Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- evalkit_internvl/lib/python3.10/site-packages/anyio-3.7.1.dist-info/INSTALLER +1 -0
- evalkit_internvl/lib/python3.10/site-packages/anyio-3.7.1.dist-info/METADATA +105 -0
- evalkit_internvl/lib/python3.10/site-packages/anyio-3.7.1.dist-info/entry_points.txt +2 -0
- evalkit_internvl/lib/python3.10/site-packages/cachetools/__init__.py +859 -0
- evalkit_internvl/lib/python3.10/site-packages/cachetools/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/cachetools/__pycache__/func.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/cachetools/__pycache__/keys.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/cachetools/func.py +121 -0
- evalkit_internvl/lib/python3.10/site-packages/cachetools/keys.py +62 -0
- evalkit_internvl/lib/python3.10/site-packages/contourpy-1.3.0.dist-info/RECORD +43 -0
- evalkit_internvl/lib/python3.10/site-packages/contourpy-1.3.0.dist-info/REQUESTED +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/_common.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/_version.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/relativedelta.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/rrule.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/tzwin.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/utils.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/_common.py +43 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/__init__.py +61 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/__pycache__/_parser.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/__pycache__/isoparser.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/_parser.py +1613 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/isoparser.py +416 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/_common.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/_factories.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/tz.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/win.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/_common.py +419 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/_factories.py +80 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/tz.py +1849 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/win.py +370 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/__init__.py +167 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz +3 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/rebuild.py +75 -0
- evalkit_internvl/lib/python3.10/site-packages/frozenlist/__init__.py +98 -0
- evalkit_internvl/lib/python3.10/site-packages/frozenlist/__init__.pyi +47 -0
- evalkit_internvl/lib/python3.10/site-packages/frozenlist/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/frozenlist/_frozenlist.pyx +123 -0
- evalkit_internvl/lib/python3.10/site-packages/frozenlist/py.typed +1 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/__init__.py +148 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/_memmapping_reducer.py +657 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/_multiprocessing_helpers.py +53 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/backports.py +177 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/func_inspect.py +369 -0
- evalkit_internvl/lib/python3.10/site-packages/joblib/logger.py +162 -0
evalkit_internvl/lib/python3.10/site-packages/anyio-3.7.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_internvl/lib/python3.10/site-packages/anyio-3.7.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: anyio
|
| 3 |
+
Version: 3.7.1
|
| 4 |
+
Summary: High level compatibility layer for multiple asynchronous event loop implementations
|
| 5 |
+
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
|
| 6 |
+
License: MIT
|
| 7 |
+
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
|
| 8 |
+
Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
|
| 9 |
+
Project-URL: Source code, https://github.com/agronholm/anyio
|
| 10 |
+
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
|
| 11 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 12 |
+
Classifier: Intended Audience :: Developers
|
| 13 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 14 |
+
Classifier: Framework :: AnyIO
|
| 15 |
+
Classifier: Typing :: Typed
|
| 16 |
+
Classifier: Programming Language :: Python
|
| 17 |
+
Classifier: Programming Language :: Python :: 3
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 23 |
+
Requires-Python: >=3.7
|
| 24 |
+
Description-Content-Type: text/x-rst
|
| 25 |
+
License-File: LICENSE
|
| 26 |
+
Requires-Dist: idna (>=2.8)
|
| 27 |
+
Requires-Dist: sniffio (>=1.1)
|
| 28 |
+
Requires-Dist: exceptiongroup ; python_version < "3.11"
|
| 29 |
+
Requires-Dist: typing-extensions ; python_version < "3.8"
|
| 30 |
+
Provides-Extra: doc
|
| 31 |
+
Requires-Dist: packaging ; extra == 'doc'
|
| 32 |
+
Requires-Dist: Sphinx ; extra == 'doc'
|
| 33 |
+
Requires-Dist: sphinx-rtd-theme (>=1.2.2) ; extra == 'doc'
|
| 34 |
+
Requires-Dist: sphinxcontrib-jquery ; extra == 'doc'
|
| 35 |
+
Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc'
|
| 36 |
+
Provides-Extra: test
|
| 37 |
+
Requires-Dist: anyio[trio] ; extra == 'test'
|
| 38 |
+
Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test'
|
| 39 |
+
Requires-Dist: hypothesis (>=4.0) ; extra == 'test'
|
| 40 |
+
Requires-Dist: psutil (>=5.9) ; extra == 'test'
|
| 41 |
+
Requires-Dist: pytest (>=7.0) ; extra == 'test'
|
| 42 |
+
Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test'
|
| 43 |
+
Requires-Dist: trustme ; extra == 'test'
|
| 44 |
+
Requires-Dist: uvloop (>=0.17) ; (python_version < "3.12" and platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test'
|
| 45 |
+
Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test'
|
| 46 |
+
Provides-Extra: trio
|
| 47 |
+
Requires-Dist: trio (<0.22) ; extra == 'trio'
|
| 48 |
+
|
| 49 |
+
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
|
| 50 |
+
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
|
| 51 |
+
:alt: Build Status
|
| 52 |
+
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
|
| 53 |
+
:target: https://coveralls.io/github/agronholm/anyio?branch=master
|
| 54 |
+
:alt: Code Coverage
|
| 55 |
+
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
|
| 56 |
+
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
|
| 57 |
+
:alt: Documentation
|
| 58 |
+
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
|
| 59 |
+
:target: https://gitter.im/python-trio/AnyIO
|
| 60 |
+
:alt: Gitter chat
|
| 61 |
+
|
| 62 |
+
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
|
| 63 |
+
trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
|
| 64 |
+
with the native SC of trio itself.
|
| 65 |
+
|
| 66 |
+
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
|
| 67 |
+
trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full
|
| 68 |
+
refactoring necessary. It will blend in with the native libraries of your chosen backend.
|
| 69 |
+
|
| 70 |
+
Documentation
|
| 71 |
+
-------------
|
| 72 |
+
|
| 73 |
+
View full documentation at: https://anyio.readthedocs.io/
|
| 74 |
+
|
| 75 |
+
Features
|
| 76 |
+
--------
|
| 77 |
+
|
| 78 |
+
AnyIO offers the following functionality:
|
| 79 |
+
|
| 80 |
+
* Task groups (nurseries_ in trio terminology)
|
| 81 |
+
* High-level networking (TCP, UDP and UNIX sockets)
|
| 82 |
+
|
| 83 |
+
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
|
| 84 |
+
3.8)
|
| 85 |
+
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
|
| 86 |
+
Protocols)
|
| 87 |
+
|
| 88 |
+
* A versatile API for byte streams and object streams
|
| 89 |
+
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
|
| 90 |
+
streams)
|
| 91 |
+
* Worker threads
|
| 92 |
+
* Subprocesses
|
| 93 |
+
* Asynchronous file I/O (using worker threads)
|
| 94 |
+
* Signal handling
|
| 95 |
+
|
| 96 |
+
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
|
| 97 |
+
It even works with the popular Hypothesis_ library.
|
| 98 |
+
|
| 99 |
+
.. _asyncio: https://docs.python.org/3/library/asyncio.html
|
| 100 |
+
.. _trio: https://github.com/python-trio/trio
|
| 101 |
+
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
| 102 |
+
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
|
| 103 |
+
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
|
| 104 |
+
.. _pytest: https://docs.pytest.org/en/latest/
|
| 105 |
+
.. _Hypothesis: https://hypothesis.works/
|
evalkit_internvl/lib/python3.10/site-packages/anyio-3.7.1.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[pytest11]
|
| 2 |
+
anyio = anyio.pytest_plugin
|
evalkit_internvl/lib/python3.10/site-packages/cachetools/__init__.py
ADDED
|
@@ -0,0 +1,859 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Extensible memoizing collections and decorators."""
|
| 2 |
+
|
| 3 |
+
__all__ = (
|
| 4 |
+
"Cache",
|
| 5 |
+
"FIFOCache",
|
| 6 |
+
"LFUCache",
|
| 7 |
+
"LRUCache",
|
| 8 |
+
"MRUCache",
|
| 9 |
+
"RRCache",
|
| 10 |
+
"TLRUCache",
|
| 11 |
+
"TTLCache",
|
| 12 |
+
"cached",
|
| 13 |
+
"cachedmethod",
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
__version__ = "5.5.0"
|
| 17 |
+
|
| 18 |
+
import collections
|
| 19 |
+
import collections.abc
|
| 20 |
+
import functools
|
| 21 |
+
import heapq
|
| 22 |
+
import random
|
| 23 |
+
import time
|
| 24 |
+
|
| 25 |
+
from . import keys
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class _DefaultSize:
|
| 29 |
+
__slots__ = ()
|
| 30 |
+
|
| 31 |
+
def __getitem__(self, _):
|
| 32 |
+
return 1
|
| 33 |
+
|
| 34 |
+
def __setitem__(self, _, value):
|
| 35 |
+
assert value == 1
|
| 36 |
+
|
| 37 |
+
def pop(self, _):
|
| 38 |
+
return 1
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class Cache(collections.abc.MutableMapping):
|
| 42 |
+
"""Mutable mapping to serve as a simple cache or cache base class."""
|
| 43 |
+
|
| 44 |
+
__marker = object()
|
| 45 |
+
|
| 46 |
+
__size = _DefaultSize()
|
| 47 |
+
|
| 48 |
+
def __init__(self, maxsize, getsizeof=None):
|
| 49 |
+
if getsizeof:
|
| 50 |
+
self.getsizeof = getsizeof
|
| 51 |
+
if self.getsizeof is not Cache.getsizeof:
|
| 52 |
+
self.__size = dict()
|
| 53 |
+
self.__data = dict()
|
| 54 |
+
self.__currsize = 0
|
| 55 |
+
self.__maxsize = maxsize
|
| 56 |
+
|
| 57 |
+
def __repr__(self):
|
| 58 |
+
return "%s(%s, maxsize=%r, currsize=%r)" % (
|
| 59 |
+
self.__class__.__name__,
|
| 60 |
+
repr(self.__data),
|
| 61 |
+
self.__maxsize,
|
| 62 |
+
self.__currsize,
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
def __getitem__(self, key):
|
| 66 |
+
try:
|
| 67 |
+
return self.__data[key]
|
| 68 |
+
except KeyError:
|
| 69 |
+
return self.__missing__(key)
|
| 70 |
+
|
| 71 |
+
def __setitem__(self, key, value):
|
| 72 |
+
maxsize = self.__maxsize
|
| 73 |
+
size = self.getsizeof(value)
|
| 74 |
+
if size > maxsize:
|
| 75 |
+
raise ValueError("value too large")
|
| 76 |
+
if key not in self.__data or self.__size[key] < size:
|
| 77 |
+
while self.__currsize + size > maxsize:
|
| 78 |
+
self.popitem()
|
| 79 |
+
if key in self.__data:
|
| 80 |
+
diffsize = size - self.__size[key]
|
| 81 |
+
else:
|
| 82 |
+
diffsize = size
|
| 83 |
+
self.__data[key] = value
|
| 84 |
+
self.__size[key] = size
|
| 85 |
+
self.__currsize += diffsize
|
| 86 |
+
|
| 87 |
+
def __delitem__(self, key):
|
| 88 |
+
size = self.__size.pop(key)
|
| 89 |
+
del self.__data[key]
|
| 90 |
+
self.__currsize -= size
|
| 91 |
+
|
| 92 |
+
def __contains__(self, key):
|
| 93 |
+
return key in self.__data
|
| 94 |
+
|
| 95 |
+
def __missing__(self, key):
|
| 96 |
+
raise KeyError(key)
|
| 97 |
+
|
| 98 |
+
def __iter__(self):
|
| 99 |
+
return iter(self.__data)
|
| 100 |
+
|
| 101 |
+
def __len__(self):
|
| 102 |
+
return len(self.__data)
|
| 103 |
+
|
| 104 |
+
def get(self, key, default=None):
|
| 105 |
+
if key in self:
|
| 106 |
+
return self[key]
|
| 107 |
+
else:
|
| 108 |
+
return default
|
| 109 |
+
|
| 110 |
+
def pop(self, key, default=__marker):
|
| 111 |
+
if key in self:
|
| 112 |
+
value = self[key]
|
| 113 |
+
del self[key]
|
| 114 |
+
elif default is self.__marker:
|
| 115 |
+
raise KeyError(key)
|
| 116 |
+
else:
|
| 117 |
+
value = default
|
| 118 |
+
return value
|
| 119 |
+
|
| 120 |
+
def setdefault(self, key, default=None):
|
| 121 |
+
if key in self:
|
| 122 |
+
value = self[key]
|
| 123 |
+
else:
|
| 124 |
+
self[key] = value = default
|
| 125 |
+
return value
|
| 126 |
+
|
| 127 |
+
@property
|
| 128 |
+
def maxsize(self):
|
| 129 |
+
"""The maximum size of the cache."""
|
| 130 |
+
return self.__maxsize
|
| 131 |
+
|
| 132 |
+
@property
|
| 133 |
+
def currsize(self):
|
| 134 |
+
"""The current size of the cache."""
|
| 135 |
+
return self.__currsize
|
| 136 |
+
|
| 137 |
+
@staticmethod
|
| 138 |
+
def getsizeof(value):
|
| 139 |
+
"""Return the size of a cache element's value."""
|
| 140 |
+
return 1
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
class FIFOCache(Cache):
|
| 144 |
+
"""First In First Out (FIFO) cache implementation."""
|
| 145 |
+
|
| 146 |
+
def __init__(self, maxsize, getsizeof=None):
|
| 147 |
+
Cache.__init__(self, maxsize, getsizeof)
|
| 148 |
+
self.__order = collections.OrderedDict()
|
| 149 |
+
|
| 150 |
+
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
|
| 151 |
+
cache_setitem(self, key, value)
|
| 152 |
+
try:
|
| 153 |
+
self.__order.move_to_end(key)
|
| 154 |
+
except KeyError:
|
| 155 |
+
self.__order[key] = None
|
| 156 |
+
|
| 157 |
+
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
|
| 158 |
+
cache_delitem(self, key)
|
| 159 |
+
del self.__order[key]
|
| 160 |
+
|
| 161 |
+
def popitem(self):
|
| 162 |
+
"""Remove and return the `(key, value)` pair first inserted."""
|
| 163 |
+
try:
|
| 164 |
+
key = next(iter(self.__order))
|
| 165 |
+
except StopIteration:
|
| 166 |
+
raise KeyError("%s is empty" % type(self).__name__) from None
|
| 167 |
+
else:
|
| 168 |
+
return (key, self.pop(key))
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
class LFUCache(Cache):
|
| 172 |
+
"""Least Frequently Used (LFU) cache implementation."""
|
| 173 |
+
|
| 174 |
+
def __init__(self, maxsize, getsizeof=None):
|
| 175 |
+
Cache.__init__(self, maxsize, getsizeof)
|
| 176 |
+
self.__counter = collections.Counter()
|
| 177 |
+
|
| 178 |
+
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
|
| 179 |
+
value = cache_getitem(self, key)
|
| 180 |
+
if key in self: # __missing__ may not store item
|
| 181 |
+
self.__counter[key] -= 1
|
| 182 |
+
return value
|
| 183 |
+
|
| 184 |
+
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
|
| 185 |
+
cache_setitem(self, key, value)
|
| 186 |
+
self.__counter[key] -= 1
|
| 187 |
+
|
| 188 |
+
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
|
| 189 |
+
cache_delitem(self, key)
|
| 190 |
+
del self.__counter[key]
|
| 191 |
+
|
| 192 |
+
def popitem(self):
|
| 193 |
+
"""Remove and return the `(key, value)` pair least frequently used."""
|
| 194 |
+
try:
|
| 195 |
+
((key, _),) = self.__counter.most_common(1)
|
| 196 |
+
except ValueError:
|
| 197 |
+
raise KeyError("%s is empty" % type(self).__name__) from None
|
| 198 |
+
else:
|
| 199 |
+
return (key, self.pop(key))
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class LRUCache(Cache):
|
| 203 |
+
"""Least Recently Used (LRU) cache implementation."""
|
| 204 |
+
|
| 205 |
+
def __init__(self, maxsize, getsizeof=None):
|
| 206 |
+
Cache.__init__(self, maxsize, getsizeof)
|
| 207 |
+
self.__order = collections.OrderedDict()
|
| 208 |
+
|
| 209 |
+
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
|
| 210 |
+
value = cache_getitem(self, key)
|
| 211 |
+
if key in self: # __missing__ may not store item
|
| 212 |
+
self.__update(key)
|
| 213 |
+
return value
|
| 214 |
+
|
| 215 |
+
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
|
| 216 |
+
cache_setitem(self, key, value)
|
| 217 |
+
self.__update(key)
|
| 218 |
+
|
| 219 |
+
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
|
| 220 |
+
cache_delitem(self, key)
|
| 221 |
+
del self.__order[key]
|
| 222 |
+
|
| 223 |
+
def popitem(self):
|
| 224 |
+
"""Remove and return the `(key, value)` pair least recently used."""
|
| 225 |
+
try:
|
| 226 |
+
key = next(iter(self.__order))
|
| 227 |
+
except StopIteration:
|
| 228 |
+
raise KeyError("%s is empty" % type(self).__name__) from None
|
| 229 |
+
else:
|
| 230 |
+
return (key, self.pop(key))
|
| 231 |
+
|
| 232 |
+
def __update(self, key):
|
| 233 |
+
try:
|
| 234 |
+
self.__order.move_to_end(key)
|
| 235 |
+
except KeyError:
|
| 236 |
+
self.__order[key] = None
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
class MRUCache(Cache):
|
| 240 |
+
"""Most Recently Used (MRU) cache implementation."""
|
| 241 |
+
|
| 242 |
+
def __init__(self, maxsize, getsizeof=None):
|
| 243 |
+
from warnings import warn
|
| 244 |
+
|
| 245 |
+
warn("MRUCache is deprecated", DeprecationWarning, stacklevel=2)
|
| 246 |
+
|
| 247 |
+
Cache.__init__(self, maxsize, getsizeof)
|
| 248 |
+
self.__order = collections.OrderedDict()
|
| 249 |
+
|
| 250 |
+
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
|
| 251 |
+
value = cache_getitem(self, key)
|
| 252 |
+
if key in self: # __missing__ may not store item
|
| 253 |
+
self.__update(key)
|
| 254 |
+
return value
|
| 255 |
+
|
| 256 |
+
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
|
| 257 |
+
cache_setitem(self, key, value)
|
| 258 |
+
self.__update(key)
|
| 259 |
+
|
| 260 |
+
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
|
| 261 |
+
cache_delitem(self, key)
|
| 262 |
+
del self.__order[key]
|
| 263 |
+
|
| 264 |
+
def popitem(self):
|
| 265 |
+
"""Remove and return the `(key, value)` pair most recently used."""
|
| 266 |
+
try:
|
| 267 |
+
key = next(iter(self.__order))
|
| 268 |
+
except StopIteration:
|
| 269 |
+
raise KeyError("%s is empty" % type(self).__name__) from None
|
| 270 |
+
else:
|
| 271 |
+
return (key, self.pop(key))
|
| 272 |
+
|
| 273 |
+
def __update(self, key):
|
| 274 |
+
try:
|
| 275 |
+
self.__order.move_to_end(key, last=False)
|
| 276 |
+
except KeyError:
|
| 277 |
+
self.__order[key] = None
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
class RRCache(Cache):
|
| 281 |
+
"""Random Replacement (RR) cache implementation."""
|
| 282 |
+
|
| 283 |
+
def __init__(self, maxsize, choice=random.choice, getsizeof=None):
|
| 284 |
+
Cache.__init__(self, maxsize, getsizeof)
|
| 285 |
+
self.__choice = choice
|
| 286 |
+
|
| 287 |
+
@property
|
| 288 |
+
def choice(self):
|
| 289 |
+
"""The `choice` function used by the cache."""
|
| 290 |
+
return self.__choice
|
| 291 |
+
|
| 292 |
+
def popitem(self):
|
| 293 |
+
"""Remove and return a random `(key, value)` pair."""
|
| 294 |
+
try:
|
| 295 |
+
key = self.__choice(list(self))
|
| 296 |
+
except IndexError:
|
| 297 |
+
raise KeyError("%s is empty" % type(self).__name__) from None
|
| 298 |
+
else:
|
| 299 |
+
return (key, self.pop(key))
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
class _TimedCache(Cache):
|
| 303 |
+
"""Base class for time aware cache implementations."""
|
| 304 |
+
|
| 305 |
+
class _Timer:
|
| 306 |
+
def __init__(self, timer):
|
| 307 |
+
self.__timer = timer
|
| 308 |
+
self.__nesting = 0
|
| 309 |
+
|
| 310 |
+
def __call__(self):
|
| 311 |
+
if self.__nesting == 0:
|
| 312 |
+
return self.__timer()
|
| 313 |
+
else:
|
| 314 |
+
return self.__time
|
| 315 |
+
|
| 316 |
+
def __enter__(self):
|
| 317 |
+
if self.__nesting == 0:
|
| 318 |
+
self.__time = time = self.__timer()
|
| 319 |
+
else:
|
| 320 |
+
time = self.__time
|
| 321 |
+
self.__nesting += 1
|
| 322 |
+
return time
|
| 323 |
+
|
| 324 |
+
def __exit__(self, *exc):
|
| 325 |
+
self.__nesting -= 1
|
| 326 |
+
|
| 327 |
+
def __reduce__(self):
|
| 328 |
+
return _TimedCache._Timer, (self.__timer,)
|
| 329 |
+
|
| 330 |
+
def __getattr__(self, name):
|
| 331 |
+
return getattr(self.__timer, name)
|
| 332 |
+
|
| 333 |
+
def __init__(self, maxsize, timer=time.monotonic, getsizeof=None):
|
| 334 |
+
Cache.__init__(self, maxsize, getsizeof)
|
| 335 |
+
self.__timer = _TimedCache._Timer(timer)
|
| 336 |
+
|
| 337 |
+
def __repr__(self, cache_repr=Cache.__repr__):
|
| 338 |
+
with self.__timer as time:
|
| 339 |
+
self.expire(time)
|
| 340 |
+
return cache_repr(self)
|
| 341 |
+
|
| 342 |
+
def __len__(self, cache_len=Cache.__len__):
|
| 343 |
+
with self.__timer as time:
|
| 344 |
+
self.expire(time)
|
| 345 |
+
return cache_len(self)
|
| 346 |
+
|
| 347 |
+
@property
|
| 348 |
+
def currsize(self):
|
| 349 |
+
with self.__timer as time:
|
| 350 |
+
self.expire(time)
|
| 351 |
+
return super().currsize
|
| 352 |
+
|
| 353 |
+
@property
|
| 354 |
+
def timer(self):
|
| 355 |
+
"""The timer function used by the cache."""
|
| 356 |
+
return self.__timer
|
| 357 |
+
|
| 358 |
+
def clear(self):
|
| 359 |
+
with self.__timer as time:
|
| 360 |
+
self.expire(time)
|
| 361 |
+
Cache.clear(self)
|
| 362 |
+
|
| 363 |
+
def get(self, *args, **kwargs):
|
| 364 |
+
with self.__timer:
|
| 365 |
+
return Cache.get(self, *args, **kwargs)
|
| 366 |
+
|
| 367 |
+
def pop(self, *args, **kwargs):
|
| 368 |
+
with self.__timer:
|
| 369 |
+
return Cache.pop(self, *args, **kwargs)
|
| 370 |
+
|
| 371 |
+
def setdefault(self, *args, **kwargs):
|
| 372 |
+
with self.__timer:
|
| 373 |
+
return Cache.setdefault(self, *args, **kwargs)
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
class TTLCache(_TimedCache):
|
| 377 |
+
"""LRU Cache implementation with per-item time-to-live (TTL) value."""
|
| 378 |
+
|
| 379 |
+
class _Link:
|
| 380 |
+
__slots__ = ("key", "expires", "next", "prev")
|
| 381 |
+
|
| 382 |
+
def __init__(self, key=None, expires=None):
|
| 383 |
+
self.key = key
|
| 384 |
+
self.expires = expires
|
| 385 |
+
|
| 386 |
+
def __reduce__(self):
|
| 387 |
+
return TTLCache._Link, (self.key, self.expires)
|
| 388 |
+
|
| 389 |
+
def unlink(self):
|
| 390 |
+
next = self.next
|
| 391 |
+
prev = self.prev
|
| 392 |
+
prev.next = next
|
| 393 |
+
next.prev = prev
|
| 394 |
+
|
| 395 |
+
def __init__(self, maxsize, ttl, timer=time.monotonic, getsizeof=None):
|
| 396 |
+
_TimedCache.__init__(self, maxsize, timer, getsizeof)
|
| 397 |
+
self.__root = root = TTLCache._Link()
|
| 398 |
+
root.prev = root.next = root
|
| 399 |
+
self.__links = collections.OrderedDict()
|
| 400 |
+
self.__ttl = ttl
|
| 401 |
+
|
| 402 |
+
def __contains__(self, key):
|
| 403 |
+
try:
|
| 404 |
+
link = self.__links[key] # no reordering
|
| 405 |
+
except KeyError:
|
| 406 |
+
return False
|
| 407 |
+
else:
|
| 408 |
+
return self.timer() < link.expires
|
| 409 |
+
|
| 410 |
+
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
|
| 411 |
+
try:
|
| 412 |
+
link = self.__getlink(key)
|
| 413 |
+
except KeyError:
|
| 414 |
+
expired = False
|
| 415 |
+
else:
|
| 416 |
+
expired = not (self.timer() < link.expires)
|
| 417 |
+
if expired:
|
| 418 |
+
return self.__missing__(key)
|
| 419 |
+
else:
|
| 420 |
+
return cache_getitem(self, key)
|
| 421 |
+
|
| 422 |
+
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
|
| 423 |
+
with self.timer as time:
|
| 424 |
+
self.expire(time)
|
| 425 |
+
cache_setitem(self, key, value)
|
| 426 |
+
try:
|
| 427 |
+
link = self.__getlink(key)
|
| 428 |
+
except KeyError:
|
| 429 |
+
self.__links[key] = link = TTLCache._Link(key)
|
| 430 |
+
else:
|
| 431 |
+
link.unlink()
|
| 432 |
+
link.expires = time + self.__ttl
|
| 433 |
+
link.next = root = self.__root
|
| 434 |
+
link.prev = prev = root.prev
|
| 435 |
+
prev.next = root.prev = link
|
| 436 |
+
|
| 437 |
+
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
|
| 438 |
+
cache_delitem(self, key)
|
| 439 |
+
link = self.__links.pop(key)
|
| 440 |
+
link.unlink()
|
| 441 |
+
if not (self.timer() < link.expires):
|
| 442 |
+
raise KeyError(key)
|
| 443 |
+
|
| 444 |
+
def __iter__(self):
|
| 445 |
+
root = self.__root
|
| 446 |
+
curr = root.next
|
| 447 |
+
while curr is not root:
|
| 448 |
+
# "freeze" time for iterator access
|
| 449 |
+
with self.timer as time:
|
| 450 |
+
if time < curr.expires:
|
| 451 |
+
yield curr.key
|
| 452 |
+
curr = curr.next
|
| 453 |
+
|
| 454 |
+
def __setstate__(self, state):
|
| 455 |
+
self.__dict__.update(state)
|
| 456 |
+
root = self.__root
|
| 457 |
+
root.prev = root.next = root
|
| 458 |
+
for link in sorted(self.__links.values(), key=lambda obj: obj.expires):
|
| 459 |
+
link.next = root
|
| 460 |
+
link.prev = prev = root.prev
|
| 461 |
+
prev.next = root.prev = link
|
| 462 |
+
self.expire(self.timer())
|
| 463 |
+
|
| 464 |
+
@property
|
| 465 |
+
def ttl(self):
|
| 466 |
+
"""The time-to-live value of the cache's items."""
|
| 467 |
+
return self.__ttl
|
| 468 |
+
|
| 469 |
+
def expire(self, time=None):
|
| 470 |
+
"""Remove expired items from the cache and return an iterable of the
|
| 471 |
+
expired `(key, value)` pairs.
|
| 472 |
+
|
| 473 |
+
"""
|
| 474 |
+
if time is None:
|
| 475 |
+
time = self.timer()
|
| 476 |
+
root = self.__root
|
| 477 |
+
curr = root.next
|
| 478 |
+
links = self.__links
|
| 479 |
+
expired = []
|
| 480 |
+
cache_delitem = Cache.__delitem__
|
| 481 |
+
cache_getitem = Cache.__getitem__
|
| 482 |
+
while curr is not root and not (time < curr.expires):
|
| 483 |
+
expired.append((curr.key, cache_getitem(self, curr.key)))
|
| 484 |
+
cache_delitem(self, curr.key)
|
| 485 |
+
del links[curr.key]
|
| 486 |
+
next = curr.next
|
| 487 |
+
curr.unlink()
|
| 488 |
+
curr = next
|
| 489 |
+
return expired
|
| 490 |
+
|
| 491 |
+
def popitem(self):
|
| 492 |
+
"""Remove and return the `(key, value)` pair least recently used that
|
| 493 |
+
has not already expired.
|
| 494 |
+
|
| 495 |
+
"""
|
| 496 |
+
with self.timer as time:
|
| 497 |
+
self.expire(time)
|
| 498 |
+
try:
|
| 499 |
+
key = next(iter(self.__links))
|
| 500 |
+
except StopIteration:
|
| 501 |
+
raise KeyError("%s is empty" % type(self).__name__) from None
|
| 502 |
+
else:
|
| 503 |
+
return (key, self.pop(key))
|
| 504 |
+
|
| 505 |
+
def __getlink(self, key):
|
| 506 |
+
value = self.__links[key]
|
| 507 |
+
self.__links.move_to_end(key)
|
| 508 |
+
return value
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
class TLRUCache(_TimedCache):
|
| 512 |
+
"""Time aware Least Recently Used (TLRU) cache implementation."""
|
| 513 |
+
|
| 514 |
+
@functools.total_ordering
|
| 515 |
+
class _Item:
|
| 516 |
+
__slots__ = ("key", "expires", "removed")
|
| 517 |
+
|
| 518 |
+
def __init__(self, key=None, expires=None):
|
| 519 |
+
self.key = key
|
| 520 |
+
self.expires = expires
|
| 521 |
+
self.removed = False
|
| 522 |
+
|
| 523 |
+
def __lt__(self, other):
|
| 524 |
+
return self.expires < other.expires
|
| 525 |
+
|
| 526 |
+
def __init__(self, maxsize, ttu, timer=time.monotonic, getsizeof=None):
|
| 527 |
+
_TimedCache.__init__(self, maxsize, timer, getsizeof)
|
| 528 |
+
self.__items = collections.OrderedDict()
|
| 529 |
+
self.__order = []
|
| 530 |
+
self.__ttu = ttu
|
| 531 |
+
|
| 532 |
+
def __contains__(self, key):
|
| 533 |
+
try:
|
| 534 |
+
item = self.__items[key] # no reordering
|
| 535 |
+
except KeyError:
|
| 536 |
+
return False
|
| 537 |
+
else:
|
| 538 |
+
return self.timer() < item.expires
|
| 539 |
+
|
| 540 |
+
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
|
| 541 |
+
try:
|
| 542 |
+
item = self.__getitem(key)
|
| 543 |
+
except KeyError:
|
| 544 |
+
expired = False
|
| 545 |
+
else:
|
| 546 |
+
expired = not (self.timer() < item.expires)
|
| 547 |
+
if expired:
|
| 548 |
+
return self.__missing__(key)
|
| 549 |
+
else:
|
| 550 |
+
return cache_getitem(self, key)
|
| 551 |
+
|
| 552 |
+
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
|
| 553 |
+
with self.timer as time:
|
| 554 |
+
expires = self.__ttu(key, value, time)
|
| 555 |
+
if not (time < expires):
|
| 556 |
+
return # skip expired items
|
| 557 |
+
self.expire(time)
|
| 558 |
+
cache_setitem(self, key, value)
|
| 559 |
+
# removing an existing item would break the heap structure, so
|
| 560 |
+
# only mark it as removed for now
|
| 561 |
+
try:
|
| 562 |
+
self.__getitem(key).removed = True
|
| 563 |
+
except KeyError:
|
| 564 |
+
pass
|
| 565 |
+
self.__items[key] = item = TLRUCache._Item(key, expires)
|
| 566 |
+
heapq.heappush(self.__order, item)
|
| 567 |
+
|
| 568 |
+
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
|
| 569 |
+
with self.timer as time:
|
| 570 |
+
# no self.expire() for performance reasons, e.g. self.clear() [#67]
|
| 571 |
+
cache_delitem(self, key)
|
| 572 |
+
item = self.__items.pop(key)
|
| 573 |
+
item.removed = True
|
| 574 |
+
if not (time < item.expires):
|
| 575 |
+
raise KeyError(key)
|
| 576 |
+
|
| 577 |
+
def __iter__(self):
|
| 578 |
+
for curr in self.__order:
|
| 579 |
+
# "freeze" time for iterator access
|
| 580 |
+
with self.timer as time:
|
| 581 |
+
if time < curr.expires and not curr.removed:
|
| 582 |
+
yield curr.key
|
| 583 |
+
|
| 584 |
+
@property
|
| 585 |
+
def ttu(self):
|
| 586 |
+
"""The local time-to-use function used by the cache."""
|
| 587 |
+
return self.__ttu
|
| 588 |
+
|
| 589 |
+
def expire(self, time=None):
|
| 590 |
+
"""Remove expired items from the cache and return an iterable of the
|
| 591 |
+
expired `(key, value)` pairs.
|
| 592 |
+
|
| 593 |
+
"""
|
| 594 |
+
if time is None:
|
| 595 |
+
time = self.timer()
|
| 596 |
+
items = self.__items
|
| 597 |
+
order = self.__order
|
| 598 |
+
# clean up the heap if too many items are marked as removed
|
| 599 |
+
if len(order) > len(items) * 2:
|
| 600 |
+
self.__order = order = [item for item in order if not item.removed]
|
| 601 |
+
heapq.heapify(order)
|
| 602 |
+
expired = []
|
| 603 |
+
cache_delitem = Cache.__delitem__
|
| 604 |
+
cache_getitem = Cache.__getitem__
|
| 605 |
+
while order and (order[0].removed or not (time < order[0].expires)):
|
| 606 |
+
item = heapq.heappop(order)
|
| 607 |
+
if not item.removed:
|
| 608 |
+
expired.append((item.key, cache_getitem(self, item.key)))
|
| 609 |
+
cache_delitem(self, item.key)
|
| 610 |
+
del items[item.key]
|
| 611 |
+
return expired
|
| 612 |
+
|
| 613 |
+
def popitem(self):
|
| 614 |
+
"""Remove and return the `(key, value)` pair least recently used that
|
| 615 |
+
has not already expired.
|
| 616 |
+
|
| 617 |
+
"""
|
| 618 |
+
with self.timer as time:
|
| 619 |
+
self.expire(time)
|
| 620 |
+
try:
|
| 621 |
+
key = next(iter(self.__items))
|
| 622 |
+
except StopIteration:
|
| 623 |
+
raise KeyError("%s is empty" % self.__class__.__name__) from None
|
| 624 |
+
else:
|
| 625 |
+
return (key, self.pop(key))
|
| 626 |
+
|
| 627 |
+
def __getitem(self, key):
|
| 628 |
+
value = self.__items[key]
|
| 629 |
+
self.__items.move_to_end(key)
|
| 630 |
+
return value
|
| 631 |
+
|
| 632 |
+
|
| 633 |
+
_CacheInfo = collections.namedtuple(
|
| 634 |
+
"CacheInfo", ["hits", "misses", "maxsize", "currsize"]
|
| 635 |
+
)
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
def cached(cache, key=keys.hashkey, lock=None, info=False):
|
| 639 |
+
"""Decorator to wrap a function with a memoizing callable that saves
|
| 640 |
+
results in a cache.
|
| 641 |
+
|
| 642 |
+
"""
|
| 643 |
+
|
| 644 |
+
def decorator(func):
|
| 645 |
+
if info:
|
| 646 |
+
hits = misses = 0
|
| 647 |
+
|
| 648 |
+
if isinstance(cache, Cache):
|
| 649 |
+
|
| 650 |
+
def getinfo():
|
| 651 |
+
nonlocal hits, misses
|
| 652 |
+
return _CacheInfo(hits, misses, cache.maxsize, cache.currsize)
|
| 653 |
+
|
| 654 |
+
elif isinstance(cache, collections.abc.Mapping):
|
| 655 |
+
|
| 656 |
+
def getinfo():
|
| 657 |
+
nonlocal hits, misses
|
| 658 |
+
return _CacheInfo(hits, misses, None, len(cache))
|
| 659 |
+
|
| 660 |
+
else:
|
| 661 |
+
|
| 662 |
+
def getinfo():
|
| 663 |
+
nonlocal hits, misses
|
| 664 |
+
return _CacheInfo(hits, misses, 0, 0)
|
| 665 |
+
|
| 666 |
+
if cache is None:
|
| 667 |
+
|
| 668 |
+
def wrapper(*args, **kwargs):
|
| 669 |
+
nonlocal misses
|
| 670 |
+
misses += 1
|
| 671 |
+
return func(*args, **kwargs)
|
| 672 |
+
|
| 673 |
+
def cache_clear():
|
| 674 |
+
nonlocal hits, misses
|
| 675 |
+
hits = misses = 0
|
| 676 |
+
|
| 677 |
+
cache_info = getinfo
|
| 678 |
+
|
| 679 |
+
elif lock is None:
|
| 680 |
+
|
| 681 |
+
def wrapper(*args, **kwargs):
|
| 682 |
+
nonlocal hits, misses
|
| 683 |
+
k = key(*args, **kwargs)
|
| 684 |
+
try:
|
| 685 |
+
result = cache[k]
|
| 686 |
+
hits += 1
|
| 687 |
+
return result
|
| 688 |
+
except KeyError:
|
| 689 |
+
misses += 1
|
| 690 |
+
v = func(*args, **kwargs)
|
| 691 |
+
try:
|
| 692 |
+
cache[k] = v
|
| 693 |
+
except ValueError:
|
| 694 |
+
pass # value too large
|
| 695 |
+
return v
|
| 696 |
+
|
| 697 |
+
def cache_clear():
|
| 698 |
+
nonlocal hits, misses
|
| 699 |
+
cache.clear()
|
| 700 |
+
hits = misses = 0
|
| 701 |
+
|
| 702 |
+
cache_info = getinfo
|
| 703 |
+
|
| 704 |
+
else:
|
| 705 |
+
|
| 706 |
+
def wrapper(*args, **kwargs):
|
| 707 |
+
nonlocal hits, misses
|
| 708 |
+
k = key(*args, **kwargs)
|
| 709 |
+
try:
|
| 710 |
+
with lock:
|
| 711 |
+
result = cache[k]
|
| 712 |
+
hits += 1
|
| 713 |
+
return result
|
| 714 |
+
except KeyError:
|
| 715 |
+
with lock:
|
| 716 |
+
misses += 1
|
| 717 |
+
v = func(*args, **kwargs)
|
| 718 |
+
# in case of a race, prefer the item already in the cache
|
| 719 |
+
try:
|
| 720 |
+
with lock:
|
| 721 |
+
return cache.setdefault(k, v)
|
| 722 |
+
except ValueError:
|
| 723 |
+
return v # value too large
|
| 724 |
+
|
| 725 |
+
def cache_clear():
|
| 726 |
+
nonlocal hits, misses
|
| 727 |
+
with lock:
|
| 728 |
+
cache.clear()
|
| 729 |
+
hits = misses = 0
|
| 730 |
+
|
| 731 |
+
def cache_info():
|
| 732 |
+
with lock:
|
| 733 |
+
return getinfo()
|
| 734 |
+
|
| 735 |
+
else:
|
| 736 |
+
if cache is None:
|
| 737 |
+
|
| 738 |
+
def wrapper(*args, **kwargs):
|
| 739 |
+
return func(*args, **kwargs)
|
| 740 |
+
|
| 741 |
+
def cache_clear():
|
| 742 |
+
pass
|
| 743 |
+
|
| 744 |
+
elif lock is None:
|
| 745 |
+
|
| 746 |
+
def wrapper(*args, **kwargs):
|
| 747 |
+
k = key(*args, **kwargs)
|
| 748 |
+
try:
|
| 749 |
+
return cache[k]
|
| 750 |
+
except KeyError:
|
| 751 |
+
pass # key not found
|
| 752 |
+
v = func(*args, **kwargs)
|
| 753 |
+
try:
|
| 754 |
+
cache[k] = v
|
| 755 |
+
except ValueError:
|
| 756 |
+
pass # value too large
|
| 757 |
+
return v
|
| 758 |
+
|
| 759 |
+
def cache_clear():
|
| 760 |
+
cache.clear()
|
| 761 |
+
|
| 762 |
+
else:
|
| 763 |
+
|
| 764 |
+
def wrapper(*args, **kwargs):
|
| 765 |
+
k = key(*args, **kwargs)
|
| 766 |
+
try:
|
| 767 |
+
with lock:
|
| 768 |
+
return cache[k]
|
| 769 |
+
except KeyError:
|
| 770 |
+
pass # key not found
|
| 771 |
+
v = func(*args, **kwargs)
|
| 772 |
+
# in case of a race, prefer the item already in the cache
|
| 773 |
+
try:
|
| 774 |
+
with lock:
|
| 775 |
+
return cache.setdefault(k, v)
|
| 776 |
+
except ValueError:
|
| 777 |
+
return v # value too large
|
| 778 |
+
|
| 779 |
+
def cache_clear():
|
| 780 |
+
with lock:
|
| 781 |
+
cache.clear()
|
| 782 |
+
|
| 783 |
+
cache_info = None
|
| 784 |
+
|
| 785 |
+
wrapper.cache = cache
|
| 786 |
+
wrapper.cache_key = key
|
| 787 |
+
wrapper.cache_lock = lock
|
| 788 |
+
wrapper.cache_clear = cache_clear
|
| 789 |
+
wrapper.cache_info = cache_info
|
| 790 |
+
|
| 791 |
+
return functools.update_wrapper(wrapper, func)
|
| 792 |
+
|
| 793 |
+
return decorator
|
| 794 |
+
|
| 795 |
+
|
| 796 |
+
def cachedmethod(cache, key=keys.methodkey, lock=None):
|
| 797 |
+
"""Decorator to wrap a class or instance method with a memoizing
|
| 798 |
+
callable that saves results in a cache.
|
| 799 |
+
|
| 800 |
+
"""
|
| 801 |
+
|
| 802 |
+
def decorator(method):
|
| 803 |
+
if lock is None:
|
| 804 |
+
|
| 805 |
+
def wrapper(self, *args, **kwargs):
|
| 806 |
+
c = cache(self)
|
| 807 |
+
if c is None:
|
| 808 |
+
return method(self, *args, **kwargs)
|
| 809 |
+
k = key(self, *args, **kwargs)
|
| 810 |
+
try:
|
| 811 |
+
return c[k]
|
| 812 |
+
except KeyError:
|
| 813 |
+
pass # key not found
|
| 814 |
+
v = method(self, *args, **kwargs)
|
| 815 |
+
try:
|
| 816 |
+
c[k] = v
|
| 817 |
+
except ValueError:
|
| 818 |
+
pass # value too large
|
| 819 |
+
return v
|
| 820 |
+
|
| 821 |
+
def clear(self):
|
| 822 |
+
c = cache(self)
|
| 823 |
+
if c is not None:
|
| 824 |
+
c.clear()
|
| 825 |
+
|
| 826 |
+
else:
|
| 827 |
+
|
| 828 |
+
def wrapper(self, *args, **kwargs):
|
| 829 |
+
c = cache(self)
|
| 830 |
+
if c is None:
|
| 831 |
+
return method(self, *args, **kwargs)
|
| 832 |
+
k = key(self, *args, **kwargs)
|
| 833 |
+
try:
|
| 834 |
+
with lock(self):
|
| 835 |
+
return c[k]
|
| 836 |
+
except KeyError:
|
| 837 |
+
pass # key not found
|
| 838 |
+
v = method(self, *args, **kwargs)
|
| 839 |
+
# in case of a race, prefer the item already in the cache
|
| 840 |
+
try:
|
| 841 |
+
with lock(self):
|
| 842 |
+
return c.setdefault(k, v)
|
| 843 |
+
except ValueError:
|
| 844 |
+
return v # value too large
|
| 845 |
+
|
| 846 |
+
def clear(self):
|
| 847 |
+
c = cache(self)
|
| 848 |
+
if c is not None:
|
| 849 |
+
with lock(self):
|
| 850 |
+
c.clear()
|
| 851 |
+
|
| 852 |
+
wrapper.cache = cache
|
| 853 |
+
wrapper.cache_key = key
|
| 854 |
+
wrapper.cache_lock = lock
|
| 855 |
+
wrapper.cache_clear = clear
|
| 856 |
+
|
| 857 |
+
return functools.update_wrapper(wrapper, method)
|
| 858 |
+
|
| 859 |
+
return decorator
|
evalkit_internvl/lib/python3.10/site-packages/cachetools/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (25.6 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/cachetools/__pycache__/func.cpython-310.pyc
ADDED
|
Binary file (3.89 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/cachetools/__pycache__/keys.cpython-310.pyc
ADDED
|
Binary file (2.43 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/cachetools/func.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""`functools.lru_cache` compatible memoizing function decorators."""
|
| 2 |
+
|
| 3 |
+
__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", "ttl_cache")
|
| 4 |
+
|
| 5 |
+
import math
|
| 6 |
+
import random
|
| 7 |
+
import time
|
| 8 |
+
|
| 9 |
+
try:
|
| 10 |
+
from threading import RLock
|
| 11 |
+
except ImportError: # pragma: no cover
|
| 12 |
+
from dummy_threading import RLock
|
| 13 |
+
|
| 14 |
+
from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache
|
| 15 |
+
from . import cached
|
| 16 |
+
from . import keys
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class _UnboundTTLCache(TTLCache):
|
| 20 |
+
def __init__(self, ttl, timer):
|
| 21 |
+
TTLCache.__init__(self, math.inf, ttl, timer)
|
| 22 |
+
|
| 23 |
+
@property
|
| 24 |
+
def maxsize(self):
|
| 25 |
+
return None
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _cache(cache, maxsize, typed):
|
| 29 |
+
def decorator(func):
|
| 30 |
+
key = keys.typedkey if typed else keys.hashkey
|
| 31 |
+
wrapper = cached(cache=cache, key=key, lock=RLock(), info=True)(func)
|
| 32 |
+
wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed}
|
| 33 |
+
return wrapper
|
| 34 |
+
|
| 35 |
+
return decorator
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def fifo_cache(maxsize=128, typed=False):
|
| 39 |
+
"""Decorator to wrap a function with a memoizing callable that saves
|
| 40 |
+
up to `maxsize` results based on a First In First Out (FIFO)
|
| 41 |
+
algorithm.
|
| 42 |
+
|
| 43 |
+
"""
|
| 44 |
+
if maxsize is None:
|
| 45 |
+
return _cache({}, None, typed)
|
| 46 |
+
elif callable(maxsize):
|
| 47 |
+
return _cache(FIFOCache(128), 128, typed)(maxsize)
|
| 48 |
+
else:
|
| 49 |
+
return _cache(FIFOCache(maxsize), maxsize, typed)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def lfu_cache(maxsize=128, typed=False):
|
| 53 |
+
"""Decorator to wrap a function with a memoizing callable that saves
|
| 54 |
+
up to `maxsize` results based on a Least Frequently Used (LFU)
|
| 55 |
+
algorithm.
|
| 56 |
+
|
| 57 |
+
"""
|
| 58 |
+
if maxsize is None:
|
| 59 |
+
return _cache({}, None, typed)
|
| 60 |
+
elif callable(maxsize):
|
| 61 |
+
return _cache(LFUCache(128), 128, typed)(maxsize)
|
| 62 |
+
else:
|
| 63 |
+
return _cache(LFUCache(maxsize), maxsize, typed)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def lru_cache(maxsize=128, typed=False):
|
| 67 |
+
"""Decorator to wrap a function with a memoizing callable that saves
|
| 68 |
+
up to `maxsize` results based on a Least Recently Used (LRU)
|
| 69 |
+
algorithm.
|
| 70 |
+
|
| 71 |
+
"""
|
| 72 |
+
if maxsize is None:
|
| 73 |
+
return _cache({}, None, typed)
|
| 74 |
+
elif callable(maxsize):
|
| 75 |
+
return _cache(LRUCache(128), 128, typed)(maxsize)
|
| 76 |
+
else:
|
| 77 |
+
return _cache(LRUCache(maxsize), maxsize, typed)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def mru_cache(maxsize=128, typed=False):
|
| 81 |
+
"""Decorator to wrap a function with a memoizing callable that saves
|
| 82 |
+
up to `maxsize` results based on a Most Recently Used (MRU)
|
| 83 |
+
algorithm.
|
| 84 |
+
"""
|
| 85 |
+
from warnings import warn
|
| 86 |
+
|
| 87 |
+
warn("@mru_cache is deprecated", DeprecationWarning, stacklevel=2)
|
| 88 |
+
|
| 89 |
+
if maxsize is None:
|
| 90 |
+
return _cache({}, None, typed)
|
| 91 |
+
elif callable(maxsize):
|
| 92 |
+
return _cache(MRUCache(128), 128, typed)(maxsize)
|
| 93 |
+
else:
|
| 94 |
+
return _cache(MRUCache(maxsize), maxsize, typed)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def rr_cache(maxsize=128, choice=random.choice, typed=False):
|
| 98 |
+
"""Decorator to wrap a function with a memoizing callable that saves
|
| 99 |
+
up to `maxsize` results based on a Random Replacement (RR)
|
| 100 |
+
algorithm.
|
| 101 |
+
|
| 102 |
+
"""
|
| 103 |
+
if maxsize is None:
|
| 104 |
+
return _cache({}, None, typed)
|
| 105 |
+
elif callable(maxsize):
|
| 106 |
+
return _cache(RRCache(128, choice), 128, typed)(maxsize)
|
| 107 |
+
else:
|
| 108 |
+
return _cache(RRCache(maxsize, choice), maxsize, typed)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):
|
| 112 |
+
"""Decorator to wrap a function with a memoizing callable that saves
|
| 113 |
+
up to `maxsize` results based on a Least Recently Used (LRU)
|
| 114 |
+
algorithm with a per-item time-to-live (TTL) value.
|
| 115 |
+
"""
|
| 116 |
+
if maxsize is None:
|
| 117 |
+
return _cache(_UnboundTTLCache(ttl, timer), None, typed)
|
| 118 |
+
elif callable(maxsize):
|
| 119 |
+
return _cache(TTLCache(128, ttl, timer), 128, typed)(maxsize)
|
| 120 |
+
else:
|
| 121 |
+
return _cache(TTLCache(maxsize, ttl, timer), maxsize, typed)
|
evalkit_internvl/lib/python3.10/site-packages/cachetools/keys.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Key functions for memoizing decorators."""
|
| 2 |
+
|
| 3 |
+
__all__ = ("hashkey", "methodkey", "typedkey", "typedmethodkey")
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class _HashedTuple(tuple):
|
| 7 |
+
"""A tuple that ensures that hash() will be called no more than once
|
| 8 |
+
per element, since cache decorators will hash the key multiple
|
| 9 |
+
times on a cache miss. See also _HashedSeq in the standard
|
| 10 |
+
library functools implementation.
|
| 11 |
+
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
__hashvalue = None
|
| 15 |
+
|
| 16 |
+
def __hash__(self, hash=tuple.__hash__):
|
| 17 |
+
hashvalue = self.__hashvalue
|
| 18 |
+
if hashvalue is None:
|
| 19 |
+
self.__hashvalue = hashvalue = hash(self)
|
| 20 |
+
return hashvalue
|
| 21 |
+
|
| 22 |
+
def __add__(self, other, add=tuple.__add__):
|
| 23 |
+
return _HashedTuple(add(self, other))
|
| 24 |
+
|
| 25 |
+
def __radd__(self, other, add=tuple.__add__):
|
| 26 |
+
return _HashedTuple(add(other, self))
|
| 27 |
+
|
| 28 |
+
def __getstate__(self):
|
| 29 |
+
return {}
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
# used for separating keyword arguments; we do not use an object
|
| 33 |
+
# instance here so identity is preserved when pickling/unpickling
|
| 34 |
+
_kwmark = (_HashedTuple,)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def hashkey(*args, **kwargs):
|
| 38 |
+
"""Return a cache key for the specified hashable arguments."""
|
| 39 |
+
|
| 40 |
+
if kwargs:
|
| 41 |
+
return _HashedTuple(args + sum(sorted(kwargs.items()), _kwmark))
|
| 42 |
+
else:
|
| 43 |
+
return _HashedTuple(args)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def methodkey(self, *args, **kwargs):
|
| 47 |
+
"""Return a cache key for use with cached methods."""
|
| 48 |
+
return hashkey(*args, **kwargs)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def typedkey(*args, **kwargs):
|
| 52 |
+
"""Return a typed cache key for the specified hashable arguments."""
|
| 53 |
+
|
| 54 |
+
key = hashkey(*args, **kwargs)
|
| 55 |
+
key += tuple(type(v) for v in args)
|
| 56 |
+
key += tuple(type(v) for _, v in sorted(kwargs.items()))
|
| 57 |
+
return key
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def typedmethodkey(self, *args, **kwargs):
|
| 61 |
+
"""Return a typed cache key for use with cached methods."""
|
| 62 |
+
return typedkey(*args, **kwargs)
|
evalkit_internvl/lib/python3.10/site-packages/contourpy-1.3.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
contourpy-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
contourpy-1.3.0.dist-info/LICENSE,sha256=x9ChU7_6oQQERGPrxjN5PUUXIu_TE4tf_SUntA8VBaI,1534
|
| 3 |
+
contourpy-1.3.0.dist-info/METADATA,sha256=qwEg3UmSNobAzlYWB8boJNDNqz32NHyx7ipS1ryICAw,5434
|
| 4 |
+
contourpy-1.3.0.dist-info/RECORD,,
|
| 5 |
+
contourpy-1.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
contourpy-1.3.0.dist-info/WHEEL,sha256=sZM_NeUMz2G4fDenMf11eikcCxcLaQWiYRmjwQBavQs,137
|
| 7 |
+
contourpy/__init__.py,sha256=Vi2YbtUhM9VxYPY3PBvxfu0xZYr6fBysl5gQPJEo88k,11831
|
| 8 |
+
contourpy/__pycache__/__init__.cpython-310.pyc,,
|
| 9 |
+
contourpy/__pycache__/_version.cpython-310.pyc,,
|
| 10 |
+
contourpy/__pycache__/array.cpython-310.pyc,,
|
| 11 |
+
contourpy/__pycache__/chunk.cpython-310.pyc,,
|
| 12 |
+
contourpy/__pycache__/convert.cpython-310.pyc,,
|
| 13 |
+
contourpy/__pycache__/dechunk.cpython-310.pyc,,
|
| 14 |
+
contourpy/__pycache__/enum_util.cpython-310.pyc,,
|
| 15 |
+
contourpy/__pycache__/typecheck.cpython-310.pyc,,
|
| 16 |
+
contourpy/__pycache__/types.cpython-310.pyc,,
|
| 17 |
+
contourpy/_contourpy.cpython-310-x86_64-linux-gnu.so,sha256=_pyI8vQnvplXJ0dm9WeRkYCCDKd153q3UXMULyUBzk8,844208
|
| 18 |
+
contourpy/_contourpy.pyi,sha256=DZwPL5Zr3rHWYr8FuSBNYkjRgON2OFinHhqwSwi4W3w,7151
|
| 19 |
+
contourpy/_version.py,sha256=F5mW07pSyGrqDNY2Ehr-UpDzpBtN-FsYU0QGZWf6PJE,22
|
| 20 |
+
contourpy/array.py,sha256=sHDbt7NOQ-4IuJwCdt2qMt-em7Gl3YjJk0vtxsUkiP8,8986
|
| 21 |
+
contourpy/chunk.py,sha256=8njDQqlpuD22RjaaCyA75FXQsSQDY5hZGJSrxFpvGGU,3279
|
| 22 |
+
contourpy/convert.py,sha256=3R1O-GjrhjP85uZiyyG-L4Yw7cRU5TsbQXlIHLLX1kA,26142
|
| 23 |
+
contourpy/dechunk.py,sha256=EgFL6hw5H54ccuof4tJ2ehdnktT7trgZjiZqppsH8QI,7756
|
| 24 |
+
contourpy/enum_util.py,sha256=o8MItJRs08oqzwPP3IwC75BBAY9Qq95saIzjkXBXwqA,1519
|
| 25 |
+
contourpy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 26 |
+
contourpy/typecheck.py,sha256=t1nvvCuKMYva1Zx4fc30EpdKFcO0Enz3n_UFfXBsq9o,10747
|
| 27 |
+
contourpy/types.py,sha256=2K4T5tJpMIjYrkkg1Lqh3C2ZKlnOhnMtYmtwz92l_y8,247
|
| 28 |
+
contourpy/util/__init__.py,sha256=eVhJ_crOHL7nkG4Kb0dOo7NL4WHMy_Px665aAN_3d-8,118
|
| 29 |
+
contourpy/util/__pycache__/__init__.cpython-310.pyc,,
|
| 30 |
+
contourpy/util/__pycache__/_build_config.cpython-310.pyc,,
|
| 31 |
+
contourpy/util/__pycache__/bokeh_renderer.cpython-310.pyc,,
|
| 32 |
+
contourpy/util/__pycache__/bokeh_util.cpython-310.pyc,,
|
| 33 |
+
contourpy/util/__pycache__/data.cpython-310.pyc,,
|
| 34 |
+
contourpy/util/__pycache__/mpl_renderer.cpython-310.pyc,,
|
| 35 |
+
contourpy/util/__pycache__/mpl_util.cpython-310.pyc,,
|
| 36 |
+
contourpy/util/__pycache__/renderer.cpython-310.pyc,,
|
| 37 |
+
contourpy/util/_build_config.py,sha256=UbZm0vMLsy7p3EBg-8qSYLj7fsmyiALBHSwOHmmQc1s,1848
|
| 38 |
+
contourpy/util/bokeh_renderer.py,sha256=goO7ORdOejP19YZ7UnPlnBuKI4dVJbKRa_G_HXbfGSg,13717
|
| 39 |
+
contourpy/util/bokeh_util.py,sha256=wc-S3ewBUYWyIkEv9jkhFySIergjLQl4Z0UEVnE0HhA,2804
|
| 40 |
+
contourpy/util/data.py,sha256=-7SSGMLX_gN-1H2JzpNSEB_EcEF_uMtYdOo_ePRIcg8,2586
|
| 41 |
+
contourpy/util/mpl_renderer.py,sha256=AGyNAyetPnmin2E4IaxlvrQv2evrw7dK-yQb361SdPI,20071
|
| 42 |
+
contourpy/util/mpl_util.py,sha256=q2OO2gGSO7bYgpNj3DCJtRUC8BjuBSrQClrZ92trT0U,3438
|
| 43 |
+
contourpy/util/renderer.py,sha256=8CBHzPmVsFPfqsWxqrxGBhqFpJhVeFHFeDzVXAgT8Fc,5118
|
evalkit_internvl/lib/python3.10/site-packages/contourpy-1.3.0.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (937 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/_common.cpython-310.pyc
ADDED
|
Binary file (1.42 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/_version.cpython-310.pyc
ADDED
|
Binary file (271 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/relativedelta.cpython-310.pyc
ADDED
|
Binary file (15.7 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/rrule.cpython-310.pyc
ADDED
|
Binary file (43.3 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/tzwin.cpython-310.pyc
ADDED
|
Binary file (190 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (2.25 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/_common.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Common code used in multiple modules.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class weekday(object):
|
| 7 |
+
__slots__ = ["weekday", "n"]
|
| 8 |
+
|
| 9 |
+
def __init__(self, weekday, n=None):
|
| 10 |
+
self.weekday = weekday
|
| 11 |
+
self.n = n
|
| 12 |
+
|
| 13 |
+
def __call__(self, n):
|
| 14 |
+
if n == self.n:
|
| 15 |
+
return self
|
| 16 |
+
else:
|
| 17 |
+
return self.__class__(self.weekday, n)
|
| 18 |
+
|
| 19 |
+
def __eq__(self, other):
|
| 20 |
+
try:
|
| 21 |
+
if self.weekday != other.weekday or self.n != other.n:
|
| 22 |
+
return False
|
| 23 |
+
except AttributeError:
|
| 24 |
+
return False
|
| 25 |
+
return True
|
| 26 |
+
|
| 27 |
+
def __hash__(self):
|
| 28 |
+
return hash((
|
| 29 |
+
self.weekday,
|
| 30 |
+
self.n,
|
| 31 |
+
))
|
| 32 |
+
|
| 33 |
+
def __ne__(self, other):
|
| 34 |
+
return not (self == other)
|
| 35 |
+
|
| 36 |
+
def __repr__(self):
|
| 37 |
+
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
|
| 38 |
+
if not self.n:
|
| 39 |
+
return s
|
| 40 |
+
else:
|
| 41 |
+
return "%s(%+d)" % (s, self.n)
|
| 42 |
+
|
| 43 |
+
# vim:ts=4:sw=4:et
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/__init__.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
from ._parser import parse, parser, parserinfo, ParserError
|
| 3 |
+
from ._parser import DEFAULTPARSER, DEFAULTTZPARSER
|
| 4 |
+
from ._parser import UnknownTimezoneWarning
|
| 5 |
+
|
| 6 |
+
from ._parser import __doc__
|
| 7 |
+
|
| 8 |
+
from .isoparser import isoparser, isoparse
|
| 9 |
+
|
| 10 |
+
__all__ = ['parse', 'parser', 'parserinfo',
|
| 11 |
+
'isoparse', 'isoparser',
|
| 12 |
+
'ParserError',
|
| 13 |
+
'UnknownTimezoneWarning']
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
###
|
| 17 |
+
# Deprecate portions of the private interface so that downstream code that
|
| 18 |
+
# is improperly relying on it is given *some* notice.
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def __deprecated_private_func(f):
|
| 22 |
+
from functools import wraps
|
| 23 |
+
import warnings
|
| 24 |
+
|
| 25 |
+
msg = ('{name} is a private function and may break without warning, '
|
| 26 |
+
'it will be moved and or renamed in future versions.')
|
| 27 |
+
msg = msg.format(name=f.__name__)
|
| 28 |
+
|
| 29 |
+
@wraps(f)
|
| 30 |
+
def deprecated_func(*args, **kwargs):
|
| 31 |
+
warnings.warn(msg, DeprecationWarning)
|
| 32 |
+
return f(*args, **kwargs)
|
| 33 |
+
|
| 34 |
+
return deprecated_func
|
| 35 |
+
|
| 36 |
+
def __deprecate_private_class(c):
|
| 37 |
+
import warnings
|
| 38 |
+
|
| 39 |
+
msg = ('{name} is a private class and may break without warning, '
|
| 40 |
+
'it will be moved and or renamed in future versions.')
|
| 41 |
+
msg = msg.format(name=c.__name__)
|
| 42 |
+
|
| 43 |
+
class private_class(c):
|
| 44 |
+
__doc__ = c.__doc__
|
| 45 |
+
|
| 46 |
+
def __init__(self, *args, **kwargs):
|
| 47 |
+
warnings.warn(msg, DeprecationWarning)
|
| 48 |
+
super(private_class, self).__init__(*args, **kwargs)
|
| 49 |
+
|
| 50 |
+
private_class.__name__ = c.__name__
|
| 51 |
+
|
| 52 |
+
return private_class
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
from ._parser import _timelex, _resultbase
|
| 56 |
+
from ._parser import _tzparser, _parsetz
|
| 57 |
+
|
| 58 |
+
_timelex = __deprecate_private_class(_timelex)
|
| 59 |
+
_tzparser = __deprecate_private_class(_tzparser)
|
| 60 |
+
_resultbase = __deprecate_private_class(_resultbase)
|
| 61 |
+
_parsetz = __deprecated_private_func(_parsetz)
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.07 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/__pycache__/_parser.cpython-310.pyc
ADDED
|
Binary file (40.5 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/__pycache__/isoparser.cpython-310.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/_parser.py
ADDED
|
@@ -0,0 +1,1613 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
This module offers a generic date/time string parser which is able to parse
|
| 4 |
+
most known formats to represent a date and/or time.
|
| 5 |
+
|
| 6 |
+
This module attempts to be forgiving with regards to unlikely input formats,
|
| 7 |
+
returning a datetime object even for dates which are ambiguous. If an element
|
| 8 |
+
of a date/time stamp is omitted, the following rules are applied:
|
| 9 |
+
|
| 10 |
+
- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour
|
| 11 |
+
on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is
|
| 12 |
+
specified.
|
| 13 |
+
- If a time zone is omitted, a timezone-naive datetime is returned.
|
| 14 |
+
|
| 15 |
+
If any other elements are missing, they are taken from the
|
| 16 |
+
:class:`datetime.datetime` object passed to the parameter ``default``. If this
|
| 17 |
+
results in a day number exceeding the valid number of days per month, the
|
| 18 |
+
value falls back to the end of the month.
|
| 19 |
+
|
| 20 |
+
Additional resources about date/time string formats can be found below:
|
| 21 |
+
|
| 22 |
+
- `A summary of the international standard date and time notation
|
| 23 |
+
<https://www.cl.cam.ac.uk/~mgk25/iso-time.html>`_
|
| 24 |
+
- `W3C Date and Time Formats <https://www.w3.org/TR/NOTE-datetime>`_
|
| 25 |
+
- `Time Formats (Planetary Rings Node) <https://pds-rings.seti.org:443/tools/time_formats.html>`_
|
| 26 |
+
- `CPAN ParseDate module
|
| 27 |
+
<https://metacpan.org/pod/release/MUIR/Time-modules-2013.0912/lib/Time/ParseDate.pm>`_
|
| 28 |
+
- `Java SimpleDateFormat Class
|
| 29 |
+
<https://docs.oracle.com/javase/6/docs/api/java/text/SimpleDateFormat.html>`_
|
| 30 |
+
"""
|
| 31 |
+
from __future__ import unicode_literals
|
| 32 |
+
|
| 33 |
+
import datetime
|
| 34 |
+
import re
|
| 35 |
+
import string
|
| 36 |
+
import time
|
| 37 |
+
import warnings
|
| 38 |
+
|
| 39 |
+
from calendar import monthrange
|
| 40 |
+
from io import StringIO
|
| 41 |
+
|
| 42 |
+
import six
|
| 43 |
+
from six import integer_types, text_type
|
| 44 |
+
|
| 45 |
+
from decimal import Decimal
|
| 46 |
+
|
| 47 |
+
from warnings import warn
|
| 48 |
+
|
| 49 |
+
from .. import relativedelta
|
| 50 |
+
from .. import tz
|
| 51 |
+
|
| 52 |
+
__all__ = ["parse", "parserinfo", "ParserError"]
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth
|
| 56 |
+
# making public and/or figuring out if there is something we can
|
| 57 |
+
# take off their plate.
|
| 58 |
+
class _timelex(object):
|
| 59 |
+
# Fractional seconds are sometimes split by a comma
|
| 60 |
+
_split_decimal = re.compile("([.,])")
|
| 61 |
+
|
| 62 |
+
def __init__(self, instream):
|
| 63 |
+
if isinstance(instream, (bytes, bytearray)):
|
| 64 |
+
instream = instream.decode()
|
| 65 |
+
|
| 66 |
+
if isinstance(instream, text_type):
|
| 67 |
+
instream = StringIO(instream)
|
| 68 |
+
elif getattr(instream, 'read', None) is None:
|
| 69 |
+
raise TypeError('Parser must be a string or character stream, not '
|
| 70 |
+
'{itype}'.format(itype=instream.__class__.__name__))
|
| 71 |
+
|
| 72 |
+
self.instream = instream
|
| 73 |
+
self.charstack = []
|
| 74 |
+
self.tokenstack = []
|
| 75 |
+
self.eof = False
|
| 76 |
+
|
| 77 |
+
def get_token(self):
|
| 78 |
+
"""
|
| 79 |
+
This function breaks the time string into lexical units (tokens), which
|
| 80 |
+
can be parsed by the parser. Lexical units are demarcated by changes in
|
| 81 |
+
the character set, so any continuous string of letters is considered
|
| 82 |
+
one unit, any continuous string of numbers is considered one unit.
|
| 83 |
+
|
| 84 |
+
The main complication arises from the fact that dots ('.') can be used
|
| 85 |
+
both as separators (e.g. "Sep.20.2009") or decimal points (e.g.
|
| 86 |
+
"4:30:21.447"). As such, it is necessary to read the full context of
|
| 87 |
+
any dot-separated strings before breaking it into tokens; as such, this
|
| 88 |
+
function maintains a "token stack", for when the ambiguous context
|
| 89 |
+
demands that multiple tokens be parsed at once.
|
| 90 |
+
"""
|
| 91 |
+
if self.tokenstack:
|
| 92 |
+
return self.tokenstack.pop(0)
|
| 93 |
+
|
| 94 |
+
seenletters = False
|
| 95 |
+
token = None
|
| 96 |
+
state = None
|
| 97 |
+
|
| 98 |
+
while not self.eof:
|
| 99 |
+
# We only realize that we've reached the end of a token when we
|
| 100 |
+
# find a character that's not part of the current token - since
|
| 101 |
+
# that character may be part of the next token, it's stored in the
|
| 102 |
+
# charstack.
|
| 103 |
+
if self.charstack:
|
| 104 |
+
nextchar = self.charstack.pop(0)
|
| 105 |
+
else:
|
| 106 |
+
nextchar = self.instream.read(1)
|
| 107 |
+
while nextchar == '\x00':
|
| 108 |
+
nextchar = self.instream.read(1)
|
| 109 |
+
|
| 110 |
+
if not nextchar:
|
| 111 |
+
self.eof = True
|
| 112 |
+
break
|
| 113 |
+
elif not state:
|
| 114 |
+
# First character of the token - determines if we're starting
|
| 115 |
+
# to parse a word, a number or something else.
|
| 116 |
+
token = nextchar
|
| 117 |
+
if self.isword(nextchar):
|
| 118 |
+
state = 'a'
|
| 119 |
+
elif self.isnum(nextchar):
|
| 120 |
+
state = '0'
|
| 121 |
+
elif self.isspace(nextchar):
|
| 122 |
+
token = ' '
|
| 123 |
+
break # emit token
|
| 124 |
+
else:
|
| 125 |
+
break # emit token
|
| 126 |
+
elif state == 'a':
|
| 127 |
+
# If we've already started reading a word, we keep reading
|
| 128 |
+
# letters until we find something that's not part of a word.
|
| 129 |
+
seenletters = True
|
| 130 |
+
if self.isword(nextchar):
|
| 131 |
+
token += nextchar
|
| 132 |
+
elif nextchar == '.':
|
| 133 |
+
token += nextchar
|
| 134 |
+
state = 'a.'
|
| 135 |
+
else:
|
| 136 |
+
self.charstack.append(nextchar)
|
| 137 |
+
break # emit token
|
| 138 |
+
elif state == '0':
|
| 139 |
+
# If we've already started reading a number, we keep reading
|
| 140 |
+
# numbers until we find something that doesn't fit.
|
| 141 |
+
if self.isnum(nextchar):
|
| 142 |
+
token += nextchar
|
| 143 |
+
elif nextchar == '.' or (nextchar == ',' and len(token) >= 2):
|
| 144 |
+
token += nextchar
|
| 145 |
+
state = '0.'
|
| 146 |
+
else:
|
| 147 |
+
self.charstack.append(nextchar)
|
| 148 |
+
break # emit token
|
| 149 |
+
elif state == 'a.':
|
| 150 |
+
# If we've seen some letters and a dot separator, continue
|
| 151 |
+
# parsing, and the tokens will be broken up later.
|
| 152 |
+
seenletters = True
|
| 153 |
+
if nextchar == '.' or self.isword(nextchar):
|
| 154 |
+
token += nextchar
|
| 155 |
+
elif self.isnum(nextchar) and token[-1] == '.':
|
| 156 |
+
token += nextchar
|
| 157 |
+
state = '0.'
|
| 158 |
+
else:
|
| 159 |
+
self.charstack.append(nextchar)
|
| 160 |
+
break # emit token
|
| 161 |
+
elif state == '0.':
|
| 162 |
+
# If we've seen at least one dot separator, keep going, we'll
|
| 163 |
+
# break up the tokens later.
|
| 164 |
+
if nextchar == '.' or self.isnum(nextchar):
|
| 165 |
+
token += nextchar
|
| 166 |
+
elif self.isword(nextchar) and token[-1] == '.':
|
| 167 |
+
token += nextchar
|
| 168 |
+
state = 'a.'
|
| 169 |
+
else:
|
| 170 |
+
self.charstack.append(nextchar)
|
| 171 |
+
break # emit token
|
| 172 |
+
|
| 173 |
+
if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or
|
| 174 |
+
token[-1] in '.,')):
|
| 175 |
+
l = self._split_decimal.split(token)
|
| 176 |
+
token = l[0]
|
| 177 |
+
for tok in l[1:]:
|
| 178 |
+
if tok:
|
| 179 |
+
self.tokenstack.append(tok)
|
| 180 |
+
|
| 181 |
+
if state == '0.' and token.count('.') == 0:
|
| 182 |
+
token = token.replace(',', '.')
|
| 183 |
+
|
| 184 |
+
return token
|
| 185 |
+
|
| 186 |
+
def __iter__(self):
|
| 187 |
+
return self
|
| 188 |
+
|
| 189 |
+
def __next__(self):
|
| 190 |
+
token = self.get_token()
|
| 191 |
+
if token is None:
|
| 192 |
+
raise StopIteration
|
| 193 |
+
|
| 194 |
+
return token
|
| 195 |
+
|
| 196 |
+
def next(self):
|
| 197 |
+
return self.__next__() # Python 2.x support
|
| 198 |
+
|
| 199 |
+
@classmethod
|
| 200 |
+
def split(cls, s):
|
| 201 |
+
return list(cls(s))
|
| 202 |
+
|
| 203 |
+
@classmethod
|
| 204 |
+
def isword(cls, nextchar):
|
| 205 |
+
""" Whether or not the next character is part of a word """
|
| 206 |
+
return nextchar.isalpha()
|
| 207 |
+
|
| 208 |
+
@classmethod
|
| 209 |
+
def isnum(cls, nextchar):
|
| 210 |
+
""" Whether the next character is part of a number """
|
| 211 |
+
return nextchar.isdigit()
|
| 212 |
+
|
| 213 |
+
@classmethod
|
| 214 |
+
def isspace(cls, nextchar):
|
| 215 |
+
""" Whether the next character is whitespace """
|
| 216 |
+
return nextchar.isspace()
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
class _resultbase(object):
|
| 220 |
+
|
| 221 |
+
def __init__(self):
|
| 222 |
+
for attr in self.__slots__:
|
| 223 |
+
setattr(self, attr, None)
|
| 224 |
+
|
| 225 |
+
def _repr(self, classname):
|
| 226 |
+
l = []
|
| 227 |
+
for attr in self.__slots__:
|
| 228 |
+
value = getattr(self, attr)
|
| 229 |
+
if value is not None:
|
| 230 |
+
l.append("%s=%s" % (attr, repr(value)))
|
| 231 |
+
return "%s(%s)" % (classname, ", ".join(l))
|
| 232 |
+
|
| 233 |
+
def __len__(self):
|
| 234 |
+
return (sum(getattr(self, attr) is not None
|
| 235 |
+
for attr in self.__slots__))
|
| 236 |
+
|
| 237 |
+
def __repr__(self):
|
| 238 |
+
return self._repr(self.__class__.__name__)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
class parserinfo(object):
|
| 242 |
+
"""
|
| 243 |
+
Class which handles what inputs are accepted. Subclass this to customize
|
| 244 |
+
the language and acceptable values for each parameter.
|
| 245 |
+
|
| 246 |
+
:param dayfirst:
|
| 247 |
+
Whether to interpret the first value in an ambiguous 3-integer date
|
| 248 |
+
(e.g. 01/05/09) as the day (``True``) or month (``False``). If
|
| 249 |
+
``yearfirst`` is set to ``True``, this distinguishes between YDM
|
| 250 |
+
and YMD. Default is ``False``.
|
| 251 |
+
|
| 252 |
+
:param yearfirst:
|
| 253 |
+
Whether to interpret the first value in an ambiguous 3-integer date
|
| 254 |
+
(e.g. 01/05/09) as the year. If ``True``, the first number is taken
|
| 255 |
+
to be the year, otherwise the last number is taken to be the year.
|
| 256 |
+
Default is ``False``.
|
| 257 |
+
"""
|
| 258 |
+
|
| 259 |
+
# m from a.m/p.m, t from ISO T separator
|
| 260 |
+
JUMP = [" ", ".", ",", ";", "-", "/", "'",
|
| 261 |
+
"at", "on", "and", "ad", "m", "t", "of",
|
| 262 |
+
"st", "nd", "rd", "th"]
|
| 263 |
+
|
| 264 |
+
WEEKDAYS = [("Mon", "Monday"),
|
| 265 |
+
("Tue", "Tuesday"), # TODO: "Tues"
|
| 266 |
+
("Wed", "Wednesday"),
|
| 267 |
+
("Thu", "Thursday"), # TODO: "Thurs"
|
| 268 |
+
("Fri", "Friday"),
|
| 269 |
+
("Sat", "Saturday"),
|
| 270 |
+
("Sun", "Sunday")]
|
| 271 |
+
MONTHS = [("Jan", "January"),
|
| 272 |
+
("Feb", "February"), # TODO: "Febr"
|
| 273 |
+
("Mar", "March"),
|
| 274 |
+
("Apr", "April"),
|
| 275 |
+
("May", "May"),
|
| 276 |
+
("Jun", "June"),
|
| 277 |
+
("Jul", "July"),
|
| 278 |
+
("Aug", "August"),
|
| 279 |
+
("Sep", "Sept", "September"),
|
| 280 |
+
("Oct", "October"),
|
| 281 |
+
("Nov", "November"),
|
| 282 |
+
("Dec", "December")]
|
| 283 |
+
HMS = [("h", "hour", "hours"),
|
| 284 |
+
("m", "minute", "minutes"),
|
| 285 |
+
("s", "second", "seconds")]
|
| 286 |
+
AMPM = [("am", "a"),
|
| 287 |
+
("pm", "p")]
|
| 288 |
+
UTCZONE = ["UTC", "GMT", "Z", "z"]
|
| 289 |
+
PERTAIN = ["of"]
|
| 290 |
+
TZOFFSET = {}
|
| 291 |
+
# TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate",
|
| 292 |
+
# "Anno Domini", "Year of Our Lord"]
|
| 293 |
+
|
| 294 |
+
def __init__(self, dayfirst=False, yearfirst=False):
|
| 295 |
+
self._jump = self._convert(self.JUMP)
|
| 296 |
+
self._weekdays = self._convert(self.WEEKDAYS)
|
| 297 |
+
self._months = self._convert(self.MONTHS)
|
| 298 |
+
self._hms = self._convert(self.HMS)
|
| 299 |
+
self._ampm = self._convert(self.AMPM)
|
| 300 |
+
self._utczone = self._convert(self.UTCZONE)
|
| 301 |
+
self._pertain = self._convert(self.PERTAIN)
|
| 302 |
+
|
| 303 |
+
self.dayfirst = dayfirst
|
| 304 |
+
self.yearfirst = yearfirst
|
| 305 |
+
|
| 306 |
+
self._year = time.localtime().tm_year
|
| 307 |
+
self._century = self._year // 100 * 100
|
| 308 |
+
|
| 309 |
+
def _convert(self, lst):
|
| 310 |
+
dct = {}
|
| 311 |
+
for i, v in enumerate(lst):
|
| 312 |
+
if isinstance(v, tuple):
|
| 313 |
+
for v in v:
|
| 314 |
+
dct[v.lower()] = i
|
| 315 |
+
else:
|
| 316 |
+
dct[v.lower()] = i
|
| 317 |
+
return dct
|
| 318 |
+
|
| 319 |
+
def jump(self, name):
|
| 320 |
+
return name.lower() in self._jump
|
| 321 |
+
|
| 322 |
+
def weekday(self, name):
|
| 323 |
+
try:
|
| 324 |
+
return self._weekdays[name.lower()]
|
| 325 |
+
except KeyError:
|
| 326 |
+
pass
|
| 327 |
+
return None
|
| 328 |
+
|
| 329 |
+
def month(self, name):
|
| 330 |
+
try:
|
| 331 |
+
return self._months[name.lower()] + 1
|
| 332 |
+
except KeyError:
|
| 333 |
+
pass
|
| 334 |
+
return None
|
| 335 |
+
|
| 336 |
+
def hms(self, name):
|
| 337 |
+
try:
|
| 338 |
+
return self._hms[name.lower()]
|
| 339 |
+
except KeyError:
|
| 340 |
+
return None
|
| 341 |
+
|
| 342 |
+
def ampm(self, name):
|
| 343 |
+
try:
|
| 344 |
+
return self._ampm[name.lower()]
|
| 345 |
+
except KeyError:
|
| 346 |
+
return None
|
| 347 |
+
|
| 348 |
+
def pertain(self, name):
|
| 349 |
+
return name.lower() in self._pertain
|
| 350 |
+
|
| 351 |
+
def utczone(self, name):
|
| 352 |
+
return name.lower() in self._utczone
|
| 353 |
+
|
| 354 |
+
def tzoffset(self, name):
|
| 355 |
+
if name in self._utczone:
|
| 356 |
+
return 0
|
| 357 |
+
|
| 358 |
+
return self.TZOFFSET.get(name)
|
| 359 |
+
|
| 360 |
+
def convertyear(self, year, century_specified=False):
|
| 361 |
+
"""
|
| 362 |
+
Converts two-digit years to year within [-50, 49]
|
| 363 |
+
range of self._year (current local time)
|
| 364 |
+
"""
|
| 365 |
+
|
| 366 |
+
# Function contract is that the year is always positive
|
| 367 |
+
assert year >= 0
|
| 368 |
+
|
| 369 |
+
if year < 100 and not century_specified:
|
| 370 |
+
# assume current century to start
|
| 371 |
+
year += self._century
|
| 372 |
+
|
| 373 |
+
if year >= self._year + 50: # if too far in future
|
| 374 |
+
year -= 100
|
| 375 |
+
elif year < self._year - 50: # if too far in past
|
| 376 |
+
year += 100
|
| 377 |
+
|
| 378 |
+
return year
|
| 379 |
+
|
| 380 |
+
def validate(self, res):
|
| 381 |
+
# move to info
|
| 382 |
+
if res.year is not None:
|
| 383 |
+
res.year = self.convertyear(res.year, res.century_specified)
|
| 384 |
+
|
| 385 |
+
if ((res.tzoffset == 0 and not res.tzname) or
|
| 386 |
+
(res.tzname == 'Z' or res.tzname == 'z')):
|
| 387 |
+
res.tzname = "UTC"
|
| 388 |
+
res.tzoffset = 0
|
| 389 |
+
elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname):
|
| 390 |
+
res.tzoffset = 0
|
| 391 |
+
return True
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
class _ymd(list):
|
| 395 |
+
def __init__(self, *args, **kwargs):
|
| 396 |
+
super(self.__class__, self).__init__(*args, **kwargs)
|
| 397 |
+
self.century_specified = False
|
| 398 |
+
self.dstridx = None
|
| 399 |
+
self.mstridx = None
|
| 400 |
+
self.ystridx = None
|
| 401 |
+
|
| 402 |
+
@property
|
| 403 |
+
def has_year(self):
|
| 404 |
+
return self.ystridx is not None
|
| 405 |
+
|
| 406 |
+
@property
|
| 407 |
+
def has_month(self):
|
| 408 |
+
return self.mstridx is not None
|
| 409 |
+
|
| 410 |
+
@property
|
| 411 |
+
def has_day(self):
|
| 412 |
+
return self.dstridx is not None
|
| 413 |
+
|
| 414 |
+
def could_be_day(self, value):
|
| 415 |
+
if self.has_day:
|
| 416 |
+
return False
|
| 417 |
+
elif not self.has_month:
|
| 418 |
+
return 1 <= value <= 31
|
| 419 |
+
elif not self.has_year:
|
| 420 |
+
# Be permissive, assume leap year
|
| 421 |
+
month = self[self.mstridx]
|
| 422 |
+
return 1 <= value <= monthrange(2000, month)[1]
|
| 423 |
+
else:
|
| 424 |
+
month = self[self.mstridx]
|
| 425 |
+
year = self[self.ystridx]
|
| 426 |
+
return 1 <= value <= monthrange(year, month)[1]
|
| 427 |
+
|
| 428 |
+
def append(self, val, label=None):
|
| 429 |
+
if hasattr(val, '__len__'):
|
| 430 |
+
if val.isdigit() and len(val) > 2:
|
| 431 |
+
self.century_specified = True
|
| 432 |
+
if label not in [None, 'Y']: # pragma: no cover
|
| 433 |
+
raise ValueError(label)
|
| 434 |
+
label = 'Y'
|
| 435 |
+
elif val > 100:
|
| 436 |
+
self.century_specified = True
|
| 437 |
+
if label not in [None, 'Y']: # pragma: no cover
|
| 438 |
+
raise ValueError(label)
|
| 439 |
+
label = 'Y'
|
| 440 |
+
|
| 441 |
+
super(self.__class__, self).append(int(val))
|
| 442 |
+
|
| 443 |
+
if label == 'M':
|
| 444 |
+
if self.has_month:
|
| 445 |
+
raise ValueError('Month is already set')
|
| 446 |
+
self.mstridx = len(self) - 1
|
| 447 |
+
elif label == 'D':
|
| 448 |
+
if self.has_day:
|
| 449 |
+
raise ValueError('Day is already set')
|
| 450 |
+
self.dstridx = len(self) - 1
|
| 451 |
+
elif label == 'Y':
|
| 452 |
+
if self.has_year:
|
| 453 |
+
raise ValueError('Year is already set')
|
| 454 |
+
self.ystridx = len(self) - 1
|
| 455 |
+
|
| 456 |
+
def _resolve_from_stridxs(self, strids):
|
| 457 |
+
"""
|
| 458 |
+
Try to resolve the identities of year/month/day elements using
|
| 459 |
+
ystridx, mstridx, and dstridx, if enough of these are specified.
|
| 460 |
+
"""
|
| 461 |
+
if len(self) == 3 and len(strids) == 2:
|
| 462 |
+
# we can back out the remaining stridx value
|
| 463 |
+
missing = [x for x in range(3) if x not in strids.values()]
|
| 464 |
+
key = [x for x in ['y', 'm', 'd'] if x not in strids]
|
| 465 |
+
assert len(missing) == len(key) == 1
|
| 466 |
+
key = key[0]
|
| 467 |
+
val = missing[0]
|
| 468 |
+
strids[key] = val
|
| 469 |
+
|
| 470 |
+
assert len(self) == len(strids) # otherwise this should not be called
|
| 471 |
+
out = {key: self[strids[key]] for key in strids}
|
| 472 |
+
return (out.get('y'), out.get('m'), out.get('d'))
|
| 473 |
+
|
| 474 |
+
def resolve_ymd(self, yearfirst, dayfirst):
|
| 475 |
+
len_ymd = len(self)
|
| 476 |
+
year, month, day = (None, None, None)
|
| 477 |
+
|
| 478 |
+
strids = (('y', self.ystridx),
|
| 479 |
+
('m', self.mstridx),
|
| 480 |
+
('d', self.dstridx))
|
| 481 |
+
|
| 482 |
+
strids = {key: val for key, val in strids if val is not None}
|
| 483 |
+
if (len(self) == len(strids) > 0 or
|
| 484 |
+
(len(self) == 3 and len(strids) == 2)):
|
| 485 |
+
return self._resolve_from_stridxs(strids)
|
| 486 |
+
|
| 487 |
+
mstridx = self.mstridx
|
| 488 |
+
|
| 489 |
+
if len_ymd > 3:
|
| 490 |
+
raise ValueError("More than three YMD values")
|
| 491 |
+
elif len_ymd == 1 or (mstridx is not None and len_ymd == 2):
|
| 492 |
+
# One member, or two members with a month string
|
| 493 |
+
if mstridx is not None:
|
| 494 |
+
month = self[mstridx]
|
| 495 |
+
# since mstridx is 0 or 1, self[mstridx-1] always
|
| 496 |
+
# looks up the other element
|
| 497 |
+
other = self[mstridx - 1]
|
| 498 |
+
else:
|
| 499 |
+
other = self[0]
|
| 500 |
+
|
| 501 |
+
if len_ymd > 1 or mstridx is None:
|
| 502 |
+
if other > 31:
|
| 503 |
+
year = other
|
| 504 |
+
else:
|
| 505 |
+
day = other
|
| 506 |
+
|
| 507 |
+
elif len_ymd == 2:
|
| 508 |
+
# Two members with numbers
|
| 509 |
+
if self[0] > 31:
|
| 510 |
+
# 99-01
|
| 511 |
+
year, month = self
|
| 512 |
+
elif self[1] > 31:
|
| 513 |
+
# 01-99
|
| 514 |
+
month, year = self
|
| 515 |
+
elif dayfirst and self[1] <= 12:
|
| 516 |
+
# 13-01
|
| 517 |
+
day, month = self
|
| 518 |
+
else:
|
| 519 |
+
# 01-13
|
| 520 |
+
month, day = self
|
| 521 |
+
|
| 522 |
+
elif len_ymd == 3:
|
| 523 |
+
# Three members
|
| 524 |
+
if mstridx == 0:
|
| 525 |
+
if self[1] > 31:
|
| 526 |
+
# Apr-2003-25
|
| 527 |
+
month, year, day = self
|
| 528 |
+
else:
|
| 529 |
+
month, day, year = self
|
| 530 |
+
elif mstridx == 1:
|
| 531 |
+
if self[0] > 31 or (yearfirst and self[2] <= 31):
|
| 532 |
+
# 99-Jan-01
|
| 533 |
+
year, month, day = self
|
| 534 |
+
else:
|
| 535 |
+
# 01-Jan-01
|
| 536 |
+
# Give precedence to day-first, since
|
| 537 |
+
# two-digit years is usually hand-written.
|
| 538 |
+
day, month, year = self
|
| 539 |
+
|
| 540 |
+
elif mstridx == 2:
|
| 541 |
+
# WTF!?
|
| 542 |
+
if self[1] > 31:
|
| 543 |
+
# 01-99-Jan
|
| 544 |
+
day, year, month = self
|
| 545 |
+
else:
|
| 546 |
+
# 99-01-Jan
|
| 547 |
+
year, day, month = self
|
| 548 |
+
|
| 549 |
+
else:
|
| 550 |
+
if (self[0] > 31 or
|
| 551 |
+
self.ystridx == 0 or
|
| 552 |
+
(yearfirst and self[1] <= 12 and self[2] <= 31)):
|
| 553 |
+
# 99-01-01
|
| 554 |
+
if dayfirst and self[2] <= 12:
|
| 555 |
+
year, day, month = self
|
| 556 |
+
else:
|
| 557 |
+
year, month, day = self
|
| 558 |
+
elif self[0] > 12 or (dayfirst and self[1] <= 12):
|
| 559 |
+
# 13-01-01
|
| 560 |
+
day, month, year = self
|
| 561 |
+
else:
|
| 562 |
+
# 01-13-01
|
| 563 |
+
month, day, year = self
|
| 564 |
+
|
| 565 |
+
return year, month, day
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
class parser(object):
|
| 569 |
+
def __init__(self, info=None):
|
| 570 |
+
self.info = info or parserinfo()
|
| 571 |
+
|
| 572 |
+
def parse(self, timestr, default=None,
|
| 573 |
+
ignoretz=False, tzinfos=None, **kwargs):
|
| 574 |
+
"""
|
| 575 |
+
Parse the date/time string into a :class:`datetime.datetime` object.
|
| 576 |
+
|
| 577 |
+
:param timestr:
|
| 578 |
+
Any date/time string using the supported formats.
|
| 579 |
+
|
| 580 |
+
:param default:
|
| 581 |
+
The default datetime object, if this is a datetime object and not
|
| 582 |
+
``None``, elements specified in ``timestr`` replace elements in the
|
| 583 |
+
default object.
|
| 584 |
+
|
| 585 |
+
:param ignoretz:
|
| 586 |
+
If set ``True``, time zones in parsed strings are ignored and a
|
| 587 |
+
naive :class:`datetime.datetime` object is returned.
|
| 588 |
+
|
| 589 |
+
:param tzinfos:
|
| 590 |
+
Additional time zone names / aliases which may be present in the
|
| 591 |
+
string. This argument maps time zone names (and optionally offsets
|
| 592 |
+
from those time zones) to time zones. This parameter can be a
|
| 593 |
+
dictionary with timezone aliases mapping time zone names to time
|
| 594 |
+
zones or a function taking two parameters (``tzname`` and
|
| 595 |
+
``tzoffset``) and returning a time zone.
|
| 596 |
+
|
| 597 |
+
The timezones to which the names are mapped can be an integer
|
| 598 |
+
offset from UTC in seconds or a :class:`tzinfo` object.
|
| 599 |
+
|
| 600 |
+
.. doctest::
|
| 601 |
+
:options: +NORMALIZE_WHITESPACE
|
| 602 |
+
|
| 603 |
+
>>> from dateutil.parser import parse
|
| 604 |
+
>>> from dateutil.tz import gettz
|
| 605 |
+
>>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")}
|
| 606 |
+
>>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos)
|
| 607 |
+
datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200))
|
| 608 |
+
>>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos)
|
| 609 |
+
datetime.datetime(2012, 1, 19, 17, 21,
|
| 610 |
+
tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago'))
|
| 611 |
+
|
| 612 |
+
This parameter is ignored if ``ignoretz`` is set.
|
| 613 |
+
|
| 614 |
+
:param \\*\\*kwargs:
|
| 615 |
+
Keyword arguments as passed to ``_parse()``.
|
| 616 |
+
|
| 617 |
+
:return:
|
| 618 |
+
Returns a :class:`datetime.datetime` object or, if the
|
| 619 |
+
``fuzzy_with_tokens`` option is ``True``, returns a tuple, the
|
| 620 |
+
first element being a :class:`datetime.datetime` object, the second
|
| 621 |
+
a tuple containing the fuzzy tokens.
|
| 622 |
+
|
| 623 |
+
:raises ParserError:
|
| 624 |
+
Raised for invalid or unknown string format, if the provided
|
| 625 |
+
:class:`tzinfo` is not in a valid format, or if an invalid date
|
| 626 |
+
would be created.
|
| 627 |
+
|
| 628 |
+
:raises TypeError:
|
| 629 |
+
Raised for non-string or character stream input.
|
| 630 |
+
|
| 631 |
+
:raises OverflowError:
|
| 632 |
+
Raised if the parsed date exceeds the largest valid C integer on
|
| 633 |
+
your system.
|
| 634 |
+
"""
|
| 635 |
+
|
| 636 |
+
if default is None:
|
| 637 |
+
default = datetime.datetime.now().replace(hour=0, minute=0,
|
| 638 |
+
second=0, microsecond=0)
|
| 639 |
+
|
| 640 |
+
res, skipped_tokens = self._parse(timestr, **kwargs)
|
| 641 |
+
|
| 642 |
+
if res is None:
|
| 643 |
+
raise ParserError("Unknown string format: %s", timestr)
|
| 644 |
+
|
| 645 |
+
if len(res) == 0:
|
| 646 |
+
raise ParserError("String does not contain a date: %s", timestr)
|
| 647 |
+
|
| 648 |
+
try:
|
| 649 |
+
ret = self._build_naive(res, default)
|
| 650 |
+
except ValueError as e:
|
| 651 |
+
six.raise_from(ParserError(str(e) + ": %s", timestr), e)
|
| 652 |
+
|
| 653 |
+
if not ignoretz:
|
| 654 |
+
ret = self._build_tzaware(ret, res, tzinfos)
|
| 655 |
+
|
| 656 |
+
if kwargs.get('fuzzy_with_tokens', False):
|
| 657 |
+
return ret, skipped_tokens
|
| 658 |
+
else:
|
| 659 |
+
return ret
|
| 660 |
+
|
| 661 |
+
class _result(_resultbase):
|
| 662 |
+
__slots__ = ["year", "month", "day", "weekday",
|
| 663 |
+
"hour", "minute", "second", "microsecond",
|
| 664 |
+
"tzname", "tzoffset", "ampm","any_unused_tokens"]
|
| 665 |
+
|
| 666 |
+
def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False,
|
| 667 |
+
fuzzy_with_tokens=False):
|
| 668 |
+
"""
|
| 669 |
+
Private method which performs the heavy lifting of parsing, called from
|
| 670 |
+
``parse()``, which passes on its ``kwargs`` to this function.
|
| 671 |
+
|
| 672 |
+
:param timestr:
|
| 673 |
+
The string to parse.
|
| 674 |
+
|
| 675 |
+
:param dayfirst:
|
| 676 |
+
Whether to interpret the first value in an ambiguous 3-integer date
|
| 677 |
+
(e.g. 01/05/09) as the day (``True``) or month (``False``). If
|
| 678 |
+
``yearfirst`` is set to ``True``, this distinguishes between YDM
|
| 679 |
+
and YMD. If set to ``None``, this value is retrieved from the
|
| 680 |
+
current :class:`parserinfo` object (which itself defaults to
|
| 681 |
+
``False``).
|
| 682 |
+
|
| 683 |
+
:param yearfirst:
|
| 684 |
+
Whether to interpret the first value in an ambiguous 3-integer date
|
| 685 |
+
(e.g. 01/05/09) as the year. If ``True``, the first number is taken
|
| 686 |
+
to be the year, otherwise the last number is taken to be the year.
|
| 687 |
+
If this is set to ``None``, the value is retrieved from the current
|
| 688 |
+
:class:`parserinfo` object (which itself defaults to ``False``).
|
| 689 |
+
|
| 690 |
+
:param fuzzy:
|
| 691 |
+
Whether to allow fuzzy parsing, allowing for string like "Today is
|
| 692 |
+
January 1, 2047 at 8:21:00AM".
|
| 693 |
+
|
| 694 |
+
:param fuzzy_with_tokens:
|
| 695 |
+
If ``True``, ``fuzzy`` is automatically set to True, and the parser
|
| 696 |
+
will return a tuple where the first element is the parsed
|
| 697 |
+
:class:`datetime.datetime` datetimestamp and the second element is
|
| 698 |
+
a tuple containing the portions of the string which were ignored:
|
| 699 |
+
|
| 700 |
+
.. doctest::
|
| 701 |
+
|
| 702 |
+
>>> from dateutil.parser import parse
|
| 703 |
+
>>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True)
|
| 704 |
+
(datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at '))
|
| 705 |
+
|
| 706 |
+
"""
|
| 707 |
+
if fuzzy_with_tokens:
|
| 708 |
+
fuzzy = True
|
| 709 |
+
|
| 710 |
+
info = self.info
|
| 711 |
+
|
| 712 |
+
if dayfirst is None:
|
| 713 |
+
dayfirst = info.dayfirst
|
| 714 |
+
|
| 715 |
+
if yearfirst is None:
|
| 716 |
+
yearfirst = info.yearfirst
|
| 717 |
+
|
| 718 |
+
res = self._result()
|
| 719 |
+
l = _timelex.split(timestr) # Splits the timestr into tokens
|
| 720 |
+
|
| 721 |
+
skipped_idxs = []
|
| 722 |
+
|
| 723 |
+
# year/month/day list
|
| 724 |
+
ymd = _ymd()
|
| 725 |
+
|
| 726 |
+
len_l = len(l)
|
| 727 |
+
i = 0
|
| 728 |
+
try:
|
| 729 |
+
while i < len_l:
|
| 730 |
+
|
| 731 |
+
# Check if it's a number
|
| 732 |
+
value_repr = l[i]
|
| 733 |
+
try:
|
| 734 |
+
value = float(value_repr)
|
| 735 |
+
except ValueError:
|
| 736 |
+
value = None
|
| 737 |
+
|
| 738 |
+
if value is not None:
|
| 739 |
+
# Numeric token
|
| 740 |
+
i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy)
|
| 741 |
+
|
| 742 |
+
# Check weekday
|
| 743 |
+
elif info.weekday(l[i]) is not None:
|
| 744 |
+
value = info.weekday(l[i])
|
| 745 |
+
res.weekday = value
|
| 746 |
+
|
| 747 |
+
# Check month name
|
| 748 |
+
elif info.month(l[i]) is not None:
|
| 749 |
+
value = info.month(l[i])
|
| 750 |
+
ymd.append(value, 'M')
|
| 751 |
+
|
| 752 |
+
if i + 1 < len_l:
|
| 753 |
+
if l[i + 1] in ('-', '/'):
|
| 754 |
+
# Jan-01[-99]
|
| 755 |
+
sep = l[i + 1]
|
| 756 |
+
ymd.append(l[i + 2])
|
| 757 |
+
|
| 758 |
+
if i + 3 < len_l and l[i + 3] == sep:
|
| 759 |
+
# Jan-01-99
|
| 760 |
+
ymd.append(l[i + 4])
|
| 761 |
+
i += 2
|
| 762 |
+
|
| 763 |
+
i += 2
|
| 764 |
+
|
| 765 |
+
elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and
|
| 766 |
+
info.pertain(l[i + 2])):
|
| 767 |
+
# Jan of 01
|
| 768 |
+
# In this case, 01 is clearly year
|
| 769 |
+
if l[i + 4].isdigit():
|
| 770 |
+
# Convert it here to become unambiguous
|
| 771 |
+
value = int(l[i + 4])
|
| 772 |
+
year = str(info.convertyear(value))
|
| 773 |
+
ymd.append(year, 'Y')
|
| 774 |
+
else:
|
| 775 |
+
# Wrong guess
|
| 776 |
+
pass
|
| 777 |
+
# TODO: not hit in tests
|
| 778 |
+
i += 4
|
| 779 |
+
|
| 780 |
+
# Check am/pm
|
| 781 |
+
elif info.ampm(l[i]) is not None:
|
| 782 |
+
value = info.ampm(l[i])
|
| 783 |
+
val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy)
|
| 784 |
+
|
| 785 |
+
if val_is_ampm:
|
| 786 |
+
res.hour = self._adjust_ampm(res.hour, value)
|
| 787 |
+
res.ampm = value
|
| 788 |
+
|
| 789 |
+
elif fuzzy:
|
| 790 |
+
skipped_idxs.append(i)
|
| 791 |
+
|
| 792 |
+
# Check for a timezone name
|
| 793 |
+
elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]):
|
| 794 |
+
res.tzname = l[i]
|
| 795 |
+
res.tzoffset = info.tzoffset(res.tzname)
|
| 796 |
+
|
| 797 |
+
# Check for something like GMT+3, or BRST+3. Notice
|
| 798 |
+
# that it doesn't mean "I am 3 hours after GMT", but
|
| 799 |
+
# "my time +3 is GMT". If found, we reverse the
|
| 800 |
+
# logic so that timezone parsing code will get it
|
| 801 |
+
# right.
|
| 802 |
+
if i + 1 < len_l and l[i + 1] in ('+', '-'):
|
| 803 |
+
l[i + 1] = ('+', '-')[l[i + 1] == '+']
|
| 804 |
+
res.tzoffset = None
|
| 805 |
+
if info.utczone(res.tzname):
|
| 806 |
+
# With something like GMT+3, the timezone
|
| 807 |
+
# is *not* GMT.
|
| 808 |
+
res.tzname = None
|
| 809 |
+
|
| 810 |
+
# Check for a numbered timezone
|
| 811 |
+
elif res.hour is not None and l[i] in ('+', '-'):
|
| 812 |
+
signal = (-1, 1)[l[i] == '+']
|
| 813 |
+
len_li = len(l[i + 1])
|
| 814 |
+
|
| 815 |
+
# TODO: check that l[i + 1] is integer?
|
| 816 |
+
if len_li == 4:
|
| 817 |
+
# -0300
|
| 818 |
+
hour_offset = int(l[i + 1][:2])
|
| 819 |
+
min_offset = int(l[i + 1][2:])
|
| 820 |
+
elif i + 2 < len_l and l[i + 2] == ':':
|
| 821 |
+
# -03:00
|
| 822 |
+
hour_offset = int(l[i + 1])
|
| 823 |
+
min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like?
|
| 824 |
+
i += 2
|
| 825 |
+
elif len_li <= 2:
|
| 826 |
+
# -[0]3
|
| 827 |
+
hour_offset = int(l[i + 1][:2])
|
| 828 |
+
min_offset = 0
|
| 829 |
+
else:
|
| 830 |
+
raise ValueError(timestr)
|
| 831 |
+
|
| 832 |
+
res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60)
|
| 833 |
+
|
| 834 |
+
# Look for a timezone name between parenthesis
|
| 835 |
+
if (i + 5 < len_l and
|
| 836 |
+
info.jump(l[i + 2]) and l[i + 3] == '(' and
|
| 837 |
+
l[i + 5] == ')' and
|
| 838 |
+
3 <= len(l[i + 4]) and
|
| 839 |
+
self._could_be_tzname(res.hour, res.tzname,
|
| 840 |
+
None, l[i + 4])):
|
| 841 |
+
# -0300 (BRST)
|
| 842 |
+
res.tzname = l[i + 4]
|
| 843 |
+
i += 4
|
| 844 |
+
|
| 845 |
+
i += 1
|
| 846 |
+
|
| 847 |
+
# Check jumps
|
| 848 |
+
elif not (info.jump(l[i]) or fuzzy):
|
| 849 |
+
raise ValueError(timestr)
|
| 850 |
+
|
| 851 |
+
else:
|
| 852 |
+
skipped_idxs.append(i)
|
| 853 |
+
i += 1
|
| 854 |
+
|
| 855 |
+
# Process year/month/day
|
| 856 |
+
year, month, day = ymd.resolve_ymd(yearfirst, dayfirst)
|
| 857 |
+
|
| 858 |
+
res.century_specified = ymd.century_specified
|
| 859 |
+
res.year = year
|
| 860 |
+
res.month = month
|
| 861 |
+
res.day = day
|
| 862 |
+
|
| 863 |
+
except (IndexError, ValueError):
|
| 864 |
+
return None, None
|
| 865 |
+
|
| 866 |
+
if not info.validate(res):
|
| 867 |
+
return None, None
|
| 868 |
+
|
| 869 |
+
if fuzzy_with_tokens:
|
| 870 |
+
skipped_tokens = self._recombine_skipped(l, skipped_idxs)
|
| 871 |
+
return res, tuple(skipped_tokens)
|
| 872 |
+
else:
|
| 873 |
+
return res, None
|
| 874 |
+
|
| 875 |
+
def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy):
|
| 876 |
+
# Token is a number
|
| 877 |
+
value_repr = tokens[idx]
|
| 878 |
+
try:
|
| 879 |
+
value = self._to_decimal(value_repr)
|
| 880 |
+
except Exception as e:
|
| 881 |
+
six.raise_from(ValueError('Unknown numeric token'), e)
|
| 882 |
+
|
| 883 |
+
len_li = len(value_repr)
|
| 884 |
+
|
| 885 |
+
len_l = len(tokens)
|
| 886 |
+
|
| 887 |
+
if (len(ymd) == 3 and len_li in (2, 4) and
|
| 888 |
+
res.hour is None and
|
| 889 |
+
(idx + 1 >= len_l or
|
| 890 |
+
(tokens[idx + 1] != ':' and
|
| 891 |
+
info.hms(tokens[idx + 1]) is None))):
|
| 892 |
+
# 19990101T23[59]
|
| 893 |
+
s = tokens[idx]
|
| 894 |
+
res.hour = int(s[:2])
|
| 895 |
+
|
| 896 |
+
if len_li == 4:
|
| 897 |
+
res.minute = int(s[2:])
|
| 898 |
+
|
| 899 |
+
elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6):
|
| 900 |
+
# YYMMDD or HHMMSS[.ss]
|
| 901 |
+
s = tokens[idx]
|
| 902 |
+
|
| 903 |
+
if not ymd and '.' not in tokens[idx]:
|
| 904 |
+
ymd.append(s[:2])
|
| 905 |
+
ymd.append(s[2:4])
|
| 906 |
+
ymd.append(s[4:])
|
| 907 |
+
else:
|
| 908 |
+
# 19990101T235959[.59]
|
| 909 |
+
|
| 910 |
+
# TODO: Check if res attributes already set.
|
| 911 |
+
res.hour = int(s[:2])
|
| 912 |
+
res.minute = int(s[2:4])
|
| 913 |
+
res.second, res.microsecond = self._parsems(s[4:])
|
| 914 |
+
|
| 915 |
+
elif len_li in (8, 12, 14):
|
| 916 |
+
# YYYYMMDD
|
| 917 |
+
s = tokens[idx]
|
| 918 |
+
ymd.append(s[:4], 'Y')
|
| 919 |
+
ymd.append(s[4:6])
|
| 920 |
+
ymd.append(s[6:8])
|
| 921 |
+
|
| 922 |
+
if len_li > 8:
|
| 923 |
+
res.hour = int(s[8:10])
|
| 924 |
+
res.minute = int(s[10:12])
|
| 925 |
+
|
| 926 |
+
if len_li > 12:
|
| 927 |
+
res.second = int(s[12:])
|
| 928 |
+
|
| 929 |
+
elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None:
|
| 930 |
+
# HH[ ]h or MM[ ]m or SS[.ss][ ]s
|
| 931 |
+
hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True)
|
| 932 |
+
(idx, hms) = self._parse_hms(idx, tokens, info, hms_idx)
|
| 933 |
+
if hms is not None:
|
| 934 |
+
# TODO: checking that hour/minute/second are not
|
| 935 |
+
# already set?
|
| 936 |
+
self._assign_hms(res, value_repr, hms)
|
| 937 |
+
|
| 938 |
+
elif idx + 2 < len_l and tokens[idx + 1] == ':':
|
| 939 |
+
# HH:MM[:SS[.ss]]
|
| 940 |
+
res.hour = int(value)
|
| 941 |
+
value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this?
|
| 942 |
+
(res.minute, res.second) = self._parse_min_sec(value)
|
| 943 |
+
|
| 944 |
+
if idx + 4 < len_l and tokens[idx + 3] == ':':
|
| 945 |
+
res.second, res.microsecond = self._parsems(tokens[idx + 4])
|
| 946 |
+
|
| 947 |
+
idx += 2
|
| 948 |
+
|
| 949 |
+
idx += 2
|
| 950 |
+
|
| 951 |
+
elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'):
|
| 952 |
+
sep = tokens[idx + 1]
|
| 953 |
+
ymd.append(value_repr)
|
| 954 |
+
|
| 955 |
+
if idx + 2 < len_l and not info.jump(tokens[idx + 2]):
|
| 956 |
+
if tokens[idx + 2].isdigit():
|
| 957 |
+
# 01-01[-01]
|
| 958 |
+
ymd.append(tokens[idx + 2])
|
| 959 |
+
else:
|
| 960 |
+
# 01-Jan[-01]
|
| 961 |
+
value = info.month(tokens[idx + 2])
|
| 962 |
+
|
| 963 |
+
if value is not None:
|
| 964 |
+
ymd.append(value, 'M')
|
| 965 |
+
else:
|
| 966 |
+
raise ValueError()
|
| 967 |
+
|
| 968 |
+
if idx + 3 < len_l and tokens[idx + 3] == sep:
|
| 969 |
+
# We have three members
|
| 970 |
+
value = info.month(tokens[idx + 4])
|
| 971 |
+
|
| 972 |
+
if value is not None:
|
| 973 |
+
ymd.append(value, 'M')
|
| 974 |
+
else:
|
| 975 |
+
ymd.append(tokens[idx + 4])
|
| 976 |
+
idx += 2
|
| 977 |
+
|
| 978 |
+
idx += 1
|
| 979 |
+
idx += 1
|
| 980 |
+
|
| 981 |
+
elif idx + 1 >= len_l or info.jump(tokens[idx + 1]):
|
| 982 |
+
if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None:
|
| 983 |
+
# 12 am
|
| 984 |
+
hour = int(value)
|
| 985 |
+
res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2]))
|
| 986 |
+
idx += 1
|
| 987 |
+
else:
|
| 988 |
+
# Year, month or day
|
| 989 |
+
ymd.append(value)
|
| 990 |
+
idx += 1
|
| 991 |
+
|
| 992 |
+
elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24):
|
| 993 |
+
# 12am
|
| 994 |
+
hour = int(value)
|
| 995 |
+
res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1]))
|
| 996 |
+
idx += 1
|
| 997 |
+
|
| 998 |
+
elif ymd.could_be_day(value):
|
| 999 |
+
ymd.append(value)
|
| 1000 |
+
|
| 1001 |
+
elif not fuzzy:
|
| 1002 |
+
raise ValueError()
|
| 1003 |
+
|
| 1004 |
+
return idx
|
| 1005 |
+
|
| 1006 |
+
def _find_hms_idx(self, idx, tokens, info, allow_jump):
|
| 1007 |
+
len_l = len(tokens)
|
| 1008 |
+
|
| 1009 |
+
if idx+1 < len_l and info.hms(tokens[idx+1]) is not None:
|
| 1010 |
+
# There is an "h", "m", or "s" label following this token. We take
|
| 1011 |
+
# assign the upcoming label to the current token.
|
| 1012 |
+
# e.g. the "12" in 12h"
|
| 1013 |
+
hms_idx = idx + 1
|
| 1014 |
+
|
| 1015 |
+
elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and
|
| 1016 |
+
info.hms(tokens[idx+2]) is not None):
|
| 1017 |
+
# There is a space and then an "h", "m", or "s" label.
|
| 1018 |
+
# e.g. the "12" in "12 h"
|
| 1019 |
+
hms_idx = idx + 2
|
| 1020 |
+
|
| 1021 |
+
elif idx > 0 and info.hms(tokens[idx-1]) is not None:
|
| 1022 |
+
# There is a "h", "m", or "s" preceding this token. Since neither
|
| 1023 |
+
# of the previous cases was hit, there is no label following this
|
| 1024 |
+
# token, so we use the previous label.
|
| 1025 |
+
# e.g. the "04" in "12h04"
|
| 1026 |
+
hms_idx = idx-1
|
| 1027 |
+
|
| 1028 |
+
elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and
|
| 1029 |
+
info.hms(tokens[idx-2]) is not None):
|
| 1030 |
+
# If we are looking at the final token, we allow for a
|
| 1031 |
+
# backward-looking check to skip over a space.
|
| 1032 |
+
# TODO: Are we sure this is the right condition here?
|
| 1033 |
+
hms_idx = idx - 2
|
| 1034 |
+
|
| 1035 |
+
else:
|
| 1036 |
+
hms_idx = None
|
| 1037 |
+
|
| 1038 |
+
return hms_idx
|
| 1039 |
+
|
| 1040 |
+
def _assign_hms(self, res, value_repr, hms):
|
| 1041 |
+
# See GH issue #427, fixing float rounding
|
| 1042 |
+
value = self._to_decimal(value_repr)
|
| 1043 |
+
|
| 1044 |
+
if hms == 0:
|
| 1045 |
+
# Hour
|
| 1046 |
+
res.hour = int(value)
|
| 1047 |
+
if value % 1:
|
| 1048 |
+
res.minute = int(60*(value % 1))
|
| 1049 |
+
|
| 1050 |
+
elif hms == 1:
|
| 1051 |
+
(res.minute, res.second) = self._parse_min_sec(value)
|
| 1052 |
+
|
| 1053 |
+
elif hms == 2:
|
| 1054 |
+
(res.second, res.microsecond) = self._parsems(value_repr)
|
| 1055 |
+
|
| 1056 |
+
def _could_be_tzname(self, hour, tzname, tzoffset, token):
|
| 1057 |
+
return (hour is not None and
|
| 1058 |
+
tzname is None and
|
| 1059 |
+
tzoffset is None and
|
| 1060 |
+
len(token) <= 5 and
|
| 1061 |
+
(all(x in string.ascii_uppercase for x in token)
|
| 1062 |
+
or token in self.info.UTCZONE))
|
| 1063 |
+
|
| 1064 |
+
def _ampm_valid(self, hour, ampm, fuzzy):
|
| 1065 |
+
"""
|
| 1066 |
+
For fuzzy parsing, 'a' or 'am' (both valid English words)
|
| 1067 |
+
may erroneously trigger the AM/PM flag. Deal with that
|
| 1068 |
+
here.
|
| 1069 |
+
"""
|
| 1070 |
+
val_is_ampm = True
|
| 1071 |
+
|
| 1072 |
+
# If there's already an AM/PM flag, this one isn't one.
|
| 1073 |
+
if fuzzy and ampm is not None:
|
| 1074 |
+
val_is_ampm = False
|
| 1075 |
+
|
| 1076 |
+
# If AM/PM is found and hour is not, raise a ValueError
|
| 1077 |
+
if hour is None:
|
| 1078 |
+
if fuzzy:
|
| 1079 |
+
val_is_ampm = False
|
| 1080 |
+
else:
|
| 1081 |
+
raise ValueError('No hour specified with AM or PM flag.')
|
| 1082 |
+
elif not 0 <= hour <= 12:
|
| 1083 |
+
# If AM/PM is found, it's a 12 hour clock, so raise
|
| 1084 |
+
# an error for invalid range
|
| 1085 |
+
if fuzzy:
|
| 1086 |
+
val_is_ampm = False
|
| 1087 |
+
else:
|
| 1088 |
+
raise ValueError('Invalid hour specified for 12-hour clock.')
|
| 1089 |
+
|
| 1090 |
+
return val_is_ampm
|
| 1091 |
+
|
| 1092 |
+
def _adjust_ampm(self, hour, ampm):
|
| 1093 |
+
if hour < 12 and ampm == 1:
|
| 1094 |
+
hour += 12
|
| 1095 |
+
elif hour == 12 and ampm == 0:
|
| 1096 |
+
hour = 0
|
| 1097 |
+
return hour
|
| 1098 |
+
|
| 1099 |
+
def _parse_min_sec(self, value):
|
| 1100 |
+
# TODO: Every usage of this function sets res.second to the return
|
| 1101 |
+
# value. Are there any cases where second will be returned as None and
|
| 1102 |
+
# we *don't* want to set res.second = None?
|
| 1103 |
+
minute = int(value)
|
| 1104 |
+
second = None
|
| 1105 |
+
|
| 1106 |
+
sec_remainder = value % 1
|
| 1107 |
+
if sec_remainder:
|
| 1108 |
+
second = int(60 * sec_remainder)
|
| 1109 |
+
return (minute, second)
|
| 1110 |
+
|
| 1111 |
+
def _parse_hms(self, idx, tokens, info, hms_idx):
|
| 1112 |
+
# TODO: Is this going to admit a lot of false-positives for when we
|
| 1113 |
+
# just happen to have digits and "h", "m" or "s" characters in non-date
|
| 1114 |
+
# text? I guess hex hashes won't have that problem, but there's plenty
|
| 1115 |
+
# of random junk out there.
|
| 1116 |
+
if hms_idx is None:
|
| 1117 |
+
hms = None
|
| 1118 |
+
new_idx = idx
|
| 1119 |
+
elif hms_idx > idx:
|
| 1120 |
+
hms = info.hms(tokens[hms_idx])
|
| 1121 |
+
new_idx = hms_idx
|
| 1122 |
+
else:
|
| 1123 |
+
# Looking backwards, increment one.
|
| 1124 |
+
hms = info.hms(tokens[hms_idx]) + 1
|
| 1125 |
+
new_idx = idx
|
| 1126 |
+
|
| 1127 |
+
return (new_idx, hms)
|
| 1128 |
+
|
| 1129 |
+
# ------------------------------------------------------------------
|
| 1130 |
+
# Handling for individual tokens. These are kept as methods instead
|
| 1131 |
+
# of functions for the sake of customizability via subclassing.
|
| 1132 |
+
|
| 1133 |
+
def _parsems(self, value):
|
| 1134 |
+
"""Parse a I[.F] seconds value into (seconds, microseconds)."""
|
| 1135 |
+
if "." not in value:
|
| 1136 |
+
return int(value), 0
|
| 1137 |
+
else:
|
| 1138 |
+
i, f = value.split(".")
|
| 1139 |
+
return int(i), int(f.ljust(6, "0")[:6])
|
| 1140 |
+
|
| 1141 |
+
def _to_decimal(self, val):
|
| 1142 |
+
try:
|
| 1143 |
+
decimal_value = Decimal(val)
|
| 1144 |
+
# See GH 662, edge case, infinite value should not be converted
|
| 1145 |
+
# via `_to_decimal`
|
| 1146 |
+
if not decimal_value.is_finite():
|
| 1147 |
+
raise ValueError("Converted decimal value is infinite or NaN")
|
| 1148 |
+
except Exception as e:
|
| 1149 |
+
msg = "Could not convert %s to decimal" % val
|
| 1150 |
+
six.raise_from(ValueError(msg), e)
|
| 1151 |
+
else:
|
| 1152 |
+
return decimal_value
|
| 1153 |
+
|
| 1154 |
+
# ------------------------------------------------------------------
|
| 1155 |
+
# Post-Parsing construction of datetime output. These are kept as
|
| 1156 |
+
# methods instead of functions for the sake of customizability via
|
| 1157 |
+
# subclassing.
|
| 1158 |
+
|
| 1159 |
+
def _build_tzinfo(self, tzinfos, tzname, tzoffset):
|
| 1160 |
+
if callable(tzinfos):
|
| 1161 |
+
tzdata = tzinfos(tzname, tzoffset)
|
| 1162 |
+
else:
|
| 1163 |
+
tzdata = tzinfos.get(tzname)
|
| 1164 |
+
# handle case where tzinfo is paased an options that returns None
|
| 1165 |
+
# eg tzinfos = {'BRST' : None}
|
| 1166 |
+
if isinstance(tzdata, datetime.tzinfo) or tzdata is None:
|
| 1167 |
+
tzinfo = tzdata
|
| 1168 |
+
elif isinstance(tzdata, text_type):
|
| 1169 |
+
tzinfo = tz.tzstr(tzdata)
|
| 1170 |
+
elif isinstance(tzdata, integer_types):
|
| 1171 |
+
tzinfo = tz.tzoffset(tzname, tzdata)
|
| 1172 |
+
else:
|
| 1173 |
+
raise TypeError("Offset must be tzinfo subclass, tz string, "
|
| 1174 |
+
"or int offset.")
|
| 1175 |
+
return tzinfo
|
| 1176 |
+
|
| 1177 |
+
def _build_tzaware(self, naive, res, tzinfos):
|
| 1178 |
+
if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)):
|
| 1179 |
+
tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset)
|
| 1180 |
+
aware = naive.replace(tzinfo=tzinfo)
|
| 1181 |
+
aware = self._assign_tzname(aware, res.tzname)
|
| 1182 |
+
|
| 1183 |
+
elif res.tzname and res.tzname in time.tzname:
|
| 1184 |
+
aware = naive.replace(tzinfo=tz.tzlocal())
|
| 1185 |
+
|
| 1186 |
+
# Handle ambiguous local datetime
|
| 1187 |
+
aware = self._assign_tzname(aware, res.tzname)
|
| 1188 |
+
|
| 1189 |
+
# This is mostly relevant for winter GMT zones parsed in the UK
|
| 1190 |
+
if (aware.tzname() != res.tzname and
|
| 1191 |
+
res.tzname in self.info.UTCZONE):
|
| 1192 |
+
aware = aware.replace(tzinfo=tz.UTC)
|
| 1193 |
+
|
| 1194 |
+
elif res.tzoffset == 0:
|
| 1195 |
+
aware = naive.replace(tzinfo=tz.UTC)
|
| 1196 |
+
|
| 1197 |
+
elif res.tzoffset:
|
| 1198 |
+
aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset))
|
| 1199 |
+
|
| 1200 |
+
elif not res.tzname and not res.tzoffset:
|
| 1201 |
+
# i.e. no timezone information was found.
|
| 1202 |
+
aware = naive
|
| 1203 |
+
|
| 1204 |
+
elif res.tzname:
|
| 1205 |
+
# tz-like string was parsed but we don't know what to do
|
| 1206 |
+
# with it
|
| 1207 |
+
warnings.warn("tzname {tzname} identified but not understood. "
|
| 1208 |
+
"Pass `tzinfos` argument in order to correctly "
|
| 1209 |
+
"return a timezone-aware datetime. In a future "
|
| 1210 |
+
"version, this will raise an "
|
| 1211 |
+
"exception.".format(tzname=res.tzname),
|
| 1212 |
+
category=UnknownTimezoneWarning)
|
| 1213 |
+
aware = naive
|
| 1214 |
+
|
| 1215 |
+
return aware
|
| 1216 |
+
|
| 1217 |
+
def _build_naive(self, res, default):
|
| 1218 |
+
repl = {}
|
| 1219 |
+
for attr in ("year", "month", "day", "hour",
|
| 1220 |
+
"minute", "second", "microsecond"):
|
| 1221 |
+
value = getattr(res, attr)
|
| 1222 |
+
if value is not None:
|
| 1223 |
+
repl[attr] = value
|
| 1224 |
+
|
| 1225 |
+
if 'day' not in repl:
|
| 1226 |
+
# If the default day exceeds the last day of the month, fall back
|
| 1227 |
+
# to the end of the month.
|
| 1228 |
+
cyear = default.year if res.year is None else res.year
|
| 1229 |
+
cmonth = default.month if res.month is None else res.month
|
| 1230 |
+
cday = default.day if res.day is None else res.day
|
| 1231 |
+
|
| 1232 |
+
if cday > monthrange(cyear, cmonth)[1]:
|
| 1233 |
+
repl['day'] = monthrange(cyear, cmonth)[1]
|
| 1234 |
+
|
| 1235 |
+
naive = default.replace(**repl)
|
| 1236 |
+
|
| 1237 |
+
if res.weekday is not None and not res.day:
|
| 1238 |
+
naive = naive + relativedelta.relativedelta(weekday=res.weekday)
|
| 1239 |
+
|
| 1240 |
+
return naive
|
| 1241 |
+
|
| 1242 |
+
def _assign_tzname(self, dt, tzname):
|
| 1243 |
+
if dt.tzname() != tzname:
|
| 1244 |
+
new_dt = tz.enfold(dt, fold=1)
|
| 1245 |
+
if new_dt.tzname() == tzname:
|
| 1246 |
+
return new_dt
|
| 1247 |
+
|
| 1248 |
+
return dt
|
| 1249 |
+
|
| 1250 |
+
def _recombine_skipped(self, tokens, skipped_idxs):
|
| 1251 |
+
"""
|
| 1252 |
+
>>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"]
|
| 1253 |
+
>>> skipped_idxs = [0, 1, 2, 5]
|
| 1254 |
+
>>> _recombine_skipped(tokens, skipped_idxs)
|
| 1255 |
+
["foo bar", "baz"]
|
| 1256 |
+
"""
|
| 1257 |
+
skipped_tokens = []
|
| 1258 |
+
for i, idx in enumerate(sorted(skipped_idxs)):
|
| 1259 |
+
if i > 0 and idx - 1 == skipped_idxs[i - 1]:
|
| 1260 |
+
skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx]
|
| 1261 |
+
else:
|
| 1262 |
+
skipped_tokens.append(tokens[idx])
|
| 1263 |
+
|
| 1264 |
+
return skipped_tokens
|
| 1265 |
+
|
| 1266 |
+
|
| 1267 |
+
DEFAULTPARSER = parser()
|
| 1268 |
+
|
| 1269 |
+
|
| 1270 |
+
def parse(timestr, parserinfo=None, **kwargs):
|
| 1271 |
+
"""
|
| 1272 |
+
|
| 1273 |
+
Parse a string in one of the supported formats, using the
|
| 1274 |
+
``parserinfo`` parameters.
|
| 1275 |
+
|
| 1276 |
+
:param timestr:
|
| 1277 |
+
A string containing a date/time stamp.
|
| 1278 |
+
|
| 1279 |
+
:param parserinfo:
|
| 1280 |
+
A :class:`parserinfo` object containing parameters for the parser.
|
| 1281 |
+
If ``None``, the default arguments to the :class:`parserinfo`
|
| 1282 |
+
constructor are used.
|
| 1283 |
+
|
| 1284 |
+
The ``**kwargs`` parameter takes the following keyword arguments:
|
| 1285 |
+
|
| 1286 |
+
:param default:
|
| 1287 |
+
The default datetime object, if this is a datetime object and not
|
| 1288 |
+
``None``, elements specified in ``timestr`` replace elements in the
|
| 1289 |
+
default object.
|
| 1290 |
+
|
| 1291 |
+
:param ignoretz:
|
| 1292 |
+
If set ``True``, time zones in parsed strings are ignored and a naive
|
| 1293 |
+
:class:`datetime` object is returned.
|
| 1294 |
+
|
| 1295 |
+
:param tzinfos:
|
| 1296 |
+
Additional time zone names / aliases which may be present in the
|
| 1297 |
+
string. This argument maps time zone names (and optionally offsets
|
| 1298 |
+
from those time zones) to time zones. This parameter can be a
|
| 1299 |
+
dictionary with timezone aliases mapping time zone names to time
|
| 1300 |
+
zones or a function taking two parameters (``tzname`` and
|
| 1301 |
+
``tzoffset``) and returning a time zone.
|
| 1302 |
+
|
| 1303 |
+
The timezones to which the names are mapped can be an integer
|
| 1304 |
+
offset from UTC in seconds or a :class:`tzinfo` object.
|
| 1305 |
+
|
| 1306 |
+
.. doctest::
|
| 1307 |
+
:options: +NORMALIZE_WHITESPACE
|
| 1308 |
+
|
| 1309 |
+
>>> from dateutil.parser import parse
|
| 1310 |
+
>>> from dateutil.tz import gettz
|
| 1311 |
+
>>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")}
|
| 1312 |
+
>>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos)
|
| 1313 |
+
datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200))
|
| 1314 |
+
>>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos)
|
| 1315 |
+
datetime.datetime(2012, 1, 19, 17, 21,
|
| 1316 |
+
tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago'))
|
| 1317 |
+
|
| 1318 |
+
This parameter is ignored if ``ignoretz`` is set.
|
| 1319 |
+
|
| 1320 |
+
:param dayfirst:
|
| 1321 |
+
Whether to interpret the first value in an ambiguous 3-integer date
|
| 1322 |
+
(e.g. 01/05/09) as the day (``True``) or month (``False``). If
|
| 1323 |
+
``yearfirst`` is set to ``True``, this distinguishes between YDM and
|
| 1324 |
+
YMD. If set to ``None``, this value is retrieved from the current
|
| 1325 |
+
:class:`parserinfo` object (which itself defaults to ``False``).
|
| 1326 |
+
|
| 1327 |
+
:param yearfirst:
|
| 1328 |
+
Whether to interpret the first value in an ambiguous 3-integer date
|
| 1329 |
+
(e.g. 01/05/09) as the year. If ``True``, the first number is taken to
|
| 1330 |
+
be the year, otherwise the last number is taken to be the year. If
|
| 1331 |
+
this is set to ``None``, the value is retrieved from the current
|
| 1332 |
+
:class:`parserinfo` object (which itself defaults to ``False``).
|
| 1333 |
+
|
| 1334 |
+
:param fuzzy:
|
| 1335 |
+
Whether to allow fuzzy parsing, allowing for string like "Today is
|
| 1336 |
+
January 1, 2047 at 8:21:00AM".
|
| 1337 |
+
|
| 1338 |
+
:param fuzzy_with_tokens:
|
| 1339 |
+
If ``True``, ``fuzzy`` is automatically set to True, and the parser
|
| 1340 |
+
will return a tuple where the first element is the parsed
|
| 1341 |
+
:class:`datetime.datetime` datetimestamp and the second element is
|
| 1342 |
+
a tuple containing the portions of the string which were ignored:
|
| 1343 |
+
|
| 1344 |
+
.. doctest::
|
| 1345 |
+
|
| 1346 |
+
>>> from dateutil.parser import parse
|
| 1347 |
+
>>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True)
|
| 1348 |
+
(datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at '))
|
| 1349 |
+
|
| 1350 |
+
:return:
|
| 1351 |
+
Returns a :class:`datetime.datetime` object or, if the
|
| 1352 |
+
``fuzzy_with_tokens`` option is ``True``, returns a tuple, the
|
| 1353 |
+
first element being a :class:`datetime.datetime` object, the second
|
| 1354 |
+
a tuple containing the fuzzy tokens.
|
| 1355 |
+
|
| 1356 |
+
:raises ParserError:
|
| 1357 |
+
Raised for invalid or unknown string formats, if the provided
|
| 1358 |
+
:class:`tzinfo` is not in a valid format, or if an invalid date would
|
| 1359 |
+
be created.
|
| 1360 |
+
|
| 1361 |
+
:raises OverflowError:
|
| 1362 |
+
Raised if the parsed date exceeds the largest valid C integer on
|
| 1363 |
+
your system.
|
| 1364 |
+
"""
|
| 1365 |
+
if parserinfo:
|
| 1366 |
+
return parser(parserinfo).parse(timestr, **kwargs)
|
| 1367 |
+
else:
|
| 1368 |
+
return DEFAULTPARSER.parse(timestr, **kwargs)
|
| 1369 |
+
|
| 1370 |
+
|
| 1371 |
+
class _tzparser(object):
|
| 1372 |
+
|
| 1373 |
+
class _result(_resultbase):
|
| 1374 |
+
|
| 1375 |
+
__slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset",
|
| 1376 |
+
"start", "end"]
|
| 1377 |
+
|
| 1378 |
+
class _attr(_resultbase):
|
| 1379 |
+
__slots__ = ["month", "week", "weekday",
|
| 1380 |
+
"yday", "jyday", "day", "time"]
|
| 1381 |
+
|
| 1382 |
+
def __repr__(self):
|
| 1383 |
+
return self._repr("")
|
| 1384 |
+
|
| 1385 |
+
def __init__(self):
|
| 1386 |
+
_resultbase.__init__(self)
|
| 1387 |
+
self.start = self._attr()
|
| 1388 |
+
self.end = self._attr()
|
| 1389 |
+
|
| 1390 |
+
def parse(self, tzstr):
|
| 1391 |
+
res = self._result()
|
| 1392 |
+
l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x]
|
| 1393 |
+
used_idxs = list()
|
| 1394 |
+
try:
|
| 1395 |
+
|
| 1396 |
+
len_l = len(l)
|
| 1397 |
+
|
| 1398 |
+
i = 0
|
| 1399 |
+
while i < len_l:
|
| 1400 |
+
# BRST+3[BRDT[+2]]
|
| 1401 |
+
j = i
|
| 1402 |
+
while j < len_l and not [x for x in l[j]
|
| 1403 |
+
if x in "0123456789:,-+"]:
|
| 1404 |
+
j += 1
|
| 1405 |
+
if j != i:
|
| 1406 |
+
if not res.stdabbr:
|
| 1407 |
+
offattr = "stdoffset"
|
| 1408 |
+
res.stdabbr = "".join(l[i:j])
|
| 1409 |
+
else:
|
| 1410 |
+
offattr = "dstoffset"
|
| 1411 |
+
res.dstabbr = "".join(l[i:j])
|
| 1412 |
+
|
| 1413 |
+
for ii in range(j):
|
| 1414 |
+
used_idxs.append(ii)
|
| 1415 |
+
i = j
|
| 1416 |
+
if (i < len_l and (l[i] in ('+', '-') or l[i][0] in
|
| 1417 |
+
"0123456789")):
|
| 1418 |
+
if l[i] in ('+', '-'):
|
| 1419 |
+
# Yes, that's right. See the TZ variable
|
| 1420 |
+
# documentation.
|
| 1421 |
+
signal = (1, -1)[l[i] == '+']
|
| 1422 |
+
used_idxs.append(i)
|
| 1423 |
+
i += 1
|
| 1424 |
+
else:
|
| 1425 |
+
signal = -1
|
| 1426 |
+
len_li = len(l[i])
|
| 1427 |
+
if len_li == 4:
|
| 1428 |
+
# -0300
|
| 1429 |
+
setattr(res, offattr, (int(l[i][:2]) * 3600 +
|
| 1430 |
+
int(l[i][2:]) * 60) * signal)
|
| 1431 |
+
elif i + 1 < len_l and l[i + 1] == ':':
|
| 1432 |
+
# -03:00
|
| 1433 |
+
setattr(res, offattr,
|
| 1434 |
+
(int(l[i]) * 3600 +
|
| 1435 |
+
int(l[i + 2]) * 60) * signal)
|
| 1436 |
+
used_idxs.append(i)
|
| 1437 |
+
i += 2
|
| 1438 |
+
elif len_li <= 2:
|
| 1439 |
+
# -[0]3
|
| 1440 |
+
setattr(res, offattr,
|
| 1441 |
+
int(l[i][:2]) * 3600 * signal)
|
| 1442 |
+
else:
|
| 1443 |
+
return None
|
| 1444 |
+
used_idxs.append(i)
|
| 1445 |
+
i += 1
|
| 1446 |
+
if res.dstabbr:
|
| 1447 |
+
break
|
| 1448 |
+
else:
|
| 1449 |
+
break
|
| 1450 |
+
|
| 1451 |
+
|
| 1452 |
+
if i < len_l:
|
| 1453 |
+
for j in range(i, len_l):
|
| 1454 |
+
if l[j] == ';':
|
| 1455 |
+
l[j] = ','
|
| 1456 |
+
|
| 1457 |
+
assert l[i] == ','
|
| 1458 |
+
|
| 1459 |
+
i += 1
|
| 1460 |
+
|
| 1461 |
+
if i >= len_l:
|
| 1462 |
+
pass
|
| 1463 |
+
elif (8 <= l.count(',') <= 9 and
|
| 1464 |
+
not [y for x in l[i:] if x != ','
|
| 1465 |
+
for y in x if y not in "0123456789+-"]):
|
| 1466 |
+
# GMT0BST,3,0,30,3600,10,0,26,7200[,3600]
|
| 1467 |
+
for x in (res.start, res.end):
|
| 1468 |
+
x.month = int(l[i])
|
| 1469 |
+
used_idxs.append(i)
|
| 1470 |
+
i += 2
|
| 1471 |
+
if l[i] == '-':
|
| 1472 |
+
value = int(l[i + 1]) * -1
|
| 1473 |
+
used_idxs.append(i)
|
| 1474 |
+
i += 1
|
| 1475 |
+
else:
|
| 1476 |
+
value = int(l[i])
|
| 1477 |
+
used_idxs.append(i)
|
| 1478 |
+
i += 2
|
| 1479 |
+
if value:
|
| 1480 |
+
x.week = value
|
| 1481 |
+
x.weekday = (int(l[i]) - 1) % 7
|
| 1482 |
+
else:
|
| 1483 |
+
x.day = int(l[i])
|
| 1484 |
+
used_idxs.append(i)
|
| 1485 |
+
i += 2
|
| 1486 |
+
x.time = int(l[i])
|
| 1487 |
+
used_idxs.append(i)
|
| 1488 |
+
i += 2
|
| 1489 |
+
if i < len_l:
|
| 1490 |
+
if l[i] in ('-', '+'):
|
| 1491 |
+
signal = (-1, 1)[l[i] == "+"]
|
| 1492 |
+
used_idxs.append(i)
|
| 1493 |
+
i += 1
|
| 1494 |
+
else:
|
| 1495 |
+
signal = 1
|
| 1496 |
+
used_idxs.append(i)
|
| 1497 |
+
res.dstoffset = (res.stdoffset + int(l[i]) * signal)
|
| 1498 |
+
|
| 1499 |
+
# This was a made-up format that is not in normal use
|
| 1500 |
+
warn(('Parsed time zone "%s"' % tzstr) +
|
| 1501 |
+
'is in a non-standard dateutil-specific format, which ' +
|
| 1502 |
+
'is now deprecated; support for parsing this format ' +
|
| 1503 |
+
'will be removed in future versions. It is recommended ' +
|
| 1504 |
+
'that you switch to a standard format like the GNU ' +
|
| 1505 |
+
'TZ variable format.', tz.DeprecatedTzFormatWarning)
|
| 1506 |
+
elif (l.count(',') == 2 and l[i:].count('/') <= 2 and
|
| 1507 |
+
not [y for x in l[i:] if x not in (',', '/', 'J', 'M',
|
| 1508 |
+
'.', '-', ':')
|
| 1509 |
+
for y in x if y not in "0123456789"]):
|
| 1510 |
+
for x in (res.start, res.end):
|
| 1511 |
+
if l[i] == 'J':
|
| 1512 |
+
# non-leap year day (1 based)
|
| 1513 |
+
used_idxs.append(i)
|
| 1514 |
+
i += 1
|
| 1515 |
+
x.jyday = int(l[i])
|
| 1516 |
+
elif l[i] == 'M':
|
| 1517 |
+
# month[-.]week[-.]weekday
|
| 1518 |
+
used_idxs.append(i)
|
| 1519 |
+
i += 1
|
| 1520 |
+
x.month = int(l[i])
|
| 1521 |
+
used_idxs.append(i)
|
| 1522 |
+
i += 1
|
| 1523 |
+
assert l[i] in ('-', '.')
|
| 1524 |
+
used_idxs.append(i)
|
| 1525 |
+
i += 1
|
| 1526 |
+
x.week = int(l[i])
|
| 1527 |
+
if x.week == 5:
|
| 1528 |
+
x.week = -1
|
| 1529 |
+
used_idxs.append(i)
|
| 1530 |
+
i += 1
|
| 1531 |
+
assert l[i] in ('-', '.')
|
| 1532 |
+
used_idxs.append(i)
|
| 1533 |
+
i += 1
|
| 1534 |
+
x.weekday = (int(l[i]) - 1) % 7
|
| 1535 |
+
else:
|
| 1536 |
+
# year day (zero based)
|
| 1537 |
+
x.yday = int(l[i]) + 1
|
| 1538 |
+
|
| 1539 |
+
used_idxs.append(i)
|
| 1540 |
+
i += 1
|
| 1541 |
+
|
| 1542 |
+
if i < len_l and l[i] == '/':
|
| 1543 |
+
used_idxs.append(i)
|
| 1544 |
+
i += 1
|
| 1545 |
+
# start time
|
| 1546 |
+
len_li = len(l[i])
|
| 1547 |
+
if len_li == 4:
|
| 1548 |
+
# -0300
|
| 1549 |
+
x.time = (int(l[i][:2]) * 3600 +
|
| 1550 |
+
int(l[i][2:]) * 60)
|
| 1551 |
+
elif i + 1 < len_l and l[i + 1] == ':':
|
| 1552 |
+
# -03:00
|
| 1553 |
+
x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60
|
| 1554 |
+
used_idxs.append(i)
|
| 1555 |
+
i += 2
|
| 1556 |
+
if i + 1 < len_l and l[i + 1] == ':':
|
| 1557 |
+
used_idxs.append(i)
|
| 1558 |
+
i += 2
|
| 1559 |
+
x.time += int(l[i])
|
| 1560 |
+
elif len_li <= 2:
|
| 1561 |
+
# -[0]3
|
| 1562 |
+
x.time = (int(l[i][:2]) * 3600)
|
| 1563 |
+
else:
|
| 1564 |
+
return None
|
| 1565 |
+
used_idxs.append(i)
|
| 1566 |
+
i += 1
|
| 1567 |
+
|
| 1568 |
+
assert i == len_l or l[i] == ','
|
| 1569 |
+
|
| 1570 |
+
i += 1
|
| 1571 |
+
|
| 1572 |
+
assert i >= len_l
|
| 1573 |
+
|
| 1574 |
+
except (IndexError, ValueError, AssertionError):
|
| 1575 |
+
return None
|
| 1576 |
+
|
| 1577 |
+
unused_idxs = set(range(len_l)).difference(used_idxs)
|
| 1578 |
+
res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"})
|
| 1579 |
+
return res
|
| 1580 |
+
|
| 1581 |
+
|
| 1582 |
+
DEFAULTTZPARSER = _tzparser()
|
| 1583 |
+
|
| 1584 |
+
|
| 1585 |
+
def _parsetz(tzstr):
|
| 1586 |
+
return DEFAULTTZPARSER.parse(tzstr)
|
| 1587 |
+
|
| 1588 |
+
|
| 1589 |
+
class ParserError(ValueError):
|
| 1590 |
+
"""Exception subclass used for any failure to parse a datetime string.
|
| 1591 |
+
|
| 1592 |
+
This is a subclass of :py:exc:`ValueError`, and should be raised any time
|
| 1593 |
+
earlier versions of ``dateutil`` would have raised ``ValueError``.
|
| 1594 |
+
|
| 1595 |
+
.. versionadded:: 2.8.1
|
| 1596 |
+
"""
|
| 1597 |
+
def __str__(self):
|
| 1598 |
+
try:
|
| 1599 |
+
return self.args[0] % self.args[1:]
|
| 1600 |
+
except (TypeError, IndexError):
|
| 1601 |
+
return super(ParserError, self).__str__()
|
| 1602 |
+
|
| 1603 |
+
def __repr__(self):
|
| 1604 |
+
args = ", ".join("'%s'" % arg for arg in self.args)
|
| 1605 |
+
return "%s(%s)" % (self.__class__.__name__, args)
|
| 1606 |
+
|
| 1607 |
+
|
| 1608 |
+
class UnknownTimezoneWarning(RuntimeWarning):
|
| 1609 |
+
"""Raised when the parser finds a timezone it cannot parse into a tzinfo.
|
| 1610 |
+
|
| 1611 |
+
.. versionadded:: 2.7.0
|
| 1612 |
+
"""
|
| 1613 |
+
# vim:ts=4:sw=4:et
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/parser/isoparser.py
ADDED
|
@@ -0,0 +1,416 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
This module offers a parser for ISO-8601 strings
|
| 4 |
+
|
| 5 |
+
It is intended to support all valid date, time and datetime formats per the
|
| 6 |
+
ISO-8601 specification.
|
| 7 |
+
|
| 8 |
+
..versionadded:: 2.7.0
|
| 9 |
+
"""
|
| 10 |
+
from datetime import datetime, timedelta, time, date
|
| 11 |
+
import calendar
|
| 12 |
+
from dateutil import tz
|
| 13 |
+
|
| 14 |
+
from functools import wraps
|
| 15 |
+
|
| 16 |
+
import re
|
| 17 |
+
import six
|
| 18 |
+
|
| 19 |
+
__all__ = ["isoparse", "isoparser"]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _takes_ascii(f):
|
| 23 |
+
@wraps(f)
|
| 24 |
+
def func(self, str_in, *args, **kwargs):
|
| 25 |
+
# If it's a stream, read the whole thing
|
| 26 |
+
str_in = getattr(str_in, 'read', lambda: str_in)()
|
| 27 |
+
|
| 28 |
+
# If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII
|
| 29 |
+
if isinstance(str_in, six.text_type):
|
| 30 |
+
# ASCII is the same in UTF-8
|
| 31 |
+
try:
|
| 32 |
+
str_in = str_in.encode('ascii')
|
| 33 |
+
except UnicodeEncodeError as e:
|
| 34 |
+
msg = 'ISO-8601 strings should contain only ASCII characters'
|
| 35 |
+
six.raise_from(ValueError(msg), e)
|
| 36 |
+
|
| 37 |
+
return f(self, str_in, *args, **kwargs)
|
| 38 |
+
|
| 39 |
+
return func
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class isoparser(object):
|
| 43 |
+
def __init__(self, sep=None):
|
| 44 |
+
"""
|
| 45 |
+
:param sep:
|
| 46 |
+
A single character that separates date and time portions. If
|
| 47 |
+
``None``, the parser will accept any single character.
|
| 48 |
+
For strict ISO-8601 adherence, pass ``'T'``.
|
| 49 |
+
"""
|
| 50 |
+
if sep is not None:
|
| 51 |
+
if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'):
|
| 52 |
+
raise ValueError('Separator must be a single, non-numeric ' +
|
| 53 |
+
'ASCII character')
|
| 54 |
+
|
| 55 |
+
sep = sep.encode('ascii')
|
| 56 |
+
|
| 57 |
+
self._sep = sep
|
| 58 |
+
|
| 59 |
+
@_takes_ascii
|
| 60 |
+
def isoparse(self, dt_str):
|
| 61 |
+
"""
|
| 62 |
+
Parse an ISO-8601 datetime string into a :class:`datetime.datetime`.
|
| 63 |
+
|
| 64 |
+
An ISO-8601 datetime string consists of a date portion, followed
|
| 65 |
+
optionally by a time portion - the date and time portions are separated
|
| 66 |
+
by a single character separator, which is ``T`` in the official
|
| 67 |
+
standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be
|
| 68 |
+
combined with a time portion.
|
| 69 |
+
|
| 70 |
+
Supported date formats are:
|
| 71 |
+
|
| 72 |
+
Common:
|
| 73 |
+
|
| 74 |
+
- ``YYYY``
|
| 75 |
+
- ``YYYY-MM``
|
| 76 |
+
- ``YYYY-MM-DD`` or ``YYYYMMDD``
|
| 77 |
+
|
| 78 |
+
Uncommon:
|
| 79 |
+
|
| 80 |
+
- ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0)
|
| 81 |
+
- ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day
|
| 82 |
+
|
| 83 |
+
The ISO week and day numbering follows the same logic as
|
| 84 |
+
:func:`datetime.date.isocalendar`.
|
| 85 |
+
|
| 86 |
+
Supported time formats are:
|
| 87 |
+
|
| 88 |
+
- ``hh``
|
| 89 |
+
- ``hh:mm`` or ``hhmm``
|
| 90 |
+
- ``hh:mm:ss`` or ``hhmmss``
|
| 91 |
+
- ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits)
|
| 92 |
+
|
| 93 |
+
Midnight is a special case for `hh`, as the standard supports both
|
| 94 |
+
00:00 and 24:00 as a representation. The decimal separator can be
|
| 95 |
+
either a dot or a comma.
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
.. caution::
|
| 99 |
+
|
| 100 |
+
Support for fractional components other than seconds is part of the
|
| 101 |
+
ISO-8601 standard, but is not currently implemented in this parser.
|
| 102 |
+
|
| 103 |
+
Supported time zone offset formats are:
|
| 104 |
+
|
| 105 |
+
- `Z` (UTC)
|
| 106 |
+
- `±HH:MM`
|
| 107 |
+
- `±HHMM`
|
| 108 |
+
- `±HH`
|
| 109 |
+
|
| 110 |
+
Offsets will be represented as :class:`dateutil.tz.tzoffset` objects,
|
| 111 |
+
with the exception of UTC, which will be represented as
|
| 112 |
+
:class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such
|
| 113 |
+
as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`.
|
| 114 |
+
|
| 115 |
+
:param dt_str:
|
| 116 |
+
A string or stream containing only an ISO-8601 datetime string
|
| 117 |
+
|
| 118 |
+
:return:
|
| 119 |
+
Returns a :class:`datetime.datetime` representing the string.
|
| 120 |
+
Unspecified components default to their lowest value.
|
| 121 |
+
|
| 122 |
+
.. warning::
|
| 123 |
+
|
| 124 |
+
As of version 2.7.0, the strictness of the parser should not be
|
| 125 |
+
considered a stable part of the contract. Any valid ISO-8601 string
|
| 126 |
+
that parses correctly with the default settings will continue to
|
| 127 |
+
parse correctly in future versions, but invalid strings that
|
| 128 |
+
currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not
|
| 129 |
+
guaranteed to continue failing in future versions if they encode
|
| 130 |
+
a valid date.
|
| 131 |
+
|
| 132 |
+
.. versionadded:: 2.7.0
|
| 133 |
+
"""
|
| 134 |
+
components, pos = self._parse_isodate(dt_str)
|
| 135 |
+
|
| 136 |
+
if len(dt_str) > pos:
|
| 137 |
+
if self._sep is None or dt_str[pos:pos + 1] == self._sep:
|
| 138 |
+
components += self._parse_isotime(dt_str[pos + 1:])
|
| 139 |
+
else:
|
| 140 |
+
raise ValueError('String contains unknown ISO components')
|
| 141 |
+
|
| 142 |
+
if len(components) > 3 and components[3] == 24:
|
| 143 |
+
components[3] = 0
|
| 144 |
+
return datetime(*components) + timedelta(days=1)
|
| 145 |
+
|
| 146 |
+
return datetime(*components)
|
| 147 |
+
|
| 148 |
+
@_takes_ascii
|
| 149 |
+
def parse_isodate(self, datestr):
|
| 150 |
+
"""
|
| 151 |
+
Parse the date portion of an ISO string.
|
| 152 |
+
|
| 153 |
+
:param datestr:
|
| 154 |
+
The string portion of an ISO string, without a separator
|
| 155 |
+
|
| 156 |
+
:return:
|
| 157 |
+
Returns a :class:`datetime.date` object
|
| 158 |
+
"""
|
| 159 |
+
components, pos = self._parse_isodate(datestr)
|
| 160 |
+
if pos < len(datestr):
|
| 161 |
+
raise ValueError('String contains unknown ISO ' +
|
| 162 |
+
'components: {!r}'.format(datestr.decode('ascii')))
|
| 163 |
+
return date(*components)
|
| 164 |
+
|
| 165 |
+
@_takes_ascii
|
| 166 |
+
def parse_isotime(self, timestr):
|
| 167 |
+
"""
|
| 168 |
+
Parse the time portion of an ISO string.
|
| 169 |
+
|
| 170 |
+
:param timestr:
|
| 171 |
+
The time portion of an ISO string, without a separator
|
| 172 |
+
|
| 173 |
+
:return:
|
| 174 |
+
Returns a :class:`datetime.time` object
|
| 175 |
+
"""
|
| 176 |
+
components = self._parse_isotime(timestr)
|
| 177 |
+
if components[0] == 24:
|
| 178 |
+
components[0] = 0
|
| 179 |
+
return time(*components)
|
| 180 |
+
|
| 181 |
+
@_takes_ascii
|
| 182 |
+
def parse_tzstr(self, tzstr, zero_as_utc=True):
|
| 183 |
+
"""
|
| 184 |
+
Parse a valid ISO time zone string.
|
| 185 |
+
|
| 186 |
+
See :func:`isoparser.isoparse` for details on supported formats.
|
| 187 |
+
|
| 188 |
+
:param tzstr:
|
| 189 |
+
A string representing an ISO time zone offset
|
| 190 |
+
|
| 191 |
+
:param zero_as_utc:
|
| 192 |
+
Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones
|
| 193 |
+
|
| 194 |
+
:return:
|
| 195 |
+
Returns :class:`dateutil.tz.tzoffset` for offsets and
|
| 196 |
+
:class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is
|
| 197 |
+
specified) offsets equivalent to UTC.
|
| 198 |
+
"""
|
| 199 |
+
return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc)
|
| 200 |
+
|
| 201 |
+
# Constants
|
| 202 |
+
_DATE_SEP = b'-'
|
| 203 |
+
_TIME_SEP = b':'
|
| 204 |
+
_FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)')
|
| 205 |
+
|
| 206 |
+
def _parse_isodate(self, dt_str):
|
| 207 |
+
try:
|
| 208 |
+
return self._parse_isodate_common(dt_str)
|
| 209 |
+
except ValueError:
|
| 210 |
+
return self._parse_isodate_uncommon(dt_str)
|
| 211 |
+
|
| 212 |
+
def _parse_isodate_common(self, dt_str):
|
| 213 |
+
len_str = len(dt_str)
|
| 214 |
+
components = [1, 1, 1]
|
| 215 |
+
|
| 216 |
+
if len_str < 4:
|
| 217 |
+
raise ValueError('ISO string too short')
|
| 218 |
+
|
| 219 |
+
# Year
|
| 220 |
+
components[0] = int(dt_str[0:4])
|
| 221 |
+
pos = 4
|
| 222 |
+
if pos >= len_str:
|
| 223 |
+
return components, pos
|
| 224 |
+
|
| 225 |
+
has_sep = dt_str[pos:pos + 1] == self._DATE_SEP
|
| 226 |
+
if has_sep:
|
| 227 |
+
pos += 1
|
| 228 |
+
|
| 229 |
+
# Month
|
| 230 |
+
if len_str - pos < 2:
|
| 231 |
+
raise ValueError('Invalid common month')
|
| 232 |
+
|
| 233 |
+
components[1] = int(dt_str[pos:pos + 2])
|
| 234 |
+
pos += 2
|
| 235 |
+
|
| 236 |
+
if pos >= len_str:
|
| 237 |
+
if has_sep:
|
| 238 |
+
return components, pos
|
| 239 |
+
else:
|
| 240 |
+
raise ValueError('Invalid ISO format')
|
| 241 |
+
|
| 242 |
+
if has_sep:
|
| 243 |
+
if dt_str[pos:pos + 1] != self._DATE_SEP:
|
| 244 |
+
raise ValueError('Invalid separator in ISO string')
|
| 245 |
+
pos += 1
|
| 246 |
+
|
| 247 |
+
# Day
|
| 248 |
+
if len_str - pos < 2:
|
| 249 |
+
raise ValueError('Invalid common day')
|
| 250 |
+
components[2] = int(dt_str[pos:pos + 2])
|
| 251 |
+
return components, pos + 2
|
| 252 |
+
|
| 253 |
+
def _parse_isodate_uncommon(self, dt_str):
|
| 254 |
+
if len(dt_str) < 4:
|
| 255 |
+
raise ValueError('ISO string too short')
|
| 256 |
+
|
| 257 |
+
# All ISO formats start with the year
|
| 258 |
+
year = int(dt_str[0:4])
|
| 259 |
+
|
| 260 |
+
has_sep = dt_str[4:5] == self._DATE_SEP
|
| 261 |
+
|
| 262 |
+
pos = 4 + has_sep # Skip '-' if it's there
|
| 263 |
+
if dt_str[pos:pos + 1] == b'W':
|
| 264 |
+
# YYYY-?Www-?D?
|
| 265 |
+
pos += 1
|
| 266 |
+
weekno = int(dt_str[pos:pos + 2])
|
| 267 |
+
pos += 2
|
| 268 |
+
|
| 269 |
+
dayno = 1
|
| 270 |
+
if len(dt_str) > pos:
|
| 271 |
+
if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep:
|
| 272 |
+
raise ValueError('Inconsistent use of dash separator')
|
| 273 |
+
|
| 274 |
+
pos += has_sep
|
| 275 |
+
|
| 276 |
+
dayno = int(dt_str[pos:pos + 1])
|
| 277 |
+
pos += 1
|
| 278 |
+
|
| 279 |
+
base_date = self._calculate_weekdate(year, weekno, dayno)
|
| 280 |
+
else:
|
| 281 |
+
# YYYYDDD or YYYY-DDD
|
| 282 |
+
if len(dt_str) - pos < 3:
|
| 283 |
+
raise ValueError('Invalid ordinal day')
|
| 284 |
+
|
| 285 |
+
ordinal_day = int(dt_str[pos:pos + 3])
|
| 286 |
+
pos += 3
|
| 287 |
+
|
| 288 |
+
if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)):
|
| 289 |
+
raise ValueError('Invalid ordinal day' +
|
| 290 |
+
' {} for year {}'.format(ordinal_day, year))
|
| 291 |
+
|
| 292 |
+
base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1)
|
| 293 |
+
|
| 294 |
+
components = [base_date.year, base_date.month, base_date.day]
|
| 295 |
+
return components, pos
|
| 296 |
+
|
| 297 |
+
def _calculate_weekdate(self, year, week, day):
|
| 298 |
+
"""
|
| 299 |
+
Calculate the day of corresponding to the ISO year-week-day calendar.
|
| 300 |
+
|
| 301 |
+
This function is effectively the inverse of
|
| 302 |
+
:func:`datetime.date.isocalendar`.
|
| 303 |
+
|
| 304 |
+
:param year:
|
| 305 |
+
The year in the ISO calendar
|
| 306 |
+
|
| 307 |
+
:param week:
|
| 308 |
+
The week in the ISO calendar - range is [1, 53]
|
| 309 |
+
|
| 310 |
+
:param day:
|
| 311 |
+
The day in the ISO calendar - range is [1 (MON), 7 (SUN)]
|
| 312 |
+
|
| 313 |
+
:return:
|
| 314 |
+
Returns a :class:`datetime.date`
|
| 315 |
+
"""
|
| 316 |
+
if not 0 < week < 54:
|
| 317 |
+
raise ValueError('Invalid week: {}'.format(week))
|
| 318 |
+
|
| 319 |
+
if not 0 < day < 8: # Range is 1-7
|
| 320 |
+
raise ValueError('Invalid weekday: {}'.format(day))
|
| 321 |
+
|
| 322 |
+
# Get week 1 for the specific year:
|
| 323 |
+
jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it
|
| 324 |
+
week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1)
|
| 325 |
+
|
| 326 |
+
# Now add the specific number of weeks and days to get what we want
|
| 327 |
+
week_offset = (week - 1) * 7 + (day - 1)
|
| 328 |
+
return week_1 + timedelta(days=week_offset)
|
| 329 |
+
|
| 330 |
+
def _parse_isotime(self, timestr):
|
| 331 |
+
len_str = len(timestr)
|
| 332 |
+
components = [0, 0, 0, 0, None]
|
| 333 |
+
pos = 0
|
| 334 |
+
comp = -1
|
| 335 |
+
|
| 336 |
+
if len_str < 2:
|
| 337 |
+
raise ValueError('ISO time too short')
|
| 338 |
+
|
| 339 |
+
has_sep = False
|
| 340 |
+
|
| 341 |
+
while pos < len_str and comp < 5:
|
| 342 |
+
comp += 1
|
| 343 |
+
|
| 344 |
+
if timestr[pos:pos + 1] in b'-+Zz':
|
| 345 |
+
# Detect time zone boundary
|
| 346 |
+
components[-1] = self._parse_tzstr(timestr[pos:])
|
| 347 |
+
pos = len_str
|
| 348 |
+
break
|
| 349 |
+
|
| 350 |
+
if comp == 1 and timestr[pos:pos+1] == self._TIME_SEP:
|
| 351 |
+
has_sep = True
|
| 352 |
+
pos += 1
|
| 353 |
+
elif comp == 2 and has_sep:
|
| 354 |
+
if timestr[pos:pos+1] != self._TIME_SEP:
|
| 355 |
+
raise ValueError('Inconsistent use of colon separator')
|
| 356 |
+
pos += 1
|
| 357 |
+
|
| 358 |
+
if comp < 3:
|
| 359 |
+
# Hour, minute, second
|
| 360 |
+
components[comp] = int(timestr[pos:pos + 2])
|
| 361 |
+
pos += 2
|
| 362 |
+
|
| 363 |
+
if comp == 3:
|
| 364 |
+
# Fraction of a second
|
| 365 |
+
frac = self._FRACTION_REGEX.match(timestr[pos:])
|
| 366 |
+
if not frac:
|
| 367 |
+
continue
|
| 368 |
+
|
| 369 |
+
us_str = frac.group(1)[:6] # Truncate to microseconds
|
| 370 |
+
components[comp] = int(us_str) * 10**(6 - len(us_str))
|
| 371 |
+
pos += len(frac.group())
|
| 372 |
+
|
| 373 |
+
if pos < len_str:
|
| 374 |
+
raise ValueError('Unused components in ISO string')
|
| 375 |
+
|
| 376 |
+
if components[0] == 24:
|
| 377 |
+
# Standard supports 00:00 and 24:00 as representations of midnight
|
| 378 |
+
if any(component != 0 for component in components[1:4]):
|
| 379 |
+
raise ValueError('Hour may only be 24 at 24:00:00.000')
|
| 380 |
+
|
| 381 |
+
return components
|
| 382 |
+
|
| 383 |
+
def _parse_tzstr(self, tzstr, zero_as_utc=True):
|
| 384 |
+
if tzstr == b'Z' or tzstr == b'z':
|
| 385 |
+
return tz.UTC
|
| 386 |
+
|
| 387 |
+
if len(tzstr) not in {3, 5, 6}:
|
| 388 |
+
raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters')
|
| 389 |
+
|
| 390 |
+
if tzstr[0:1] == b'-':
|
| 391 |
+
mult = -1
|
| 392 |
+
elif tzstr[0:1] == b'+':
|
| 393 |
+
mult = 1
|
| 394 |
+
else:
|
| 395 |
+
raise ValueError('Time zone offset requires sign')
|
| 396 |
+
|
| 397 |
+
hours = int(tzstr[1:3])
|
| 398 |
+
if len(tzstr) == 3:
|
| 399 |
+
minutes = 0
|
| 400 |
+
else:
|
| 401 |
+
minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):])
|
| 402 |
+
|
| 403 |
+
if zero_as_utc and hours == 0 and minutes == 0:
|
| 404 |
+
return tz.UTC
|
| 405 |
+
else:
|
| 406 |
+
if minutes > 59:
|
| 407 |
+
raise ValueError('Invalid minutes in time zone offset')
|
| 408 |
+
|
| 409 |
+
if hours > 23:
|
| 410 |
+
raise ValueError('Invalid hours in time zone offset')
|
| 411 |
+
|
| 412 |
+
return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60)
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
DEFAULT_ISOPARSER = isoparser()
|
| 416 |
+
isoparse = DEFAULT_ISOPARSER.isoparse
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (663 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/_common.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/_factories.cpython-310.pyc
ADDED
|
Binary file (2.93 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/tz.cpython-310.pyc
ADDED
|
Binary file (44.9 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__pycache__/win.cpython-310.pyc
ADDED
|
Binary file (11.4 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/_common.py
ADDED
|
@@ -0,0 +1,419 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from six import PY2
|
| 2 |
+
|
| 3 |
+
from functools import wraps
|
| 4 |
+
|
| 5 |
+
from datetime import datetime, timedelta, tzinfo
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
ZERO = timedelta(0)
|
| 9 |
+
|
| 10 |
+
__all__ = ['tzname_in_python2', 'enfold']
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def tzname_in_python2(namefunc):
|
| 14 |
+
"""Change unicode output into bytestrings in Python 2
|
| 15 |
+
|
| 16 |
+
tzname() API changed in Python 3. It used to return bytes, but was changed
|
| 17 |
+
to unicode strings
|
| 18 |
+
"""
|
| 19 |
+
if PY2:
|
| 20 |
+
@wraps(namefunc)
|
| 21 |
+
def adjust_encoding(*args, **kwargs):
|
| 22 |
+
name = namefunc(*args, **kwargs)
|
| 23 |
+
if name is not None:
|
| 24 |
+
name = name.encode()
|
| 25 |
+
|
| 26 |
+
return name
|
| 27 |
+
|
| 28 |
+
return adjust_encoding
|
| 29 |
+
else:
|
| 30 |
+
return namefunc
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# The following is adapted from Alexander Belopolsky's tz library
|
| 34 |
+
# https://github.com/abalkin/tz
|
| 35 |
+
if hasattr(datetime, 'fold'):
|
| 36 |
+
# This is the pre-python 3.6 fold situation
|
| 37 |
+
def enfold(dt, fold=1):
|
| 38 |
+
"""
|
| 39 |
+
Provides a unified interface for assigning the ``fold`` attribute to
|
| 40 |
+
datetimes both before and after the implementation of PEP-495.
|
| 41 |
+
|
| 42 |
+
:param fold:
|
| 43 |
+
The value for the ``fold`` attribute in the returned datetime. This
|
| 44 |
+
should be either 0 or 1.
|
| 45 |
+
|
| 46 |
+
:return:
|
| 47 |
+
Returns an object for which ``getattr(dt, 'fold', 0)`` returns
|
| 48 |
+
``fold`` for all versions of Python. In versions prior to
|
| 49 |
+
Python 3.6, this is a ``_DatetimeWithFold`` object, which is a
|
| 50 |
+
subclass of :py:class:`datetime.datetime` with the ``fold``
|
| 51 |
+
attribute added, if ``fold`` is 1.
|
| 52 |
+
|
| 53 |
+
.. versionadded:: 2.6.0
|
| 54 |
+
"""
|
| 55 |
+
return dt.replace(fold=fold)
|
| 56 |
+
|
| 57 |
+
else:
|
| 58 |
+
class _DatetimeWithFold(datetime):
|
| 59 |
+
"""
|
| 60 |
+
This is a class designed to provide a PEP 495-compliant interface for
|
| 61 |
+
Python versions before 3.6. It is used only for dates in a fold, so
|
| 62 |
+
the ``fold`` attribute is fixed at ``1``.
|
| 63 |
+
|
| 64 |
+
.. versionadded:: 2.6.0
|
| 65 |
+
"""
|
| 66 |
+
__slots__ = ()
|
| 67 |
+
|
| 68 |
+
def replace(self, *args, **kwargs):
|
| 69 |
+
"""
|
| 70 |
+
Return a datetime with the same attributes, except for those
|
| 71 |
+
attributes given new values by whichever keyword arguments are
|
| 72 |
+
specified. Note that tzinfo=None can be specified to create a naive
|
| 73 |
+
datetime from an aware datetime with no conversion of date and time
|
| 74 |
+
data.
|
| 75 |
+
|
| 76 |
+
This is reimplemented in ``_DatetimeWithFold`` because pypy3 will
|
| 77 |
+
return a ``datetime.datetime`` even if ``fold`` is unchanged.
|
| 78 |
+
"""
|
| 79 |
+
argnames = (
|
| 80 |
+
'year', 'month', 'day', 'hour', 'minute', 'second',
|
| 81 |
+
'microsecond', 'tzinfo'
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
for arg, argname in zip(args, argnames):
|
| 85 |
+
if argname in kwargs:
|
| 86 |
+
raise TypeError('Duplicate argument: {}'.format(argname))
|
| 87 |
+
|
| 88 |
+
kwargs[argname] = arg
|
| 89 |
+
|
| 90 |
+
for argname in argnames:
|
| 91 |
+
if argname not in kwargs:
|
| 92 |
+
kwargs[argname] = getattr(self, argname)
|
| 93 |
+
|
| 94 |
+
dt_class = self.__class__ if kwargs.get('fold', 1) else datetime
|
| 95 |
+
|
| 96 |
+
return dt_class(**kwargs)
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def fold(self):
|
| 100 |
+
return 1
|
| 101 |
+
|
| 102 |
+
def enfold(dt, fold=1):
|
| 103 |
+
"""
|
| 104 |
+
Provides a unified interface for assigning the ``fold`` attribute to
|
| 105 |
+
datetimes both before and after the implementation of PEP-495.
|
| 106 |
+
|
| 107 |
+
:param fold:
|
| 108 |
+
The value for the ``fold`` attribute in the returned datetime. This
|
| 109 |
+
should be either 0 or 1.
|
| 110 |
+
|
| 111 |
+
:return:
|
| 112 |
+
Returns an object for which ``getattr(dt, 'fold', 0)`` returns
|
| 113 |
+
``fold`` for all versions of Python. In versions prior to
|
| 114 |
+
Python 3.6, this is a ``_DatetimeWithFold`` object, which is a
|
| 115 |
+
subclass of :py:class:`datetime.datetime` with the ``fold``
|
| 116 |
+
attribute added, if ``fold`` is 1.
|
| 117 |
+
|
| 118 |
+
.. versionadded:: 2.6.0
|
| 119 |
+
"""
|
| 120 |
+
if getattr(dt, 'fold', 0) == fold:
|
| 121 |
+
return dt
|
| 122 |
+
|
| 123 |
+
args = dt.timetuple()[:6]
|
| 124 |
+
args += (dt.microsecond, dt.tzinfo)
|
| 125 |
+
|
| 126 |
+
if fold:
|
| 127 |
+
return _DatetimeWithFold(*args)
|
| 128 |
+
else:
|
| 129 |
+
return datetime(*args)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def _validate_fromutc_inputs(f):
|
| 133 |
+
"""
|
| 134 |
+
The CPython version of ``fromutc`` checks that the input is a ``datetime``
|
| 135 |
+
object and that ``self`` is attached as its ``tzinfo``.
|
| 136 |
+
"""
|
| 137 |
+
@wraps(f)
|
| 138 |
+
def fromutc(self, dt):
|
| 139 |
+
if not isinstance(dt, datetime):
|
| 140 |
+
raise TypeError("fromutc() requires a datetime argument")
|
| 141 |
+
if dt.tzinfo is not self:
|
| 142 |
+
raise ValueError("dt.tzinfo is not self")
|
| 143 |
+
|
| 144 |
+
return f(self, dt)
|
| 145 |
+
|
| 146 |
+
return fromutc
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
class _tzinfo(tzinfo):
|
| 150 |
+
"""
|
| 151 |
+
Base class for all ``dateutil`` ``tzinfo`` objects.
|
| 152 |
+
"""
|
| 153 |
+
|
| 154 |
+
def is_ambiguous(self, dt):
|
| 155 |
+
"""
|
| 156 |
+
Whether or not the "wall time" of a given datetime is ambiguous in this
|
| 157 |
+
zone.
|
| 158 |
+
|
| 159 |
+
:param dt:
|
| 160 |
+
A :py:class:`datetime.datetime`, naive or time zone aware.
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
:return:
|
| 164 |
+
Returns ``True`` if ambiguous, ``False`` otherwise.
|
| 165 |
+
|
| 166 |
+
.. versionadded:: 2.6.0
|
| 167 |
+
"""
|
| 168 |
+
|
| 169 |
+
dt = dt.replace(tzinfo=self)
|
| 170 |
+
|
| 171 |
+
wall_0 = enfold(dt, fold=0)
|
| 172 |
+
wall_1 = enfold(dt, fold=1)
|
| 173 |
+
|
| 174 |
+
same_offset = wall_0.utcoffset() == wall_1.utcoffset()
|
| 175 |
+
same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None)
|
| 176 |
+
|
| 177 |
+
return same_dt and not same_offset
|
| 178 |
+
|
| 179 |
+
def _fold_status(self, dt_utc, dt_wall):
|
| 180 |
+
"""
|
| 181 |
+
Determine the fold status of a "wall" datetime, given a representation
|
| 182 |
+
of the same datetime as a (naive) UTC datetime. This is calculated based
|
| 183 |
+
on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all
|
| 184 |
+
datetimes, and that this offset is the actual number of hours separating
|
| 185 |
+
``dt_utc`` and ``dt_wall``.
|
| 186 |
+
|
| 187 |
+
:param dt_utc:
|
| 188 |
+
Representation of the datetime as UTC
|
| 189 |
+
|
| 190 |
+
:param dt_wall:
|
| 191 |
+
Representation of the datetime as "wall time". This parameter must
|
| 192 |
+
either have a `fold` attribute or have a fold-naive
|
| 193 |
+
:class:`datetime.tzinfo` attached, otherwise the calculation may
|
| 194 |
+
fail.
|
| 195 |
+
"""
|
| 196 |
+
if self.is_ambiguous(dt_wall):
|
| 197 |
+
delta_wall = dt_wall - dt_utc
|
| 198 |
+
_fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst()))
|
| 199 |
+
else:
|
| 200 |
+
_fold = 0
|
| 201 |
+
|
| 202 |
+
return _fold
|
| 203 |
+
|
| 204 |
+
def _fold(self, dt):
|
| 205 |
+
return getattr(dt, 'fold', 0)
|
| 206 |
+
|
| 207 |
+
def _fromutc(self, dt):
|
| 208 |
+
"""
|
| 209 |
+
Given a timezone-aware datetime in a given timezone, calculates a
|
| 210 |
+
timezone-aware datetime in a new timezone.
|
| 211 |
+
|
| 212 |
+
Since this is the one time that we *know* we have an unambiguous
|
| 213 |
+
datetime object, we take this opportunity to determine whether the
|
| 214 |
+
datetime is ambiguous and in a "fold" state (e.g. if it's the first
|
| 215 |
+
occurrence, chronologically, of the ambiguous datetime).
|
| 216 |
+
|
| 217 |
+
:param dt:
|
| 218 |
+
A timezone-aware :class:`datetime.datetime` object.
|
| 219 |
+
"""
|
| 220 |
+
|
| 221 |
+
# Re-implement the algorithm from Python's datetime.py
|
| 222 |
+
dtoff = dt.utcoffset()
|
| 223 |
+
if dtoff is None:
|
| 224 |
+
raise ValueError("fromutc() requires a non-None utcoffset() "
|
| 225 |
+
"result")
|
| 226 |
+
|
| 227 |
+
# The original datetime.py code assumes that `dst()` defaults to
|
| 228 |
+
# zero during ambiguous times. PEP 495 inverts this presumption, so
|
| 229 |
+
# for pre-PEP 495 versions of python, we need to tweak the algorithm.
|
| 230 |
+
dtdst = dt.dst()
|
| 231 |
+
if dtdst is None:
|
| 232 |
+
raise ValueError("fromutc() requires a non-None dst() result")
|
| 233 |
+
delta = dtoff - dtdst
|
| 234 |
+
|
| 235 |
+
dt += delta
|
| 236 |
+
# Set fold=1 so we can default to being in the fold for
|
| 237 |
+
# ambiguous dates.
|
| 238 |
+
dtdst = enfold(dt, fold=1).dst()
|
| 239 |
+
if dtdst is None:
|
| 240 |
+
raise ValueError("fromutc(): dt.dst gave inconsistent "
|
| 241 |
+
"results; cannot convert")
|
| 242 |
+
return dt + dtdst
|
| 243 |
+
|
| 244 |
+
@_validate_fromutc_inputs
|
| 245 |
+
def fromutc(self, dt):
|
| 246 |
+
"""
|
| 247 |
+
Given a timezone-aware datetime in a given timezone, calculates a
|
| 248 |
+
timezone-aware datetime in a new timezone.
|
| 249 |
+
|
| 250 |
+
Since this is the one time that we *know* we have an unambiguous
|
| 251 |
+
datetime object, we take this opportunity to determine whether the
|
| 252 |
+
datetime is ambiguous and in a "fold" state (e.g. if it's the first
|
| 253 |
+
occurrence, chronologically, of the ambiguous datetime).
|
| 254 |
+
|
| 255 |
+
:param dt:
|
| 256 |
+
A timezone-aware :class:`datetime.datetime` object.
|
| 257 |
+
"""
|
| 258 |
+
dt_wall = self._fromutc(dt)
|
| 259 |
+
|
| 260 |
+
# Calculate the fold status given the two datetimes.
|
| 261 |
+
_fold = self._fold_status(dt, dt_wall)
|
| 262 |
+
|
| 263 |
+
# Set the default fold value for ambiguous dates
|
| 264 |
+
return enfold(dt_wall, fold=_fold)
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
class tzrangebase(_tzinfo):
|
| 268 |
+
"""
|
| 269 |
+
This is an abstract base class for time zones represented by an annual
|
| 270 |
+
transition into and out of DST. Child classes should implement the following
|
| 271 |
+
methods:
|
| 272 |
+
|
| 273 |
+
* ``__init__(self, *args, **kwargs)``
|
| 274 |
+
* ``transitions(self, year)`` - this is expected to return a tuple of
|
| 275 |
+
datetimes representing the DST on and off transitions in standard
|
| 276 |
+
time.
|
| 277 |
+
|
| 278 |
+
A fully initialized ``tzrangebase`` subclass should also provide the
|
| 279 |
+
following attributes:
|
| 280 |
+
* ``hasdst``: Boolean whether or not the zone uses DST.
|
| 281 |
+
* ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects
|
| 282 |
+
representing the respective UTC offsets.
|
| 283 |
+
* ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short
|
| 284 |
+
abbreviations in DST and STD, respectively.
|
| 285 |
+
* ``_hasdst``: Whether or not the zone has DST.
|
| 286 |
+
|
| 287 |
+
.. versionadded:: 2.6.0
|
| 288 |
+
"""
|
| 289 |
+
def __init__(self):
|
| 290 |
+
raise NotImplementedError('tzrangebase is an abstract base class')
|
| 291 |
+
|
| 292 |
+
def utcoffset(self, dt):
|
| 293 |
+
isdst = self._isdst(dt)
|
| 294 |
+
|
| 295 |
+
if isdst is None:
|
| 296 |
+
return None
|
| 297 |
+
elif isdst:
|
| 298 |
+
return self._dst_offset
|
| 299 |
+
else:
|
| 300 |
+
return self._std_offset
|
| 301 |
+
|
| 302 |
+
def dst(self, dt):
|
| 303 |
+
isdst = self._isdst(dt)
|
| 304 |
+
|
| 305 |
+
if isdst is None:
|
| 306 |
+
return None
|
| 307 |
+
elif isdst:
|
| 308 |
+
return self._dst_base_offset
|
| 309 |
+
else:
|
| 310 |
+
return ZERO
|
| 311 |
+
|
| 312 |
+
@tzname_in_python2
|
| 313 |
+
def tzname(self, dt):
|
| 314 |
+
if self._isdst(dt):
|
| 315 |
+
return self._dst_abbr
|
| 316 |
+
else:
|
| 317 |
+
return self._std_abbr
|
| 318 |
+
|
| 319 |
+
def fromutc(self, dt):
|
| 320 |
+
""" Given a datetime in UTC, return local time """
|
| 321 |
+
if not isinstance(dt, datetime):
|
| 322 |
+
raise TypeError("fromutc() requires a datetime argument")
|
| 323 |
+
|
| 324 |
+
if dt.tzinfo is not self:
|
| 325 |
+
raise ValueError("dt.tzinfo is not self")
|
| 326 |
+
|
| 327 |
+
# Get transitions - if there are none, fixed offset
|
| 328 |
+
transitions = self.transitions(dt.year)
|
| 329 |
+
if transitions is None:
|
| 330 |
+
return dt + self.utcoffset(dt)
|
| 331 |
+
|
| 332 |
+
# Get the transition times in UTC
|
| 333 |
+
dston, dstoff = transitions
|
| 334 |
+
|
| 335 |
+
dston -= self._std_offset
|
| 336 |
+
dstoff -= self._std_offset
|
| 337 |
+
|
| 338 |
+
utc_transitions = (dston, dstoff)
|
| 339 |
+
dt_utc = dt.replace(tzinfo=None)
|
| 340 |
+
|
| 341 |
+
isdst = self._naive_isdst(dt_utc, utc_transitions)
|
| 342 |
+
|
| 343 |
+
if isdst:
|
| 344 |
+
dt_wall = dt + self._dst_offset
|
| 345 |
+
else:
|
| 346 |
+
dt_wall = dt + self._std_offset
|
| 347 |
+
|
| 348 |
+
_fold = int(not isdst and self.is_ambiguous(dt_wall))
|
| 349 |
+
|
| 350 |
+
return enfold(dt_wall, fold=_fold)
|
| 351 |
+
|
| 352 |
+
def is_ambiguous(self, dt):
|
| 353 |
+
"""
|
| 354 |
+
Whether or not the "wall time" of a given datetime is ambiguous in this
|
| 355 |
+
zone.
|
| 356 |
+
|
| 357 |
+
:param dt:
|
| 358 |
+
A :py:class:`datetime.datetime`, naive or time zone aware.
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
:return:
|
| 362 |
+
Returns ``True`` if ambiguous, ``False`` otherwise.
|
| 363 |
+
|
| 364 |
+
.. versionadded:: 2.6.0
|
| 365 |
+
"""
|
| 366 |
+
if not self.hasdst:
|
| 367 |
+
return False
|
| 368 |
+
|
| 369 |
+
start, end = self.transitions(dt.year)
|
| 370 |
+
|
| 371 |
+
dt = dt.replace(tzinfo=None)
|
| 372 |
+
return (end <= dt < end + self._dst_base_offset)
|
| 373 |
+
|
| 374 |
+
def _isdst(self, dt):
|
| 375 |
+
if not self.hasdst:
|
| 376 |
+
return False
|
| 377 |
+
elif dt is None:
|
| 378 |
+
return None
|
| 379 |
+
|
| 380 |
+
transitions = self.transitions(dt.year)
|
| 381 |
+
|
| 382 |
+
if transitions is None:
|
| 383 |
+
return False
|
| 384 |
+
|
| 385 |
+
dt = dt.replace(tzinfo=None)
|
| 386 |
+
|
| 387 |
+
isdst = self._naive_isdst(dt, transitions)
|
| 388 |
+
|
| 389 |
+
# Handle ambiguous dates
|
| 390 |
+
if not isdst and self.is_ambiguous(dt):
|
| 391 |
+
return not self._fold(dt)
|
| 392 |
+
else:
|
| 393 |
+
return isdst
|
| 394 |
+
|
| 395 |
+
def _naive_isdst(self, dt, transitions):
|
| 396 |
+
dston, dstoff = transitions
|
| 397 |
+
|
| 398 |
+
dt = dt.replace(tzinfo=None)
|
| 399 |
+
|
| 400 |
+
if dston < dstoff:
|
| 401 |
+
isdst = dston <= dt < dstoff
|
| 402 |
+
else:
|
| 403 |
+
isdst = not dstoff <= dt < dston
|
| 404 |
+
|
| 405 |
+
return isdst
|
| 406 |
+
|
| 407 |
+
@property
|
| 408 |
+
def _dst_base_offset(self):
|
| 409 |
+
return self._dst_offset - self._std_offset
|
| 410 |
+
|
| 411 |
+
__hash__ = None
|
| 412 |
+
|
| 413 |
+
def __ne__(self, other):
|
| 414 |
+
return not (self == other)
|
| 415 |
+
|
| 416 |
+
def __repr__(self):
|
| 417 |
+
return "%s(...)" % self.__class__.__name__
|
| 418 |
+
|
| 419 |
+
__reduce__ = object.__reduce__
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/_factories.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import timedelta
|
| 2 |
+
import weakref
|
| 3 |
+
from collections import OrderedDict
|
| 4 |
+
|
| 5 |
+
from six.moves import _thread
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class _TzSingleton(type):
|
| 9 |
+
def __init__(cls, *args, **kwargs):
|
| 10 |
+
cls.__instance = None
|
| 11 |
+
super(_TzSingleton, cls).__init__(*args, **kwargs)
|
| 12 |
+
|
| 13 |
+
def __call__(cls):
|
| 14 |
+
if cls.__instance is None:
|
| 15 |
+
cls.__instance = super(_TzSingleton, cls).__call__()
|
| 16 |
+
return cls.__instance
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class _TzFactory(type):
|
| 20 |
+
def instance(cls, *args, **kwargs):
|
| 21 |
+
"""Alternate constructor that returns a fresh instance"""
|
| 22 |
+
return type.__call__(cls, *args, **kwargs)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class _TzOffsetFactory(_TzFactory):
|
| 26 |
+
def __init__(cls, *args, **kwargs):
|
| 27 |
+
cls.__instances = weakref.WeakValueDictionary()
|
| 28 |
+
cls.__strong_cache = OrderedDict()
|
| 29 |
+
cls.__strong_cache_size = 8
|
| 30 |
+
|
| 31 |
+
cls._cache_lock = _thread.allocate_lock()
|
| 32 |
+
|
| 33 |
+
def __call__(cls, name, offset):
|
| 34 |
+
if isinstance(offset, timedelta):
|
| 35 |
+
key = (name, offset.total_seconds())
|
| 36 |
+
else:
|
| 37 |
+
key = (name, offset)
|
| 38 |
+
|
| 39 |
+
instance = cls.__instances.get(key, None)
|
| 40 |
+
if instance is None:
|
| 41 |
+
instance = cls.__instances.setdefault(key,
|
| 42 |
+
cls.instance(name, offset))
|
| 43 |
+
|
| 44 |
+
# This lock may not be necessary in Python 3. See GH issue #901
|
| 45 |
+
with cls._cache_lock:
|
| 46 |
+
cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance)
|
| 47 |
+
|
| 48 |
+
# Remove an item if the strong cache is overpopulated
|
| 49 |
+
if len(cls.__strong_cache) > cls.__strong_cache_size:
|
| 50 |
+
cls.__strong_cache.popitem(last=False)
|
| 51 |
+
|
| 52 |
+
return instance
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class _TzStrFactory(_TzFactory):
|
| 56 |
+
def __init__(cls, *args, **kwargs):
|
| 57 |
+
cls.__instances = weakref.WeakValueDictionary()
|
| 58 |
+
cls.__strong_cache = OrderedDict()
|
| 59 |
+
cls.__strong_cache_size = 8
|
| 60 |
+
|
| 61 |
+
cls.__cache_lock = _thread.allocate_lock()
|
| 62 |
+
|
| 63 |
+
def __call__(cls, s, posix_offset=False):
|
| 64 |
+
key = (s, posix_offset)
|
| 65 |
+
instance = cls.__instances.get(key, None)
|
| 66 |
+
|
| 67 |
+
if instance is None:
|
| 68 |
+
instance = cls.__instances.setdefault(key,
|
| 69 |
+
cls.instance(s, posix_offset))
|
| 70 |
+
|
| 71 |
+
# This lock may not be necessary in Python 3. See GH issue #901
|
| 72 |
+
with cls.__cache_lock:
|
| 73 |
+
cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance)
|
| 74 |
+
|
| 75 |
+
# Remove an item if the strong cache is overpopulated
|
| 76 |
+
if len(cls.__strong_cache) > cls.__strong_cache_size:
|
| 77 |
+
cls.__strong_cache.popitem(last=False)
|
| 78 |
+
|
| 79 |
+
return instance
|
| 80 |
+
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/tz.py
ADDED
|
@@ -0,0 +1,1849 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
This module offers timezone implementations subclassing the abstract
|
| 4 |
+
:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format
|
| 5 |
+
files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`,
|
| 6 |
+
etc), TZ environment string (in all known formats), given ranges (with help
|
| 7 |
+
from relative deltas), local machine timezone, fixed offset timezone, and UTC
|
| 8 |
+
timezone.
|
| 9 |
+
"""
|
| 10 |
+
import datetime
|
| 11 |
+
import struct
|
| 12 |
+
import time
|
| 13 |
+
import sys
|
| 14 |
+
import os
|
| 15 |
+
import bisect
|
| 16 |
+
import weakref
|
| 17 |
+
from collections import OrderedDict
|
| 18 |
+
|
| 19 |
+
import six
|
| 20 |
+
from six import string_types
|
| 21 |
+
from six.moves import _thread
|
| 22 |
+
from ._common import tzname_in_python2, _tzinfo
|
| 23 |
+
from ._common import tzrangebase, enfold
|
| 24 |
+
from ._common import _validate_fromutc_inputs
|
| 25 |
+
|
| 26 |
+
from ._factories import _TzSingleton, _TzOffsetFactory
|
| 27 |
+
from ._factories import _TzStrFactory
|
| 28 |
+
try:
|
| 29 |
+
from .win import tzwin, tzwinlocal
|
| 30 |
+
except ImportError:
|
| 31 |
+
tzwin = tzwinlocal = None
|
| 32 |
+
|
| 33 |
+
# For warning about rounding tzinfo
|
| 34 |
+
from warnings import warn
|
| 35 |
+
|
| 36 |
+
ZERO = datetime.timedelta(0)
|
| 37 |
+
EPOCH = datetime.datetime(1970, 1, 1, 0, 0)
|
| 38 |
+
EPOCHORDINAL = EPOCH.toordinal()
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@six.add_metaclass(_TzSingleton)
|
| 42 |
+
class tzutc(datetime.tzinfo):
|
| 43 |
+
"""
|
| 44 |
+
This is a tzinfo object that represents the UTC time zone.
|
| 45 |
+
|
| 46 |
+
**Examples:**
|
| 47 |
+
|
| 48 |
+
.. doctest::
|
| 49 |
+
|
| 50 |
+
>>> from datetime import *
|
| 51 |
+
>>> from dateutil.tz import *
|
| 52 |
+
|
| 53 |
+
>>> datetime.now()
|
| 54 |
+
datetime.datetime(2003, 9, 27, 9, 40, 1, 521290)
|
| 55 |
+
|
| 56 |
+
>>> datetime.now(tzutc())
|
| 57 |
+
datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc())
|
| 58 |
+
|
| 59 |
+
>>> datetime.now(tzutc()).tzname()
|
| 60 |
+
'UTC'
|
| 61 |
+
|
| 62 |
+
.. versionchanged:: 2.7.0
|
| 63 |
+
``tzutc()`` is now a singleton, so the result of ``tzutc()`` will
|
| 64 |
+
always return the same object.
|
| 65 |
+
|
| 66 |
+
.. doctest::
|
| 67 |
+
|
| 68 |
+
>>> from dateutil.tz import tzutc, UTC
|
| 69 |
+
>>> tzutc() is tzutc()
|
| 70 |
+
True
|
| 71 |
+
>>> tzutc() is UTC
|
| 72 |
+
True
|
| 73 |
+
"""
|
| 74 |
+
def utcoffset(self, dt):
|
| 75 |
+
return ZERO
|
| 76 |
+
|
| 77 |
+
def dst(self, dt):
|
| 78 |
+
return ZERO
|
| 79 |
+
|
| 80 |
+
@tzname_in_python2
|
| 81 |
+
def tzname(self, dt):
|
| 82 |
+
return "UTC"
|
| 83 |
+
|
| 84 |
+
def is_ambiguous(self, dt):
|
| 85 |
+
"""
|
| 86 |
+
Whether or not the "wall time" of a given datetime is ambiguous in this
|
| 87 |
+
zone.
|
| 88 |
+
|
| 89 |
+
:param dt:
|
| 90 |
+
A :py:class:`datetime.datetime`, naive or time zone aware.
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
:return:
|
| 94 |
+
Returns ``True`` if ambiguous, ``False`` otherwise.
|
| 95 |
+
|
| 96 |
+
.. versionadded:: 2.6.0
|
| 97 |
+
"""
|
| 98 |
+
return False
|
| 99 |
+
|
| 100 |
+
@_validate_fromutc_inputs
|
| 101 |
+
def fromutc(self, dt):
|
| 102 |
+
"""
|
| 103 |
+
Fast track version of fromutc() returns the original ``dt`` object for
|
| 104 |
+
any valid :py:class:`datetime.datetime` object.
|
| 105 |
+
"""
|
| 106 |
+
return dt
|
| 107 |
+
|
| 108 |
+
def __eq__(self, other):
|
| 109 |
+
if not isinstance(other, (tzutc, tzoffset)):
|
| 110 |
+
return NotImplemented
|
| 111 |
+
|
| 112 |
+
return (isinstance(other, tzutc) or
|
| 113 |
+
(isinstance(other, tzoffset) and other._offset == ZERO))
|
| 114 |
+
|
| 115 |
+
__hash__ = None
|
| 116 |
+
|
| 117 |
+
def __ne__(self, other):
|
| 118 |
+
return not (self == other)
|
| 119 |
+
|
| 120 |
+
def __repr__(self):
|
| 121 |
+
return "%s()" % self.__class__.__name__
|
| 122 |
+
|
| 123 |
+
__reduce__ = object.__reduce__
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
#: Convenience constant providing a :class:`tzutc()` instance
|
| 127 |
+
#:
|
| 128 |
+
#: .. versionadded:: 2.7.0
|
| 129 |
+
UTC = tzutc()
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
@six.add_metaclass(_TzOffsetFactory)
|
| 133 |
+
class tzoffset(datetime.tzinfo):
|
| 134 |
+
"""
|
| 135 |
+
A simple class for representing a fixed offset from UTC.
|
| 136 |
+
|
| 137 |
+
:param name:
|
| 138 |
+
The timezone name, to be returned when ``tzname()`` is called.
|
| 139 |
+
:param offset:
|
| 140 |
+
The time zone offset in seconds, or (since version 2.6.0, represented
|
| 141 |
+
as a :py:class:`datetime.timedelta` object).
|
| 142 |
+
"""
|
| 143 |
+
def __init__(self, name, offset):
|
| 144 |
+
self._name = name
|
| 145 |
+
|
| 146 |
+
try:
|
| 147 |
+
# Allow a timedelta
|
| 148 |
+
offset = offset.total_seconds()
|
| 149 |
+
except (TypeError, AttributeError):
|
| 150 |
+
pass
|
| 151 |
+
|
| 152 |
+
self._offset = datetime.timedelta(seconds=_get_supported_offset(offset))
|
| 153 |
+
|
| 154 |
+
def utcoffset(self, dt):
|
| 155 |
+
return self._offset
|
| 156 |
+
|
| 157 |
+
def dst(self, dt):
|
| 158 |
+
return ZERO
|
| 159 |
+
|
| 160 |
+
@tzname_in_python2
|
| 161 |
+
def tzname(self, dt):
|
| 162 |
+
return self._name
|
| 163 |
+
|
| 164 |
+
@_validate_fromutc_inputs
|
| 165 |
+
def fromutc(self, dt):
|
| 166 |
+
return dt + self._offset
|
| 167 |
+
|
| 168 |
+
def is_ambiguous(self, dt):
|
| 169 |
+
"""
|
| 170 |
+
Whether or not the "wall time" of a given datetime is ambiguous in this
|
| 171 |
+
zone.
|
| 172 |
+
|
| 173 |
+
:param dt:
|
| 174 |
+
A :py:class:`datetime.datetime`, naive or time zone aware.
|
| 175 |
+
:return:
|
| 176 |
+
Returns ``True`` if ambiguous, ``False`` otherwise.
|
| 177 |
+
|
| 178 |
+
.. versionadded:: 2.6.0
|
| 179 |
+
"""
|
| 180 |
+
return False
|
| 181 |
+
|
| 182 |
+
def __eq__(self, other):
|
| 183 |
+
if not isinstance(other, tzoffset):
|
| 184 |
+
return NotImplemented
|
| 185 |
+
|
| 186 |
+
return self._offset == other._offset
|
| 187 |
+
|
| 188 |
+
__hash__ = None
|
| 189 |
+
|
| 190 |
+
def __ne__(self, other):
|
| 191 |
+
return not (self == other)
|
| 192 |
+
|
| 193 |
+
def __repr__(self):
|
| 194 |
+
return "%s(%s, %s)" % (self.__class__.__name__,
|
| 195 |
+
repr(self._name),
|
| 196 |
+
int(self._offset.total_seconds()))
|
| 197 |
+
|
| 198 |
+
__reduce__ = object.__reduce__
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class tzlocal(_tzinfo):
|
| 202 |
+
"""
|
| 203 |
+
A :class:`tzinfo` subclass built around the ``time`` timezone functions.
|
| 204 |
+
"""
|
| 205 |
+
def __init__(self):
|
| 206 |
+
super(tzlocal, self).__init__()
|
| 207 |
+
|
| 208 |
+
self._std_offset = datetime.timedelta(seconds=-time.timezone)
|
| 209 |
+
if time.daylight:
|
| 210 |
+
self._dst_offset = datetime.timedelta(seconds=-time.altzone)
|
| 211 |
+
else:
|
| 212 |
+
self._dst_offset = self._std_offset
|
| 213 |
+
|
| 214 |
+
self._dst_saved = self._dst_offset - self._std_offset
|
| 215 |
+
self._hasdst = bool(self._dst_saved)
|
| 216 |
+
self._tznames = tuple(time.tzname)
|
| 217 |
+
|
| 218 |
+
def utcoffset(self, dt):
|
| 219 |
+
if dt is None and self._hasdst:
|
| 220 |
+
return None
|
| 221 |
+
|
| 222 |
+
if self._isdst(dt):
|
| 223 |
+
return self._dst_offset
|
| 224 |
+
else:
|
| 225 |
+
return self._std_offset
|
| 226 |
+
|
| 227 |
+
def dst(self, dt):
|
| 228 |
+
if dt is None and self._hasdst:
|
| 229 |
+
return None
|
| 230 |
+
|
| 231 |
+
if self._isdst(dt):
|
| 232 |
+
return self._dst_offset - self._std_offset
|
| 233 |
+
else:
|
| 234 |
+
return ZERO
|
| 235 |
+
|
| 236 |
+
@tzname_in_python2
|
| 237 |
+
def tzname(self, dt):
|
| 238 |
+
return self._tznames[self._isdst(dt)]
|
| 239 |
+
|
| 240 |
+
def is_ambiguous(self, dt):
|
| 241 |
+
"""
|
| 242 |
+
Whether or not the "wall time" of a given datetime is ambiguous in this
|
| 243 |
+
zone.
|
| 244 |
+
|
| 245 |
+
:param dt:
|
| 246 |
+
A :py:class:`datetime.datetime`, naive or time zone aware.
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
:return:
|
| 250 |
+
Returns ``True`` if ambiguous, ``False`` otherwise.
|
| 251 |
+
|
| 252 |
+
.. versionadded:: 2.6.0
|
| 253 |
+
"""
|
| 254 |
+
naive_dst = self._naive_is_dst(dt)
|
| 255 |
+
return (not naive_dst and
|
| 256 |
+
(naive_dst != self._naive_is_dst(dt - self._dst_saved)))
|
| 257 |
+
|
| 258 |
+
def _naive_is_dst(self, dt):
|
| 259 |
+
timestamp = _datetime_to_timestamp(dt)
|
| 260 |
+
return time.localtime(timestamp + time.timezone).tm_isdst
|
| 261 |
+
|
| 262 |
+
def _isdst(self, dt, fold_naive=True):
|
| 263 |
+
# We can't use mktime here. It is unstable when deciding if
|
| 264 |
+
# the hour near to a change is DST or not.
|
| 265 |
+
#
|
| 266 |
+
# timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour,
|
| 267 |
+
# dt.minute, dt.second, dt.weekday(), 0, -1))
|
| 268 |
+
# return time.localtime(timestamp).tm_isdst
|
| 269 |
+
#
|
| 270 |
+
# The code above yields the following result:
|
| 271 |
+
#
|
| 272 |
+
# >>> import tz, datetime
|
| 273 |
+
# >>> t = tz.tzlocal()
|
| 274 |
+
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
|
| 275 |
+
# 'BRDT'
|
| 276 |
+
# >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname()
|
| 277 |
+
# 'BRST'
|
| 278 |
+
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
|
| 279 |
+
# 'BRST'
|
| 280 |
+
# >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname()
|
| 281 |
+
# 'BRDT'
|
| 282 |
+
# >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
|
| 283 |
+
# 'BRDT'
|
| 284 |
+
#
|
| 285 |
+
# Here is a more stable implementation:
|
| 286 |
+
#
|
| 287 |
+
if not self._hasdst:
|
| 288 |
+
return False
|
| 289 |
+
|
| 290 |
+
# Check for ambiguous times:
|
| 291 |
+
dstval = self._naive_is_dst(dt)
|
| 292 |
+
fold = getattr(dt, 'fold', None)
|
| 293 |
+
|
| 294 |
+
if self.is_ambiguous(dt):
|
| 295 |
+
if fold is not None:
|
| 296 |
+
return not self._fold(dt)
|
| 297 |
+
else:
|
| 298 |
+
return True
|
| 299 |
+
|
| 300 |
+
return dstval
|
| 301 |
+
|
| 302 |
+
def __eq__(self, other):
|
| 303 |
+
if isinstance(other, tzlocal):
|
| 304 |
+
return (self._std_offset == other._std_offset and
|
| 305 |
+
self._dst_offset == other._dst_offset)
|
| 306 |
+
elif isinstance(other, tzutc):
|
| 307 |
+
return (not self._hasdst and
|
| 308 |
+
self._tznames[0] in {'UTC', 'GMT'} and
|
| 309 |
+
self._std_offset == ZERO)
|
| 310 |
+
elif isinstance(other, tzoffset):
|
| 311 |
+
return (not self._hasdst and
|
| 312 |
+
self._tznames[0] == other._name and
|
| 313 |
+
self._std_offset == other._offset)
|
| 314 |
+
else:
|
| 315 |
+
return NotImplemented
|
| 316 |
+
|
| 317 |
+
__hash__ = None
|
| 318 |
+
|
| 319 |
+
def __ne__(self, other):
|
| 320 |
+
return not (self == other)
|
| 321 |
+
|
| 322 |
+
def __repr__(self):
|
| 323 |
+
return "%s()" % self.__class__.__name__
|
| 324 |
+
|
| 325 |
+
__reduce__ = object.__reduce__
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
class _ttinfo(object):
|
| 329 |
+
__slots__ = ["offset", "delta", "isdst", "abbr",
|
| 330 |
+
"isstd", "isgmt", "dstoffset"]
|
| 331 |
+
|
| 332 |
+
def __init__(self):
|
| 333 |
+
for attr in self.__slots__:
|
| 334 |
+
setattr(self, attr, None)
|
| 335 |
+
|
| 336 |
+
def __repr__(self):
|
| 337 |
+
l = []
|
| 338 |
+
for attr in self.__slots__:
|
| 339 |
+
value = getattr(self, attr)
|
| 340 |
+
if value is not None:
|
| 341 |
+
l.append("%s=%s" % (attr, repr(value)))
|
| 342 |
+
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
|
| 343 |
+
|
| 344 |
+
def __eq__(self, other):
|
| 345 |
+
if not isinstance(other, _ttinfo):
|
| 346 |
+
return NotImplemented
|
| 347 |
+
|
| 348 |
+
return (self.offset == other.offset and
|
| 349 |
+
self.delta == other.delta and
|
| 350 |
+
self.isdst == other.isdst and
|
| 351 |
+
self.abbr == other.abbr and
|
| 352 |
+
self.isstd == other.isstd and
|
| 353 |
+
self.isgmt == other.isgmt and
|
| 354 |
+
self.dstoffset == other.dstoffset)
|
| 355 |
+
|
| 356 |
+
__hash__ = None
|
| 357 |
+
|
| 358 |
+
def __ne__(self, other):
|
| 359 |
+
return not (self == other)
|
| 360 |
+
|
| 361 |
+
def __getstate__(self):
|
| 362 |
+
state = {}
|
| 363 |
+
for name in self.__slots__:
|
| 364 |
+
state[name] = getattr(self, name, None)
|
| 365 |
+
return state
|
| 366 |
+
|
| 367 |
+
def __setstate__(self, state):
|
| 368 |
+
for name in self.__slots__:
|
| 369 |
+
if name in state:
|
| 370 |
+
setattr(self, name, state[name])
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
class _tzfile(object):
|
| 374 |
+
"""
|
| 375 |
+
Lightweight class for holding the relevant transition and time zone
|
| 376 |
+
information read from binary tzfiles.
|
| 377 |
+
"""
|
| 378 |
+
attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list',
|
| 379 |
+
'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first']
|
| 380 |
+
|
| 381 |
+
def __init__(self, **kwargs):
|
| 382 |
+
for attr in self.attrs:
|
| 383 |
+
setattr(self, attr, kwargs.get(attr, None))
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
class tzfile(_tzinfo):
|
| 387 |
+
"""
|
| 388 |
+
This is a ``tzinfo`` subclass that allows one to use the ``tzfile(5)``
|
| 389 |
+
format timezone files to extract current and historical zone information.
|
| 390 |
+
|
| 391 |
+
:param fileobj:
|
| 392 |
+
This can be an opened file stream or a file name that the time zone
|
| 393 |
+
information can be read from.
|
| 394 |
+
|
| 395 |
+
:param filename:
|
| 396 |
+
This is an optional parameter specifying the source of the time zone
|
| 397 |
+
information in the event that ``fileobj`` is a file object. If omitted
|
| 398 |
+
and ``fileobj`` is a file stream, this parameter will be set either to
|
| 399 |
+
``fileobj``'s ``name`` attribute or to ``repr(fileobj)``.
|
| 400 |
+
|
| 401 |
+
See `Sources for Time Zone and Daylight Saving Time Data
|
| 402 |
+
<https://data.iana.org/time-zones/tz-link.html>`_ for more information.
|
| 403 |
+
Time zone files can be compiled from the `IANA Time Zone database files
|
| 404 |
+
<https://www.iana.org/time-zones>`_ with the `zic time zone compiler
|
| 405 |
+
<https://www.freebsd.org/cgi/man.cgi?query=zic&sektion=8>`_
|
| 406 |
+
|
| 407 |
+
.. note::
|
| 408 |
+
|
| 409 |
+
Only construct a ``tzfile`` directly if you have a specific timezone
|
| 410 |
+
file on disk that you want to read into a Python ``tzinfo`` object.
|
| 411 |
+
If you want to get a ``tzfile`` representing a specific IANA zone,
|
| 412 |
+
(e.g. ``'America/New_York'``), you should call
|
| 413 |
+
:func:`dateutil.tz.gettz` with the zone identifier.
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
**Examples:**
|
| 417 |
+
|
| 418 |
+
Using the US Eastern time zone as an example, we can see that a ``tzfile``
|
| 419 |
+
provides time zone information for the standard Daylight Saving offsets:
|
| 420 |
+
|
| 421 |
+
.. testsetup:: tzfile
|
| 422 |
+
|
| 423 |
+
from dateutil.tz import gettz
|
| 424 |
+
from datetime import datetime
|
| 425 |
+
|
| 426 |
+
.. doctest:: tzfile
|
| 427 |
+
|
| 428 |
+
>>> NYC = gettz('America/New_York')
|
| 429 |
+
>>> NYC
|
| 430 |
+
tzfile('/usr/share/zoneinfo/America/New_York')
|
| 431 |
+
|
| 432 |
+
>>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST
|
| 433 |
+
2016-01-03 00:00:00-05:00
|
| 434 |
+
|
| 435 |
+
>>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT
|
| 436 |
+
2016-07-07 00:00:00-04:00
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
The ``tzfile`` structure contains a fully history of the time zone,
|
| 440 |
+
so historical dates will also have the right offsets. For example, before
|
| 441 |
+
the adoption of the UTC standards, New York used local solar mean time:
|
| 442 |
+
|
| 443 |
+
.. doctest:: tzfile
|
| 444 |
+
|
| 445 |
+
>>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT
|
| 446 |
+
1901-04-12 00:00:00-04:56
|
| 447 |
+
|
| 448 |
+
And during World War II, New York was on "Eastern War Time", which was a
|
| 449 |
+
state of permanent daylight saving time:
|
| 450 |
+
|
| 451 |
+
.. doctest:: tzfile
|
| 452 |
+
|
| 453 |
+
>>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT
|
| 454 |
+
1944-02-07 00:00:00-04:00
|
| 455 |
+
|
| 456 |
+
"""
|
| 457 |
+
|
| 458 |
+
def __init__(self, fileobj, filename=None):
|
| 459 |
+
super(tzfile, self).__init__()
|
| 460 |
+
|
| 461 |
+
file_opened_here = False
|
| 462 |
+
if isinstance(fileobj, string_types):
|
| 463 |
+
self._filename = fileobj
|
| 464 |
+
fileobj = open(fileobj, 'rb')
|
| 465 |
+
file_opened_here = True
|
| 466 |
+
elif filename is not None:
|
| 467 |
+
self._filename = filename
|
| 468 |
+
elif hasattr(fileobj, "name"):
|
| 469 |
+
self._filename = fileobj.name
|
| 470 |
+
else:
|
| 471 |
+
self._filename = repr(fileobj)
|
| 472 |
+
|
| 473 |
+
if fileobj is not None:
|
| 474 |
+
if not file_opened_here:
|
| 475 |
+
fileobj = _nullcontext(fileobj)
|
| 476 |
+
|
| 477 |
+
with fileobj as file_stream:
|
| 478 |
+
tzobj = self._read_tzfile(file_stream)
|
| 479 |
+
|
| 480 |
+
self._set_tzdata(tzobj)
|
| 481 |
+
|
| 482 |
+
def _set_tzdata(self, tzobj):
|
| 483 |
+
""" Set the time zone data of this object from a _tzfile object """
|
| 484 |
+
# Copy the relevant attributes over as private attributes
|
| 485 |
+
for attr in _tzfile.attrs:
|
| 486 |
+
setattr(self, '_' + attr, getattr(tzobj, attr))
|
| 487 |
+
|
| 488 |
+
def _read_tzfile(self, fileobj):
|
| 489 |
+
out = _tzfile()
|
| 490 |
+
|
| 491 |
+
# From tzfile(5):
|
| 492 |
+
#
|
| 493 |
+
# The time zone information files used by tzset(3)
|
| 494 |
+
# begin with the magic characters "TZif" to identify
|
| 495 |
+
# them as time zone information files, followed by
|
| 496 |
+
# sixteen bytes reserved for future use, followed by
|
| 497 |
+
# six four-byte values of type long, written in a
|
| 498 |
+
# ``standard'' byte order (the high-order byte
|
| 499 |
+
# of the value is written first).
|
| 500 |
+
if fileobj.read(4).decode() != "TZif":
|
| 501 |
+
raise ValueError("magic not found")
|
| 502 |
+
|
| 503 |
+
fileobj.read(16)
|
| 504 |
+
|
| 505 |
+
(
|
| 506 |
+
# The number of UTC/local indicators stored in the file.
|
| 507 |
+
ttisgmtcnt,
|
| 508 |
+
|
| 509 |
+
# The number of standard/wall indicators stored in the file.
|
| 510 |
+
ttisstdcnt,
|
| 511 |
+
|
| 512 |
+
# The number of leap seconds for which data is
|
| 513 |
+
# stored in the file.
|
| 514 |
+
leapcnt,
|
| 515 |
+
|
| 516 |
+
# The number of "transition times" for which data
|
| 517 |
+
# is stored in the file.
|
| 518 |
+
timecnt,
|
| 519 |
+
|
| 520 |
+
# The number of "local time types" for which data
|
| 521 |
+
# is stored in the file (must not be zero).
|
| 522 |
+
typecnt,
|
| 523 |
+
|
| 524 |
+
# The number of characters of "time zone
|
| 525 |
+
# abbreviation strings" stored in the file.
|
| 526 |
+
charcnt,
|
| 527 |
+
|
| 528 |
+
) = struct.unpack(">6l", fileobj.read(24))
|
| 529 |
+
|
| 530 |
+
# The above header is followed by tzh_timecnt four-byte
|
| 531 |
+
# values of type long, sorted in ascending order.
|
| 532 |
+
# These values are written in ``standard'' byte order.
|
| 533 |
+
# Each is used as a transition time (as returned by
|
| 534 |
+
# time(2)) at which the rules for computing local time
|
| 535 |
+
# change.
|
| 536 |
+
|
| 537 |
+
if timecnt:
|
| 538 |
+
out.trans_list_utc = list(struct.unpack(">%dl" % timecnt,
|
| 539 |
+
fileobj.read(timecnt*4)))
|
| 540 |
+
else:
|
| 541 |
+
out.trans_list_utc = []
|
| 542 |
+
|
| 543 |
+
# Next come tzh_timecnt one-byte values of type unsigned
|
| 544 |
+
# char; each one tells which of the different types of
|
| 545 |
+
# ``local time'' types described in the file is associated
|
| 546 |
+
# with the same-indexed transition time. These values
|
| 547 |
+
# serve as indices into an array of ttinfo structures that
|
| 548 |
+
# appears next in the file.
|
| 549 |
+
|
| 550 |
+
if timecnt:
|
| 551 |
+
out.trans_idx = struct.unpack(">%dB" % timecnt,
|
| 552 |
+
fileobj.read(timecnt))
|
| 553 |
+
else:
|
| 554 |
+
out.trans_idx = []
|
| 555 |
+
|
| 556 |
+
# Each ttinfo structure is written as a four-byte value
|
| 557 |
+
# for tt_gmtoff of type long, in a standard byte
|
| 558 |
+
# order, followed by a one-byte value for tt_isdst
|
| 559 |
+
# and a one-byte value for tt_abbrind. In each
|
| 560 |
+
# structure, tt_gmtoff gives the number of
|
| 561 |
+
# seconds to be added to UTC, tt_isdst tells whether
|
| 562 |
+
# tm_isdst should be set by localtime(3), and
|
| 563 |
+
# tt_abbrind serves as an index into the array of
|
| 564 |
+
# time zone abbreviation characters that follow the
|
| 565 |
+
# ttinfo structure(s) in the file.
|
| 566 |
+
|
| 567 |
+
ttinfo = []
|
| 568 |
+
|
| 569 |
+
for i in range(typecnt):
|
| 570 |
+
ttinfo.append(struct.unpack(">lbb", fileobj.read(6)))
|
| 571 |
+
|
| 572 |
+
abbr = fileobj.read(charcnt).decode()
|
| 573 |
+
|
| 574 |
+
# Then there are tzh_leapcnt pairs of four-byte
|
| 575 |
+
# values, written in standard byte order; the
|
| 576 |
+
# first value of each pair gives the time (as
|
| 577 |
+
# returned by time(2)) at which a leap second
|
| 578 |
+
# occurs; the second gives the total number of
|
| 579 |
+
# leap seconds to be applied after the given time.
|
| 580 |
+
# The pairs of values are sorted in ascending order
|
| 581 |
+
# by time.
|
| 582 |
+
|
| 583 |
+
# Not used, for now (but seek for correct file position)
|
| 584 |
+
if leapcnt:
|
| 585 |
+
fileobj.seek(leapcnt * 8, os.SEEK_CUR)
|
| 586 |
+
|
| 587 |
+
# Then there are tzh_ttisstdcnt standard/wall
|
| 588 |
+
# indicators, each stored as a one-byte value;
|
| 589 |
+
# they tell whether the transition times associated
|
| 590 |
+
# with local time types were specified as standard
|
| 591 |
+
# time or wall clock time, and are used when
|
| 592 |
+
# a time zone file is used in handling POSIX-style
|
| 593 |
+
# time zone environment variables.
|
| 594 |
+
|
| 595 |
+
if ttisstdcnt:
|
| 596 |
+
isstd = struct.unpack(">%db" % ttisstdcnt,
|
| 597 |
+
fileobj.read(ttisstdcnt))
|
| 598 |
+
|
| 599 |
+
# Finally, there are tzh_ttisgmtcnt UTC/local
|
| 600 |
+
# indicators, each stored as a one-byte value;
|
| 601 |
+
# they tell whether the transition times associated
|
| 602 |
+
# with local time types were specified as UTC or
|
| 603 |
+
# local time, and are used when a time zone file
|
| 604 |
+
# is used in handling POSIX-style time zone envi-
|
| 605 |
+
# ronment variables.
|
| 606 |
+
|
| 607 |
+
if ttisgmtcnt:
|
| 608 |
+
isgmt = struct.unpack(">%db" % ttisgmtcnt,
|
| 609 |
+
fileobj.read(ttisgmtcnt))
|
| 610 |
+
|
| 611 |
+
# Build ttinfo list
|
| 612 |
+
out.ttinfo_list = []
|
| 613 |
+
for i in range(typecnt):
|
| 614 |
+
gmtoff, isdst, abbrind = ttinfo[i]
|
| 615 |
+
gmtoff = _get_supported_offset(gmtoff)
|
| 616 |
+
tti = _ttinfo()
|
| 617 |
+
tti.offset = gmtoff
|
| 618 |
+
tti.dstoffset = datetime.timedelta(0)
|
| 619 |
+
tti.delta = datetime.timedelta(seconds=gmtoff)
|
| 620 |
+
tti.isdst = isdst
|
| 621 |
+
tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)]
|
| 622 |
+
tti.isstd = (ttisstdcnt > i and isstd[i] != 0)
|
| 623 |
+
tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0)
|
| 624 |
+
out.ttinfo_list.append(tti)
|
| 625 |
+
|
| 626 |
+
# Replace ttinfo indexes for ttinfo objects.
|
| 627 |
+
out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx]
|
| 628 |
+
|
| 629 |
+
# Set standard, dst, and before ttinfos. before will be
|
| 630 |
+
# used when a given time is before any transitions,
|
| 631 |
+
# and will be set to the first non-dst ttinfo, or to
|
| 632 |
+
# the first dst, if all of them are dst.
|
| 633 |
+
out.ttinfo_std = None
|
| 634 |
+
out.ttinfo_dst = None
|
| 635 |
+
out.ttinfo_before = None
|
| 636 |
+
if out.ttinfo_list:
|
| 637 |
+
if not out.trans_list_utc:
|
| 638 |
+
out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0]
|
| 639 |
+
else:
|
| 640 |
+
for i in range(timecnt-1, -1, -1):
|
| 641 |
+
tti = out.trans_idx[i]
|
| 642 |
+
if not out.ttinfo_std and not tti.isdst:
|
| 643 |
+
out.ttinfo_std = tti
|
| 644 |
+
elif not out.ttinfo_dst and tti.isdst:
|
| 645 |
+
out.ttinfo_dst = tti
|
| 646 |
+
|
| 647 |
+
if out.ttinfo_std and out.ttinfo_dst:
|
| 648 |
+
break
|
| 649 |
+
else:
|
| 650 |
+
if out.ttinfo_dst and not out.ttinfo_std:
|
| 651 |
+
out.ttinfo_std = out.ttinfo_dst
|
| 652 |
+
|
| 653 |
+
for tti in out.ttinfo_list:
|
| 654 |
+
if not tti.isdst:
|
| 655 |
+
out.ttinfo_before = tti
|
| 656 |
+
break
|
| 657 |
+
else:
|
| 658 |
+
out.ttinfo_before = out.ttinfo_list[0]
|
| 659 |
+
|
| 660 |
+
# Now fix transition times to become relative to wall time.
|
| 661 |
+
#
|
| 662 |
+
# I'm not sure about this. In my tests, the tz source file
|
| 663 |
+
# is setup to wall time, and in the binary file isstd and
|
| 664 |
+
# isgmt are off, so it should be in wall time. OTOH, it's
|
| 665 |
+
# always in gmt time. Let me know if you have comments
|
| 666 |
+
# about this.
|
| 667 |
+
lastdst = None
|
| 668 |
+
lastoffset = None
|
| 669 |
+
lastdstoffset = None
|
| 670 |
+
lastbaseoffset = None
|
| 671 |
+
out.trans_list = []
|
| 672 |
+
|
| 673 |
+
for i, tti in enumerate(out.trans_idx):
|
| 674 |
+
offset = tti.offset
|
| 675 |
+
dstoffset = 0
|
| 676 |
+
|
| 677 |
+
if lastdst is not None:
|
| 678 |
+
if tti.isdst:
|
| 679 |
+
if not lastdst:
|
| 680 |
+
dstoffset = offset - lastoffset
|
| 681 |
+
|
| 682 |
+
if not dstoffset and lastdstoffset:
|
| 683 |
+
dstoffset = lastdstoffset
|
| 684 |
+
|
| 685 |
+
tti.dstoffset = datetime.timedelta(seconds=dstoffset)
|
| 686 |
+
lastdstoffset = dstoffset
|
| 687 |
+
|
| 688 |
+
# If a time zone changes its base offset during a DST transition,
|
| 689 |
+
# then you need to adjust by the previous base offset to get the
|
| 690 |
+
# transition time in local time. Otherwise you use the current
|
| 691 |
+
# base offset. Ideally, I would have some mathematical proof of
|
| 692 |
+
# why this is true, but I haven't really thought about it enough.
|
| 693 |
+
baseoffset = offset - dstoffset
|
| 694 |
+
adjustment = baseoffset
|
| 695 |
+
if (lastbaseoffset is not None and baseoffset != lastbaseoffset
|
| 696 |
+
and tti.isdst != lastdst):
|
| 697 |
+
# The base DST has changed
|
| 698 |
+
adjustment = lastbaseoffset
|
| 699 |
+
|
| 700 |
+
lastdst = tti.isdst
|
| 701 |
+
lastoffset = offset
|
| 702 |
+
lastbaseoffset = baseoffset
|
| 703 |
+
|
| 704 |
+
out.trans_list.append(out.trans_list_utc[i] + adjustment)
|
| 705 |
+
|
| 706 |
+
out.trans_idx = tuple(out.trans_idx)
|
| 707 |
+
out.trans_list = tuple(out.trans_list)
|
| 708 |
+
out.trans_list_utc = tuple(out.trans_list_utc)
|
| 709 |
+
|
| 710 |
+
return out
|
| 711 |
+
|
| 712 |
+
def _find_last_transition(self, dt, in_utc=False):
|
| 713 |
+
# If there's no list, there are no transitions to find
|
| 714 |
+
if not self._trans_list:
|
| 715 |
+
return None
|
| 716 |
+
|
| 717 |
+
timestamp = _datetime_to_timestamp(dt)
|
| 718 |
+
|
| 719 |
+
# Find where the timestamp fits in the transition list - if the
|
| 720 |
+
# timestamp is a transition time, it's part of the "after" period.
|
| 721 |
+
trans_list = self._trans_list_utc if in_utc else self._trans_list
|
| 722 |
+
idx = bisect.bisect_right(trans_list, timestamp)
|
| 723 |
+
|
| 724 |
+
# We want to know when the previous transition was, so subtract off 1
|
| 725 |
+
return idx - 1
|
| 726 |
+
|
| 727 |
+
def _get_ttinfo(self, idx):
|
| 728 |
+
# For no list or after the last transition, default to _ttinfo_std
|
| 729 |
+
if idx is None or (idx + 1) >= len(self._trans_list):
|
| 730 |
+
return self._ttinfo_std
|
| 731 |
+
|
| 732 |
+
# If there is a list and the time is before it, return _ttinfo_before
|
| 733 |
+
if idx < 0:
|
| 734 |
+
return self._ttinfo_before
|
| 735 |
+
|
| 736 |
+
return self._trans_idx[idx]
|
| 737 |
+
|
| 738 |
+
def _find_ttinfo(self, dt):
|
| 739 |
+
idx = self._resolve_ambiguous_time(dt)
|
| 740 |
+
|
| 741 |
+
return self._get_ttinfo(idx)
|
| 742 |
+
|
| 743 |
+
def fromutc(self, dt):
|
| 744 |
+
"""
|
| 745 |
+
The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`.
|
| 746 |
+
|
| 747 |
+
:param dt:
|
| 748 |
+
A :py:class:`datetime.datetime` object.
|
| 749 |
+
|
| 750 |
+
:raises TypeError:
|
| 751 |
+
Raised if ``dt`` is not a :py:class:`datetime.datetime` object.
|
| 752 |
+
|
| 753 |
+
:raises ValueError:
|
| 754 |
+
Raised if this is called with a ``dt`` which does not have this
|
| 755 |
+
``tzinfo`` attached.
|
| 756 |
+
|
| 757 |
+
:return:
|
| 758 |
+
Returns a :py:class:`datetime.datetime` object representing the
|
| 759 |
+
wall time in ``self``'s time zone.
|
| 760 |
+
"""
|
| 761 |
+
# These isinstance checks are in datetime.tzinfo, so we'll preserve
|
| 762 |
+
# them, even if we don't care about duck typing.
|
| 763 |
+
if not isinstance(dt, datetime.datetime):
|
| 764 |
+
raise TypeError("fromutc() requires a datetime argument")
|
| 765 |
+
|
| 766 |
+
if dt.tzinfo is not self:
|
| 767 |
+
raise ValueError("dt.tzinfo is not self")
|
| 768 |
+
|
| 769 |
+
# First treat UTC as wall time and get the transition we're in.
|
| 770 |
+
idx = self._find_last_transition(dt, in_utc=True)
|
| 771 |
+
tti = self._get_ttinfo(idx)
|
| 772 |
+
|
| 773 |
+
dt_out = dt + datetime.timedelta(seconds=tti.offset)
|
| 774 |
+
|
| 775 |
+
fold = self.is_ambiguous(dt_out, idx=idx)
|
| 776 |
+
|
| 777 |
+
return enfold(dt_out, fold=int(fold))
|
| 778 |
+
|
| 779 |
+
def is_ambiguous(self, dt, idx=None):
|
| 780 |
+
"""
|
| 781 |
+
Whether or not the "wall time" of a given datetime is ambiguous in this
|
| 782 |
+
zone.
|
| 783 |
+
|
| 784 |
+
:param dt:
|
| 785 |
+
A :py:class:`datetime.datetime`, naive or time zone aware.
|
| 786 |
+
|
| 787 |
+
|
| 788 |
+
:return:
|
| 789 |
+
Returns ``True`` if ambiguous, ``False`` otherwise.
|
| 790 |
+
|
| 791 |
+
.. versionadded:: 2.6.0
|
| 792 |
+
"""
|
| 793 |
+
if idx is None:
|
| 794 |
+
idx = self._find_last_transition(dt)
|
| 795 |
+
|
| 796 |
+
# Calculate the difference in offsets from current to previous
|
| 797 |
+
timestamp = _datetime_to_timestamp(dt)
|
| 798 |
+
tti = self._get_ttinfo(idx)
|
| 799 |
+
|
| 800 |
+
if idx is None or idx <= 0:
|
| 801 |
+
return False
|
| 802 |
+
|
| 803 |
+
od = self._get_ttinfo(idx - 1).offset - tti.offset
|
| 804 |
+
tt = self._trans_list[idx] # Transition time
|
| 805 |
+
|
| 806 |
+
return timestamp < tt + od
|
| 807 |
+
|
| 808 |
+
def _resolve_ambiguous_time(self, dt):
|
| 809 |
+
idx = self._find_last_transition(dt)
|
| 810 |
+
|
| 811 |
+
# If we have no transitions, return the index
|
| 812 |
+
_fold = self._fold(dt)
|
| 813 |
+
if idx is None or idx == 0:
|
| 814 |
+
return idx
|
| 815 |
+
|
| 816 |
+
# If it's ambiguous and we're in a fold, shift to a different index.
|
| 817 |
+
idx_offset = int(not _fold and self.is_ambiguous(dt, idx))
|
| 818 |
+
|
| 819 |
+
return idx - idx_offset
|
| 820 |
+
|
| 821 |
+
def utcoffset(self, dt):
|
| 822 |
+
if dt is None:
|
| 823 |
+
return None
|
| 824 |
+
|
| 825 |
+
if not self._ttinfo_std:
|
| 826 |
+
return ZERO
|
| 827 |
+
|
| 828 |
+
return self._find_ttinfo(dt).delta
|
| 829 |
+
|
| 830 |
+
def dst(self, dt):
|
| 831 |
+
if dt is None:
|
| 832 |
+
return None
|
| 833 |
+
|
| 834 |
+
if not self._ttinfo_dst:
|
| 835 |
+
return ZERO
|
| 836 |
+
|
| 837 |
+
tti = self._find_ttinfo(dt)
|
| 838 |
+
|
| 839 |
+
if not tti.isdst:
|
| 840 |
+
return ZERO
|
| 841 |
+
|
| 842 |
+
# The documentation says that utcoffset()-dst() must
|
| 843 |
+
# be constant for every dt.
|
| 844 |
+
return tti.dstoffset
|
| 845 |
+
|
| 846 |
+
@tzname_in_python2
|
| 847 |
+
def tzname(self, dt):
|
| 848 |
+
if not self._ttinfo_std or dt is None:
|
| 849 |
+
return None
|
| 850 |
+
return self._find_ttinfo(dt).abbr
|
| 851 |
+
|
| 852 |
+
def __eq__(self, other):
|
| 853 |
+
if not isinstance(other, tzfile):
|
| 854 |
+
return NotImplemented
|
| 855 |
+
return (self._trans_list == other._trans_list and
|
| 856 |
+
self._trans_idx == other._trans_idx and
|
| 857 |
+
self._ttinfo_list == other._ttinfo_list)
|
| 858 |
+
|
| 859 |
+
__hash__ = None
|
| 860 |
+
|
| 861 |
+
def __ne__(self, other):
|
| 862 |
+
return not (self == other)
|
| 863 |
+
|
| 864 |
+
def __repr__(self):
|
| 865 |
+
return "%s(%s)" % (self.__class__.__name__, repr(self._filename))
|
| 866 |
+
|
| 867 |
+
def __reduce__(self):
|
| 868 |
+
return self.__reduce_ex__(None)
|
| 869 |
+
|
| 870 |
+
def __reduce_ex__(self, protocol):
|
| 871 |
+
return (self.__class__, (None, self._filename), self.__dict__)
|
| 872 |
+
|
| 873 |
+
|
| 874 |
+
class tzrange(tzrangebase):
|
| 875 |
+
"""
|
| 876 |
+
The ``tzrange`` object is a time zone specified by a set of offsets and
|
| 877 |
+
abbreviations, equivalent to the way the ``TZ`` variable can be specified
|
| 878 |
+
in POSIX-like systems, but using Python delta objects to specify DST
|
| 879 |
+
start, end and offsets.
|
| 880 |
+
|
| 881 |
+
:param stdabbr:
|
| 882 |
+
The abbreviation for standard time (e.g. ``'EST'``).
|
| 883 |
+
|
| 884 |
+
:param stdoffset:
|
| 885 |
+
An integer or :class:`datetime.timedelta` object or equivalent
|
| 886 |
+
specifying the base offset from UTC.
|
| 887 |
+
|
| 888 |
+
If unspecified, +00:00 is used.
|
| 889 |
+
|
| 890 |
+
:param dstabbr:
|
| 891 |
+
The abbreviation for DST / "Summer" time (e.g. ``'EDT'``).
|
| 892 |
+
|
| 893 |
+
If specified, with no other DST information, DST is assumed to occur
|
| 894 |
+
and the default behavior or ``dstoffset``, ``start`` and ``end`` is
|
| 895 |
+
used. If unspecified and no other DST information is specified, it
|
| 896 |
+
is assumed that this zone has no DST.
|
| 897 |
+
|
| 898 |
+
If this is unspecified and other DST information is *is* specified,
|
| 899 |
+
DST occurs in the zone but the time zone abbreviation is left
|
| 900 |
+
unchanged.
|
| 901 |
+
|
| 902 |
+
:param dstoffset:
|
| 903 |
+
A an integer or :class:`datetime.timedelta` object or equivalent
|
| 904 |
+
specifying the UTC offset during DST. If unspecified and any other DST
|
| 905 |
+
information is specified, it is assumed to be the STD offset +1 hour.
|
| 906 |
+
|
| 907 |
+
:param start:
|
| 908 |
+
A :class:`relativedelta.relativedelta` object or equivalent specifying
|
| 909 |
+
the time and time of year that daylight savings time starts. To
|
| 910 |
+
specify, for example, that DST starts at 2AM on the 2nd Sunday in
|
| 911 |
+
March, pass:
|
| 912 |
+
|
| 913 |
+
``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))``
|
| 914 |
+
|
| 915 |
+
If unspecified and any other DST information is specified, the default
|
| 916 |
+
value is 2 AM on the first Sunday in April.
|
| 917 |
+
|
| 918 |
+
:param end:
|
| 919 |
+
A :class:`relativedelta.relativedelta` object or equivalent
|
| 920 |
+
representing the time and time of year that daylight savings time
|
| 921 |
+
ends, with the same specification method as in ``start``. One note is
|
| 922 |
+
that this should point to the first time in the *standard* zone, so if
|
| 923 |
+
a transition occurs at 2AM in the DST zone and the clocks are set back
|
| 924 |
+
1 hour to 1AM, set the ``hours`` parameter to +1.
|
| 925 |
+
|
| 926 |
+
|
| 927 |
+
**Examples:**
|
| 928 |
+
|
| 929 |
+
.. testsetup:: tzrange
|
| 930 |
+
|
| 931 |
+
from dateutil.tz import tzrange, tzstr
|
| 932 |
+
|
| 933 |
+
.. doctest:: tzrange
|
| 934 |
+
|
| 935 |
+
>>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT")
|
| 936 |
+
True
|
| 937 |
+
|
| 938 |
+
>>> from dateutil.relativedelta import *
|
| 939 |
+
>>> range1 = tzrange("EST", -18000, "EDT")
|
| 940 |
+
>>> range2 = tzrange("EST", -18000, "EDT", -14400,
|
| 941 |
+
... relativedelta(hours=+2, month=4, day=1,
|
| 942 |
+
... weekday=SU(+1)),
|
| 943 |
+
... relativedelta(hours=+1, month=10, day=31,
|
| 944 |
+
... weekday=SU(-1)))
|
| 945 |
+
>>> tzstr('EST5EDT') == range1 == range2
|
| 946 |
+
True
|
| 947 |
+
|
| 948 |
+
"""
|
| 949 |
+
def __init__(self, stdabbr, stdoffset=None,
|
| 950 |
+
dstabbr=None, dstoffset=None,
|
| 951 |
+
start=None, end=None):
|
| 952 |
+
|
| 953 |
+
global relativedelta
|
| 954 |
+
from dateutil import relativedelta
|
| 955 |
+
|
| 956 |
+
self._std_abbr = stdabbr
|
| 957 |
+
self._dst_abbr = dstabbr
|
| 958 |
+
|
| 959 |
+
try:
|
| 960 |
+
stdoffset = stdoffset.total_seconds()
|
| 961 |
+
except (TypeError, AttributeError):
|
| 962 |
+
pass
|
| 963 |
+
|
| 964 |
+
try:
|
| 965 |
+
dstoffset = dstoffset.total_seconds()
|
| 966 |
+
except (TypeError, AttributeError):
|
| 967 |
+
pass
|
| 968 |
+
|
| 969 |
+
if stdoffset is not None:
|
| 970 |
+
self._std_offset = datetime.timedelta(seconds=stdoffset)
|
| 971 |
+
else:
|
| 972 |
+
self._std_offset = ZERO
|
| 973 |
+
|
| 974 |
+
if dstoffset is not None:
|
| 975 |
+
self._dst_offset = datetime.timedelta(seconds=dstoffset)
|
| 976 |
+
elif dstabbr and stdoffset is not None:
|
| 977 |
+
self._dst_offset = self._std_offset + datetime.timedelta(hours=+1)
|
| 978 |
+
else:
|
| 979 |
+
self._dst_offset = ZERO
|
| 980 |
+
|
| 981 |
+
if dstabbr and start is None:
|
| 982 |
+
self._start_delta = relativedelta.relativedelta(
|
| 983 |
+
hours=+2, month=4, day=1, weekday=relativedelta.SU(+1))
|
| 984 |
+
else:
|
| 985 |
+
self._start_delta = start
|
| 986 |
+
|
| 987 |
+
if dstabbr and end is None:
|
| 988 |
+
self._end_delta = relativedelta.relativedelta(
|
| 989 |
+
hours=+1, month=10, day=31, weekday=relativedelta.SU(-1))
|
| 990 |
+
else:
|
| 991 |
+
self._end_delta = end
|
| 992 |
+
|
| 993 |
+
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
| 994 |
+
self.hasdst = bool(self._start_delta)
|
| 995 |
+
|
| 996 |
+
def transitions(self, year):
|
| 997 |
+
"""
|
| 998 |
+
For a given year, get the DST on and off transition times, expressed
|
| 999 |
+
always on the standard time side. For zones with no transitions, this
|
| 1000 |
+
function returns ``None``.
|
| 1001 |
+
|
| 1002 |
+
:param year:
|
| 1003 |
+
The year whose transitions you would like to query.
|
| 1004 |
+
|
| 1005 |
+
:return:
|
| 1006 |
+
Returns a :class:`tuple` of :class:`datetime.datetime` objects,
|
| 1007 |
+
``(dston, dstoff)`` for zones with an annual DST transition, or
|
| 1008 |
+
``None`` for fixed offset zones.
|
| 1009 |
+
"""
|
| 1010 |
+
if not self.hasdst:
|
| 1011 |
+
return None
|
| 1012 |
+
|
| 1013 |
+
base_year = datetime.datetime(year, 1, 1)
|
| 1014 |
+
|
| 1015 |
+
start = base_year + self._start_delta
|
| 1016 |
+
end = base_year + self._end_delta
|
| 1017 |
+
|
| 1018 |
+
return (start, end)
|
| 1019 |
+
|
| 1020 |
+
def __eq__(self, other):
|
| 1021 |
+
if not isinstance(other, tzrange):
|
| 1022 |
+
return NotImplemented
|
| 1023 |
+
|
| 1024 |
+
return (self._std_abbr == other._std_abbr and
|
| 1025 |
+
self._dst_abbr == other._dst_abbr and
|
| 1026 |
+
self._std_offset == other._std_offset and
|
| 1027 |
+
self._dst_offset == other._dst_offset and
|
| 1028 |
+
self._start_delta == other._start_delta and
|
| 1029 |
+
self._end_delta == other._end_delta)
|
| 1030 |
+
|
| 1031 |
+
@property
|
| 1032 |
+
def _dst_base_offset(self):
|
| 1033 |
+
return self._dst_base_offset_
|
| 1034 |
+
|
| 1035 |
+
|
| 1036 |
+
@six.add_metaclass(_TzStrFactory)
|
| 1037 |
+
class tzstr(tzrange):
|
| 1038 |
+
"""
|
| 1039 |
+
``tzstr`` objects are time zone objects specified by a time-zone string as
|
| 1040 |
+
it would be passed to a ``TZ`` variable on POSIX-style systems (see
|
| 1041 |
+
the `GNU C Library: TZ Variable`_ for more details).
|
| 1042 |
+
|
| 1043 |
+
There is one notable exception, which is that POSIX-style time zones use an
|
| 1044 |
+
inverted offset format, so normally ``GMT+3`` would be parsed as an offset
|
| 1045 |
+
3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an
|
| 1046 |
+
offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX
|
| 1047 |
+
behavior, pass a ``True`` value to ``posix_offset``.
|
| 1048 |
+
|
| 1049 |
+
The :class:`tzrange` object provides the same functionality, but is
|
| 1050 |
+
specified using :class:`relativedelta.relativedelta` objects. rather than
|
| 1051 |
+
strings.
|
| 1052 |
+
|
| 1053 |
+
:param s:
|
| 1054 |
+
A time zone string in ``TZ`` variable format. This can be a
|
| 1055 |
+
:class:`bytes` (2.x: :class:`str`), :class:`str` (2.x:
|
| 1056 |
+
:class:`unicode`) or a stream emitting unicode characters
|
| 1057 |
+
(e.g. :class:`StringIO`).
|
| 1058 |
+
|
| 1059 |
+
:param posix_offset:
|
| 1060 |
+
Optional. If set to ``True``, interpret strings such as ``GMT+3`` or
|
| 1061 |
+
``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the
|
| 1062 |
+
POSIX standard.
|
| 1063 |
+
|
| 1064 |
+
.. caution::
|
| 1065 |
+
|
| 1066 |
+
Prior to version 2.7.0, this function also supported time zones
|
| 1067 |
+
in the format:
|
| 1068 |
+
|
| 1069 |
+
* ``EST5EDT,4,0,6,7200,10,0,26,7200,3600``
|
| 1070 |
+
* ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600``
|
| 1071 |
+
|
| 1072 |
+
This format is non-standard and has been deprecated; this function
|
| 1073 |
+
will raise a :class:`DeprecatedTZFormatWarning` until
|
| 1074 |
+
support is removed in a future version.
|
| 1075 |
+
|
| 1076 |
+
.. _`GNU C Library: TZ Variable`:
|
| 1077 |
+
https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html
|
| 1078 |
+
"""
|
| 1079 |
+
def __init__(self, s, posix_offset=False):
|
| 1080 |
+
global parser
|
| 1081 |
+
from dateutil.parser import _parser as parser
|
| 1082 |
+
|
| 1083 |
+
self._s = s
|
| 1084 |
+
|
| 1085 |
+
res = parser._parsetz(s)
|
| 1086 |
+
if res is None or res.any_unused_tokens:
|
| 1087 |
+
raise ValueError("unknown string format")
|
| 1088 |
+
|
| 1089 |
+
# Here we break the compatibility with the TZ variable handling.
|
| 1090 |
+
# GMT-3 actually *means* the timezone -3.
|
| 1091 |
+
if res.stdabbr in ("GMT", "UTC") and not posix_offset:
|
| 1092 |
+
res.stdoffset *= -1
|
| 1093 |
+
|
| 1094 |
+
# We must initialize it first, since _delta() needs
|
| 1095 |
+
# _std_offset and _dst_offset set. Use False in start/end
|
| 1096 |
+
# to avoid building it two times.
|
| 1097 |
+
tzrange.__init__(self, res.stdabbr, res.stdoffset,
|
| 1098 |
+
res.dstabbr, res.dstoffset,
|
| 1099 |
+
start=False, end=False)
|
| 1100 |
+
|
| 1101 |
+
if not res.dstabbr:
|
| 1102 |
+
self._start_delta = None
|
| 1103 |
+
self._end_delta = None
|
| 1104 |
+
else:
|
| 1105 |
+
self._start_delta = self._delta(res.start)
|
| 1106 |
+
if self._start_delta:
|
| 1107 |
+
self._end_delta = self._delta(res.end, isend=1)
|
| 1108 |
+
|
| 1109 |
+
self.hasdst = bool(self._start_delta)
|
| 1110 |
+
|
| 1111 |
+
def _delta(self, x, isend=0):
|
| 1112 |
+
from dateutil import relativedelta
|
| 1113 |
+
kwargs = {}
|
| 1114 |
+
if x.month is not None:
|
| 1115 |
+
kwargs["month"] = x.month
|
| 1116 |
+
if x.weekday is not None:
|
| 1117 |
+
kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week)
|
| 1118 |
+
if x.week > 0:
|
| 1119 |
+
kwargs["day"] = 1
|
| 1120 |
+
else:
|
| 1121 |
+
kwargs["day"] = 31
|
| 1122 |
+
elif x.day:
|
| 1123 |
+
kwargs["day"] = x.day
|
| 1124 |
+
elif x.yday is not None:
|
| 1125 |
+
kwargs["yearday"] = x.yday
|
| 1126 |
+
elif x.jyday is not None:
|
| 1127 |
+
kwargs["nlyearday"] = x.jyday
|
| 1128 |
+
if not kwargs:
|
| 1129 |
+
# Default is to start on first sunday of april, and end
|
| 1130 |
+
# on last sunday of october.
|
| 1131 |
+
if not isend:
|
| 1132 |
+
kwargs["month"] = 4
|
| 1133 |
+
kwargs["day"] = 1
|
| 1134 |
+
kwargs["weekday"] = relativedelta.SU(+1)
|
| 1135 |
+
else:
|
| 1136 |
+
kwargs["month"] = 10
|
| 1137 |
+
kwargs["day"] = 31
|
| 1138 |
+
kwargs["weekday"] = relativedelta.SU(-1)
|
| 1139 |
+
if x.time is not None:
|
| 1140 |
+
kwargs["seconds"] = x.time
|
| 1141 |
+
else:
|
| 1142 |
+
# Default is 2AM.
|
| 1143 |
+
kwargs["seconds"] = 7200
|
| 1144 |
+
if isend:
|
| 1145 |
+
# Convert to standard time, to follow the documented way
|
| 1146 |
+
# of working with the extra hour. See the documentation
|
| 1147 |
+
# of the tzinfo class.
|
| 1148 |
+
delta = self._dst_offset - self._std_offset
|
| 1149 |
+
kwargs["seconds"] -= delta.seconds + delta.days * 86400
|
| 1150 |
+
return relativedelta.relativedelta(**kwargs)
|
| 1151 |
+
|
| 1152 |
+
def __repr__(self):
|
| 1153 |
+
return "%s(%s)" % (self.__class__.__name__, repr(self._s))
|
| 1154 |
+
|
| 1155 |
+
|
| 1156 |
+
class _tzicalvtzcomp(object):
|
| 1157 |
+
def __init__(self, tzoffsetfrom, tzoffsetto, isdst,
|
| 1158 |
+
tzname=None, rrule=None):
|
| 1159 |
+
self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom)
|
| 1160 |
+
self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto)
|
| 1161 |
+
self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom
|
| 1162 |
+
self.isdst = isdst
|
| 1163 |
+
self.tzname = tzname
|
| 1164 |
+
self.rrule = rrule
|
| 1165 |
+
|
| 1166 |
+
|
| 1167 |
+
class _tzicalvtz(_tzinfo):
|
| 1168 |
+
def __init__(self, tzid, comps=[]):
|
| 1169 |
+
super(_tzicalvtz, self).__init__()
|
| 1170 |
+
|
| 1171 |
+
self._tzid = tzid
|
| 1172 |
+
self._comps = comps
|
| 1173 |
+
self._cachedate = []
|
| 1174 |
+
self._cachecomp = []
|
| 1175 |
+
self._cache_lock = _thread.allocate_lock()
|
| 1176 |
+
|
| 1177 |
+
def _find_comp(self, dt):
|
| 1178 |
+
if len(self._comps) == 1:
|
| 1179 |
+
return self._comps[0]
|
| 1180 |
+
|
| 1181 |
+
dt = dt.replace(tzinfo=None)
|
| 1182 |
+
|
| 1183 |
+
try:
|
| 1184 |
+
with self._cache_lock:
|
| 1185 |
+
return self._cachecomp[self._cachedate.index(
|
| 1186 |
+
(dt, self._fold(dt)))]
|
| 1187 |
+
except ValueError:
|
| 1188 |
+
pass
|
| 1189 |
+
|
| 1190 |
+
lastcompdt = None
|
| 1191 |
+
lastcomp = None
|
| 1192 |
+
|
| 1193 |
+
for comp in self._comps:
|
| 1194 |
+
compdt = self._find_compdt(comp, dt)
|
| 1195 |
+
|
| 1196 |
+
if compdt and (not lastcompdt or lastcompdt < compdt):
|
| 1197 |
+
lastcompdt = compdt
|
| 1198 |
+
lastcomp = comp
|
| 1199 |
+
|
| 1200 |
+
if not lastcomp:
|
| 1201 |
+
# RFC says nothing about what to do when a given
|
| 1202 |
+
# time is before the first onset date. We'll look for the
|
| 1203 |
+
# first standard component, or the first component, if
|
| 1204 |
+
# none is found.
|
| 1205 |
+
for comp in self._comps:
|
| 1206 |
+
if not comp.isdst:
|
| 1207 |
+
lastcomp = comp
|
| 1208 |
+
break
|
| 1209 |
+
else:
|
| 1210 |
+
lastcomp = comp[0]
|
| 1211 |
+
|
| 1212 |
+
with self._cache_lock:
|
| 1213 |
+
self._cachedate.insert(0, (dt, self._fold(dt)))
|
| 1214 |
+
self._cachecomp.insert(0, lastcomp)
|
| 1215 |
+
|
| 1216 |
+
if len(self._cachedate) > 10:
|
| 1217 |
+
self._cachedate.pop()
|
| 1218 |
+
self._cachecomp.pop()
|
| 1219 |
+
|
| 1220 |
+
return lastcomp
|
| 1221 |
+
|
| 1222 |
+
def _find_compdt(self, comp, dt):
|
| 1223 |
+
if comp.tzoffsetdiff < ZERO and self._fold(dt):
|
| 1224 |
+
dt -= comp.tzoffsetdiff
|
| 1225 |
+
|
| 1226 |
+
compdt = comp.rrule.before(dt, inc=True)
|
| 1227 |
+
|
| 1228 |
+
return compdt
|
| 1229 |
+
|
| 1230 |
+
def utcoffset(self, dt):
|
| 1231 |
+
if dt is None:
|
| 1232 |
+
return None
|
| 1233 |
+
|
| 1234 |
+
return self._find_comp(dt).tzoffsetto
|
| 1235 |
+
|
| 1236 |
+
def dst(self, dt):
|
| 1237 |
+
comp = self._find_comp(dt)
|
| 1238 |
+
if comp.isdst:
|
| 1239 |
+
return comp.tzoffsetdiff
|
| 1240 |
+
else:
|
| 1241 |
+
return ZERO
|
| 1242 |
+
|
| 1243 |
+
@tzname_in_python2
|
| 1244 |
+
def tzname(self, dt):
|
| 1245 |
+
return self._find_comp(dt).tzname
|
| 1246 |
+
|
| 1247 |
+
def __repr__(self):
|
| 1248 |
+
return "<tzicalvtz %s>" % repr(self._tzid)
|
| 1249 |
+
|
| 1250 |
+
__reduce__ = object.__reduce__
|
| 1251 |
+
|
| 1252 |
+
|
| 1253 |
+
class tzical(object):
|
| 1254 |
+
"""
|
| 1255 |
+
This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure
|
| 1256 |
+
as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects.
|
| 1257 |
+
|
| 1258 |
+
:param `fileobj`:
|
| 1259 |
+
A file or stream in iCalendar format, which should be UTF-8 encoded
|
| 1260 |
+
with CRLF endings.
|
| 1261 |
+
|
| 1262 |
+
.. _`RFC 5545`: https://tools.ietf.org/html/rfc5545
|
| 1263 |
+
"""
|
| 1264 |
+
def __init__(self, fileobj):
|
| 1265 |
+
global rrule
|
| 1266 |
+
from dateutil import rrule
|
| 1267 |
+
|
| 1268 |
+
if isinstance(fileobj, string_types):
|
| 1269 |
+
self._s = fileobj
|
| 1270 |
+
# ical should be encoded in UTF-8 with CRLF
|
| 1271 |
+
fileobj = open(fileobj, 'r')
|
| 1272 |
+
else:
|
| 1273 |
+
self._s = getattr(fileobj, 'name', repr(fileobj))
|
| 1274 |
+
fileobj = _nullcontext(fileobj)
|
| 1275 |
+
|
| 1276 |
+
self._vtz = {}
|
| 1277 |
+
|
| 1278 |
+
with fileobj as fobj:
|
| 1279 |
+
self._parse_rfc(fobj.read())
|
| 1280 |
+
|
| 1281 |
+
def keys(self):
|
| 1282 |
+
"""
|
| 1283 |
+
Retrieves the available time zones as a list.
|
| 1284 |
+
"""
|
| 1285 |
+
return list(self._vtz.keys())
|
| 1286 |
+
|
| 1287 |
+
def get(self, tzid=None):
|
| 1288 |
+
"""
|
| 1289 |
+
Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``.
|
| 1290 |
+
|
| 1291 |
+
:param tzid:
|
| 1292 |
+
If there is exactly one time zone available, omitting ``tzid``
|
| 1293 |
+
or passing :py:const:`None` value returns it. Otherwise a valid
|
| 1294 |
+
key (which can be retrieved from :func:`keys`) is required.
|
| 1295 |
+
|
| 1296 |
+
:raises ValueError:
|
| 1297 |
+
Raised if ``tzid`` is not specified but there are either more
|
| 1298 |
+
or fewer than 1 zone defined.
|
| 1299 |
+
|
| 1300 |
+
:returns:
|
| 1301 |
+
Returns either a :py:class:`datetime.tzinfo` object representing
|
| 1302 |
+
the relevant time zone or :py:const:`None` if the ``tzid`` was
|
| 1303 |
+
not found.
|
| 1304 |
+
"""
|
| 1305 |
+
if tzid is None:
|
| 1306 |
+
if len(self._vtz) == 0:
|
| 1307 |
+
raise ValueError("no timezones defined")
|
| 1308 |
+
elif len(self._vtz) > 1:
|
| 1309 |
+
raise ValueError("more than one timezone available")
|
| 1310 |
+
tzid = next(iter(self._vtz))
|
| 1311 |
+
|
| 1312 |
+
return self._vtz.get(tzid)
|
| 1313 |
+
|
| 1314 |
+
def _parse_offset(self, s):
|
| 1315 |
+
s = s.strip()
|
| 1316 |
+
if not s:
|
| 1317 |
+
raise ValueError("empty offset")
|
| 1318 |
+
if s[0] in ('+', '-'):
|
| 1319 |
+
signal = (-1, +1)[s[0] == '+']
|
| 1320 |
+
s = s[1:]
|
| 1321 |
+
else:
|
| 1322 |
+
signal = +1
|
| 1323 |
+
if len(s) == 4:
|
| 1324 |
+
return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal
|
| 1325 |
+
elif len(s) == 6:
|
| 1326 |
+
return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal
|
| 1327 |
+
else:
|
| 1328 |
+
raise ValueError("invalid offset: " + s)
|
| 1329 |
+
|
| 1330 |
+
def _parse_rfc(self, s):
|
| 1331 |
+
lines = s.splitlines()
|
| 1332 |
+
if not lines:
|
| 1333 |
+
raise ValueError("empty string")
|
| 1334 |
+
|
| 1335 |
+
# Unfold
|
| 1336 |
+
i = 0
|
| 1337 |
+
while i < len(lines):
|
| 1338 |
+
line = lines[i].rstrip()
|
| 1339 |
+
if not line:
|
| 1340 |
+
del lines[i]
|
| 1341 |
+
elif i > 0 and line[0] == " ":
|
| 1342 |
+
lines[i-1] += line[1:]
|
| 1343 |
+
del lines[i]
|
| 1344 |
+
else:
|
| 1345 |
+
i += 1
|
| 1346 |
+
|
| 1347 |
+
tzid = None
|
| 1348 |
+
comps = []
|
| 1349 |
+
invtz = False
|
| 1350 |
+
comptype = None
|
| 1351 |
+
for line in lines:
|
| 1352 |
+
if not line:
|
| 1353 |
+
continue
|
| 1354 |
+
name, value = line.split(':', 1)
|
| 1355 |
+
parms = name.split(';')
|
| 1356 |
+
if not parms:
|
| 1357 |
+
raise ValueError("empty property name")
|
| 1358 |
+
name = parms[0].upper()
|
| 1359 |
+
parms = parms[1:]
|
| 1360 |
+
if invtz:
|
| 1361 |
+
if name == "BEGIN":
|
| 1362 |
+
if value in ("STANDARD", "DAYLIGHT"):
|
| 1363 |
+
# Process component
|
| 1364 |
+
pass
|
| 1365 |
+
else:
|
| 1366 |
+
raise ValueError("unknown component: "+value)
|
| 1367 |
+
comptype = value
|
| 1368 |
+
founddtstart = False
|
| 1369 |
+
tzoffsetfrom = None
|
| 1370 |
+
tzoffsetto = None
|
| 1371 |
+
rrulelines = []
|
| 1372 |
+
tzname = None
|
| 1373 |
+
elif name == "END":
|
| 1374 |
+
if value == "VTIMEZONE":
|
| 1375 |
+
if comptype:
|
| 1376 |
+
raise ValueError("component not closed: "+comptype)
|
| 1377 |
+
if not tzid:
|
| 1378 |
+
raise ValueError("mandatory TZID not found")
|
| 1379 |
+
if not comps:
|
| 1380 |
+
raise ValueError(
|
| 1381 |
+
"at least one component is needed")
|
| 1382 |
+
# Process vtimezone
|
| 1383 |
+
self._vtz[tzid] = _tzicalvtz(tzid, comps)
|
| 1384 |
+
invtz = False
|
| 1385 |
+
elif value == comptype:
|
| 1386 |
+
if not founddtstart:
|
| 1387 |
+
raise ValueError("mandatory DTSTART not found")
|
| 1388 |
+
if tzoffsetfrom is None:
|
| 1389 |
+
raise ValueError(
|
| 1390 |
+
"mandatory TZOFFSETFROM not found")
|
| 1391 |
+
if tzoffsetto is None:
|
| 1392 |
+
raise ValueError(
|
| 1393 |
+
"mandatory TZOFFSETFROM not found")
|
| 1394 |
+
# Process component
|
| 1395 |
+
rr = None
|
| 1396 |
+
if rrulelines:
|
| 1397 |
+
rr = rrule.rrulestr("\n".join(rrulelines),
|
| 1398 |
+
compatible=True,
|
| 1399 |
+
ignoretz=True,
|
| 1400 |
+
cache=True)
|
| 1401 |
+
comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto,
|
| 1402 |
+
(comptype == "DAYLIGHT"),
|
| 1403 |
+
tzname, rr)
|
| 1404 |
+
comps.append(comp)
|
| 1405 |
+
comptype = None
|
| 1406 |
+
else:
|
| 1407 |
+
raise ValueError("invalid component end: "+value)
|
| 1408 |
+
elif comptype:
|
| 1409 |
+
if name == "DTSTART":
|
| 1410 |
+
# DTSTART in VTIMEZONE takes a subset of valid RRULE
|
| 1411 |
+
# values under RFC 5545.
|
| 1412 |
+
for parm in parms:
|
| 1413 |
+
if parm != 'VALUE=DATE-TIME':
|
| 1414 |
+
msg = ('Unsupported DTSTART param in ' +
|
| 1415 |
+
'VTIMEZONE: ' + parm)
|
| 1416 |
+
raise ValueError(msg)
|
| 1417 |
+
rrulelines.append(line)
|
| 1418 |
+
founddtstart = True
|
| 1419 |
+
elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"):
|
| 1420 |
+
rrulelines.append(line)
|
| 1421 |
+
elif name == "TZOFFSETFROM":
|
| 1422 |
+
if parms:
|
| 1423 |
+
raise ValueError(
|
| 1424 |
+
"unsupported %s parm: %s " % (name, parms[0]))
|
| 1425 |
+
tzoffsetfrom = self._parse_offset(value)
|
| 1426 |
+
elif name == "TZOFFSETTO":
|
| 1427 |
+
if parms:
|
| 1428 |
+
raise ValueError(
|
| 1429 |
+
"unsupported TZOFFSETTO parm: "+parms[0])
|
| 1430 |
+
tzoffsetto = self._parse_offset(value)
|
| 1431 |
+
elif name == "TZNAME":
|
| 1432 |
+
if parms:
|
| 1433 |
+
raise ValueError(
|
| 1434 |
+
"unsupported TZNAME parm: "+parms[0])
|
| 1435 |
+
tzname = value
|
| 1436 |
+
elif name == "COMMENT":
|
| 1437 |
+
pass
|
| 1438 |
+
else:
|
| 1439 |
+
raise ValueError("unsupported property: "+name)
|
| 1440 |
+
else:
|
| 1441 |
+
if name == "TZID":
|
| 1442 |
+
if parms:
|
| 1443 |
+
raise ValueError(
|
| 1444 |
+
"unsupported TZID parm: "+parms[0])
|
| 1445 |
+
tzid = value
|
| 1446 |
+
elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"):
|
| 1447 |
+
pass
|
| 1448 |
+
else:
|
| 1449 |
+
raise ValueError("unsupported property: "+name)
|
| 1450 |
+
elif name == "BEGIN" and value == "VTIMEZONE":
|
| 1451 |
+
tzid = None
|
| 1452 |
+
comps = []
|
| 1453 |
+
invtz = True
|
| 1454 |
+
|
| 1455 |
+
def __repr__(self):
|
| 1456 |
+
return "%s(%s)" % (self.__class__.__name__, repr(self._s))
|
| 1457 |
+
|
| 1458 |
+
|
| 1459 |
+
if sys.platform != "win32":
|
| 1460 |
+
TZFILES = ["/etc/localtime", "localtime"]
|
| 1461 |
+
TZPATHS = ["/usr/share/zoneinfo",
|
| 1462 |
+
"/usr/lib/zoneinfo",
|
| 1463 |
+
"/usr/share/lib/zoneinfo",
|
| 1464 |
+
"/etc/zoneinfo"]
|
| 1465 |
+
else:
|
| 1466 |
+
TZFILES = []
|
| 1467 |
+
TZPATHS = []
|
| 1468 |
+
|
| 1469 |
+
|
| 1470 |
+
def __get_gettz():
|
| 1471 |
+
tzlocal_classes = (tzlocal,)
|
| 1472 |
+
if tzwinlocal is not None:
|
| 1473 |
+
tzlocal_classes += (tzwinlocal,)
|
| 1474 |
+
|
| 1475 |
+
class GettzFunc(object):
|
| 1476 |
+
"""
|
| 1477 |
+
Retrieve a time zone object from a string representation
|
| 1478 |
+
|
| 1479 |
+
This function is intended to retrieve the :py:class:`tzinfo` subclass
|
| 1480 |
+
that best represents the time zone that would be used if a POSIX
|
| 1481 |
+
`TZ variable`_ were set to the same value.
|
| 1482 |
+
|
| 1483 |
+
If no argument or an empty string is passed to ``gettz``, local time
|
| 1484 |
+
is returned:
|
| 1485 |
+
|
| 1486 |
+
.. code-block:: python3
|
| 1487 |
+
|
| 1488 |
+
>>> gettz()
|
| 1489 |
+
tzfile('/etc/localtime')
|
| 1490 |
+
|
| 1491 |
+
This function is also the preferred way to map IANA tz database keys
|
| 1492 |
+
to :class:`tzfile` objects:
|
| 1493 |
+
|
| 1494 |
+
.. code-block:: python3
|
| 1495 |
+
|
| 1496 |
+
>>> gettz('Pacific/Kiritimati')
|
| 1497 |
+
tzfile('/usr/share/zoneinfo/Pacific/Kiritimati')
|
| 1498 |
+
|
| 1499 |
+
On Windows, the standard is extended to include the Windows-specific
|
| 1500 |
+
zone names provided by the operating system:
|
| 1501 |
+
|
| 1502 |
+
.. code-block:: python3
|
| 1503 |
+
|
| 1504 |
+
>>> gettz('Egypt Standard Time')
|
| 1505 |
+
tzwin('Egypt Standard Time')
|
| 1506 |
+
|
| 1507 |
+
Passing a GNU ``TZ`` style string time zone specification returns a
|
| 1508 |
+
:class:`tzstr` object:
|
| 1509 |
+
|
| 1510 |
+
.. code-block:: python3
|
| 1511 |
+
|
| 1512 |
+
>>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3')
|
| 1513 |
+
tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3')
|
| 1514 |
+
|
| 1515 |
+
:param name:
|
| 1516 |
+
A time zone name (IANA, or, on Windows, Windows keys), location of
|
| 1517 |
+
a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone
|
| 1518 |
+
specifier. An empty string, no argument or ``None`` is interpreted
|
| 1519 |
+
as local time.
|
| 1520 |
+
|
| 1521 |
+
:return:
|
| 1522 |
+
Returns an instance of one of ``dateutil``'s :py:class:`tzinfo`
|
| 1523 |
+
subclasses.
|
| 1524 |
+
|
| 1525 |
+
.. versionchanged:: 2.7.0
|
| 1526 |
+
|
| 1527 |
+
After version 2.7.0, any two calls to ``gettz`` using the same
|
| 1528 |
+
input strings will return the same object:
|
| 1529 |
+
|
| 1530 |
+
.. code-block:: python3
|
| 1531 |
+
|
| 1532 |
+
>>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago')
|
| 1533 |
+
True
|
| 1534 |
+
|
| 1535 |
+
In addition to improving performance, this ensures that
|
| 1536 |
+
`"same zone" semantics`_ are used for datetimes in the same zone.
|
| 1537 |
+
|
| 1538 |
+
|
| 1539 |
+
.. _`TZ variable`:
|
| 1540 |
+
https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html
|
| 1541 |
+
|
| 1542 |
+
.. _`"same zone" semantics`:
|
| 1543 |
+
https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html
|
| 1544 |
+
"""
|
| 1545 |
+
def __init__(self):
|
| 1546 |
+
|
| 1547 |
+
self.__instances = weakref.WeakValueDictionary()
|
| 1548 |
+
self.__strong_cache_size = 8
|
| 1549 |
+
self.__strong_cache = OrderedDict()
|
| 1550 |
+
self._cache_lock = _thread.allocate_lock()
|
| 1551 |
+
|
| 1552 |
+
def __call__(self, name=None):
|
| 1553 |
+
with self._cache_lock:
|
| 1554 |
+
rv = self.__instances.get(name, None)
|
| 1555 |
+
|
| 1556 |
+
if rv is None:
|
| 1557 |
+
rv = self.nocache(name=name)
|
| 1558 |
+
if not (name is None
|
| 1559 |
+
or isinstance(rv, tzlocal_classes)
|
| 1560 |
+
or rv is None):
|
| 1561 |
+
# tzlocal is slightly more complicated than the other
|
| 1562 |
+
# time zone providers because it depends on environment
|
| 1563 |
+
# at construction time, so don't cache that.
|
| 1564 |
+
#
|
| 1565 |
+
# We also cannot store weak references to None, so we
|
| 1566 |
+
# will also not store that.
|
| 1567 |
+
self.__instances[name] = rv
|
| 1568 |
+
else:
|
| 1569 |
+
# No need for strong caching, return immediately
|
| 1570 |
+
return rv
|
| 1571 |
+
|
| 1572 |
+
self.__strong_cache[name] = self.__strong_cache.pop(name, rv)
|
| 1573 |
+
|
| 1574 |
+
if len(self.__strong_cache) > self.__strong_cache_size:
|
| 1575 |
+
self.__strong_cache.popitem(last=False)
|
| 1576 |
+
|
| 1577 |
+
return rv
|
| 1578 |
+
|
| 1579 |
+
def set_cache_size(self, size):
|
| 1580 |
+
with self._cache_lock:
|
| 1581 |
+
self.__strong_cache_size = size
|
| 1582 |
+
while len(self.__strong_cache) > size:
|
| 1583 |
+
self.__strong_cache.popitem(last=False)
|
| 1584 |
+
|
| 1585 |
+
def cache_clear(self):
|
| 1586 |
+
with self._cache_lock:
|
| 1587 |
+
self.__instances = weakref.WeakValueDictionary()
|
| 1588 |
+
self.__strong_cache.clear()
|
| 1589 |
+
|
| 1590 |
+
@staticmethod
|
| 1591 |
+
def nocache(name=None):
|
| 1592 |
+
"""A non-cached version of gettz"""
|
| 1593 |
+
tz = None
|
| 1594 |
+
if not name:
|
| 1595 |
+
try:
|
| 1596 |
+
name = os.environ["TZ"]
|
| 1597 |
+
except KeyError:
|
| 1598 |
+
pass
|
| 1599 |
+
if name is None or name in ("", ":"):
|
| 1600 |
+
for filepath in TZFILES:
|
| 1601 |
+
if not os.path.isabs(filepath):
|
| 1602 |
+
filename = filepath
|
| 1603 |
+
for path in TZPATHS:
|
| 1604 |
+
filepath = os.path.join(path, filename)
|
| 1605 |
+
if os.path.isfile(filepath):
|
| 1606 |
+
break
|
| 1607 |
+
else:
|
| 1608 |
+
continue
|
| 1609 |
+
if os.path.isfile(filepath):
|
| 1610 |
+
try:
|
| 1611 |
+
tz = tzfile(filepath)
|
| 1612 |
+
break
|
| 1613 |
+
except (IOError, OSError, ValueError):
|
| 1614 |
+
pass
|
| 1615 |
+
else:
|
| 1616 |
+
tz = tzlocal()
|
| 1617 |
+
else:
|
| 1618 |
+
try:
|
| 1619 |
+
if name.startswith(":"):
|
| 1620 |
+
name = name[1:]
|
| 1621 |
+
except TypeError as e:
|
| 1622 |
+
if isinstance(name, bytes):
|
| 1623 |
+
new_msg = "gettz argument should be str, not bytes"
|
| 1624 |
+
six.raise_from(TypeError(new_msg), e)
|
| 1625 |
+
else:
|
| 1626 |
+
raise
|
| 1627 |
+
if os.path.isabs(name):
|
| 1628 |
+
if os.path.isfile(name):
|
| 1629 |
+
tz = tzfile(name)
|
| 1630 |
+
else:
|
| 1631 |
+
tz = None
|
| 1632 |
+
else:
|
| 1633 |
+
for path in TZPATHS:
|
| 1634 |
+
filepath = os.path.join(path, name)
|
| 1635 |
+
if not os.path.isfile(filepath):
|
| 1636 |
+
filepath = filepath.replace(' ', '_')
|
| 1637 |
+
if not os.path.isfile(filepath):
|
| 1638 |
+
continue
|
| 1639 |
+
try:
|
| 1640 |
+
tz = tzfile(filepath)
|
| 1641 |
+
break
|
| 1642 |
+
except (IOError, OSError, ValueError):
|
| 1643 |
+
pass
|
| 1644 |
+
else:
|
| 1645 |
+
tz = None
|
| 1646 |
+
if tzwin is not None:
|
| 1647 |
+
try:
|
| 1648 |
+
tz = tzwin(name)
|
| 1649 |
+
except (WindowsError, UnicodeEncodeError):
|
| 1650 |
+
# UnicodeEncodeError is for Python 2.7 compat
|
| 1651 |
+
tz = None
|
| 1652 |
+
|
| 1653 |
+
if not tz:
|
| 1654 |
+
from dateutil.zoneinfo import get_zonefile_instance
|
| 1655 |
+
tz = get_zonefile_instance().get(name)
|
| 1656 |
+
|
| 1657 |
+
if not tz:
|
| 1658 |
+
for c in name:
|
| 1659 |
+
# name is not a tzstr unless it has at least
|
| 1660 |
+
# one offset. For short values of "name", an
|
| 1661 |
+
# explicit for loop seems to be the fastest way
|
| 1662 |
+
# To determine if a string contains a digit
|
| 1663 |
+
if c in "0123456789":
|
| 1664 |
+
try:
|
| 1665 |
+
tz = tzstr(name)
|
| 1666 |
+
except ValueError:
|
| 1667 |
+
pass
|
| 1668 |
+
break
|
| 1669 |
+
else:
|
| 1670 |
+
if name in ("GMT", "UTC"):
|
| 1671 |
+
tz = UTC
|
| 1672 |
+
elif name in time.tzname:
|
| 1673 |
+
tz = tzlocal()
|
| 1674 |
+
return tz
|
| 1675 |
+
|
| 1676 |
+
return GettzFunc()
|
| 1677 |
+
|
| 1678 |
+
|
| 1679 |
+
gettz = __get_gettz()
|
| 1680 |
+
del __get_gettz
|
| 1681 |
+
|
| 1682 |
+
|
| 1683 |
+
def datetime_exists(dt, tz=None):
|
| 1684 |
+
"""
|
| 1685 |
+
Given a datetime and a time zone, determine whether or not a given datetime
|
| 1686 |
+
would fall in a gap.
|
| 1687 |
+
|
| 1688 |
+
:param dt:
|
| 1689 |
+
A :class:`datetime.datetime` (whose time zone will be ignored if ``tz``
|
| 1690 |
+
is provided.)
|
| 1691 |
+
|
| 1692 |
+
:param tz:
|
| 1693 |
+
A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If
|
| 1694 |
+
``None`` or not provided, the datetime's own time zone will be used.
|
| 1695 |
+
|
| 1696 |
+
:return:
|
| 1697 |
+
Returns a boolean value whether or not the "wall time" exists in
|
| 1698 |
+
``tz``.
|
| 1699 |
+
|
| 1700 |
+
.. versionadded:: 2.7.0
|
| 1701 |
+
"""
|
| 1702 |
+
if tz is None:
|
| 1703 |
+
if dt.tzinfo is None:
|
| 1704 |
+
raise ValueError('Datetime is naive and no time zone provided.')
|
| 1705 |
+
tz = dt.tzinfo
|
| 1706 |
+
|
| 1707 |
+
dt = dt.replace(tzinfo=None)
|
| 1708 |
+
|
| 1709 |
+
# This is essentially a test of whether or not the datetime can survive
|
| 1710 |
+
# a round trip to UTC.
|
| 1711 |
+
dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz)
|
| 1712 |
+
dt_rt = dt_rt.replace(tzinfo=None)
|
| 1713 |
+
|
| 1714 |
+
return dt == dt_rt
|
| 1715 |
+
|
| 1716 |
+
|
| 1717 |
+
def datetime_ambiguous(dt, tz=None):
|
| 1718 |
+
"""
|
| 1719 |
+
Given a datetime and a time zone, determine whether or not a given datetime
|
| 1720 |
+
is ambiguous (i.e if there are two times differentiated only by their DST
|
| 1721 |
+
status).
|
| 1722 |
+
|
| 1723 |
+
:param dt:
|
| 1724 |
+
A :class:`datetime.datetime` (whose time zone will be ignored if ``tz``
|
| 1725 |
+
is provided.)
|
| 1726 |
+
|
| 1727 |
+
:param tz:
|
| 1728 |
+
A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If
|
| 1729 |
+
``None`` or not provided, the datetime's own time zone will be used.
|
| 1730 |
+
|
| 1731 |
+
:return:
|
| 1732 |
+
Returns a boolean value whether or not the "wall time" is ambiguous in
|
| 1733 |
+
``tz``.
|
| 1734 |
+
|
| 1735 |
+
.. versionadded:: 2.6.0
|
| 1736 |
+
"""
|
| 1737 |
+
if tz is None:
|
| 1738 |
+
if dt.tzinfo is None:
|
| 1739 |
+
raise ValueError('Datetime is naive and no time zone provided.')
|
| 1740 |
+
|
| 1741 |
+
tz = dt.tzinfo
|
| 1742 |
+
|
| 1743 |
+
# If a time zone defines its own "is_ambiguous" function, we'll use that.
|
| 1744 |
+
is_ambiguous_fn = getattr(tz, 'is_ambiguous', None)
|
| 1745 |
+
if is_ambiguous_fn is not None:
|
| 1746 |
+
try:
|
| 1747 |
+
return tz.is_ambiguous(dt)
|
| 1748 |
+
except Exception:
|
| 1749 |
+
pass
|
| 1750 |
+
|
| 1751 |
+
# If it doesn't come out and tell us it's ambiguous, we'll just check if
|
| 1752 |
+
# the fold attribute has any effect on this particular date and time.
|
| 1753 |
+
dt = dt.replace(tzinfo=tz)
|
| 1754 |
+
wall_0 = enfold(dt, fold=0)
|
| 1755 |
+
wall_1 = enfold(dt, fold=1)
|
| 1756 |
+
|
| 1757 |
+
same_offset = wall_0.utcoffset() == wall_1.utcoffset()
|
| 1758 |
+
same_dst = wall_0.dst() == wall_1.dst()
|
| 1759 |
+
|
| 1760 |
+
return not (same_offset and same_dst)
|
| 1761 |
+
|
| 1762 |
+
|
| 1763 |
+
def resolve_imaginary(dt):
|
| 1764 |
+
"""
|
| 1765 |
+
Given a datetime that may be imaginary, return an existing datetime.
|
| 1766 |
+
|
| 1767 |
+
This function assumes that an imaginary datetime represents what the
|
| 1768 |
+
wall time would be in a zone had the offset transition not occurred, so
|
| 1769 |
+
it will always fall forward by the transition's change in offset.
|
| 1770 |
+
|
| 1771 |
+
.. doctest::
|
| 1772 |
+
|
| 1773 |
+
>>> from dateutil import tz
|
| 1774 |
+
>>> from datetime import datetime
|
| 1775 |
+
>>> NYC = tz.gettz('America/New_York')
|
| 1776 |
+
>>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC)))
|
| 1777 |
+
2017-03-12 03:30:00-04:00
|
| 1778 |
+
|
| 1779 |
+
>>> KIR = tz.gettz('Pacific/Kiritimati')
|
| 1780 |
+
>>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR)))
|
| 1781 |
+
1995-01-02 12:30:00+14:00
|
| 1782 |
+
|
| 1783 |
+
As a note, :func:`datetime.astimezone` is guaranteed to produce a valid,
|
| 1784 |
+
existing datetime, so a round-trip to and from UTC is sufficient to get
|
| 1785 |
+
an extant datetime, however, this generally "falls back" to an earlier time
|
| 1786 |
+
rather than falling forward to the STD side (though no guarantees are made
|
| 1787 |
+
about this behavior).
|
| 1788 |
+
|
| 1789 |
+
:param dt:
|
| 1790 |
+
A :class:`datetime.datetime` which may or may not exist.
|
| 1791 |
+
|
| 1792 |
+
:return:
|
| 1793 |
+
Returns an existing :class:`datetime.datetime`. If ``dt`` was not
|
| 1794 |
+
imaginary, the datetime returned is guaranteed to be the same object
|
| 1795 |
+
passed to the function.
|
| 1796 |
+
|
| 1797 |
+
.. versionadded:: 2.7.0
|
| 1798 |
+
"""
|
| 1799 |
+
if dt.tzinfo is not None and not datetime_exists(dt):
|
| 1800 |
+
|
| 1801 |
+
curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset()
|
| 1802 |
+
old_offset = (dt - datetime.timedelta(hours=24)).utcoffset()
|
| 1803 |
+
|
| 1804 |
+
dt += curr_offset - old_offset
|
| 1805 |
+
|
| 1806 |
+
return dt
|
| 1807 |
+
|
| 1808 |
+
|
| 1809 |
+
def _datetime_to_timestamp(dt):
|
| 1810 |
+
"""
|
| 1811 |
+
Convert a :class:`datetime.datetime` object to an epoch timestamp in
|
| 1812 |
+
seconds since January 1, 1970, ignoring the time zone.
|
| 1813 |
+
"""
|
| 1814 |
+
return (dt.replace(tzinfo=None) - EPOCH).total_seconds()
|
| 1815 |
+
|
| 1816 |
+
|
| 1817 |
+
if sys.version_info >= (3, 6):
|
| 1818 |
+
def _get_supported_offset(second_offset):
|
| 1819 |
+
return second_offset
|
| 1820 |
+
else:
|
| 1821 |
+
def _get_supported_offset(second_offset):
|
| 1822 |
+
# For python pre-3.6, round to full-minutes if that's not the case.
|
| 1823 |
+
# Python's datetime doesn't accept sub-minute timezones. Check
|
| 1824 |
+
# http://python.org/sf/1447945 or https://bugs.python.org/issue5288
|
| 1825 |
+
# for some information.
|
| 1826 |
+
old_offset = second_offset
|
| 1827 |
+
calculated_offset = 60 * ((second_offset + 30) // 60)
|
| 1828 |
+
return calculated_offset
|
| 1829 |
+
|
| 1830 |
+
|
| 1831 |
+
try:
|
| 1832 |
+
# Python 3.7 feature
|
| 1833 |
+
from contextlib import nullcontext as _nullcontext
|
| 1834 |
+
except ImportError:
|
| 1835 |
+
class _nullcontext(object):
|
| 1836 |
+
"""
|
| 1837 |
+
Class for wrapping contexts so that they are passed through in a
|
| 1838 |
+
with statement.
|
| 1839 |
+
"""
|
| 1840 |
+
def __init__(self, context):
|
| 1841 |
+
self.context = context
|
| 1842 |
+
|
| 1843 |
+
def __enter__(self):
|
| 1844 |
+
return self.context
|
| 1845 |
+
|
| 1846 |
+
def __exit__(*args, **kwargs):
|
| 1847 |
+
pass
|
| 1848 |
+
|
| 1849 |
+
# vim:ts=4:sw=4:et
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/win.py
ADDED
|
@@ -0,0 +1,370 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
This module provides an interface to the native time zone data on Windows,
|
| 4 |
+
including :py:class:`datetime.tzinfo` implementations.
|
| 5 |
+
|
| 6 |
+
Attempting to import this module on a non-Windows platform will raise an
|
| 7 |
+
:py:obj:`ImportError`.
|
| 8 |
+
"""
|
| 9 |
+
# This code was originally contributed by Jeffrey Harris.
|
| 10 |
+
import datetime
|
| 11 |
+
import struct
|
| 12 |
+
|
| 13 |
+
from six.moves import winreg
|
| 14 |
+
from six import text_type
|
| 15 |
+
|
| 16 |
+
try:
|
| 17 |
+
import ctypes
|
| 18 |
+
from ctypes import wintypes
|
| 19 |
+
except ValueError:
|
| 20 |
+
# ValueError is raised on non-Windows systems for some horrible reason.
|
| 21 |
+
raise ImportError("Running tzwin on non-Windows system")
|
| 22 |
+
|
| 23 |
+
from ._common import tzrangebase
|
| 24 |
+
|
| 25 |
+
__all__ = ["tzwin", "tzwinlocal", "tzres"]
|
| 26 |
+
|
| 27 |
+
ONEWEEK = datetime.timedelta(7)
|
| 28 |
+
|
| 29 |
+
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
|
| 30 |
+
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
|
| 31 |
+
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _settzkeyname():
|
| 35 |
+
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
| 36 |
+
try:
|
| 37 |
+
winreg.OpenKey(handle, TZKEYNAMENT).Close()
|
| 38 |
+
TZKEYNAME = TZKEYNAMENT
|
| 39 |
+
except WindowsError:
|
| 40 |
+
TZKEYNAME = TZKEYNAME9X
|
| 41 |
+
handle.Close()
|
| 42 |
+
return TZKEYNAME
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
TZKEYNAME = _settzkeyname()
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class tzres(object):
|
| 49 |
+
"""
|
| 50 |
+
Class for accessing ``tzres.dll``, which contains timezone name related
|
| 51 |
+
resources.
|
| 52 |
+
|
| 53 |
+
.. versionadded:: 2.5.0
|
| 54 |
+
"""
|
| 55 |
+
p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char
|
| 56 |
+
|
| 57 |
+
def __init__(self, tzres_loc='tzres.dll'):
|
| 58 |
+
# Load the user32 DLL so we can load strings from tzres
|
| 59 |
+
user32 = ctypes.WinDLL('user32')
|
| 60 |
+
|
| 61 |
+
# Specify the LoadStringW function
|
| 62 |
+
user32.LoadStringW.argtypes = (wintypes.HINSTANCE,
|
| 63 |
+
wintypes.UINT,
|
| 64 |
+
wintypes.LPWSTR,
|
| 65 |
+
ctypes.c_int)
|
| 66 |
+
|
| 67 |
+
self.LoadStringW = user32.LoadStringW
|
| 68 |
+
self._tzres = ctypes.WinDLL(tzres_loc)
|
| 69 |
+
self.tzres_loc = tzres_loc
|
| 70 |
+
|
| 71 |
+
def load_name(self, offset):
|
| 72 |
+
"""
|
| 73 |
+
Load a timezone name from a DLL offset (integer).
|
| 74 |
+
|
| 75 |
+
>>> from dateutil.tzwin import tzres
|
| 76 |
+
>>> tzr = tzres()
|
| 77 |
+
>>> print(tzr.load_name(112))
|
| 78 |
+
'Eastern Standard Time'
|
| 79 |
+
|
| 80 |
+
:param offset:
|
| 81 |
+
A positive integer value referring to a string from the tzres dll.
|
| 82 |
+
|
| 83 |
+
.. note::
|
| 84 |
+
|
| 85 |
+
Offsets found in the registry are generally of the form
|
| 86 |
+
``@tzres.dll,-114``. The offset in this case is 114, not -114.
|
| 87 |
+
|
| 88 |
+
"""
|
| 89 |
+
resource = self.p_wchar()
|
| 90 |
+
lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR)
|
| 91 |
+
nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0)
|
| 92 |
+
return resource[:nchar]
|
| 93 |
+
|
| 94 |
+
def name_from_string(self, tzname_str):
|
| 95 |
+
"""
|
| 96 |
+
Parse strings as returned from the Windows registry into the time zone
|
| 97 |
+
name as defined in the registry.
|
| 98 |
+
|
| 99 |
+
>>> from dateutil.tzwin import tzres
|
| 100 |
+
>>> tzr = tzres()
|
| 101 |
+
>>> print(tzr.name_from_string('@tzres.dll,-251'))
|
| 102 |
+
'Dateline Daylight Time'
|
| 103 |
+
>>> print(tzr.name_from_string('Eastern Standard Time'))
|
| 104 |
+
'Eastern Standard Time'
|
| 105 |
+
|
| 106 |
+
:param tzname_str:
|
| 107 |
+
A timezone name string as returned from a Windows registry key.
|
| 108 |
+
|
| 109 |
+
:return:
|
| 110 |
+
Returns the localized timezone string from tzres.dll if the string
|
| 111 |
+
is of the form `@tzres.dll,-offset`, else returns the input string.
|
| 112 |
+
"""
|
| 113 |
+
if not tzname_str.startswith('@'):
|
| 114 |
+
return tzname_str
|
| 115 |
+
|
| 116 |
+
name_splt = tzname_str.split(',-')
|
| 117 |
+
try:
|
| 118 |
+
offset = int(name_splt[1])
|
| 119 |
+
except:
|
| 120 |
+
raise ValueError("Malformed timezone string.")
|
| 121 |
+
|
| 122 |
+
return self.load_name(offset)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class tzwinbase(tzrangebase):
|
| 126 |
+
"""tzinfo class based on win32's timezones available in the registry."""
|
| 127 |
+
def __init__(self):
|
| 128 |
+
raise NotImplementedError('tzwinbase is an abstract base class')
|
| 129 |
+
|
| 130 |
+
def __eq__(self, other):
|
| 131 |
+
# Compare on all relevant dimensions, including name.
|
| 132 |
+
if not isinstance(other, tzwinbase):
|
| 133 |
+
return NotImplemented
|
| 134 |
+
|
| 135 |
+
return (self._std_offset == other._std_offset and
|
| 136 |
+
self._dst_offset == other._dst_offset and
|
| 137 |
+
self._stddayofweek == other._stddayofweek and
|
| 138 |
+
self._dstdayofweek == other._dstdayofweek and
|
| 139 |
+
self._stdweeknumber == other._stdweeknumber and
|
| 140 |
+
self._dstweeknumber == other._dstweeknumber and
|
| 141 |
+
self._stdhour == other._stdhour and
|
| 142 |
+
self._dsthour == other._dsthour and
|
| 143 |
+
self._stdminute == other._stdminute and
|
| 144 |
+
self._dstminute == other._dstminute and
|
| 145 |
+
self._std_abbr == other._std_abbr and
|
| 146 |
+
self._dst_abbr == other._dst_abbr)
|
| 147 |
+
|
| 148 |
+
@staticmethod
|
| 149 |
+
def list():
|
| 150 |
+
"""Return a list of all time zones known to the system."""
|
| 151 |
+
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
| 152 |
+
with winreg.OpenKey(handle, TZKEYNAME) as tzkey:
|
| 153 |
+
result = [winreg.EnumKey(tzkey, i)
|
| 154 |
+
for i in range(winreg.QueryInfoKey(tzkey)[0])]
|
| 155 |
+
return result
|
| 156 |
+
|
| 157 |
+
def display(self):
|
| 158 |
+
"""
|
| 159 |
+
Return the display name of the time zone.
|
| 160 |
+
"""
|
| 161 |
+
return self._display
|
| 162 |
+
|
| 163 |
+
def transitions(self, year):
|
| 164 |
+
"""
|
| 165 |
+
For a given year, get the DST on and off transition times, expressed
|
| 166 |
+
always on the standard time side. For zones with no transitions, this
|
| 167 |
+
function returns ``None``.
|
| 168 |
+
|
| 169 |
+
:param year:
|
| 170 |
+
The year whose transitions you would like to query.
|
| 171 |
+
|
| 172 |
+
:return:
|
| 173 |
+
Returns a :class:`tuple` of :class:`datetime.datetime` objects,
|
| 174 |
+
``(dston, dstoff)`` for zones with an annual DST transition, or
|
| 175 |
+
``None`` for fixed offset zones.
|
| 176 |
+
"""
|
| 177 |
+
|
| 178 |
+
if not self.hasdst:
|
| 179 |
+
return None
|
| 180 |
+
|
| 181 |
+
dston = picknthweekday(year, self._dstmonth, self._dstdayofweek,
|
| 182 |
+
self._dsthour, self._dstminute,
|
| 183 |
+
self._dstweeknumber)
|
| 184 |
+
|
| 185 |
+
dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek,
|
| 186 |
+
self._stdhour, self._stdminute,
|
| 187 |
+
self._stdweeknumber)
|
| 188 |
+
|
| 189 |
+
# Ambiguous dates default to the STD side
|
| 190 |
+
dstoff -= self._dst_base_offset
|
| 191 |
+
|
| 192 |
+
return dston, dstoff
|
| 193 |
+
|
| 194 |
+
def _get_hasdst(self):
|
| 195 |
+
return self._dstmonth != 0
|
| 196 |
+
|
| 197 |
+
@property
|
| 198 |
+
def _dst_base_offset(self):
|
| 199 |
+
return self._dst_base_offset_
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class tzwin(tzwinbase):
|
| 203 |
+
"""
|
| 204 |
+
Time zone object created from the zone info in the Windows registry
|
| 205 |
+
|
| 206 |
+
These are similar to :py:class:`dateutil.tz.tzrange` objects in that
|
| 207 |
+
the time zone data is provided in the format of a single offset rule
|
| 208 |
+
for either 0 or 2 time zone transitions per year.
|
| 209 |
+
|
| 210 |
+
:param: name
|
| 211 |
+
The name of a Windows time zone key, e.g. "Eastern Standard Time".
|
| 212 |
+
The full list of keys can be retrieved with :func:`tzwin.list`.
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
def __init__(self, name):
|
| 216 |
+
self._name = name
|
| 217 |
+
|
| 218 |
+
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
| 219 |
+
tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name)
|
| 220 |
+
with winreg.OpenKey(handle, tzkeyname) as tzkey:
|
| 221 |
+
keydict = valuestodict(tzkey)
|
| 222 |
+
|
| 223 |
+
self._std_abbr = keydict["Std"]
|
| 224 |
+
self._dst_abbr = keydict["Dlt"]
|
| 225 |
+
|
| 226 |
+
self._display = keydict["Display"]
|
| 227 |
+
|
| 228 |
+
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
|
| 229 |
+
tup = struct.unpack("=3l16h", keydict["TZI"])
|
| 230 |
+
stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
|
| 231 |
+
dstoffset = stdoffset-tup[2] # + DaylightBias * -1
|
| 232 |
+
self._std_offset = datetime.timedelta(minutes=stdoffset)
|
| 233 |
+
self._dst_offset = datetime.timedelta(minutes=dstoffset)
|
| 234 |
+
|
| 235 |
+
# for the meaning see the win32 TIME_ZONE_INFORMATION structure docs
|
| 236 |
+
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx
|
| 237 |
+
(self._stdmonth,
|
| 238 |
+
self._stddayofweek, # Sunday = 0
|
| 239 |
+
self._stdweeknumber, # Last = 5
|
| 240 |
+
self._stdhour,
|
| 241 |
+
self._stdminute) = tup[4:9]
|
| 242 |
+
|
| 243 |
+
(self._dstmonth,
|
| 244 |
+
self._dstdayofweek, # Sunday = 0
|
| 245 |
+
self._dstweeknumber, # Last = 5
|
| 246 |
+
self._dsthour,
|
| 247 |
+
self._dstminute) = tup[12:17]
|
| 248 |
+
|
| 249 |
+
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
| 250 |
+
self.hasdst = self._get_hasdst()
|
| 251 |
+
|
| 252 |
+
def __repr__(self):
|
| 253 |
+
return "tzwin(%s)" % repr(self._name)
|
| 254 |
+
|
| 255 |
+
def __reduce__(self):
|
| 256 |
+
return (self.__class__, (self._name,))
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
class tzwinlocal(tzwinbase):
|
| 260 |
+
"""
|
| 261 |
+
Class representing the local time zone information in the Windows registry
|
| 262 |
+
|
| 263 |
+
While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time`
|
| 264 |
+
module) to retrieve time zone information, ``tzwinlocal`` retrieves the
|
| 265 |
+
rules directly from the Windows registry and creates an object like
|
| 266 |
+
:class:`dateutil.tz.tzwin`.
|
| 267 |
+
|
| 268 |
+
Because Windows does not have an equivalent of :func:`time.tzset`, on
|
| 269 |
+
Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the
|
| 270 |
+
time zone settings *at the time that the process was started*, meaning
|
| 271 |
+
changes to the machine's time zone settings during the run of a program
|
| 272 |
+
on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`.
|
| 273 |
+
Because ``tzwinlocal`` reads the registry directly, it is unaffected by
|
| 274 |
+
this issue.
|
| 275 |
+
"""
|
| 276 |
+
def __init__(self):
|
| 277 |
+
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
| 278 |
+
with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey:
|
| 279 |
+
keydict = valuestodict(tzlocalkey)
|
| 280 |
+
|
| 281 |
+
self._std_abbr = keydict["StandardName"]
|
| 282 |
+
self._dst_abbr = keydict["DaylightName"]
|
| 283 |
+
|
| 284 |
+
try:
|
| 285 |
+
tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME,
|
| 286 |
+
sn=self._std_abbr)
|
| 287 |
+
with winreg.OpenKey(handle, tzkeyname) as tzkey:
|
| 288 |
+
_keydict = valuestodict(tzkey)
|
| 289 |
+
self._display = _keydict["Display"]
|
| 290 |
+
except OSError:
|
| 291 |
+
self._display = None
|
| 292 |
+
|
| 293 |
+
stdoffset = -keydict["Bias"]-keydict["StandardBias"]
|
| 294 |
+
dstoffset = stdoffset-keydict["DaylightBias"]
|
| 295 |
+
|
| 296 |
+
self._std_offset = datetime.timedelta(minutes=stdoffset)
|
| 297 |
+
self._dst_offset = datetime.timedelta(minutes=dstoffset)
|
| 298 |
+
|
| 299 |
+
# For reasons unclear, in this particular key, the day of week has been
|
| 300 |
+
# moved to the END of the SYSTEMTIME structure.
|
| 301 |
+
tup = struct.unpack("=8h", keydict["StandardStart"])
|
| 302 |
+
|
| 303 |
+
(self._stdmonth,
|
| 304 |
+
self._stdweeknumber, # Last = 5
|
| 305 |
+
self._stdhour,
|
| 306 |
+
self._stdminute) = tup[1:5]
|
| 307 |
+
|
| 308 |
+
self._stddayofweek = tup[7]
|
| 309 |
+
|
| 310 |
+
tup = struct.unpack("=8h", keydict["DaylightStart"])
|
| 311 |
+
|
| 312 |
+
(self._dstmonth,
|
| 313 |
+
self._dstweeknumber, # Last = 5
|
| 314 |
+
self._dsthour,
|
| 315 |
+
self._dstminute) = tup[1:5]
|
| 316 |
+
|
| 317 |
+
self._dstdayofweek = tup[7]
|
| 318 |
+
|
| 319 |
+
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
| 320 |
+
self.hasdst = self._get_hasdst()
|
| 321 |
+
|
| 322 |
+
def __repr__(self):
|
| 323 |
+
return "tzwinlocal()"
|
| 324 |
+
|
| 325 |
+
def __str__(self):
|
| 326 |
+
# str will return the standard name, not the daylight name.
|
| 327 |
+
return "tzwinlocal(%s)" % repr(self._std_abbr)
|
| 328 |
+
|
| 329 |
+
def __reduce__(self):
|
| 330 |
+
return (self.__class__, ())
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
|
| 334 |
+
""" dayofweek == 0 means Sunday, whichweek 5 means last instance """
|
| 335 |
+
first = datetime.datetime(year, month, 1, hour, minute)
|
| 336 |
+
|
| 337 |
+
# This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6),
|
| 338 |
+
# Because 7 % 7 = 0
|
| 339 |
+
weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1)
|
| 340 |
+
wd = weekdayone + ((whichweek - 1) * ONEWEEK)
|
| 341 |
+
if (wd.month != month):
|
| 342 |
+
wd -= ONEWEEK
|
| 343 |
+
|
| 344 |
+
return wd
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
def valuestodict(key):
|
| 348 |
+
"""Convert a registry key's values to a dictionary."""
|
| 349 |
+
dout = {}
|
| 350 |
+
size = winreg.QueryInfoKey(key)[1]
|
| 351 |
+
tz_res = None
|
| 352 |
+
|
| 353 |
+
for i in range(size):
|
| 354 |
+
key_name, value, dtype = winreg.EnumValue(key, i)
|
| 355 |
+
if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN:
|
| 356 |
+
# If it's a DWORD (32-bit integer), it's stored as unsigned - convert
|
| 357 |
+
# that to a proper signed integer
|
| 358 |
+
if value & (1 << 31):
|
| 359 |
+
value = value - (1 << 32)
|
| 360 |
+
elif dtype == winreg.REG_SZ:
|
| 361 |
+
# If it's a reference to the tzres DLL, load the actual string
|
| 362 |
+
if value.startswith('@tzres'):
|
| 363 |
+
tz_res = tz_res or tzres()
|
| 364 |
+
value = tz_res.name_from_string(value)
|
| 365 |
+
|
| 366 |
+
value = value.rstrip('\x00') # Remove trailing nulls
|
| 367 |
+
|
| 368 |
+
dout[key_name] = value
|
| 369 |
+
|
| 370 |
+
return dout
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/__init__.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
import warnings
|
| 3 |
+
import json
|
| 4 |
+
|
| 5 |
+
from tarfile import TarFile
|
| 6 |
+
from pkgutil import get_data
|
| 7 |
+
from io import BytesIO
|
| 8 |
+
|
| 9 |
+
from dateutil.tz import tzfile as _tzfile
|
| 10 |
+
|
| 11 |
+
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]
|
| 12 |
+
|
| 13 |
+
ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
|
| 14 |
+
METADATA_FN = 'METADATA'
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class tzfile(_tzfile):
|
| 18 |
+
def __reduce__(self):
|
| 19 |
+
return (gettz, (self._filename,))
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def getzoneinfofile_stream():
|
| 23 |
+
try:
|
| 24 |
+
return BytesIO(get_data(__name__, ZONEFILENAME))
|
| 25 |
+
except IOError as e: # TODO switch to FileNotFoundError?
|
| 26 |
+
warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror))
|
| 27 |
+
return None
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class ZoneInfoFile(object):
|
| 31 |
+
def __init__(self, zonefile_stream=None):
|
| 32 |
+
if zonefile_stream is not None:
|
| 33 |
+
with TarFile.open(fileobj=zonefile_stream) as tf:
|
| 34 |
+
self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name)
|
| 35 |
+
for zf in tf.getmembers()
|
| 36 |
+
if zf.isfile() and zf.name != METADATA_FN}
|
| 37 |
+
# deal with links: They'll point to their parent object. Less
|
| 38 |
+
# waste of memory
|
| 39 |
+
links = {zl.name: self.zones[zl.linkname]
|
| 40 |
+
for zl in tf.getmembers() if
|
| 41 |
+
zl.islnk() or zl.issym()}
|
| 42 |
+
self.zones.update(links)
|
| 43 |
+
try:
|
| 44 |
+
metadata_json = tf.extractfile(tf.getmember(METADATA_FN))
|
| 45 |
+
metadata_str = metadata_json.read().decode('UTF-8')
|
| 46 |
+
self.metadata = json.loads(metadata_str)
|
| 47 |
+
except KeyError:
|
| 48 |
+
# no metadata in tar file
|
| 49 |
+
self.metadata = None
|
| 50 |
+
else:
|
| 51 |
+
self.zones = {}
|
| 52 |
+
self.metadata = None
|
| 53 |
+
|
| 54 |
+
def get(self, name, default=None):
|
| 55 |
+
"""
|
| 56 |
+
Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method
|
| 57 |
+
for retrieving zones from the zone dictionary.
|
| 58 |
+
|
| 59 |
+
:param name:
|
| 60 |
+
The name of the zone to retrieve. (Generally IANA zone names)
|
| 61 |
+
|
| 62 |
+
:param default:
|
| 63 |
+
The value to return in the event of a missing key.
|
| 64 |
+
|
| 65 |
+
.. versionadded:: 2.6.0
|
| 66 |
+
|
| 67 |
+
"""
|
| 68 |
+
return self.zones.get(name, default)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
# The current API has gettz as a module function, although in fact it taps into
|
| 72 |
+
# a stateful class. So as a workaround for now, without changing the API, we
|
| 73 |
+
# will create a new "global" class instance the first time a user requests a
|
| 74 |
+
# timezone. Ugly, but adheres to the api.
|
| 75 |
+
#
|
| 76 |
+
# TODO: Remove after deprecation period.
|
| 77 |
+
_CLASS_ZONE_INSTANCE = []
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def get_zonefile_instance(new_instance=False):
|
| 81 |
+
"""
|
| 82 |
+
This is a convenience function which provides a :class:`ZoneInfoFile`
|
| 83 |
+
instance using the data provided by the ``dateutil`` package. By default, it
|
| 84 |
+
caches a single instance of the ZoneInfoFile object and returns that.
|
| 85 |
+
|
| 86 |
+
:param new_instance:
|
| 87 |
+
If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and
|
| 88 |
+
used as the cached instance for the next call. Otherwise, new instances
|
| 89 |
+
are created only as necessary.
|
| 90 |
+
|
| 91 |
+
:return:
|
| 92 |
+
Returns a :class:`ZoneInfoFile` object.
|
| 93 |
+
|
| 94 |
+
.. versionadded:: 2.6
|
| 95 |
+
"""
|
| 96 |
+
if new_instance:
|
| 97 |
+
zif = None
|
| 98 |
+
else:
|
| 99 |
+
zif = getattr(get_zonefile_instance, '_cached_instance', None)
|
| 100 |
+
|
| 101 |
+
if zif is None:
|
| 102 |
+
zif = ZoneInfoFile(getzoneinfofile_stream())
|
| 103 |
+
|
| 104 |
+
get_zonefile_instance._cached_instance = zif
|
| 105 |
+
|
| 106 |
+
return zif
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def gettz(name):
|
| 110 |
+
"""
|
| 111 |
+
This retrieves a time zone from the local zoneinfo tarball that is packaged
|
| 112 |
+
with dateutil.
|
| 113 |
+
|
| 114 |
+
:param name:
|
| 115 |
+
An IANA-style time zone name, as found in the zoneinfo file.
|
| 116 |
+
|
| 117 |
+
:return:
|
| 118 |
+
Returns a :class:`dateutil.tz.tzfile` time zone object.
|
| 119 |
+
|
| 120 |
+
.. warning::
|
| 121 |
+
It is generally inadvisable to use this function, and it is only
|
| 122 |
+
provided for API compatibility with earlier versions. This is *not*
|
| 123 |
+
equivalent to ``dateutil.tz.gettz()``, which selects an appropriate
|
| 124 |
+
time zone based on the inputs, favoring system zoneinfo. This is ONLY
|
| 125 |
+
for accessing the dateutil-specific zoneinfo (which may be out of
|
| 126 |
+
date compared to the system zoneinfo).
|
| 127 |
+
|
| 128 |
+
.. deprecated:: 2.6
|
| 129 |
+
If you need to use a specific zoneinfofile over the system zoneinfo,
|
| 130 |
+
instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call
|
| 131 |
+
:func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead.
|
| 132 |
+
|
| 133 |
+
Use :func:`get_zonefile_instance` to retrieve an instance of the
|
| 134 |
+
dateutil-provided zoneinfo.
|
| 135 |
+
"""
|
| 136 |
+
warnings.warn("zoneinfo.gettz() will be removed in future versions, "
|
| 137 |
+
"to use the dateutil-provided zoneinfo files, instantiate a "
|
| 138 |
+
"ZoneInfoFile object and use ZoneInfoFile.zones.get() "
|
| 139 |
+
"instead. See the documentation for details.",
|
| 140 |
+
DeprecationWarning)
|
| 141 |
+
|
| 142 |
+
if len(_CLASS_ZONE_INSTANCE) == 0:
|
| 143 |
+
_CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))
|
| 144 |
+
return _CLASS_ZONE_INSTANCE[0].zones.get(name)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def gettz_db_metadata():
|
| 148 |
+
""" Get the zonefile metadata
|
| 149 |
+
|
| 150 |
+
See `zonefile_metadata`_
|
| 151 |
+
|
| 152 |
+
:returns:
|
| 153 |
+
A dictionary with the database metadata
|
| 154 |
+
|
| 155 |
+
.. deprecated:: 2.6
|
| 156 |
+
See deprecation warning in :func:`zoneinfo.gettz`. To get metadata,
|
| 157 |
+
query the attribute ``zoneinfo.ZoneInfoFile.metadata``.
|
| 158 |
+
"""
|
| 159 |
+
warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future "
|
| 160 |
+
"versions, to use the dateutil-provided zoneinfo files, "
|
| 161 |
+
"ZoneInfoFile object and query the 'metadata' attribute "
|
| 162 |
+
"instead. See the documentation for details.",
|
| 163 |
+
DeprecationWarning)
|
| 164 |
+
|
| 165 |
+
if len(_CLASS_ZONE_INSTANCE) == 0:
|
| 166 |
+
_CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))
|
| 167 |
+
return _CLASS_ZONE_INSTANCE[0].metadata
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (5.76 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-310.pyc
ADDED
|
Binary file (2.69 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d3ea52e7b6e968de0d884df1288193596fa95b803db4f92a18279a7398004475
|
| 3 |
+
size 156400
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/zoneinfo/rebuild.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import tempfile
|
| 4 |
+
import shutil
|
| 5 |
+
import json
|
| 6 |
+
from subprocess import check_call, check_output
|
| 7 |
+
from tarfile import TarFile
|
| 8 |
+
|
| 9 |
+
from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None):
|
| 13 |
+
"""Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar*
|
| 14 |
+
|
| 15 |
+
filename is the timezone tarball from ``ftp.iana.org/tz``.
|
| 16 |
+
|
| 17 |
+
"""
|
| 18 |
+
tmpdir = tempfile.mkdtemp()
|
| 19 |
+
zonedir = os.path.join(tmpdir, "zoneinfo")
|
| 20 |
+
moduledir = os.path.dirname(__file__)
|
| 21 |
+
try:
|
| 22 |
+
with TarFile.open(filename) as tf:
|
| 23 |
+
for name in zonegroups:
|
| 24 |
+
tf.extract(name, tmpdir)
|
| 25 |
+
filepaths = [os.path.join(tmpdir, n) for n in zonegroups]
|
| 26 |
+
|
| 27 |
+
_run_zic(zonedir, filepaths)
|
| 28 |
+
|
| 29 |
+
# write metadata file
|
| 30 |
+
with open(os.path.join(zonedir, METADATA_FN), 'w') as f:
|
| 31 |
+
json.dump(metadata, f, indent=4, sort_keys=True)
|
| 32 |
+
target = os.path.join(moduledir, ZONEFILENAME)
|
| 33 |
+
with TarFile.open(target, "w:%s" % format) as tf:
|
| 34 |
+
for entry in os.listdir(zonedir):
|
| 35 |
+
entrypath = os.path.join(zonedir, entry)
|
| 36 |
+
tf.add(entrypath, entry)
|
| 37 |
+
finally:
|
| 38 |
+
shutil.rmtree(tmpdir)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _run_zic(zonedir, filepaths):
|
| 42 |
+
"""Calls the ``zic`` compiler in a compatible way to get a "fat" binary.
|
| 43 |
+
|
| 44 |
+
Recent versions of ``zic`` default to ``-b slim``, while older versions
|
| 45 |
+
don't even have the ``-b`` option (but default to "fat" binaries). The
|
| 46 |
+
current version of dateutil does not support Version 2+ TZif files, which
|
| 47 |
+
causes problems when used in conjunction with "slim" binaries, so this
|
| 48 |
+
function is used to ensure that we always get a "fat" binary.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
try:
|
| 52 |
+
help_text = check_output(["zic", "--help"])
|
| 53 |
+
except OSError as e:
|
| 54 |
+
_print_on_nosuchfile(e)
|
| 55 |
+
raise
|
| 56 |
+
|
| 57 |
+
if b"-b " in help_text:
|
| 58 |
+
bloat_args = ["-b", "fat"]
|
| 59 |
+
else:
|
| 60 |
+
bloat_args = []
|
| 61 |
+
|
| 62 |
+
check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _print_on_nosuchfile(e):
|
| 66 |
+
"""Print helpful troubleshooting message
|
| 67 |
+
|
| 68 |
+
e is an exception raised by subprocess.check_call()
|
| 69 |
+
|
| 70 |
+
"""
|
| 71 |
+
if e.errno == 2:
|
| 72 |
+
logging.error(
|
| 73 |
+
"Could not find zic. Perhaps you need to install "
|
| 74 |
+
"libc-bin or some other package that provides it, "
|
| 75 |
+
"or it's not in your PATH?")
|
evalkit_internvl/lib/python3.10/site-packages/frozenlist/__init__.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
import types
|
| 4 |
+
from collections.abc import MutableSequence
|
| 5 |
+
from functools import total_ordering
|
| 6 |
+
from typing import Any, Type
|
| 7 |
+
|
| 8 |
+
__version__ = "1.5.0"
|
| 9 |
+
|
| 10 |
+
__all__ = ("FrozenList", "PyFrozenList") # type: Tuple[str, ...]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
NO_EXTENSIONS = bool(os.environ.get("FROZENLIST_NO_EXTENSIONS")) # type: bool
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@total_ordering
|
| 17 |
+
class FrozenList(MutableSequence):
|
| 18 |
+
__slots__ = ("_frozen", "_items")
|
| 19 |
+
|
| 20 |
+
if sys.version_info >= (3, 9):
|
| 21 |
+
__class_getitem__ = classmethod(types.GenericAlias)
|
| 22 |
+
else:
|
| 23 |
+
|
| 24 |
+
@classmethod
|
| 25 |
+
def __class_getitem__(
|
| 26 |
+
cls: Type["FrozenList"],
|
| 27 |
+
cls_item: Any,
|
| 28 |
+
) -> Type["FrozenList"]:
|
| 29 |
+
return cls
|
| 30 |
+
|
| 31 |
+
def __init__(self, items=None):
|
| 32 |
+
self._frozen = False
|
| 33 |
+
if items is not None:
|
| 34 |
+
items = list(items)
|
| 35 |
+
else:
|
| 36 |
+
items = []
|
| 37 |
+
self._items = items
|
| 38 |
+
|
| 39 |
+
@property
|
| 40 |
+
def frozen(self):
|
| 41 |
+
return self._frozen
|
| 42 |
+
|
| 43 |
+
def freeze(self):
|
| 44 |
+
self._frozen = True
|
| 45 |
+
|
| 46 |
+
def __getitem__(self, index):
|
| 47 |
+
return self._items[index]
|
| 48 |
+
|
| 49 |
+
def __setitem__(self, index, value):
|
| 50 |
+
if self._frozen:
|
| 51 |
+
raise RuntimeError("Cannot modify frozen list.")
|
| 52 |
+
self._items[index] = value
|
| 53 |
+
|
| 54 |
+
def __delitem__(self, index):
|
| 55 |
+
if self._frozen:
|
| 56 |
+
raise RuntimeError("Cannot modify frozen list.")
|
| 57 |
+
del self._items[index]
|
| 58 |
+
|
| 59 |
+
def __len__(self):
|
| 60 |
+
return self._items.__len__()
|
| 61 |
+
|
| 62 |
+
def __iter__(self):
|
| 63 |
+
return self._items.__iter__()
|
| 64 |
+
|
| 65 |
+
def __reversed__(self):
|
| 66 |
+
return self._items.__reversed__()
|
| 67 |
+
|
| 68 |
+
def __eq__(self, other):
|
| 69 |
+
return list(self) == other
|
| 70 |
+
|
| 71 |
+
def __le__(self, other):
|
| 72 |
+
return list(self) <= other
|
| 73 |
+
|
| 74 |
+
def insert(self, pos, item):
|
| 75 |
+
if self._frozen:
|
| 76 |
+
raise RuntimeError("Cannot modify frozen list.")
|
| 77 |
+
self._items.insert(pos, item)
|
| 78 |
+
|
| 79 |
+
def __repr__(self):
|
| 80 |
+
return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"
|
| 81 |
+
|
| 82 |
+
def __hash__(self):
|
| 83 |
+
if self._frozen:
|
| 84 |
+
return hash(tuple(self))
|
| 85 |
+
else:
|
| 86 |
+
raise RuntimeError("Cannot hash unfrozen list.")
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
PyFrozenList = FrozenList
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
if not NO_EXTENSIONS:
|
| 93 |
+
try:
|
| 94 |
+
from ._frozenlist import FrozenList as CFrozenList # type: ignore
|
| 95 |
+
except ImportError: # pragma: no cover
|
| 96 |
+
pass
|
| 97 |
+
else:
|
| 98 |
+
FrozenList = CFrozenList # type: ignore
|
evalkit_internvl/lib/python3.10/site-packages/frozenlist/__init__.pyi
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import (
|
| 2 |
+
Generic,
|
| 3 |
+
Iterable,
|
| 4 |
+
Iterator,
|
| 5 |
+
List,
|
| 6 |
+
MutableSequence,
|
| 7 |
+
Optional,
|
| 8 |
+
TypeVar,
|
| 9 |
+
Union,
|
| 10 |
+
overload,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
_T = TypeVar("_T")
|
| 14 |
+
_Arg = Union[List[_T], Iterable[_T]]
|
| 15 |
+
|
| 16 |
+
class FrozenList(MutableSequence[_T], Generic[_T]):
|
| 17 |
+
def __init__(self, items: Optional[_Arg[_T]] = None) -> None: ...
|
| 18 |
+
@property
|
| 19 |
+
def frozen(self) -> bool: ...
|
| 20 |
+
def freeze(self) -> None: ...
|
| 21 |
+
@overload
|
| 22 |
+
def __getitem__(self, i: int) -> _T: ...
|
| 23 |
+
@overload
|
| 24 |
+
def __getitem__(self, s: slice) -> FrozenList[_T]: ...
|
| 25 |
+
@overload
|
| 26 |
+
def __setitem__(self, i: int, o: _T) -> None: ...
|
| 27 |
+
@overload
|
| 28 |
+
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
|
| 29 |
+
@overload
|
| 30 |
+
def __delitem__(self, i: int) -> None: ...
|
| 31 |
+
@overload
|
| 32 |
+
def __delitem__(self, i: slice) -> None: ...
|
| 33 |
+
def __len__(self) -> int: ...
|
| 34 |
+
def __iter__(self) -> Iterator[_T]: ...
|
| 35 |
+
def __reversed__(self) -> Iterator[_T]: ...
|
| 36 |
+
def __eq__(self, other: object) -> bool: ...
|
| 37 |
+
def __le__(self, other: FrozenList[_T]) -> bool: ...
|
| 38 |
+
def __ne__(self, other: object) -> bool: ...
|
| 39 |
+
def __lt__(self, other: FrozenList[_T]) -> bool: ...
|
| 40 |
+
def __ge__(self, other: FrozenList[_T]) -> bool: ...
|
| 41 |
+
def __gt__(self, other: FrozenList[_T]) -> bool: ...
|
| 42 |
+
def insert(self, pos: int, item: _T) -> None: ...
|
| 43 |
+
def __repr__(self) -> str: ...
|
| 44 |
+
def __hash__(self) -> int: ...
|
| 45 |
+
|
| 46 |
+
# types for C accelerators are the same
|
| 47 |
+
CFrozenList = PyFrozenList = FrozenList
|
evalkit_internvl/lib/python3.10/site-packages/frozenlist/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (3.29 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/frozenlist/_frozenlist.pyx
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import types
|
| 3 |
+
from collections.abc import MutableSequence
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
cdef class FrozenList:
|
| 7 |
+
|
| 8 |
+
if sys.version_info >= (3, 9):
|
| 9 |
+
__class_getitem__ = classmethod(types.GenericAlias)
|
| 10 |
+
else:
|
| 11 |
+
@classmethod
|
| 12 |
+
def __class_getitem__(cls, cls_item):
|
| 13 |
+
return cls
|
| 14 |
+
|
| 15 |
+
cdef readonly bint frozen
|
| 16 |
+
cdef list _items
|
| 17 |
+
|
| 18 |
+
def __init__(self, items=None):
|
| 19 |
+
self.frozen = False
|
| 20 |
+
if items is not None:
|
| 21 |
+
items = list(items)
|
| 22 |
+
else:
|
| 23 |
+
items = []
|
| 24 |
+
self._items = items
|
| 25 |
+
|
| 26 |
+
cdef object _check_frozen(self):
|
| 27 |
+
if self.frozen:
|
| 28 |
+
raise RuntimeError("Cannot modify frozen list.")
|
| 29 |
+
|
| 30 |
+
cdef inline object _fast_len(self):
|
| 31 |
+
return len(self._items)
|
| 32 |
+
|
| 33 |
+
def freeze(self):
|
| 34 |
+
self.frozen = True
|
| 35 |
+
|
| 36 |
+
def __getitem__(self, index):
|
| 37 |
+
return self._items[index]
|
| 38 |
+
|
| 39 |
+
def __setitem__(self, index, value):
|
| 40 |
+
self._check_frozen()
|
| 41 |
+
self._items[index] = value
|
| 42 |
+
|
| 43 |
+
def __delitem__(self, index):
|
| 44 |
+
self._check_frozen()
|
| 45 |
+
del self._items[index]
|
| 46 |
+
|
| 47 |
+
def __len__(self):
|
| 48 |
+
return self._fast_len()
|
| 49 |
+
|
| 50 |
+
def __iter__(self):
|
| 51 |
+
return self._items.__iter__()
|
| 52 |
+
|
| 53 |
+
def __reversed__(self):
|
| 54 |
+
return self._items.__reversed__()
|
| 55 |
+
|
| 56 |
+
def __richcmp__(self, other, op):
|
| 57 |
+
if op == 0: # <
|
| 58 |
+
return list(self) < other
|
| 59 |
+
if op == 1: # <=
|
| 60 |
+
return list(self) <= other
|
| 61 |
+
if op == 2: # ==
|
| 62 |
+
return list(self) == other
|
| 63 |
+
if op == 3: # !=
|
| 64 |
+
return list(self) != other
|
| 65 |
+
if op == 4: # >
|
| 66 |
+
return list(self) > other
|
| 67 |
+
if op == 5: # =>
|
| 68 |
+
return list(self) >= other
|
| 69 |
+
|
| 70 |
+
def insert(self, pos, item):
|
| 71 |
+
self._check_frozen()
|
| 72 |
+
self._items.insert(pos, item)
|
| 73 |
+
|
| 74 |
+
def __contains__(self, item):
|
| 75 |
+
return item in self._items
|
| 76 |
+
|
| 77 |
+
def __iadd__(self, items):
|
| 78 |
+
self._check_frozen()
|
| 79 |
+
self._items += list(items)
|
| 80 |
+
return self
|
| 81 |
+
|
| 82 |
+
def index(self, item):
|
| 83 |
+
return self._items.index(item)
|
| 84 |
+
|
| 85 |
+
def remove(self, item):
|
| 86 |
+
self._check_frozen()
|
| 87 |
+
self._items.remove(item)
|
| 88 |
+
|
| 89 |
+
def clear(self):
|
| 90 |
+
self._check_frozen()
|
| 91 |
+
self._items.clear()
|
| 92 |
+
|
| 93 |
+
def extend(self, items):
|
| 94 |
+
self._check_frozen()
|
| 95 |
+
self._items += list(items)
|
| 96 |
+
|
| 97 |
+
def reverse(self):
|
| 98 |
+
self._check_frozen()
|
| 99 |
+
self._items.reverse()
|
| 100 |
+
|
| 101 |
+
def pop(self, index=-1):
|
| 102 |
+
self._check_frozen()
|
| 103 |
+
return self._items.pop(index)
|
| 104 |
+
|
| 105 |
+
def append(self, item):
|
| 106 |
+
self._check_frozen()
|
| 107 |
+
return self._items.append(item)
|
| 108 |
+
|
| 109 |
+
def count(self, item):
|
| 110 |
+
return self._items.count(item)
|
| 111 |
+
|
| 112 |
+
def __repr__(self):
|
| 113 |
+
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
|
| 114 |
+
self._items)
|
| 115 |
+
|
| 116 |
+
def __hash__(self):
|
| 117 |
+
if self.frozen:
|
| 118 |
+
return hash(tuple(self._items))
|
| 119 |
+
else:
|
| 120 |
+
raise RuntimeError("Cannot hash unfrozen list.")
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
MutableSequence.register(FrozenList)
|
evalkit_internvl/lib/python3.10/site-packages/frozenlist/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Marker
|
evalkit_internvl/lib/python3.10/site-packages/joblib/__init__.py
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Joblib is a set of tools to provide **lightweight pipelining in
|
| 2 |
+
Python**. In particular:
|
| 3 |
+
|
| 4 |
+
1. transparent disk-caching of functions and lazy re-evaluation
|
| 5 |
+
(memoize pattern)
|
| 6 |
+
|
| 7 |
+
2. easy simple parallel computing
|
| 8 |
+
|
| 9 |
+
Joblib is optimized to be **fast** and **robust** on large
|
| 10 |
+
data in particular and has specific optimizations for `numpy` arrays. It is
|
| 11 |
+
**BSD-licensed**.
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
==================== ===============================================
|
| 15 |
+
**Documentation:** https://joblib.readthedocs.io
|
| 16 |
+
|
| 17 |
+
**Download:** https://pypi.python.org/pypi/joblib#downloads
|
| 18 |
+
|
| 19 |
+
**Source code:** https://github.com/joblib/joblib
|
| 20 |
+
|
| 21 |
+
**Report issues:** https://github.com/joblib/joblib/issues
|
| 22 |
+
==================== ===============================================
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
Vision
|
| 26 |
+
--------
|
| 27 |
+
|
| 28 |
+
The vision is to provide tools to easily achieve better performance and
|
| 29 |
+
reproducibility when working with long running jobs.
|
| 30 |
+
|
| 31 |
+
* **Avoid computing the same thing twice**: code is often rerun again and
|
| 32 |
+
again, for instance when prototyping computational-heavy jobs (as in
|
| 33 |
+
scientific development), but hand-crafted solutions to alleviate this
|
| 34 |
+
issue are error-prone and often lead to unreproducible results.
|
| 35 |
+
|
| 36 |
+
* **Persist to disk transparently**: efficiently persisting
|
| 37 |
+
arbitrary objects containing large data is hard. Using
|
| 38 |
+
joblib's caching mechanism avoids hand-written persistence and
|
| 39 |
+
implicitly links the file on disk to the execution context of
|
| 40 |
+
the original Python object. As a result, joblib's persistence is
|
| 41 |
+
good for resuming an application status or computational job, eg
|
| 42 |
+
after a crash.
|
| 43 |
+
|
| 44 |
+
Joblib addresses these problems while **leaving your code and your flow
|
| 45 |
+
control as unmodified as possible** (no framework, no new paradigms).
|
| 46 |
+
|
| 47 |
+
Main features
|
| 48 |
+
------------------
|
| 49 |
+
|
| 50 |
+
1) **Transparent and fast disk-caching of output value:** a memoize or
|
| 51 |
+
make-like functionality for Python functions that works well for
|
| 52 |
+
arbitrary Python objects, including very large numpy arrays. Separate
|
| 53 |
+
persistence and flow-execution logic from domain logic or algorithmic
|
| 54 |
+
code by writing the operations as a set of steps with well-defined
|
| 55 |
+
inputs and outputs: Python functions. Joblib can save their
|
| 56 |
+
computation to disk and rerun it only if necessary::
|
| 57 |
+
|
| 58 |
+
>>> from joblib import Memory
|
| 59 |
+
>>> cachedir = 'your_cache_dir_goes_here'
|
| 60 |
+
>>> mem = Memory(cachedir)
|
| 61 |
+
>>> import numpy as np
|
| 62 |
+
>>> a = np.vander(np.arange(3)).astype(float)
|
| 63 |
+
>>> square = mem.cache(np.square)
|
| 64 |
+
>>> b = square(a) # doctest: +ELLIPSIS
|
| 65 |
+
______________________________________________________________________...
|
| 66 |
+
[Memory] Calling square...
|
| 67 |
+
square(array([[0., 0., 1.],
|
| 68 |
+
[1., 1., 1.],
|
| 69 |
+
[4., 2., 1.]]))
|
| 70 |
+
_________________________________________________...square - ...s, 0.0min
|
| 71 |
+
|
| 72 |
+
>>> c = square(a)
|
| 73 |
+
>>> # The above call did not trigger an evaluation
|
| 74 |
+
|
| 75 |
+
2) **Embarrassingly parallel helper:** to make it easy to write readable
|
| 76 |
+
parallel code and debug it quickly::
|
| 77 |
+
|
| 78 |
+
>>> from joblib import Parallel, delayed
|
| 79 |
+
>>> from math import sqrt
|
| 80 |
+
>>> Parallel(n_jobs=1)(delayed(sqrt)(i**2) for i in range(10))
|
| 81 |
+
[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
3) **Fast compressed Persistence**: a replacement for pickle to work
|
| 85 |
+
efficiently on Python objects containing large data (
|
| 86 |
+
*joblib.dump* & *joblib.load* ).
|
| 87 |
+
|
| 88 |
+
..
|
| 89 |
+
>>> import shutil ; shutil.rmtree(cachedir)
|
| 90 |
+
|
| 91 |
+
"""
|
| 92 |
+
|
| 93 |
+
# PEP0440 compatible formatted version, see:
|
| 94 |
+
# https://www.python.org/dev/peps/pep-0440/
|
| 95 |
+
#
|
| 96 |
+
# Generic release markers:
|
| 97 |
+
# X.Y
|
| 98 |
+
# X.Y.Z # For bugfix releases
|
| 99 |
+
#
|
| 100 |
+
# Admissible pre-release markers:
|
| 101 |
+
# X.YaN # Alpha release
|
| 102 |
+
# X.YbN # Beta release
|
| 103 |
+
# X.YrcN # Release Candidate
|
| 104 |
+
# X.Y # Final release
|
| 105 |
+
#
|
| 106 |
+
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
|
| 107 |
+
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
|
| 108 |
+
#
|
| 109 |
+
__version__ = '1.4.2'
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
import os
|
| 113 |
+
|
| 114 |
+
from .memory import Memory
|
| 115 |
+
from .memory import MemorizedResult
|
| 116 |
+
from .memory import register_store_backend
|
| 117 |
+
from .memory import expires_after
|
| 118 |
+
|
| 119 |
+
from .logger import PrintTime
|
| 120 |
+
from .logger import Logger
|
| 121 |
+
|
| 122 |
+
from .hashing import hash
|
| 123 |
+
|
| 124 |
+
from .numpy_pickle import dump
|
| 125 |
+
from .numpy_pickle import load
|
| 126 |
+
|
| 127 |
+
from .compressor import register_compressor
|
| 128 |
+
|
| 129 |
+
from .parallel import Parallel
|
| 130 |
+
from .parallel import delayed
|
| 131 |
+
from .parallel import cpu_count
|
| 132 |
+
from .parallel import register_parallel_backend
|
| 133 |
+
from .parallel import parallel_backend
|
| 134 |
+
from .parallel import parallel_config
|
| 135 |
+
from .parallel import effective_n_jobs
|
| 136 |
+
from ._cloudpickle_wrapper import wrap_non_picklable_objects
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
__all__ = ['Memory', 'MemorizedResult', 'PrintTime', 'Logger', 'hash', 'dump',
|
| 140 |
+
'load', 'Parallel', 'delayed', 'cpu_count', 'effective_n_jobs',
|
| 141 |
+
'register_parallel_backend', 'parallel_backend', 'expires_after',
|
| 142 |
+
'register_store_backend', 'register_compressor',
|
| 143 |
+
'wrap_non_picklable_objects', 'parallel_config']
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
# Workaround issue discovered in intel-openmp 2019.5:
|
| 147 |
+
# https://github.com/ContinuumIO/anaconda-issues/issues/11294
|
| 148 |
+
os.environ.setdefault("KMP_INIT_AT_FORK", "FALSE")
|
evalkit_internvl/lib/python3.10/site-packages/joblib/_memmapping_reducer.py
ADDED
|
@@ -0,0 +1,657 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Reducer using memory mapping for numpy arrays
|
| 3 |
+
"""
|
| 4 |
+
# Author: Thomas Moreau <thomas.moreau.2010@gmail.com>
|
| 5 |
+
# Copyright: 2017, Thomas Moreau
|
| 6 |
+
# License: BSD 3 clause
|
| 7 |
+
|
| 8 |
+
from mmap import mmap
|
| 9 |
+
import errno
|
| 10 |
+
import os
|
| 11 |
+
import stat
|
| 12 |
+
import threading
|
| 13 |
+
import atexit
|
| 14 |
+
import tempfile
|
| 15 |
+
import time
|
| 16 |
+
import warnings
|
| 17 |
+
import weakref
|
| 18 |
+
from uuid import uuid4
|
| 19 |
+
from multiprocessing import util
|
| 20 |
+
|
| 21 |
+
from pickle import whichmodule, loads, dumps, HIGHEST_PROTOCOL, PicklingError
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
WindowsError
|
| 25 |
+
except NameError:
|
| 26 |
+
WindowsError = type(None)
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
import numpy as np
|
| 30 |
+
from numpy.lib.stride_tricks import as_strided
|
| 31 |
+
except ImportError:
|
| 32 |
+
np = None
|
| 33 |
+
|
| 34 |
+
from .numpy_pickle import dump, load, load_temporary_memmap
|
| 35 |
+
from .backports import make_memmap
|
| 36 |
+
from .disk import delete_folder
|
| 37 |
+
from .externals.loky.backend import resource_tracker
|
| 38 |
+
|
| 39 |
+
# Some system have a ramdisk mounted by default, we can use it instead of /tmp
|
| 40 |
+
# as the default folder to dump big arrays to share with subprocesses.
|
| 41 |
+
SYSTEM_SHARED_MEM_FS = '/dev/shm'
|
| 42 |
+
|
| 43 |
+
# Minimal number of bytes available on SYSTEM_SHARED_MEM_FS to consider using
|
| 44 |
+
# it as the default folder to dump big arrays to share with subprocesses.
|
| 45 |
+
SYSTEM_SHARED_MEM_FS_MIN_SIZE = int(2e9)
|
| 46 |
+
|
| 47 |
+
# Folder and file permissions to chmod temporary files generated by the
|
| 48 |
+
# memmapping pool. Only the owner of the Python process can access the
|
| 49 |
+
# temporary files and folder.
|
| 50 |
+
FOLDER_PERMISSIONS = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
|
| 51 |
+
FILE_PERMISSIONS = stat.S_IRUSR | stat.S_IWUSR
|
| 52 |
+
|
| 53 |
+
# Set used in joblib workers, referencing the filenames of temporary memmaps
|
| 54 |
+
# created by joblib to speed up data communication. In child processes, we add
|
| 55 |
+
# a finalizer to these memmaps that sends a maybe_unlink call to the
|
| 56 |
+
# resource_tracker, in order to free main memory as fast as possible.
|
| 57 |
+
JOBLIB_MMAPS = set()
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _log_and_unlink(filename):
|
| 61 |
+
from .externals.loky.backend.resource_tracker import _resource_tracker
|
| 62 |
+
util.debug(
|
| 63 |
+
"[FINALIZER CALL] object mapping to {} about to be deleted,"
|
| 64 |
+
" decrementing the refcount of the file (pid: {})".format(
|
| 65 |
+
os.path.basename(filename), os.getpid()))
|
| 66 |
+
_resource_tracker.maybe_unlink(filename, "file")
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def add_maybe_unlink_finalizer(memmap):
|
| 70 |
+
util.debug(
|
| 71 |
+
"[FINALIZER ADD] adding finalizer to {} (id {}, filename {}, pid {})"
|
| 72 |
+
"".format(type(memmap), id(memmap), os.path.basename(memmap.filename),
|
| 73 |
+
os.getpid()))
|
| 74 |
+
weakref.finalize(memmap, _log_and_unlink, memmap.filename)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def unlink_file(filename):
|
| 78 |
+
"""Wrapper around os.unlink with a retry mechanism.
|
| 79 |
+
|
| 80 |
+
The retry mechanism has been implemented primarily to overcome a race
|
| 81 |
+
condition happening during the finalizer of a np.memmap: when a process
|
| 82 |
+
holding the last reference to a mmap-backed np.memmap/np.array is about to
|
| 83 |
+
delete this array (and close the reference), it sends a maybe_unlink
|
| 84 |
+
request to the resource_tracker. This request can be processed faster than
|
| 85 |
+
it takes for the last reference of the memmap to be closed, yielding (on
|
| 86 |
+
Windows) a PermissionError in the resource_tracker loop.
|
| 87 |
+
"""
|
| 88 |
+
NUM_RETRIES = 10
|
| 89 |
+
for retry_no in range(1, NUM_RETRIES + 1):
|
| 90 |
+
try:
|
| 91 |
+
os.unlink(filename)
|
| 92 |
+
break
|
| 93 |
+
except PermissionError:
|
| 94 |
+
util.debug(
|
| 95 |
+
'[ResourceTracker] tried to unlink {}, got '
|
| 96 |
+
'PermissionError'.format(filename)
|
| 97 |
+
)
|
| 98 |
+
if retry_no == NUM_RETRIES:
|
| 99 |
+
raise
|
| 100 |
+
else:
|
| 101 |
+
time.sleep(.2)
|
| 102 |
+
except FileNotFoundError:
|
| 103 |
+
# In case of a race condition when deleting the temporary folder,
|
| 104 |
+
# avoid noisy FileNotFoundError exception in the resource tracker.
|
| 105 |
+
pass
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
resource_tracker._CLEANUP_FUNCS['file'] = unlink_file
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class _WeakArrayKeyMap:
|
| 112 |
+
"""A variant of weakref.WeakKeyDictionary for unhashable numpy arrays.
|
| 113 |
+
|
| 114 |
+
This datastructure will be used with numpy arrays as obj keys, therefore we
|
| 115 |
+
do not use the __get__ / __set__ methods to avoid any conflict with the
|
| 116 |
+
numpy fancy indexing syntax.
|
| 117 |
+
"""
|
| 118 |
+
|
| 119 |
+
def __init__(self):
|
| 120 |
+
self._data = {}
|
| 121 |
+
|
| 122 |
+
def get(self, obj):
|
| 123 |
+
ref, val = self._data[id(obj)]
|
| 124 |
+
if ref() is not obj:
|
| 125 |
+
# In case of race condition with on_destroy: could never be
|
| 126 |
+
# triggered by the joblib tests with CPython.
|
| 127 |
+
raise KeyError(obj)
|
| 128 |
+
return val
|
| 129 |
+
|
| 130 |
+
def set(self, obj, value):
|
| 131 |
+
key = id(obj)
|
| 132 |
+
try:
|
| 133 |
+
ref, _ = self._data[key]
|
| 134 |
+
if ref() is not obj:
|
| 135 |
+
# In case of race condition with on_destroy: could never be
|
| 136 |
+
# triggered by the joblib tests with CPython.
|
| 137 |
+
raise KeyError(obj)
|
| 138 |
+
except KeyError:
|
| 139 |
+
# Insert the new entry in the mapping along with a weakref
|
| 140 |
+
# callback to automatically delete the entry from the mapping
|
| 141 |
+
# as soon as the object used as key is garbage collected.
|
| 142 |
+
def on_destroy(_):
|
| 143 |
+
del self._data[key]
|
| 144 |
+
ref = weakref.ref(obj, on_destroy)
|
| 145 |
+
self._data[key] = ref, value
|
| 146 |
+
|
| 147 |
+
def __getstate__(self):
|
| 148 |
+
raise PicklingError("_WeakArrayKeyMap is not pickleable")
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
###############################################################################
|
| 152 |
+
# Support for efficient transient pickling of numpy data structures
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def _get_backing_memmap(a):
|
| 156 |
+
"""Recursively look up the original np.memmap instance base if any."""
|
| 157 |
+
b = getattr(a, 'base', None)
|
| 158 |
+
if b is None:
|
| 159 |
+
# TODO: check scipy sparse datastructure if scipy is installed
|
| 160 |
+
# a nor its descendants do not have a memmap base
|
| 161 |
+
return None
|
| 162 |
+
|
| 163 |
+
elif isinstance(b, mmap):
|
| 164 |
+
# a is already a real memmap instance.
|
| 165 |
+
return a
|
| 166 |
+
|
| 167 |
+
else:
|
| 168 |
+
# Recursive exploration of the base ancestry
|
| 169 |
+
return _get_backing_memmap(b)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def _get_temp_dir(pool_folder_name, temp_folder=None):
|
| 173 |
+
"""Get the full path to a subfolder inside the temporary folder.
|
| 174 |
+
|
| 175 |
+
Parameters
|
| 176 |
+
----------
|
| 177 |
+
pool_folder_name : str
|
| 178 |
+
Sub-folder name used for the serialization of a pool instance.
|
| 179 |
+
|
| 180 |
+
temp_folder: str, optional
|
| 181 |
+
Folder to be used by the pool for memmapping large arrays
|
| 182 |
+
for sharing memory with worker processes. If None, this will try in
|
| 183 |
+
order:
|
| 184 |
+
|
| 185 |
+
- a folder pointed by the JOBLIB_TEMP_FOLDER environment
|
| 186 |
+
variable,
|
| 187 |
+
- /dev/shm if the folder exists and is writable: this is a
|
| 188 |
+
RAMdisk filesystem available by default on modern Linux
|
| 189 |
+
distributions,
|
| 190 |
+
- the default system temporary folder that can be
|
| 191 |
+
overridden with TMP, TMPDIR or TEMP environment
|
| 192 |
+
variables, typically /tmp under Unix operating systems.
|
| 193 |
+
|
| 194 |
+
Returns
|
| 195 |
+
-------
|
| 196 |
+
pool_folder : str
|
| 197 |
+
full path to the temporary folder
|
| 198 |
+
use_shared_mem : bool
|
| 199 |
+
whether the temporary folder is written to the system shared memory
|
| 200 |
+
folder or some other temporary folder.
|
| 201 |
+
"""
|
| 202 |
+
use_shared_mem = False
|
| 203 |
+
if temp_folder is None:
|
| 204 |
+
temp_folder = os.environ.get('JOBLIB_TEMP_FOLDER', None)
|
| 205 |
+
if temp_folder is None:
|
| 206 |
+
if os.path.exists(SYSTEM_SHARED_MEM_FS) and hasattr(os, 'statvfs'):
|
| 207 |
+
try:
|
| 208 |
+
shm_stats = os.statvfs(SYSTEM_SHARED_MEM_FS)
|
| 209 |
+
available_nbytes = shm_stats.f_bsize * shm_stats.f_bavail
|
| 210 |
+
if available_nbytes > SYSTEM_SHARED_MEM_FS_MIN_SIZE:
|
| 211 |
+
# Try to see if we have write access to the shared mem
|
| 212 |
+
# folder only if it is reasonably large (that is 2GB or
|
| 213 |
+
# more).
|
| 214 |
+
temp_folder = SYSTEM_SHARED_MEM_FS
|
| 215 |
+
pool_folder = os.path.join(temp_folder, pool_folder_name)
|
| 216 |
+
if not os.path.exists(pool_folder):
|
| 217 |
+
os.makedirs(pool_folder)
|
| 218 |
+
use_shared_mem = True
|
| 219 |
+
except (IOError, OSError):
|
| 220 |
+
# Missing rights in the /dev/shm partition, fallback to regular
|
| 221 |
+
# temp folder.
|
| 222 |
+
temp_folder = None
|
| 223 |
+
if temp_folder is None:
|
| 224 |
+
# Fallback to the default tmp folder, typically /tmp
|
| 225 |
+
temp_folder = tempfile.gettempdir()
|
| 226 |
+
temp_folder = os.path.abspath(os.path.expanduser(temp_folder))
|
| 227 |
+
pool_folder = os.path.join(temp_folder, pool_folder_name)
|
| 228 |
+
return pool_folder, use_shared_mem
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def has_shareable_memory(a):
|
| 232 |
+
"""Return True if a is backed by some mmap buffer directly or not."""
|
| 233 |
+
return _get_backing_memmap(a) is not None
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def _strided_from_memmap(filename, dtype, mode, offset, order, shape, strides,
|
| 237 |
+
total_buffer_len, unlink_on_gc_collect):
|
| 238 |
+
"""Reconstruct an array view on a memory mapped file."""
|
| 239 |
+
if mode == 'w+':
|
| 240 |
+
# Do not zero the original data when unpickling
|
| 241 |
+
mode = 'r+'
|
| 242 |
+
|
| 243 |
+
if strides is None:
|
| 244 |
+
# Simple, contiguous memmap
|
| 245 |
+
return make_memmap(
|
| 246 |
+
filename, dtype=dtype, shape=shape, mode=mode, offset=offset,
|
| 247 |
+
order=order, unlink_on_gc_collect=unlink_on_gc_collect
|
| 248 |
+
)
|
| 249 |
+
else:
|
| 250 |
+
# For non-contiguous data, memmap the total enclosing buffer and then
|
| 251 |
+
# extract the non-contiguous view with the stride-tricks API
|
| 252 |
+
base = make_memmap(
|
| 253 |
+
filename, dtype=dtype, shape=total_buffer_len, offset=offset,
|
| 254 |
+
mode=mode, order=order, unlink_on_gc_collect=unlink_on_gc_collect
|
| 255 |
+
)
|
| 256 |
+
return as_strided(base, shape=shape, strides=strides)
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def _reduce_memmap_backed(a, m):
|
| 260 |
+
"""Pickling reduction for memmap backed arrays.
|
| 261 |
+
|
| 262 |
+
a is expected to be an instance of np.ndarray (or np.memmap)
|
| 263 |
+
m is expected to be an instance of np.memmap on the top of the ``base``
|
| 264 |
+
attribute ancestry of a. ``m.base`` should be the real python mmap object.
|
| 265 |
+
"""
|
| 266 |
+
# offset that comes from the striding differences between a and m
|
| 267 |
+
util.debug('[MEMMAP REDUCE] reducing a memmap-backed array '
|
| 268 |
+
'(shape, {}, pid: {})'.format(a.shape, os.getpid()))
|
| 269 |
+
try:
|
| 270 |
+
from numpy.lib.array_utils import byte_bounds
|
| 271 |
+
except (ModuleNotFoundError, ImportError):
|
| 272 |
+
# Backward-compat for numpy < 2.0
|
| 273 |
+
from numpy import byte_bounds
|
| 274 |
+
a_start, a_end = byte_bounds(a)
|
| 275 |
+
m_start = byte_bounds(m)[0]
|
| 276 |
+
offset = a_start - m_start
|
| 277 |
+
|
| 278 |
+
# offset from the backing memmap
|
| 279 |
+
offset += m.offset
|
| 280 |
+
|
| 281 |
+
if m.flags['F_CONTIGUOUS']:
|
| 282 |
+
order = 'F'
|
| 283 |
+
else:
|
| 284 |
+
# The backing memmap buffer is necessarily contiguous hence C if not
|
| 285 |
+
# Fortran
|
| 286 |
+
order = 'C'
|
| 287 |
+
|
| 288 |
+
if a.flags['F_CONTIGUOUS'] or a.flags['C_CONTIGUOUS']:
|
| 289 |
+
# If the array is a contiguous view, no need to pass the strides
|
| 290 |
+
strides = None
|
| 291 |
+
total_buffer_len = None
|
| 292 |
+
else:
|
| 293 |
+
# Compute the total number of items to map from which the strided
|
| 294 |
+
# view will be extracted.
|
| 295 |
+
strides = a.strides
|
| 296 |
+
total_buffer_len = (a_end - a_start) // a.itemsize
|
| 297 |
+
|
| 298 |
+
return (_strided_from_memmap,
|
| 299 |
+
(m.filename, a.dtype, m.mode, offset, order, a.shape, strides,
|
| 300 |
+
total_buffer_len, False))
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
def reduce_array_memmap_backward(a):
|
| 304 |
+
"""reduce a np.array or a np.memmap from a child process"""
|
| 305 |
+
m = _get_backing_memmap(a)
|
| 306 |
+
if isinstance(m, np.memmap) and m.filename not in JOBLIB_MMAPS:
|
| 307 |
+
# if a is backed by a memmaped file, reconstruct a using the
|
| 308 |
+
# memmaped file.
|
| 309 |
+
return _reduce_memmap_backed(a, m)
|
| 310 |
+
else:
|
| 311 |
+
# a is either a regular (not memmap-backed) numpy array, or an array
|
| 312 |
+
# backed by a shared temporary file created by joblib. In the latter
|
| 313 |
+
# case, in order to limit the lifespan of these temporary files, we
|
| 314 |
+
# serialize the memmap as a regular numpy array, and decref the
|
| 315 |
+
# file backing the memmap (done implicitly in a previously registered
|
| 316 |
+
# finalizer, see ``unlink_on_gc_collect`` for more details)
|
| 317 |
+
return (
|
| 318 |
+
loads, (dumps(np.asarray(a), protocol=HIGHEST_PROTOCOL), )
|
| 319 |
+
)
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
class ArrayMemmapForwardReducer(object):
|
| 323 |
+
"""Reducer callable to dump large arrays to memmap files.
|
| 324 |
+
|
| 325 |
+
Parameters
|
| 326 |
+
----------
|
| 327 |
+
max_nbytes: int
|
| 328 |
+
Threshold to trigger memmapping of large arrays to files created
|
| 329 |
+
a folder.
|
| 330 |
+
temp_folder_resolver: callable
|
| 331 |
+
An callable in charge of resolving a temporary folder name where files
|
| 332 |
+
for backing memmapped arrays are created.
|
| 333 |
+
mmap_mode: 'r', 'r+' or 'c'
|
| 334 |
+
Mode for the created memmap datastructure. See the documentation of
|
| 335 |
+
numpy.memmap for more details. Note: 'w+' is coerced to 'r+'
|
| 336 |
+
automatically to avoid zeroing the data on unpickling.
|
| 337 |
+
verbose: int, optional, 0 by default
|
| 338 |
+
If verbose > 0, memmap creations are logged.
|
| 339 |
+
If verbose > 1, both memmap creations, reuse and array pickling are
|
| 340 |
+
logged.
|
| 341 |
+
prewarm: bool, optional, False by default.
|
| 342 |
+
Force a read on newly memmapped array to make sure that OS pre-cache it
|
| 343 |
+
memory. This can be useful to avoid concurrent disk access when the
|
| 344 |
+
same data array is passed to different worker processes.
|
| 345 |
+
"""
|
| 346 |
+
|
| 347 |
+
def __init__(self, max_nbytes, temp_folder_resolver, mmap_mode,
|
| 348 |
+
unlink_on_gc_collect, verbose=0, prewarm=True):
|
| 349 |
+
self._max_nbytes = max_nbytes
|
| 350 |
+
self._temp_folder_resolver = temp_folder_resolver
|
| 351 |
+
self._mmap_mode = mmap_mode
|
| 352 |
+
self.verbose = int(verbose)
|
| 353 |
+
if prewarm == "auto":
|
| 354 |
+
self._prewarm = not self._temp_folder.startswith(
|
| 355 |
+
SYSTEM_SHARED_MEM_FS
|
| 356 |
+
)
|
| 357 |
+
else:
|
| 358 |
+
self._prewarm = prewarm
|
| 359 |
+
self._prewarm = prewarm
|
| 360 |
+
self._memmaped_arrays = _WeakArrayKeyMap()
|
| 361 |
+
self._temporary_memmaped_filenames = set()
|
| 362 |
+
self._unlink_on_gc_collect = unlink_on_gc_collect
|
| 363 |
+
|
| 364 |
+
@property
|
| 365 |
+
def _temp_folder(self):
|
| 366 |
+
return self._temp_folder_resolver()
|
| 367 |
+
|
| 368 |
+
def __reduce__(self):
|
| 369 |
+
# The ArrayMemmapForwardReducer is passed to the children processes: it
|
| 370 |
+
# needs to be pickled but the _WeakArrayKeyMap need to be skipped as
|
| 371 |
+
# it's only guaranteed to be consistent with the parent process memory
|
| 372 |
+
# garbage collection.
|
| 373 |
+
# Although this reducer is pickled, it is not needed in its destination
|
| 374 |
+
# process (child processes), as we only use this reducer to send
|
| 375 |
+
# memmaps from the parent process to the children processes. For this
|
| 376 |
+
# reason, we can afford skipping the resolver, (which would otherwise
|
| 377 |
+
# be unpicklable), and pass it as None instead.
|
| 378 |
+
args = (self._max_nbytes, None, self._mmap_mode,
|
| 379 |
+
self._unlink_on_gc_collect)
|
| 380 |
+
kwargs = {
|
| 381 |
+
'verbose': self.verbose,
|
| 382 |
+
'prewarm': self._prewarm,
|
| 383 |
+
}
|
| 384 |
+
return ArrayMemmapForwardReducer, args, kwargs
|
| 385 |
+
|
| 386 |
+
def __call__(self, a):
|
| 387 |
+
m = _get_backing_memmap(a)
|
| 388 |
+
if m is not None and isinstance(m, np.memmap):
|
| 389 |
+
# a is already backed by a memmap file, let's reuse it directly
|
| 390 |
+
return _reduce_memmap_backed(a, m)
|
| 391 |
+
|
| 392 |
+
if (not a.dtype.hasobject and self._max_nbytes is not None and
|
| 393 |
+
a.nbytes > self._max_nbytes):
|
| 394 |
+
# check that the folder exists (lazily create the pool temp folder
|
| 395 |
+
# if required)
|
| 396 |
+
try:
|
| 397 |
+
os.makedirs(self._temp_folder)
|
| 398 |
+
os.chmod(self._temp_folder, FOLDER_PERMISSIONS)
|
| 399 |
+
except OSError as e:
|
| 400 |
+
if e.errno != errno.EEXIST:
|
| 401 |
+
raise e
|
| 402 |
+
|
| 403 |
+
try:
|
| 404 |
+
basename = self._memmaped_arrays.get(a)
|
| 405 |
+
except KeyError:
|
| 406 |
+
# Generate a new unique random filename. The process and thread
|
| 407 |
+
# ids are only useful for debugging purpose and to make it
|
| 408 |
+
# easier to cleanup orphaned files in case of hard process
|
| 409 |
+
# kill (e.g. by "kill -9" or segfault).
|
| 410 |
+
basename = "{}-{}-{}.pkl".format(
|
| 411 |
+
os.getpid(), id(threading.current_thread()), uuid4().hex)
|
| 412 |
+
self._memmaped_arrays.set(a, basename)
|
| 413 |
+
filename = os.path.join(self._temp_folder, basename)
|
| 414 |
+
|
| 415 |
+
# In case the same array with the same content is passed several
|
| 416 |
+
# times to the pool subprocess children, serialize it only once
|
| 417 |
+
|
| 418 |
+
is_new_memmap = filename not in self._temporary_memmaped_filenames
|
| 419 |
+
|
| 420 |
+
# add the memmap to the list of temporary memmaps created by joblib
|
| 421 |
+
self._temporary_memmaped_filenames.add(filename)
|
| 422 |
+
|
| 423 |
+
if self._unlink_on_gc_collect:
|
| 424 |
+
# Bump reference count of the memmap by 1 to account for
|
| 425 |
+
# shared usage of the memmap by a child process. The
|
| 426 |
+
# corresponding decref call will be executed upon calling
|
| 427 |
+
# resource_tracker.maybe_unlink, registered as a finalizer in
|
| 428 |
+
# the child.
|
| 429 |
+
# the incref/decref calls here are only possible when the child
|
| 430 |
+
# and the parent share the same resource_tracker. It is not the
|
| 431 |
+
# case for the multiprocessing backend, but it does not matter
|
| 432 |
+
# because unlinking a memmap from a child process is only
|
| 433 |
+
# useful to control the memory usage of long-lasting child
|
| 434 |
+
# processes, while the multiprocessing-based pools terminate
|
| 435 |
+
# their workers at the end of a map() call.
|
| 436 |
+
resource_tracker.register(filename, "file")
|
| 437 |
+
|
| 438 |
+
if is_new_memmap:
|
| 439 |
+
# Incref each temporary memmap created by joblib one extra
|
| 440 |
+
# time. This means that these memmaps will only be deleted
|
| 441 |
+
# once an extra maybe_unlink() is called, which is done once
|
| 442 |
+
# all the jobs have completed (or been canceled) in the
|
| 443 |
+
# Parallel._terminate_backend() method.
|
| 444 |
+
resource_tracker.register(filename, "file")
|
| 445 |
+
|
| 446 |
+
if not os.path.exists(filename):
|
| 447 |
+
util.debug(
|
| 448 |
+
"[ARRAY DUMP] Pickling new array (shape={}, dtype={}) "
|
| 449 |
+
"creating a new memmap at {}".format(
|
| 450 |
+
a.shape, a.dtype, filename))
|
| 451 |
+
for dumped_filename in dump(a, filename):
|
| 452 |
+
os.chmod(dumped_filename, FILE_PERMISSIONS)
|
| 453 |
+
|
| 454 |
+
if self._prewarm:
|
| 455 |
+
# Warm up the data by accessing it. This operation ensures
|
| 456 |
+
# that the disk access required to create the memmapping
|
| 457 |
+
# file are performed in the reducing process and avoids
|
| 458 |
+
# concurrent memmap creation in multiple children
|
| 459 |
+
# processes.
|
| 460 |
+
load(filename, mmap_mode=self._mmap_mode).max()
|
| 461 |
+
|
| 462 |
+
else:
|
| 463 |
+
util.debug(
|
| 464 |
+
"[ARRAY DUMP] Pickling known array (shape={}, dtype={}) "
|
| 465 |
+
"reusing memmap file: {}".format(
|
| 466 |
+
a.shape, a.dtype, os.path.basename(filename)))
|
| 467 |
+
|
| 468 |
+
# The worker process will use joblib.load to memmap the data
|
| 469 |
+
return (
|
| 470 |
+
(load_temporary_memmap, (filename, self._mmap_mode,
|
| 471 |
+
self._unlink_on_gc_collect))
|
| 472 |
+
)
|
| 473 |
+
else:
|
| 474 |
+
# do not convert a into memmap, let pickler do its usual copy with
|
| 475 |
+
# the default system pickler
|
| 476 |
+
util.debug(
|
| 477 |
+
'[ARRAY DUMP] Pickling array (NO MEMMAPPING) (shape={}, '
|
| 478 |
+
' dtype={}).'.format(a.shape, a.dtype))
|
| 479 |
+
return (loads, (dumps(a, protocol=HIGHEST_PROTOCOL),))
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def get_memmapping_reducers(
|
| 483 |
+
forward_reducers=None, backward_reducers=None,
|
| 484 |
+
temp_folder_resolver=None, max_nbytes=1e6, mmap_mode='r', verbose=0,
|
| 485 |
+
prewarm=False, unlink_on_gc_collect=True, **kwargs):
|
| 486 |
+
"""Construct a pair of memmapping reducer linked to a tmpdir.
|
| 487 |
+
|
| 488 |
+
This function manage the creation and the clean up of the temporary folders
|
| 489 |
+
underlying the memory maps and should be use to get the reducers necessary
|
| 490 |
+
to construct joblib pool or executor.
|
| 491 |
+
"""
|
| 492 |
+
if forward_reducers is None:
|
| 493 |
+
forward_reducers = dict()
|
| 494 |
+
if backward_reducers is None:
|
| 495 |
+
backward_reducers = dict()
|
| 496 |
+
|
| 497 |
+
if np is not None:
|
| 498 |
+
# Register smart numpy.ndarray reducers that detects memmap backed
|
| 499 |
+
# arrays and that is also able to dump to memmap large in-memory
|
| 500 |
+
# arrays over the max_nbytes threshold
|
| 501 |
+
forward_reduce_ndarray = ArrayMemmapForwardReducer(
|
| 502 |
+
max_nbytes, temp_folder_resolver, mmap_mode, unlink_on_gc_collect,
|
| 503 |
+
verbose, prewarm=prewarm)
|
| 504 |
+
forward_reducers[np.ndarray] = forward_reduce_ndarray
|
| 505 |
+
forward_reducers[np.memmap] = forward_reduce_ndarray
|
| 506 |
+
|
| 507 |
+
# Communication from child process to the parent process always
|
| 508 |
+
# pickles in-memory numpy.ndarray without dumping them as memmap
|
| 509 |
+
# to avoid confusing the caller and make it tricky to collect the
|
| 510 |
+
# temporary folder
|
| 511 |
+
backward_reducers[np.ndarray] = reduce_array_memmap_backward
|
| 512 |
+
backward_reducers[np.memmap] = reduce_array_memmap_backward
|
| 513 |
+
|
| 514 |
+
return forward_reducers, backward_reducers
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
class TemporaryResourcesManager(object):
|
| 518 |
+
"""Stateful object able to manage temporary folder and pickles
|
| 519 |
+
|
| 520 |
+
It exposes:
|
| 521 |
+
- a per-context folder name resolving API that memmap-based reducers will
|
| 522 |
+
rely on to know where to pickle the temporary memmaps
|
| 523 |
+
- a temporary file/folder management API that internally uses the
|
| 524 |
+
resource_tracker.
|
| 525 |
+
"""
|
| 526 |
+
|
| 527 |
+
def __init__(self, temp_folder_root=None, context_id=None):
|
| 528 |
+
self._current_temp_folder = None
|
| 529 |
+
self._temp_folder_root = temp_folder_root
|
| 530 |
+
self._use_shared_mem = None
|
| 531 |
+
self._cached_temp_folders = dict()
|
| 532 |
+
self._id = uuid4().hex
|
| 533 |
+
self._finalizers = {}
|
| 534 |
+
if context_id is None:
|
| 535 |
+
# It would be safer to not assign a default context id (less silent
|
| 536 |
+
# bugs), but doing this while maintaining backward compatibility
|
| 537 |
+
# with the previous, context-unaware version get_memmaping_executor
|
| 538 |
+
# exposes too many low-level details.
|
| 539 |
+
context_id = uuid4().hex
|
| 540 |
+
self.set_current_context(context_id)
|
| 541 |
+
|
| 542 |
+
def set_current_context(self, context_id):
|
| 543 |
+
self._current_context_id = context_id
|
| 544 |
+
self.register_new_context(context_id)
|
| 545 |
+
|
| 546 |
+
def register_new_context(self, context_id):
|
| 547 |
+
# Prepare a sub-folder name specific to a context (usually a unique id
|
| 548 |
+
# generated by each instance of the Parallel class). Do not create in
|
| 549 |
+
# advance to spare FS write access if no array is to be dumped).
|
| 550 |
+
if context_id in self._cached_temp_folders:
|
| 551 |
+
return
|
| 552 |
+
else:
|
| 553 |
+
# During its lifecycle, one Parallel object can have several
|
| 554 |
+
# executors associated to it (for instance, if a loky worker raises
|
| 555 |
+
# an exception, joblib shutdowns the executor and instantly
|
| 556 |
+
# recreates a new one before raising the error - see
|
| 557 |
+
# ``ensure_ready``. Because we don't want two executors tied to
|
| 558 |
+
# the same Parallel object (and thus the same context id) to
|
| 559 |
+
# register/use/delete the same folder, we also add an id specific
|
| 560 |
+
# to the current Manager (and thus specific to its associated
|
| 561 |
+
# executor) to the folder name.
|
| 562 |
+
new_folder_name = (
|
| 563 |
+
"joblib_memmapping_folder_{}_{}_{}".format(
|
| 564 |
+
os.getpid(), self._id, context_id)
|
| 565 |
+
)
|
| 566 |
+
new_folder_path, _ = _get_temp_dir(
|
| 567 |
+
new_folder_name, self._temp_folder_root
|
| 568 |
+
)
|
| 569 |
+
self.register_folder_finalizer(new_folder_path, context_id)
|
| 570 |
+
self._cached_temp_folders[context_id] = new_folder_path
|
| 571 |
+
|
| 572 |
+
def resolve_temp_folder_name(self):
|
| 573 |
+
"""Return a folder name specific to the currently activated context"""
|
| 574 |
+
return self._cached_temp_folders[self._current_context_id]
|
| 575 |
+
|
| 576 |
+
# resource management API
|
| 577 |
+
|
| 578 |
+
def register_folder_finalizer(self, pool_subfolder, context_id):
|
| 579 |
+
# Register the garbage collector at program exit in case caller forgets
|
| 580 |
+
# to call terminate explicitly: note we do not pass any reference to
|
| 581 |
+
# ensure that this callback won't prevent garbage collection of
|
| 582 |
+
# parallel instance and related file handler resources such as POSIX
|
| 583 |
+
# semaphores and pipes
|
| 584 |
+
pool_module_name = whichmodule(delete_folder, 'delete_folder')
|
| 585 |
+
resource_tracker.register(pool_subfolder, "folder")
|
| 586 |
+
|
| 587 |
+
def _cleanup():
|
| 588 |
+
# In some cases the Python runtime seems to set delete_folder to
|
| 589 |
+
# None just before exiting when accessing the delete_folder
|
| 590 |
+
# function from the closure namespace. So instead we reimport
|
| 591 |
+
# the delete_folder function explicitly.
|
| 592 |
+
# https://github.com/joblib/joblib/issues/328
|
| 593 |
+
# We cannot just use from 'joblib.pool import delete_folder'
|
| 594 |
+
# because joblib should only use relative imports to allow
|
| 595 |
+
# easy vendoring.
|
| 596 |
+
delete_folder = __import__(
|
| 597 |
+
pool_module_name, fromlist=['delete_folder']
|
| 598 |
+
).delete_folder
|
| 599 |
+
try:
|
| 600 |
+
delete_folder(pool_subfolder, allow_non_empty=True)
|
| 601 |
+
resource_tracker.unregister(pool_subfolder, "folder")
|
| 602 |
+
except OSError:
|
| 603 |
+
warnings.warn("Failed to delete temporary folder: {}"
|
| 604 |
+
.format(pool_subfolder))
|
| 605 |
+
|
| 606 |
+
self._finalizers[context_id] = atexit.register(_cleanup)
|
| 607 |
+
|
| 608 |
+
def _clean_temporary_resources(self, context_id=None, force=False,
|
| 609 |
+
allow_non_empty=False):
|
| 610 |
+
"""Clean temporary resources created by a process-based pool"""
|
| 611 |
+
if context_id is None:
|
| 612 |
+
# Iterates over a copy of the cache keys to avoid Error due to
|
| 613 |
+
# iterating over a changing size dictionary.
|
| 614 |
+
for context_id in list(self._cached_temp_folders):
|
| 615 |
+
self._clean_temporary_resources(
|
| 616 |
+
context_id, force=force, allow_non_empty=allow_non_empty
|
| 617 |
+
)
|
| 618 |
+
else:
|
| 619 |
+
temp_folder = self._cached_temp_folders.get(context_id)
|
| 620 |
+
if temp_folder and os.path.exists(temp_folder):
|
| 621 |
+
for filename in os.listdir(temp_folder):
|
| 622 |
+
if force:
|
| 623 |
+
# Some workers have failed and the ref counted might
|
| 624 |
+
# be off. The workers should have shut down by this
|
| 625 |
+
# time so forcefully clean up the files.
|
| 626 |
+
resource_tracker.unregister(
|
| 627 |
+
os.path.join(temp_folder, filename), "file"
|
| 628 |
+
)
|
| 629 |
+
else:
|
| 630 |
+
resource_tracker.maybe_unlink(
|
| 631 |
+
os.path.join(temp_folder, filename), "file"
|
| 632 |
+
)
|
| 633 |
+
|
| 634 |
+
# When forcing clean-up, try to delete the folder even if some
|
| 635 |
+
# files are still in it. Otherwise, try to delete the folder
|
| 636 |
+
allow_non_empty |= force
|
| 637 |
+
|
| 638 |
+
# Clean up the folder if possible, either if it is empty or
|
| 639 |
+
# if none of the files in it are in used and allow_non_empty.
|
| 640 |
+
try:
|
| 641 |
+
delete_folder(
|
| 642 |
+
temp_folder, allow_non_empty=allow_non_empty
|
| 643 |
+
)
|
| 644 |
+
# Forget the folder once it has been deleted
|
| 645 |
+
self._cached_temp_folders.pop(context_id, None)
|
| 646 |
+
resource_tracker.unregister(temp_folder, "folder")
|
| 647 |
+
|
| 648 |
+
# Also cancel the finalizers that gets triggered at gc.
|
| 649 |
+
finalizer = self._finalizers.pop(context_id, None)
|
| 650 |
+
if finalizer is not None:
|
| 651 |
+
atexit.unregister(finalizer)
|
| 652 |
+
|
| 653 |
+
except OSError:
|
| 654 |
+
# Temporary folder cannot be deleted right now.
|
| 655 |
+
# This folder will be cleaned up by an atexit
|
| 656 |
+
# finalizer registered by the memmapping_reducer.
|
| 657 |
+
pass
|
evalkit_internvl/lib/python3.10/site-packages/joblib/_multiprocessing_helpers.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper module to factorize the conditional multiprocessing import logic
|
| 2 |
+
|
| 3 |
+
We use a distinct module to simplify import statements and avoid introducing
|
| 4 |
+
circular dependencies (for instance for the assert_spawning name).
|
| 5 |
+
"""
|
| 6 |
+
import os
|
| 7 |
+
import warnings
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
# Obtain possible configuration from the environment, assuming 1 (on)
|
| 11 |
+
# by default, upon 0 set to None. Should instructively fail if some non
|
| 12 |
+
# 0/1 value is set.
|
| 13 |
+
mp = int(os.environ.get('JOBLIB_MULTIPROCESSING', 1)) or None
|
| 14 |
+
if mp:
|
| 15 |
+
try:
|
| 16 |
+
import multiprocessing as mp
|
| 17 |
+
import _multiprocessing # noqa
|
| 18 |
+
except ImportError:
|
| 19 |
+
mp = None
|
| 20 |
+
|
| 21 |
+
# 2nd stage: validate that locking is available on the system and
|
| 22 |
+
# issue a warning if not
|
| 23 |
+
if mp is not None:
|
| 24 |
+
try:
|
| 25 |
+
# try to create a named semaphore using SemLock to make sure they are
|
| 26 |
+
# available on this platform. We use the low level object
|
| 27 |
+
# _multiprocessing.SemLock to avoid spawning a resource tracker on
|
| 28 |
+
# Unix system or changing the default backend.
|
| 29 |
+
import tempfile
|
| 30 |
+
from _multiprocessing import SemLock
|
| 31 |
+
|
| 32 |
+
_rand = tempfile._RandomNameSequence()
|
| 33 |
+
for i in range(100):
|
| 34 |
+
try:
|
| 35 |
+
name = '/joblib-{}-{}' .format(
|
| 36 |
+
os.getpid(), next(_rand))
|
| 37 |
+
_sem = SemLock(0, 0, 1, name=name, unlink=True)
|
| 38 |
+
del _sem # cleanup
|
| 39 |
+
break
|
| 40 |
+
except FileExistsError as e: # pragma: no cover
|
| 41 |
+
if i >= 99:
|
| 42 |
+
raise FileExistsError(
|
| 43 |
+
'cannot find name for semaphore') from e
|
| 44 |
+
except (FileExistsError, AttributeError, ImportError, OSError) as e:
|
| 45 |
+
mp = None
|
| 46 |
+
warnings.warn('%s. joblib will operate in serial mode' % (e,))
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
# 3rd stage: backward compat for the assert_spawning helper
|
| 50 |
+
if mp is not None:
|
| 51 |
+
from multiprocessing.context import assert_spawning
|
| 52 |
+
else:
|
| 53 |
+
assert_spawning = None
|
evalkit_internvl/lib/python3.10/site-packages/joblib/backports.py
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Backports of fixes for joblib dependencies
|
| 3 |
+
"""
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import time
|
| 7 |
+
|
| 8 |
+
from os.path import basename
|
| 9 |
+
from multiprocessing import util
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Version:
|
| 13 |
+
"""Backport from deprecated distutils
|
| 14 |
+
|
| 15 |
+
We maintain this backport to avoid introducing a new dependency on
|
| 16 |
+
`packaging`.
|
| 17 |
+
|
| 18 |
+
We might rexplore this choice in the future if all major Python projects
|
| 19 |
+
introduce a dependency on packaging anyway.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
def __init__(self, vstring=None):
|
| 23 |
+
if vstring:
|
| 24 |
+
self.parse(vstring)
|
| 25 |
+
|
| 26 |
+
def __repr__(self):
|
| 27 |
+
return "%s ('%s')" % (self.__class__.__name__, str(self))
|
| 28 |
+
|
| 29 |
+
def __eq__(self, other):
|
| 30 |
+
c = self._cmp(other)
|
| 31 |
+
if c is NotImplemented:
|
| 32 |
+
return c
|
| 33 |
+
return c == 0
|
| 34 |
+
|
| 35 |
+
def __lt__(self, other):
|
| 36 |
+
c = self._cmp(other)
|
| 37 |
+
if c is NotImplemented:
|
| 38 |
+
return c
|
| 39 |
+
return c < 0
|
| 40 |
+
|
| 41 |
+
def __le__(self, other):
|
| 42 |
+
c = self._cmp(other)
|
| 43 |
+
if c is NotImplemented:
|
| 44 |
+
return c
|
| 45 |
+
return c <= 0
|
| 46 |
+
|
| 47 |
+
def __gt__(self, other):
|
| 48 |
+
c = self._cmp(other)
|
| 49 |
+
if c is NotImplemented:
|
| 50 |
+
return c
|
| 51 |
+
return c > 0
|
| 52 |
+
|
| 53 |
+
def __ge__(self, other):
|
| 54 |
+
c = self._cmp(other)
|
| 55 |
+
if c is NotImplemented:
|
| 56 |
+
return c
|
| 57 |
+
return c >= 0
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class LooseVersion(Version):
|
| 61 |
+
"""Backport from deprecated distutils
|
| 62 |
+
|
| 63 |
+
We maintain this backport to avoid introducing a new dependency on
|
| 64 |
+
`packaging`.
|
| 65 |
+
|
| 66 |
+
We might rexplore this choice in the future if all major Python projects
|
| 67 |
+
introduce a dependency on packaging anyway.
|
| 68 |
+
"""
|
| 69 |
+
|
| 70 |
+
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
| 71 |
+
|
| 72 |
+
def __init__(self, vstring=None):
|
| 73 |
+
if vstring:
|
| 74 |
+
self.parse(vstring)
|
| 75 |
+
|
| 76 |
+
def parse(self, vstring):
|
| 77 |
+
# I've given up on thinking I can reconstruct the version string
|
| 78 |
+
# from the parsed tuple -- so I just store the string here for
|
| 79 |
+
# use by __str__
|
| 80 |
+
self.vstring = vstring
|
| 81 |
+
components = [x for x in self.component_re.split(vstring)
|
| 82 |
+
if x and x != '.']
|
| 83 |
+
for i, obj in enumerate(components):
|
| 84 |
+
try:
|
| 85 |
+
components[i] = int(obj)
|
| 86 |
+
except ValueError:
|
| 87 |
+
pass
|
| 88 |
+
|
| 89 |
+
self.version = components
|
| 90 |
+
|
| 91 |
+
def __str__(self):
|
| 92 |
+
return self.vstring
|
| 93 |
+
|
| 94 |
+
def __repr__(self):
|
| 95 |
+
return "LooseVersion ('%s')" % str(self)
|
| 96 |
+
|
| 97 |
+
def _cmp(self, other):
|
| 98 |
+
if isinstance(other, str):
|
| 99 |
+
other = LooseVersion(other)
|
| 100 |
+
elif not isinstance(other, LooseVersion):
|
| 101 |
+
return NotImplemented
|
| 102 |
+
|
| 103 |
+
if self.version == other.version:
|
| 104 |
+
return 0
|
| 105 |
+
if self.version < other.version:
|
| 106 |
+
return -1
|
| 107 |
+
if self.version > other.version:
|
| 108 |
+
return 1
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
try:
|
| 112 |
+
import numpy as np
|
| 113 |
+
|
| 114 |
+
def make_memmap(filename, dtype='uint8', mode='r+', offset=0,
|
| 115 |
+
shape=None, order='C', unlink_on_gc_collect=False):
|
| 116 |
+
"""Custom memmap constructor compatible with numpy.memmap.
|
| 117 |
+
|
| 118 |
+
This function:
|
| 119 |
+
- is a backport the numpy memmap offset fix (See
|
| 120 |
+
https://github.com/numpy/numpy/pull/8443 for more details.
|
| 121 |
+
The numpy fix is available starting numpy 1.13)
|
| 122 |
+
- adds ``unlink_on_gc_collect``, which specifies explicitly whether
|
| 123 |
+
the process re-constructing the memmap owns a reference to the
|
| 124 |
+
underlying file. If set to True, it adds a finalizer to the
|
| 125 |
+
newly-created memmap that sends a maybe_unlink request for the
|
| 126 |
+
memmaped file to resource_tracker.
|
| 127 |
+
"""
|
| 128 |
+
util.debug(
|
| 129 |
+
"[MEMMAP READ] creating a memmap (shape {}, filename {}, "
|
| 130 |
+
"pid {})".format(shape, basename(filename), os.getpid())
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
mm = np.memmap(filename, dtype=dtype, mode=mode, offset=offset,
|
| 134 |
+
shape=shape, order=order)
|
| 135 |
+
if LooseVersion(np.__version__) < '1.13':
|
| 136 |
+
mm.offset = offset
|
| 137 |
+
if unlink_on_gc_collect:
|
| 138 |
+
from ._memmapping_reducer import add_maybe_unlink_finalizer
|
| 139 |
+
add_maybe_unlink_finalizer(mm)
|
| 140 |
+
return mm
|
| 141 |
+
except ImportError:
|
| 142 |
+
def make_memmap(filename, dtype='uint8', mode='r+', offset=0,
|
| 143 |
+
shape=None, order='C', unlink_on_gc_collect=False):
|
| 144 |
+
raise NotImplementedError(
|
| 145 |
+
"'joblib.backports.make_memmap' should not be used "
|
| 146 |
+
'if numpy is not installed.')
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
if os.name == 'nt':
|
| 150 |
+
# https://github.com/joblib/joblib/issues/540
|
| 151 |
+
access_denied_errors = (5, 13)
|
| 152 |
+
from os import replace
|
| 153 |
+
|
| 154 |
+
def concurrency_safe_rename(src, dst):
|
| 155 |
+
"""Renames ``src`` into ``dst`` overwriting ``dst`` if it exists.
|
| 156 |
+
|
| 157 |
+
On Windows os.replace can yield permission errors if executed by two
|
| 158 |
+
different processes.
|
| 159 |
+
"""
|
| 160 |
+
max_sleep_time = 1
|
| 161 |
+
total_sleep_time = 0
|
| 162 |
+
sleep_time = 0.001
|
| 163 |
+
while total_sleep_time < max_sleep_time:
|
| 164 |
+
try:
|
| 165 |
+
replace(src, dst)
|
| 166 |
+
break
|
| 167 |
+
except Exception as exc:
|
| 168 |
+
if getattr(exc, 'winerror', None) in access_denied_errors:
|
| 169 |
+
time.sleep(sleep_time)
|
| 170 |
+
total_sleep_time += sleep_time
|
| 171 |
+
sleep_time *= 2
|
| 172 |
+
else:
|
| 173 |
+
raise
|
| 174 |
+
else:
|
| 175 |
+
raise
|
| 176 |
+
else:
|
| 177 |
+
from os import replace as concurrency_safe_rename # noqa
|
evalkit_internvl/lib/python3.10/site-packages/joblib/func_inspect.py
ADDED
|
@@ -0,0 +1,369 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
My own variation on function-specific inspect-like features.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
|
| 6 |
+
# Copyright (c) 2009 Gael Varoquaux
|
| 7 |
+
# License: BSD Style, 3 clauses.
|
| 8 |
+
|
| 9 |
+
import inspect
|
| 10 |
+
import warnings
|
| 11 |
+
import re
|
| 12 |
+
import os
|
| 13 |
+
import collections
|
| 14 |
+
|
| 15 |
+
from itertools import islice
|
| 16 |
+
from tokenize import open as open_py_source
|
| 17 |
+
|
| 18 |
+
from .logger import pformat
|
| 19 |
+
|
| 20 |
+
full_argspec_fields = ('args varargs varkw defaults kwonlyargs '
|
| 21 |
+
'kwonlydefaults annotations')
|
| 22 |
+
full_argspec_type = collections.namedtuple('FullArgSpec', full_argspec_fields)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def get_func_code(func):
|
| 26 |
+
""" Attempts to retrieve a reliable function code hash.
|
| 27 |
+
|
| 28 |
+
The reason we don't use inspect.getsource is that it caches the
|
| 29 |
+
source, whereas we want this to be modified on the fly when the
|
| 30 |
+
function is modified.
|
| 31 |
+
|
| 32 |
+
Returns
|
| 33 |
+
-------
|
| 34 |
+
func_code: string
|
| 35 |
+
The function code
|
| 36 |
+
source_file: string
|
| 37 |
+
The path to the file in which the function is defined.
|
| 38 |
+
first_line: int
|
| 39 |
+
The first line of the code in the source file.
|
| 40 |
+
|
| 41 |
+
Notes
|
| 42 |
+
------
|
| 43 |
+
This function does a bit more magic than inspect, and is thus
|
| 44 |
+
more robust.
|
| 45 |
+
"""
|
| 46 |
+
source_file = None
|
| 47 |
+
try:
|
| 48 |
+
code = func.__code__
|
| 49 |
+
source_file = code.co_filename
|
| 50 |
+
if not os.path.exists(source_file):
|
| 51 |
+
# Use inspect for lambda functions and functions defined in an
|
| 52 |
+
# interactive shell, or in doctests
|
| 53 |
+
source_code = ''.join(inspect.getsourcelines(func)[0])
|
| 54 |
+
line_no = 1
|
| 55 |
+
if source_file.startswith('<doctest '):
|
| 56 |
+
source_file, line_no = re.match(
|
| 57 |
+
r'\<doctest (.*\.rst)\[(.*)\]\>', source_file).groups()
|
| 58 |
+
line_no = int(line_no)
|
| 59 |
+
source_file = '<doctest %s>' % source_file
|
| 60 |
+
return source_code, source_file, line_no
|
| 61 |
+
# Try to retrieve the source code.
|
| 62 |
+
with open_py_source(source_file) as source_file_obj:
|
| 63 |
+
first_line = code.co_firstlineno
|
| 64 |
+
# All the lines after the function definition:
|
| 65 |
+
source_lines = list(islice(source_file_obj, first_line - 1, None))
|
| 66 |
+
return ''.join(inspect.getblock(source_lines)), source_file, first_line
|
| 67 |
+
except: # noqa: E722
|
| 68 |
+
# If the source code fails, we use the hash. This is fragile and
|
| 69 |
+
# might change from one session to another.
|
| 70 |
+
if hasattr(func, '__code__'):
|
| 71 |
+
# Python 3.X
|
| 72 |
+
return str(func.__code__.__hash__()), source_file, -1
|
| 73 |
+
else:
|
| 74 |
+
# Weird objects like numpy ufunc don't have __code__
|
| 75 |
+
# This is fragile, as quite often the id of the object is
|
| 76 |
+
# in the repr, so it might not persist across sessions,
|
| 77 |
+
# however it will work for ufuncs.
|
| 78 |
+
return repr(func), source_file, -1
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _clean_win_chars(string):
|
| 82 |
+
"""Windows cannot encode some characters in filename."""
|
| 83 |
+
import urllib
|
| 84 |
+
if hasattr(urllib, 'quote'):
|
| 85 |
+
quote = urllib.quote
|
| 86 |
+
else:
|
| 87 |
+
# In Python 3, quote is elsewhere
|
| 88 |
+
import urllib.parse
|
| 89 |
+
quote = urllib.parse.quote
|
| 90 |
+
for char in ('<', '>', '!', ':', '\\'):
|
| 91 |
+
string = string.replace(char, quote(char))
|
| 92 |
+
return string
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
def get_func_name(func, resolv_alias=True, win_characters=True):
|
| 96 |
+
""" Return the function import path (as a list of module names), and
|
| 97 |
+
a name for the function.
|
| 98 |
+
|
| 99 |
+
Parameters
|
| 100 |
+
----------
|
| 101 |
+
func: callable
|
| 102 |
+
The func to inspect
|
| 103 |
+
resolv_alias: boolean, optional
|
| 104 |
+
If true, possible local aliases are indicated.
|
| 105 |
+
win_characters: boolean, optional
|
| 106 |
+
If true, substitute special characters using urllib.quote
|
| 107 |
+
This is useful in Windows, as it cannot encode some filenames
|
| 108 |
+
"""
|
| 109 |
+
if hasattr(func, '__module__'):
|
| 110 |
+
module = func.__module__
|
| 111 |
+
else:
|
| 112 |
+
try:
|
| 113 |
+
module = inspect.getmodule(func)
|
| 114 |
+
except TypeError:
|
| 115 |
+
if hasattr(func, '__class__'):
|
| 116 |
+
module = func.__class__.__module__
|
| 117 |
+
else:
|
| 118 |
+
module = 'unknown'
|
| 119 |
+
if module is None:
|
| 120 |
+
# Happens in doctests, eg
|
| 121 |
+
module = ''
|
| 122 |
+
if module == '__main__':
|
| 123 |
+
try:
|
| 124 |
+
filename = os.path.abspath(inspect.getsourcefile(func))
|
| 125 |
+
except: # noqa: E722
|
| 126 |
+
filename = None
|
| 127 |
+
if filename is not None:
|
| 128 |
+
# mangling of full path to filename
|
| 129 |
+
parts = filename.split(os.sep)
|
| 130 |
+
if parts[-1].startswith('<ipython-input'):
|
| 131 |
+
# We're in a IPython (or notebook) session. parts[-1] comes
|
| 132 |
+
# from func.__code__.co_filename and is of the form
|
| 133 |
+
# <ipython-input-N-XYZ>, where:
|
| 134 |
+
# - N is the cell number where the function was defined
|
| 135 |
+
# - XYZ is a hash representing the function's code (and name).
|
| 136 |
+
# It will be consistent across sessions and kernel restarts,
|
| 137 |
+
# and will change if the function's code/name changes
|
| 138 |
+
# We remove N so that cache is properly hit if the cell where
|
| 139 |
+
# the func is defined is re-exectuted.
|
| 140 |
+
# The XYZ hash should avoid collisions between functions with
|
| 141 |
+
# the same name, both within the same notebook but also across
|
| 142 |
+
# notebooks
|
| 143 |
+
splitted = parts[-1].split('-')
|
| 144 |
+
parts[-1] = '-'.join(splitted[:2] + splitted[3:])
|
| 145 |
+
elif len(parts) > 2 and parts[-2].startswith('ipykernel_'):
|
| 146 |
+
# In a notebook session (ipykernel). Filename seems to be 'xyz'
|
| 147 |
+
# of above. parts[-2] has the structure ipykernel_XXXXXX where
|
| 148 |
+
# XXXXXX is a six-digit number identifying the current run (?).
|
| 149 |
+
# If we split it off, the function again has the same
|
| 150 |
+
# identifier across runs.
|
| 151 |
+
parts[-2] = 'ipykernel'
|
| 152 |
+
filename = '-'.join(parts)
|
| 153 |
+
if filename.endswith('.py'):
|
| 154 |
+
filename = filename[:-3]
|
| 155 |
+
module = module + '-' + filename
|
| 156 |
+
module = module.split('.')
|
| 157 |
+
if hasattr(func, 'func_name'):
|
| 158 |
+
name = func.func_name
|
| 159 |
+
elif hasattr(func, '__name__'):
|
| 160 |
+
name = func.__name__
|
| 161 |
+
else:
|
| 162 |
+
name = 'unknown'
|
| 163 |
+
# Hack to detect functions not defined at the module-level
|
| 164 |
+
if resolv_alias:
|
| 165 |
+
# TODO: Maybe add a warning here?
|
| 166 |
+
if hasattr(func, 'func_globals') and name in func.func_globals:
|
| 167 |
+
if not func.func_globals[name] is func:
|
| 168 |
+
name = '%s-alias' % name
|
| 169 |
+
if hasattr(func, '__qualname__') and func.__qualname__ != name:
|
| 170 |
+
# Extend the module name in case of nested functions to avoid
|
| 171 |
+
# (module, name) collisions
|
| 172 |
+
module.extend(func.__qualname__.split(".")[:-1])
|
| 173 |
+
if inspect.ismethod(func):
|
| 174 |
+
# We need to add the name of the class
|
| 175 |
+
if hasattr(func, 'im_class'):
|
| 176 |
+
klass = func.im_class
|
| 177 |
+
module.append(klass.__name__)
|
| 178 |
+
if os.name == 'nt' and win_characters:
|
| 179 |
+
# Windows can't encode certain characters in filenames
|
| 180 |
+
name = _clean_win_chars(name)
|
| 181 |
+
module = [_clean_win_chars(s) for s in module]
|
| 182 |
+
return module, name
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def _signature_str(function_name, arg_sig):
|
| 186 |
+
"""Helper function to output a function signature"""
|
| 187 |
+
return '{}{}'.format(function_name, arg_sig)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def _function_called_str(function_name, args, kwargs):
|
| 191 |
+
"""Helper function to output a function call"""
|
| 192 |
+
template_str = '{0}({1}, {2})'
|
| 193 |
+
|
| 194 |
+
args_str = repr(args)[1:-1]
|
| 195 |
+
kwargs_str = ', '.join('%s=%s' % (k, v)
|
| 196 |
+
for k, v in kwargs.items())
|
| 197 |
+
return template_str.format(function_name, args_str,
|
| 198 |
+
kwargs_str)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def filter_args(func, ignore_lst, args=(), kwargs=dict()):
|
| 202 |
+
""" Filters the given args and kwargs using a list of arguments to
|
| 203 |
+
ignore, and a function specification.
|
| 204 |
+
|
| 205 |
+
Parameters
|
| 206 |
+
----------
|
| 207 |
+
func: callable
|
| 208 |
+
Function giving the argument specification
|
| 209 |
+
ignore_lst: list of strings
|
| 210 |
+
List of arguments to ignore (either a name of an argument
|
| 211 |
+
in the function spec, or '*', or '**')
|
| 212 |
+
*args: list
|
| 213 |
+
Positional arguments passed to the function.
|
| 214 |
+
**kwargs: dict
|
| 215 |
+
Keyword arguments passed to the function
|
| 216 |
+
|
| 217 |
+
Returns
|
| 218 |
+
-------
|
| 219 |
+
filtered_args: list
|
| 220 |
+
List of filtered positional and keyword arguments.
|
| 221 |
+
"""
|
| 222 |
+
args = list(args)
|
| 223 |
+
if isinstance(ignore_lst, str):
|
| 224 |
+
# Catch a common mistake
|
| 225 |
+
raise ValueError(
|
| 226 |
+
'ignore_lst must be a list of parameters to ignore '
|
| 227 |
+
'%s (type %s) was given' % (ignore_lst, type(ignore_lst)))
|
| 228 |
+
# Special case for functools.partial objects
|
| 229 |
+
if (not inspect.ismethod(func) and not inspect.isfunction(func)):
|
| 230 |
+
if ignore_lst:
|
| 231 |
+
warnings.warn('Cannot inspect object %s, ignore list will '
|
| 232 |
+
'not work.' % func, stacklevel=2)
|
| 233 |
+
return {'*': args, '**': kwargs}
|
| 234 |
+
arg_sig = inspect.signature(func)
|
| 235 |
+
arg_names = []
|
| 236 |
+
arg_defaults = []
|
| 237 |
+
arg_kwonlyargs = []
|
| 238 |
+
arg_varargs = None
|
| 239 |
+
arg_varkw = None
|
| 240 |
+
for param in arg_sig.parameters.values():
|
| 241 |
+
if param.kind is param.POSITIONAL_OR_KEYWORD:
|
| 242 |
+
arg_names.append(param.name)
|
| 243 |
+
elif param.kind is param.KEYWORD_ONLY:
|
| 244 |
+
arg_names.append(param.name)
|
| 245 |
+
arg_kwonlyargs.append(param.name)
|
| 246 |
+
elif param.kind is param.VAR_POSITIONAL:
|
| 247 |
+
arg_varargs = param.name
|
| 248 |
+
elif param.kind is param.VAR_KEYWORD:
|
| 249 |
+
arg_varkw = param.name
|
| 250 |
+
if param.default is not param.empty:
|
| 251 |
+
arg_defaults.append(param.default)
|
| 252 |
+
if inspect.ismethod(func):
|
| 253 |
+
# First argument is 'self', it has been removed by Python
|
| 254 |
+
# we need to add it back:
|
| 255 |
+
args = [func.__self__, ] + args
|
| 256 |
+
# func is an instance method, inspect.signature(func) does not
|
| 257 |
+
# include self, we need to fetch it from the class method, i.e
|
| 258 |
+
# func.__func__
|
| 259 |
+
class_method_sig = inspect.signature(func.__func__)
|
| 260 |
+
self_name = next(iter(class_method_sig.parameters))
|
| 261 |
+
arg_names = [self_name] + arg_names
|
| 262 |
+
# XXX: Maybe I need an inspect.isbuiltin to detect C-level methods, such
|
| 263 |
+
# as on ndarrays.
|
| 264 |
+
|
| 265 |
+
_, name = get_func_name(func, resolv_alias=False)
|
| 266 |
+
arg_dict = dict()
|
| 267 |
+
arg_position = -1
|
| 268 |
+
for arg_position, arg_name in enumerate(arg_names):
|
| 269 |
+
if arg_position < len(args):
|
| 270 |
+
# Positional argument or keyword argument given as positional
|
| 271 |
+
if arg_name not in arg_kwonlyargs:
|
| 272 |
+
arg_dict[arg_name] = args[arg_position]
|
| 273 |
+
else:
|
| 274 |
+
raise ValueError(
|
| 275 |
+
"Keyword-only parameter '%s' was passed as "
|
| 276 |
+
'positional parameter for %s:\n'
|
| 277 |
+
' %s was called.'
|
| 278 |
+
% (arg_name,
|
| 279 |
+
_signature_str(name, arg_sig),
|
| 280 |
+
_function_called_str(name, args, kwargs))
|
| 281 |
+
)
|
| 282 |
+
|
| 283 |
+
else:
|
| 284 |
+
position = arg_position - len(arg_names)
|
| 285 |
+
if arg_name in kwargs:
|
| 286 |
+
arg_dict[arg_name] = kwargs[arg_name]
|
| 287 |
+
else:
|
| 288 |
+
try:
|
| 289 |
+
arg_dict[arg_name] = arg_defaults[position]
|
| 290 |
+
except (IndexError, KeyError) as e:
|
| 291 |
+
# Missing argument
|
| 292 |
+
raise ValueError(
|
| 293 |
+
'Wrong number of arguments for %s:\n'
|
| 294 |
+
' %s was called.'
|
| 295 |
+
% (_signature_str(name, arg_sig),
|
| 296 |
+
_function_called_str(name, args, kwargs))
|
| 297 |
+
) from e
|
| 298 |
+
|
| 299 |
+
varkwargs = dict()
|
| 300 |
+
for arg_name, arg_value in sorted(kwargs.items()):
|
| 301 |
+
if arg_name in arg_dict:
|
| 302 |
+
arg_dict[arg_name] = arg_value
|
| 303 |
+
elif arg_varkw is not None:
|
| 304 |
+
varkwargs[arg_name] = arg_value
|
| 305 |
+
else:
|
| 306 |
+
raise TypeError("Ignore list for %s() contains an unexpected "
|
| 307 |
+
"keyword argument '%s'" % (name, arg_name))
|
| 308 |
+
|
| 309 |
+
if arg_varkw is not None:
|
| 310 |
+
arg_dict['**'] = varkwargs
|
| 311 |
+
if arg_varargs is not None:
|
| 312 |
+
varargs = args[arg_position + 1:]
|
| 313 |
+
arg_dict['*'] = varargs
|
| 314 |
+
|
| 315 |
+
# Now remove the arguments to be ignored
|
| 316 |
+
for item in ignore_lst:
|
| 317 |
+
if item in arg_dict:
|
| 318 |
+
arg_dict.pop(item)
|
| 319 |
+
else:
|
| 320 |
+
raise ValueError("Ignore list: argument '%s' is not defined for "
|
| 321 |
+
"function %s"
|
| 322 |
+
% (item,
|
| 323 |
+
_signature_str(name, arg_sig))
|
| 324 |
+
)
|
| 325 |
+
# XXX: Return a sorted list of pairs?
|
| 326 |
+
return arg_dict
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
def _format_arg(arg):
|
| 330 |
+
formatted_arg = pformat(arg, indent=2)
|
| 331 |
+
if len(formatted_arg) > 1500:
|
| 332 |
+
formatted_arg = '%s...' % formatted_arg[:700]
|
| 333 |
+
return formatted_arg
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
def format_signature(func, *args, **kwargs):
|
| 337 |
+
# XXX: Should this use inspect.formatargvalues/formatargspec?
|
| 338 |
+
module, name = get_func_name(func)
|
| 339 |
+
module = [m for m in module if m]
|
| 340 |
+
if module:
|
| 341 |
+
module.append(name)
|
| 342 |
+
module_path = '.'.join(module)
|
| 343 |
+
else:
|
| 344 |
+
module_path = name
|
| 345 |
+
arg_str = list()
|
| 346 |
+
previous_length = 0
|
| 347 |
+
for arg in args:
|
| 348 |
+
formatted_arg = _format_arg(arg)
|
| 349 |
+
if previous_length > 80:
|
| 350 |
+
formatted_arg = '\n%s' % formatted_arg
|
| 351 |
+
previous_length = len(formatted_arg)
|
| 352 |
+
arg_str.append(formatted_arg)
|
| 353 |
+
arg_str.extend(['%s=%s' % (v, _format_arg(i)) for v, i in kwargs.items()])
|
| 354 |
+
arg_str = ', '.join(arg_str)
|
| 355 |
+
|
| 356 |
+
signature = '%s(%s)' % (name, arg_str)
|
| 357 |
+
return module_path, signature
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
def format_call(func, args, kwargs, object_name="Memory"):
|
| 361 |
+
""" Returns a nicely formatted statement displaying the function
|
| 362 |
+
call with the given arguments.
|
| 363 |
+
"""
|
| 364 |
+
path, signature = format_signature(func, *args, **kwargs)
|
| 365 |
+
msg = '%s\n[%s] Calling %s...\n%s' % (80 * '_', object_name,
|
| 366 |
+
path, signature)
|
| 367 |
+
return msg
|
| 368 |
+
# XXX: Not using logging framework
|
| 369 |
+
# self.debug(msg)
|
evalkit_internvl/lib/python3.10/site-packages/joblib/logger.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Helpers for logging.
|
| 3 |
+
|
| 4 |
+
This module needs much love to become useful.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
|
| 8 |
+
# Copyright (c) 2008 Gael Varoquaux
|
| 9 |
+
# License: BSD Style, 3 clauses.
|
| 10 |
+
|
| 11 |
+
from __future__ import print_function
|
| 12 |
+
|
| 13 |
+
import time
|
| 14 |
+
import sys
|
| 15 |
+
import os
|
| 16 |
+
import shutil
|
| 17 |
+
import logging
|
| 18 |
+
import pprint
|
| 19 |
+
|
| 20 |
+
from .disk import mkdirp
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def _squeeze_time(t):
|
| 24 |
+
"""Remove .1s to the time under Windows: this is the time it take to
|
| 25 |
+
stat files. This is needed to make results similar to timings under
|
| 26 |
+
Unix, for tests
|
| 27 |
+
"""
|
| 28 |
+
if sys.platform.startswith('win'):
|
| 29 |
+
return max(0, t - .1)
|
| 30 |
+
else:
|
| 31 |
+
return t
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def format_time(t):
|
| 35 |
+
t = _squeeze_time(t)
|
| 36 |
+
return "%.1fs, %.1fmin" % (t, t / 60.)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def short_format_time(t):
|
| 40 |
+
t = _squeeze_time(t)
|
| 41 |
+
if t > 60:
|
| 42 |
+
return "%4.1fmin" % (t / 60.)
|
| 43 |
+
else:
|
| 44 |
+
return " %5.1fs" % (t)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def pformat(obj, indent=0, depth=3):
|
| 48 |
+
if 'numpy' in sys.modules:
|
| 49 |
+
import numpy as np
|
| 50 |
+
print_options = np.get_printoptions()
|
| 51 |
+
np.set_printoptions(precision=6, threshold=64, edgeitems=1)
|
| 52 |
+
else:
|
| 53 |
+
print_options = None
|
| 54 |
+
out = pprint.pformat(obj, depth=depth, indent=indent)
|
| 55 |
+
if print_options:
|
| 56 |
+
np.set_printoptions(**print_options)
|
| 57 |
+
return out
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
###############################################################################
|
| 61 |
+
# class `Logger`
|
| 62 |
+
###############################################################################
|
| 63 |
+
class Logger(object):
|
| 64 |
+
""" Base class for logging messages.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def __init__(self, depth=3, name=None):
|
| 68 |
+
"""
|
| 69 |
+
Parameters
|
| 70 |
+
----------
|
| 71 |
+
depth: int, optional
|
| 72 |
+
The depth of objects printed.
|
| 73 |
+
name: str, optional
|
| 74 |
+
The namespace to log to. If None, defaults to joblib.
|
| 75 |
+
"""
|
| 76 |
+
self.depth = depth
|
| 77 |
+
self._name = name if name else 'joblib'
|
| 78 |
+
|
| 79 |
+
def warn(self, msg):
|
| 80 |
+
logging.getLogger(self._name).warning("[%s]: %s" % (self, msg))
|
| 81 |
+
|
| 82 |
+
def info(self, msg):
|
| 83 |
+
logging.info("[%s]: %s" % (self, msg))
|
| 84 |
+
|
| 85 |
+
def debug(self, msg):
|
| 86 |
+
# XXX: This conflicts with the debug flag used in children class
|
| 87 |
+
logging.getLogger(self._name).debug("[%s]: %s" % (self, msg))
|
| 88 |
+
|
| 89 |
+
def format(self, obj, indent=0):
|
| 90 |
+
"""Return the formatted representation of the object."""
|
| 91 |
+
return pformat(obj, indent=indent, depth=self.depth)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
###############################################################################
|
| 95 |
+
# class `PrintTime`
|
| 96 |
+
###############################################################################
|
| 97 |
+
class PrintTime(object):
|
| 98 |
+
""" Print and log messages while keeping track of time.
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def __init__(self, logfile=None, logdir=None):
|
| 102 |
+
if logfile is not None and logdir is not None:
|
| 103 |
+
raise ValueError('Cannot specify both logfile and logdir')
|
| 104 |
+
# XXX: Need argument docstring
|
| 105 |
+
self.last_time = time.time()
|
| 106 |
+
self.start_time = self.last_time
|
| 107 |
+
if logdir is not None:
|
| 108 |
+
logfile = os.path.join(logdir, 'joblib.log')
|
| 109 |
+
self.logfile = logfile
|
| 110 |
+
if logfile is not None:
|
| 111 |
+
mkdirp(os.path.dirname(logfile))
|
| 112 |
+
if os.path.exists(logfile):
|
| 113 |
+
# Rotate the logs
|
| 114 |
+
for i in range(1, 9):
|
| 115 |
+
try:
|
| 116 |
+
shutil.move(logfile + '.%i' % i,
|
| 117 |
+
logfile + '.%i' % (i + 1))
|
| 118 |
+
except: # noqa: E722
|
| 119 |
+
"No reason failing here"
|
| 120 |
+
# Use a copy rather than a move, so that a process
|
| 121 |
+
# monitoring this file does not get lost.
|
| 122 |
+
try:
|
| 123 |
+
shutil.copy(logfile, logfile + '.1')
|
| 124 |
+
except: # noqa: E722
|
| 125 |
+
"No reason failing here"
|
| 126 |
+
try:
|
| 127 |
+
with open(logfile, 'w') as logfile:
|
| 128 |
+
logfile.write('\nLogging joblib python script\n')
|
| 129 |
+
logfile.write('\n---%s---\n' % time.ctime(self.last_time))
|
| 130 |
+
except: # noqa: E722
|
| 131 |
+
""" Multiprocessing writing to files can create race
|
| 132 |
+
conditions. Rather fail silently than crash the
|
| 133 |
+
computation.
|
| 134 |
+
"""
|
| 135 |
+
# XXX: We actually need a debug flag to disable this
|
| 136 |
+
# silent failure.
|
| 137 |
+
|
| 138 |
+
def __call__(self, msg='', total=False):
|
| 139 |
+
""" Print the time elapsed between the last call and the current
|
| 140 |
+
call, with an optional message.
|
| 141 |
+
"""
|
| 142 |
+
if not total:
|
| 143 |
+
time_lapse = time.time() - self.last_time
|
| 144 |
+
full_msg = "%s: %s" % (msg, format_time(time_lapse))
|
| 145 |
+
else:
|
| 146 |
+
# FIXME: Too much logic duplicated
|
| 147 |
+
time_lapse = time.time() - self.start_time
|
| 148 |
+
full_msg = "%s: %.2fs, %.1f min" % (msg, time_lapse,
|
| 149 |
+
time_lapse / 60)
|
| 150 |
+
print(full_msg, file=sys.stderr)
|
| 151 |
+
if self.logfile is not None:
|
| 152 |
+
try:
|
| 153 |
+
with open(self.logfile, 'a') as f:
|
| 154 |
+
print(full_msg, file=f)
|
| 155 |
+
except: # noqa: E722
|
| 156 |
+
""" Multiprocessing writing to files can create race
|
| 157 |
+
conditions. Rather fail silently than crash the
|
| 158 |
+
calculation.
|
| 159 |
+
"""
|
| 160 |
+
# XXX: We actually need a debug flag to disable this
|
| 161 |
+
# silent failure.
|
| 162 |
+
self.last_time = time.time()
|