Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/INSTALLER +1 -0
- evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/METADATA +441 -0
- evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/RECORD +32 -0
- evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/REQUESTED +0 -0
- evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/WHEEL +5 -0
- evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt +27 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/__init__.py +24 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/_version.py +4 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/easter.py +89 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/relativedelta.py +599 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/rrule.py +1737 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__init__.py +12 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/tzwin.py +2 -0
- evalkit_internvl/lib/python3.10/site-packages/dateutil/utils.py +71 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_api.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_exceptions.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_models.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_ssl.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_synchronization.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_trace.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_utils.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__init__.py +39 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/connection.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/http11.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/http2.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/interfaces.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/connection.py +215 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/connection_pool.py +356 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/http11.py +331 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/http2.py +589 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/http_proxy.py +350 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/interfaces.py +135 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/socks_proxy.py +340 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__init__.py +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/anyio.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/auto.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/base.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/mock.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/sync.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/trio.cpython-310.pyc +0 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/anyio.py +145 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/auto.py +52 -0
- evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/base.py +103 -0
.gitattributes
CHANGED
|
@@ -1645,3 +1645,4 @@ evalkit_internvl/lib/python3.10/site-packages/sympy/solvers/__pycache__/solveset
|
|
| 1645 |
evalkit_internvl/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/single.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1646 |
evalkit_internvl/lib/python3.10/site-packages/sympy/polys/benchmarks/__pycache__/bench_solvers.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1647 |
evalkit_internvl/lib/python3.10/site-packages/tiktoken/_tiktoken.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 1645 |
evalkit_internvl/lib/python3.10/site-packages/sympy/solvers/ode/__pycache__/single.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1646 |
evalkit_internvl/lib/python3.10/site-packages/sympy/polys/benchmarks/__pycache__/bench_solvers.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1647 |
evalkit_internvl/lib/python3.10/site-packages/tiktoken/_tiktoken.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1648 |
+
evalkit_internvl/lib/python3.10/site-packages/sympy/solvers/tests/__pycache__/test_solveset.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/METADATA
ADDED
|
@@ -0,0 +1,441 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: colorama
|
| 3 |
+
Version: 0.4.6
|
| 4 |
+
Summary: Cross-platform colored terminal text.
|
| 5 |
+
Project-URL: Homepage, https://github.com/tartley/colorama
|
| 6 |
+
Author-email: Jonathan Hartley <tartley@tartley.com>
|
| 7 |
+
License-File: LICENSE.txt
|
| 8 |
+
Keywords: ansi,color,colour,crossplatform,terminal,text,windows,xplatform
|
| 9 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 10 |
+
Classifier: Environment :: Console
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 13 |
+
Classifier: Operating System :: OS Independent
|
| 14 |
+
Classifier: Programming Language :: Python
|
| 15 |
+
Classifier: Programming Language :: Python :: 2
|
| 16 |
+
Classifier: Programming Language :: Python :: 2.7
|
| 17 |
+
Classifier: Programming Language :: Python :: 3
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 22 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 23 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 24 |
+
Classifier: Topic :: Terminals
|
| 25 |
+
Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7
|
| 26 |
+
Description-Content-Type: text/x-rst
|
| 27 |
+
|
| 28 |
+
.. image:: https://img.shields.io/pypi/v/colorama.svg
|
| 29 |
+
:target: https://pypi.org/project/colorama/
|
| 30 |
+
:alt: Latest Version
|
| 31 |
+
|
| 32 |
+
.. image:: https://img.shields.io/pypi/pyversions/colorama.svg
|
| 33 |
+
:target: https://pypi.org/project/colorama/
|
| 34 |
+
:alt: Supported Python versions
|
| 35 |
+
|
| 36 |
+
.. image:: https://github.com/tartley/colorama/actions/workflows/test.yml/badge.svg
|
| 37 |
+
:target: https://github.com/tartley/colorama/actions/workflows/test.yml
|
| 38 |
+
:alt: Build Status
|
| 39 |
+
|
| 40 |
+
Colorama
|
| 41 |
+
========
|
| 42 |
+
|
| 43 |
+
Makes ANSI escape character sequences (for producing colored terminal text and
|
| 44 |
+
cursor positioning) work under MS Windows.
|
| 45 |
+
|
| 46 |
+
.. |donate| image:: https://www.paypalobjects.com/en_US/i/btn/btn_donate_SM.gif
|
| 47 |
+
:target: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=2MZ9D2GMLYCUJ&item_name=Colorama¤cy_code=USD
|
| 48 |
+
:alt: Donate with Paypal
|
| 49 |
+
|
| 50 |
+
`PyPI for releases <https://pypi.org/project/colorama/>`_ |
|
| 51 |
+
`Github for source <https://github.com/tartley/colorama>`_ |
|
| 52 |
+
`Colorama for enterprise on Tidelift <https://github.com/tartley/colorama/blob/master/ENTERPRISE.md>`_
|
| 53 |
+
|
| 54 |
+
If you find Colorama useful, please |donate| to the authors. Thank you!
|
| 55 |
+
|
| 56 |
+
Installation
|
| 57 |
+
------------
|
| 58 |
+
|
| 59 |
+
Tested on CPython 2.7, 3.7, 3.8, 3.9 and 3.10 and Pypy 2.7 and 3.8.
|
| 60 |
+
|
| 61 |
+
No requirements other than the standard library.
|
| 62 |
+
|
| 63 |
+
.. code-block:: bash
|
| 64 |
+
|
| 65 |
+
pip install colorama
|
| 66 |
+
# or
|
| 67 |
+
conda install -c anaconda colorama
|
| 68 |
+
|
| 69 |
+
Description
|
| 70 |
+
-----------
|
| 71 |
+
|
| 72 |
+
ANSI escape character sequences have long been used to produce colored terminal
|
| 73 |
+
text and cursor positioning on Unix and Macs. Colorama makes this work on
|
| 74 |
+
Windows, too, by wrapping ``stdout``, stripping ANSI sequences it finds (which
|
| 75 |
+
would appear as gobbledygook in the output), and converting them into the
|
| 76 |
+
appropriate win32 calls to modify the state of the terminal. On other platforms,
|
| 77 |
+
Colorama does nothing.
|
| 78 |
+
|
| 79 |
+
This has the upshot of providing a simple cross-platform API for printing
|
| 80 |
+
colored terminal text from Python, and has the happy side-effect that existing
|
| 81 |
+
applications or libraries which use ANSI sequences to produce colored output on
|
| 82 |
+
Linux or Macs can now also work on Windows, simply by calling
|
| 83 |
+
``colorama.just_fix_windows_console()`` (since v0.4.6) or ``colorama.init()``
|
| 84 |
+
(all versions, but may have other side-effects – see below).
|
| 85 |
+
|
| 86 |
+
An alternative approach is to install ``ansi.sys`` on Windows machines, which
|
| 87 |
+
provides the same behaviour for all applications running in terminals. Colorama
|
| 88 |
+
is intended for situations where that isn't easy (e.g., maybe your app doesn't
|
| 89 |
+
have an installer.)
|
| 90 |
+
|
| 91 |
+
Demo scripts in the source code repository print some colored text using
|
| 92 |
+
ANSI sequences. Compare their output under Gnome-terminal's built in ANSI
|
| 93 |
+
handling, versus on Windows Command-Prompt using Colorama:
|
| 94 |
+
|
| 95 |
+
.. image:: https://github.com/tartley/colorama/raw/master/screenshots/ubuntu-demo.png
|
| 96 |
+
:width: 661
|
| 97 |
+
:height: 357
|
| 98 |
+
:alt: ANSI sequences on Ubuntu under gnome-terminal.
|
| 99 |
+
|
| 100 |
+
.. image:: https://github.com/tartley/colorama/raw/master/screenshots/windows-demo.png
|
| 101 |
+
:width: 668
|
| 102 |
+
:height: 325
|
| 103 |
+
:alt: Same ANSI sequences on Windows, using Colorama.
|
| 104 |
+
|
| 105 |
+
These screenshots show that, on Windows, Colorama does not support ANSI 'dim
|
| 106 |
+
text'; it looks the same as 'normal text'.
|
| 107 |
+
|
| 108 |
+
Usage
|
| 109 |
+
-----
|
| 110 |
+
|
| 111 |
+
Initialisation
|
| 112 |
+
..............
|
| 113 |
+
|
| 114 |
+
If the only thing you want from Colorama is to get ANSI escapes to work on
|
| 115 |
+
Windows, then run:
|
| 116 |
+
|
| 117 |
+
.. code-block:: python
|
| 118 |
+
|
| 119 |
+
from colorama import just_fix_windows_console
|
| 120 |
+
just_fix_windows_console()
|
| 121 |
+
|
| 122 |
+
If you're on a recent version of Windows 10 or better, and your stdout/stderr
|
| 123 |
+
are pointing to a Windows console, then this will flip the magic configuration
|
| 124 |
+
switch to enable Windows' built-in ANSI support.
|
| 125 |
+
|
| 126 |
+
If you're on an older version of Windows, and your stdout/stderr are pointing to
|
| 127 |
+
a Windows console, then this will wrap ``sys.stdout`` and/or ``sys.stderr`` in a
|
| 128 |
+
magic file object that intercepts ANSI escape sequences and issues the
|
| 129 |
+
appropriate Win32 calls to emulate them.
|
| 130 |
+
|
| 131 |
+
In all other circumstances, it does nothing whatsoever. Basically the idea is
|
| 132 |
+
that this makes Windows act like Unix with respect to ANSI escape handling.
|
| 133 |
+
|
| 134 |
+
It's safe to call this function multiple times. It's safe to call this function
|
| 135 |
+
on non-Windows platforms, but it won't do anything. It's safe to call this
|
| 136 |
+
function when one or both of your stdout/stderr are redirected to a file – it
|
| 137 |
+
won't do anything to those streams.
|
| 138 |
+
|
| 139 |
+
Alternatively, you can use the older interface with more features (but also more
|
| 140 |
+
potential footguns):
|
| 141 |
+
|
| 142 |
+
.. code-block:: python
|
| 143 |
+
|
| 144 |
+
from colorama import init
|
| 145 |
+
init()
|
| 146 |
+
|
| 147 |
+
This does the same thing as ``just_fix_windows_console``, except for the
|
| 148 |
+
following differences:
|
| 149 |
+
|
| 150 |
+
- It's not safe to call ``init`` multiple times; you can end up with multiple
|
| 151 |
+
layers of wrapping and broken ANSI support.
|
| 152 |
+
|
| 153 |
+
- Colorama will apply a heuristic to guess whether stdout/stderr support ANSI,
|
| 154 |
+
and if it thinks they don't, then it will wrap ``sys.stdout`` and
|
| 155 |
+
``sys.stderr`` in a magic file object that strips out ANSI escape sequences
|
| 156 |
+
before printing them. This happens on all platforms, and can be convenient if
|
| 157 |
+
you want to write your code to emit ANSI escape sequences unconditionally, and
|
| 158 |
+
let Colorama decide whether they should actually be output. But note that
|
| 159 |
+
Colorama's heuristic is not particularly clever.
|
| 160 |
+
|
| 161 |
+
- ``init`` also accepts explicit keyword args to enable/disable various
|
| 162 |
+
functionality – see below.
|
| 163 |
+
|
| 164 |
+
To stop using Colorama before your program exits, simply call ``deinit()``.
|
| 165 |
+
This will restore ``stdout`` and ``stderr`` to their original values, so that
|
| 166 |
+
Colorama is disabled. To resume using Colorama again, call ``reinit()``; it is
|
| 167 |
+
cheaper than calling ``init()`` again (but does the same thing).
|
| 168 |
+
|
| 169 |
+
Most users should depend on ``colorama >= 0.4.6``, and use
|
| 170 |
+
``just_fix_windows_console``. The old ``init`` interface will be supported
|
| 171 |
+
indefinitely for backwards compatibility, but we don't plan to fix any issues
|
| 172 |
+
with it, also for backwards compatibility.
|
| 173 |
+
|
| 174 |
+
Colored Output
|
| 175 |
+
..............
|
| 176 |
+
|
| 177 |
+
Cross-platform printing of colored text can then be done using Colorama's
|
| 178 |
+
constant shorthand for ANSI escape sequences. These are deliberately
|
| 179 |
+
rudimentary, see below.
|
| 180 |
+
|
| 181 |
+
.. code-block:: python
|
| 182 |
+
|
| 183 |
+
from colorama import Fore, Back, Style
|
| 184 |
+
print(Fore.RED + 'some red text')
|
| 185 |
+
print(Back.GREEN + 'and with a green background')
|
| 186 |
+
print(Style.DIM + 'and in dim text')
|
| 187 |
+
print(Style.RESET_ALL)
|
| 188 |
+
print('back to normal now')
|
| 189 |
+
|
| 190 |
+
...or simply by manually printing ANSI sequences from your own code:
|
| 191 |
+
|
| 192 |
+
.. code-block:: python
|
| 193 |
+
|
| 194 |
+
print('\033[31m' + 'some red text')
|
| 195 |
+
print('\033[39m') # and reset to default color
|
| 196 |
+
|
| 197 |
+
...or, Colorama can be used in conjunction with existing ANSI libraries
|
| 198 |
+
such as the venerable `Termcolor <https://pypi.org/project/termcolor/>`_
|
| 199 |
+
the fabulous `Blessings <https://pypi.org/project/blessings/>`_,
|
| 200 |
+
or the incredible `_Rich <https://pypi.org/project/rich/>`_.
|
| 201 |
+
|
| 202 |
+
If you wish Colorama's Fore, Back and Style constants were more capable,
|
| 203 |
+
then consider using one of the above highly capable libraries to generate
|
| 204 |
+
colors, etc, and use Colorama just for its primary purpose: to convert
|
| 205 |
+
those ANSI sequences to also work on Windows:
|
| 206 |
+
|
| 207 |
+
SIMILARLY, do not send PRs adding the generation of new ANSI types to Colorama.
|
| 208 |
+
We are only interested in converting ANSI codes to win32 API calls, not
|
| 209 |
+
shortcuts like the above to generate ANSI characters.
|
| 210 |
+
|
| 211 |
+
.. code-block:: python
|
| 212 |
+
|
| 213 |
+
from colorama import just_fix_windows_console
|
| 214 |
+
from termcolor import colored
|
| 215 |
+
|
| 216 |
+
# use Colorama to make Termcolor work on Windows too
|
| 217 |
+
just_fix_windows_console()
|
| 218 |
+
|
| 219 |
+
# then use Termcolor for all colored text output
|
| 220 |
+
print(colored('Hello, World!', 'green', 'on_red'))
|
| 221 |
+
|
| 222 |
+
Available formatting constants are::
|
| 223 |
+
|
| 224 |
+
Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
|
| 225 |
+
Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
|
| 226 |
+
Style: DIM, NORMAL, BRIGHT, RESET_ALL
|
| 227 |
+
|
| 228 |
+
``Style.RESET_ALL`` resets foreground, background, and brightness. Colorama will
|
| 229 |
+
perform this reset automatically on program exit.
|
| 230 |
+
|
| 231 |
+
These are fairly well supported, but not part of the standard::
|
| 232 |
+
|
| 233 |
+
Fore: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX
|
| 234 |
+
Back: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX
|
| 235 |
+
|
| 236 |
+
Cursor Positioning
|
| 237 |
+
..................
|
| 238 |
+
|
| 239 |
+
ANSI codes to reposition the cursor are supported. See ``demos/demo06.py`` for
|
| 240 |
+
an example of how to generate them.
|
| 241 |
+
|
| 242 |
+
Init Keyword Args
|
| 243 |
+
.................
|
| 244 |
+
|
| 245 |
+
``init()`` accepts some ``**kwargs`` to override default behaviour.
|
| 246 |
+
|
| 247 |
+
init(autoreset=False):
|
| 248 |
+
If you find yourself repeatedly sending reset sequences to turn off color
|
| 249 |
+
changes at the end of every print, then ``init(autoreset=True)`` will
|
| 250 |
+
automate that:
|
| 251 |
+
|
| 252 |
+
.. code-block:: python
|
| 253 |
+
|
| 254 |
+
from colorama import init
|
| 255 |
+
init(autoreset=True)
|
| 256 |
+
print(Fore.RED + 'some red text')
|
| 257 |
+
print('automatically back to default color again')
|
| 258 |
+
|
| 259 |
+
init(strip=None):
|
| 260 |
+
Pass ``True`` or ``False`` to override whether ANSI codes should be
|
| 261 |
+
stripped from the output. The default behaviour is to strip if on Windows
|
| 262 |
+
or if output is redirected (not a tty).
|
| 263 |
+
|
| 264 |
+
init(convert=None):
|
| 265 |
+
Pass ``True`` or ``False`` to override whether to convert ANSI codes in the
|
| 266 |
+
output into win32 calls. The default behaviour is to convert if on Windows
|
| 267 |
+
and output is to a tty (terminal).
|
| 268 |
+
|
| 269 |
+
init(wrap=True):
|
| 270 |
+
On Windows, Colorama works by replacing ``sys.stdout`` and ``sys.stderr``
|
| 271 |
+
with proxy objects, which override the ``.write()`` method to do their work.
|
| 272 |
+
If this wrapping causes you problems, then this can be disabled by passing
|
| 273 |
+
``init(wrap=False)``. The default behaviour is to wrap if ``autoreset`` or
|
| 274 |
+
``strip`` or ``convert`` are True.
|
| 275 |
+
|
| 276 |
+
When wrapping is disabled, colored printing on non-Windows platforms will
|
| 277 |
+
continue to work as normal. To do cross-platform colored output, you can
|
| 278 |
+
use Colorama's ``AnsiToWin32`` proxy directly:
|
| 279 |
+
|
| 280 |
+
.. code-block:: python
|
| 281 |
+
|
| 282 |
+
import sys
|
| 283 |
+
from colorama import init, AnsiToWin32
|
| 284 |
+
init(wrap=False)
|
| 285 |
+
stream = AnsiToWin32(sys.stderr).stream
|
| 286 |
+
|
| 287 |
+
# Python 2
|
| 288 |
+
print >>stream, Fore.BLUE + 'blue text on stderr'
|
| 289 |
+
|
| 290 |
+
# Python 3
|
| 291 |
+
print(Fore.BLUE + 'blue text on stderr', file=stream)
|
| 292 |
+
|
| 293 |
+
Recognised ANSI Sequences
|
| 294 |
+
.........................
|
| 295 |
+
|
| 296 |
+
ANSI sequences generally take the form::
|
| 297 |
+
|
| 298 |
+
ESC [ <param> ; <param> ... <command>
|
| 299 |
+
|
| 300 |
+
Where ``<param>`` is an integer, and ``<command>`` is a single letter. Zero or
|
| 301 |
+
more params are passed to a ``<command>``. If no params are passed, it is
|
| 302 |
+
generally synonymous with passing a single zero. No spaces exist in the
|
| 303 |
+
sequence; they have been inserted here simply to read more easily.
|
| 304 |
+
|
| 305 |
+
The only ANSI sequences that Colorama converts into win32 calls are::
|
| 306 |
+
|
| 307 |
+
ESC [ 0 m # reset all (colors and brightness)
|
| 308 |
+
ESC [ 1 m # bright
|
| 309 |
+
ESC [ 2 m # dim (looks same as normal brightness)
|
| 310 |
+
ESC [ 22 m # normal brightness
|
| 311 |
+
|
| 312 |
+
# FOREGROUND:
|
| 313 |
+
ESC [ 30 m # black
|
| 314 |
+
ESC [ 31 m # red
|
| 315 |
+
ESC [ 32 m # green
|
| 316 |
+
ESC [ 33 m # yellow
|
| 317 |
+
ESC [ 34 m # blue
|
| 318 |
+
ESC [ 35 m # magenta
|
| 319 |
+
ESC [ 36 m # cyan
|
| 320 |
+
ESC [ 37 m # white
|
| 321 |
+
ESC [ 39 m # reset
|
| 322 |
+
|
| 323 |
+
# BACKGROUND
|
| 324 |
+
ESC [ 40 m # black
|
| 325 |
+
ESC [ 41 m # red
|
| 326 |
+
ESC [ 42 m # green
|
| 327 |
+
ESC [ 43 m # yellow
|
| 328 |
+
ESC [ 44 m # blue
|
| 329 |
+
ESC [ 45 m # magenta
|
| 330 |
+
ESC [ 46 m # cyan
|
| 331 |
+
ESC [ 47 m # white
|
| 332 |
+
ESC [ 49 m # reset
|
| 333 |
+
|
| 334 |
+
# cursor positioning
|
| 335 |
+
ESC [ y;x H # position cursor at x across, y down
|
| 336 |
+
ESC [ y;x f # position cursor at x across, y down
|
| 337 |
+
ESC [ n A # move cursor n lines up
|
| 338 |
+
ESC [ n B # move cursor n lines down
|
| 339 |
+
ESC [ n C # move cursor n characters forward
|
| 340 |
+
ESC [ n D # move cursor n characters backward
|
| 341 |
+
|
| 342 |
+
# clear the screen
|
| 343 |
+
ESC [ mode J # clear the screen
|
| 344 |
+
|
| 345 |
+
# clear the line
|
| 346 |
+
ESC [ mode K # clear the line
|
| 347 |
+
|
| 348 |
+
Multiple numeric params to the ``'m'`` command can be combined into a single
|
| 349 |
+
sequence::
|
| 350 |
+
|
| 351 |
+
ESC [ 36 ; 45 ; 1 m # bright cyan text on magenta background
|
| 352 |
+
|
| 353 |
+
All other ANSI sequences of the form ``ESC [ <param> ; <param> ... <command>``
|
| 354 |
+
are silently stripped from the output on Windows.
|
| 355 |
+
|
| 356 |
+
Any other form of ANSI sequence, such as single-character codes or alternative
|
| 357 |
+
initial characters, are not recognised or stripped. It would be cool to add
|
| 358 |
+
them though. Let me know if it would be useful for you, via the Issues on
|
| 359 |
+
GitHub.
|
| 360 |
+
|
| 361 |
+
Status & Known Problems
|
| 362 |
+
-----------------------
|
| 363 |
+
|
| 364 |
+
I've personally only tested it on Windows XP (CMD, Console2), Ubuntu
|
| 365 |
+
(gnome-terminal, xterm), and OS X.
|
| 366 |
+
|
| 367 |
+
Some valid ANSI sequences aren't recognised.
|
| 368 |
+
|
| 369 |
+
If you're hacking on the code, see `README-hacking.md`_. ESPECIALLY, see the
|
| 370 |
+
explanation there of why we do not want PRs that allow Colorama to generate new
|
| 371 |
+
types of ANSI codes.
|
| 372 |
+
|
| 373 |
+
See outstanding issues and wish-list:
|
| 374 |
+
https://github.com/tartley/colorama/issues
|
| 375 |
+
|
| 376 |
+
If anything doesn't work for you, or doesn't do what you expected or hoped for,
|
| 377 |
+
I'd love to hear about it on that issues list, would be delighted by patches,
|
| 378 |
+
and would be happy to grant commit access to anyone who submits a working patch
|
| 379 |
+
or two.
|
| 380 |
+
|
| 381 |
+
.. _README-hacking.md: README-hacking.md
|
| 382 |
+
|
| 383 |
+
License
|
| 384 |
+
-------
|
| 385 |
+
|
| 386 |
+
Copyright Jonathan Hartley & Arnon Yaari, 2013-2020. BSD 3-Clause license; see
|
| 387 |
+
LICENSE file.
|
| 388 |
+
|
| 389 |
+
Professional support
|
| 390 |
+
--------------------
|
| 391 |
+
|
| 392 |
+
.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png
|
| 393 |
+
:alt: Tidelift
|
| 394 |
+
:target: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme
|
| 395 |
+
|
| 396 |
+
.. list-table::
|
| 397 |
+
:widths: 10 100
|
| 398 |
+
|
| 399 |
+
* - |tideliftlogo|
|
| 400 |
+
- Professional support for colorama is available as part of the
|
| 401 |
+
`Tidelift Subscription`_.
|
| 402 |
+
Tidelift gives software development teams a single source for purchasing
|
| 403 |
+
and maintaining their software, with professional grade assurances from
|
| 404 |
+
the experts who know it best, while seamlessly integrating with existing
|
| 405 |
+
tools.
|
| 406 |
+
|
| 407 |
+
.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme
|
| 408 |
+
|
| 409 |
+
Thanks
|
| 410 |
+
------
|
| 411 |
+
|
| 412 |
+
See the CHANGELOG for more thanks!
|
| 413 |
+
|
| 414 |
+
* Marc Schlaich (schlamar) for a ``setup.py`` fix for Python2.5.
|
| 415 |
+
* Marc Abramowitz, reported & fixed a crash on exit with closed ``stdout``,
|
| 416 |
+
providing a solution to issue #7's setuptools/distutils debate,
|
| 417 |
+
and other fixes.
|
| 418 |
+
* User 'eryksun', for guidance on correctly instantiating ``ctypes.windll``.
|
| 419 |
+
* Matthew McCormick for politely pointing out a longstanding crash on non-Win.
|
| 420 |
+
* Ben Hoyt, for a magnificent fix under 64-bit Windows.
|
| 421 |
+
* Jesse at Empty Square for submitting a fix for examples in the README.
|
| 422 |
+
* User 'jamessp', an observant documentation fix for cursor positioning.
|
| 423 |
+
* User 'vaal1239', Dave Mckee & Lackner Kristof for a tiny but much-needed Win7
|
| 424 |
+
fix.
|
| 425 |
+
* Julien Stuyck, for wisely suggesting Python3 compatible updates to README.
|
| 426 |
+
* Daniel Griffith for multiple fabulous patches.
|
| 427 |
+
* Oscar Lesta for a valuable fix to stop ANSI chars being sent to non-tty
|
| 428 |
+
output.
|
| 429 |
+
* Roger Binns, for many suggestions, valuable feedback, & bug reports.
|
| 430 |
+
* Tim Golden for thought and much appreciated feedback on the initial idea.
|
| 431 |
+
* User 'Zearin' for updates to the README file.
|
| 432 |
+
* John Szakmeister for adding support for light colors
|
| 433 |
+
* Charles Merriam for adding documentation to demos
|
| 434 |
+
* Jurko for a fix on 64-bit Windows CPython2.5 w/o ctypes
|
| 435 |
+
* Florian Bruhin for a fix when stdout or stderr are None
|
| 436 |
+
* Thomas Weininger for fixing ValueError on Windows
|
| 437 |
+
* Remi Rampin for better Github integration and fixes to the README file
|
| 438 |
+
* Simeon Visser for closing a file handle using 'with' and updating classifiers
|
| 439 |
+
to include Python 3.3 and 3.4
|
| 440 |
+
* Andy Neff for fixing RESET of LIGHT_EX colors.
|
| 441 |
+
* Jonathan Hartley for the initial idea and implementation.
|
evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/RECORD
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
colorama-0.4.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
colorama-0.4.6.dist-info/METADATA,sha256=e67SnrUMOym9sz_4TjF3vxvAV4T3aF7NyqRHHH3YEMw,17158
|
| 3 |
+
colorama-0.4.6.dist-info/RECORD,,
|
| 4 |
+
colorama-0.4.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 5 |
+
colorama-0.4.6.dist-info/WHEEL,sha256=cdcF4Fbd0FPtw2EMIOwH-3rSOTUdTCeOSXRMD1iLUb8,105
|
| 6 |
+
colorama-0.4.6.dist-info/licenses/LICENSE.txt,sha256=ysNcAmhuXQSlpxQL-zs25zrtSWZW6JEQLkKIhteTAxg,1491
|
| 7 |
+
colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266
|
| 8 |
+
colorama/__pycache__/__init__.cpython-310.pyc,,
|
| 9 |
+
colorama/__pycache__/ansi.cpython-310.pyc,,
|
| 10 |
+
colorama/__pycache__/ansitowin32.cpython-310.pyc,,
|
| 11 |
+
colorama/__pycache__/initialise.cpython-310.pyc,,
|
| 12 |
+
colorama/__pycache__/win32.cpython-310.pyc,,
|
| 13 |
+
colorama/__pycache__/winterm.cpython-310.pyc,,
|
| 14 |
+
colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522
|
| 15 |
+
colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128
|
| 16 |
+
colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325
|
| 17 |
+
colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75
|
| 18 |
+
colorama/tests/__pycache__/__init__.cpython-310.pyc,,
|
| 19 |
+
colorama/tests/__pycache__/ansi_test.cpython-310.pyc,,
|
| 20 |
+
colorama/tests/__pycache__/ansitowin32_test.cpython-310.pyc,,
|
| 21 |
+
colorama/tests/__pycache__/initialise_test.cpython-310.pyc,,
|
| 22 |
+
colorama/tests/__pycache__/isatty_test.cpython-310.pyc,,
|
| 23 |
+
colorama/tests/__pycache__/utils.cpython-310.pyc,,
|
| 24 |
+
colorama/tests/__pycache__/winterm_test.cpython-310.pyc,,
|
| 25 |
+
colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839
|
| 26 |
+
colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678
|
| 27 |
+
colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741
|
| 28 |
+
colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866
|
| 29 |
+
colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079
|
| 30 |
+
colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709
|
| 31 |
+
colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181
|
| 32 |
+
colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134
|
evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.11.1
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py2-none-any
|
| 5 |
+
Tag: py3-none-any
|
evalkit_internvl/lib/python3.10/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2010 Jonathan Hartley
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without
|
| 5 |
+
modification, are permitted provided that the following conditions are met:
|
| 6 |
+
|
| 7 |
+
* Redistributions of source code must retain the above copyright notice, this
|
| 8 |
+
list of conditions and the following disclaimer.
|
| 9 |
+
|
| 10 |
+
* Redistributions in binary form must reproduce the above copyright notice,
|
| 11 |
+
this list of conditions and the following disclaimer in the documentation
|
| 12 |
+
and/or other materials provided with the distribution.
|
| 13 |
+
|
| 14 |
+
* Neither the name of the copyright holders, nor those of its contributors
|
| 15 |
+
may be used to endorse or promote products derived from this software without
|
| 16 |
+
specific prior written permission.
|
| 17 |
+
|
| 18 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
| 19 |
+
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 20 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 21 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
| 22 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
| 23 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 24 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 25 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
| 26 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 27 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/__init__.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
try:
|
| 5 |
+
from ._version import version as __version__
|
| 6 |
+
except ImportError:
|
| 7 |
+
__version__ = 'unknown'
|
| 8 |
+
|
| 9 |
+
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
|
| 10 |
+
'utils', 'zoneinfo']
|
| 11 |
+
|
| 12 |
+
def __getattr__(name):
|
| 13 |
+
import importlib
|
| 14 |
+
|
| 15 |
+
if name in __all__:
|
| 16 |
+
return importlib.import_module("." + name, __name__)
|
| 17 |
+
raise AttributeError(
|
| 18 |
+
"module {!r} has not attribute {!r}".format(__name__, name)
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def __dir__():
|
| 23 |
+
# __dir__ should include all the lazy-importable modules as well.
|
| 24 |
+
return [x for x in globals() if x not in sys.modules] + __all__
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/_version.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# file generated by setuptools_scm
|
| 2 |
+
# don't change, don't track in version control
|
| 3 |
+
__version__ = version = '2.9.0.post0'
|
| 4 |
+
__version_tuple__ = version_tuple = (2, 9, 0)
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/easter.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
This module offers a generic Easter computing method for any given year, using
|
| 4 |
+
Western, Orthodox or Julian algorithms.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import datetime
|
| 8 |
+
|
| 9 |
+
__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
|
| 10 |
+
|
| 11 |
+
EASTER_JULIAN = 1
|
| 12 |
+
EASTER_ORTHODOX = 2
|
| 13 |
+
EASTER_WESTERN = 3
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def easter(year, method=EASTER_WESTERN):
|
| 17 |
+
"""
|
| 18 |
+
This method was ported from the work done by GM Arts,
|
| 19 |
+
on top of the algorithm by Claus Tondering, which was
|
| 20 |
+
based in part on the algorithm of Ouding (1940), as
|
| 21 |
+
quoted in "Explanatory Supplement to the Astronomical
|
| 22 |
+
Almanac", P. Kenneth Seidelmann, editor.
|
| 23 |
+
|
| 24 |
+
This algorithm implements three different Easter
|
| 25 |
+
calculation methods:
|
| 26 |
+
|
| 27 |
+
1. Original calculation in Julian calendar, valid in
|
| 28 |
+
dates after 326 AD
|
| 29 |
+
2. Original method, with date converted to Gregorian
|
| 30 |
+
calendar, valid in years 1583 to 4099
|
| 31 |
+
3. Revised method, in Gregorian calendar, valid in
|
| 32 |
+
years 1583 to 4099 as well
|
| 33 |
+
|
| 34 |
+
These methods are represented by the constants:
|
| 35 |
+
|
| 36 |
+
* ``EASTER_JULIAN = 1``
|
| 37 |
+
* ``EASTER_ORTHODOX = 2``
|
| 38 |
+
* ``EASTER_WESTERN = 3``
|
| 39 |
+
|
| 40 |
+
The default method is method 3.
|
| 41 |
+
|
| 42 |
+
More about the algorithm may be found at:
|
| 43 |
+
|
| 44 |
+
`GM Arts: Easter Algorithms <http://www.gmarts.org/index.php?go=415>`_
|
| 45 |
+
|
| 46 |
+
and
|
| 47 |
+
|
| 48 |
+
`The Calendar FAQ: Easter <https://www.tondering.dk/claus/cal/easter.php>`_
|
| 49 |
+
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
if not (1 <= method <= 3):
|
| 53 |
+
raise ValueError("invalid method")
|
| 54 |
+
|
| 55 |
+
# g - Golden year - 1
|
| 56 |
+
# c - Century
|
| 57 |
+
# h - (23 - Epact) mod 30
|
| 58 |
+
# i - Number of days from March 21 to Paschal Full Moon
|
| 59 |
+
# j - Weekday for PFM (0=Sunday, etc)
|
| 60 |
+
# p - Number of days from March 21 to Sunday on or before PFM
|
| 61 |
+
# (-6 to 28 methods 1 & 3, to 56 for method 2)
|
| 62 |
+
# e - Extra days to add for method 2 (converting Julian
|
| 63 |
+
# date to Gregorian date)
|
| 64 |
+
|
| 65 |
+
y = year
|
| 66 |
+
g = y % 19
|
| 67 |
+
e = 0
|
| 68 |
+
if method < 3:
|
| 69 |
+
# Old method
|
| 70 |
+
i = (19*g + 15) % 30
|
| 71 |
+
j = (y + y//4 + i) % 7
|
| 72 |
+
if method == 2:
|
| 73 |
+
# Extra dates to convert Julian to Gregorian date
|
| 74 |
+
e = 10
|
| 75 |
+
if y > 1600:
|
| 76 |
+
e = e + y//100 - 16 - (y//100 - 16)//4
|
| 77 |
+
else:
|
| 78 |
+
# New method
|
| 79 |
+
c = y//100
|
| 80 |
+
h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30
|
| 81 |
+
i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11))
|
| 82 |
+
j = (y + y//4 + i + 2 - c + c//4) % 7
|
| 83 |
+
|
| 84 |
+
# p can be from -6 to 56 corresponding to dates 22 March to 23 May
|
| 85 |
+
# (later dates apply to method 2, although 23 May never actually occurs)
|
| 86 |
+
p = i - j + e
|
| 87 |
+
d = 1 + (p + 27 + (p + 6)//40) % 31
|
| 88 |
+
m = 3 + (p + 26)//30
|
| 89 |
+
return datetime.date(int(y), int(m), int(d))
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/relativedelta.py
ADDED
|
@@ -0,0 +1,599 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
import datetime
|
| 3 |
+
import calendar
|
| 4 |
+
|
| 5 |
+
import operator
|
| 6 |
+
from math import copysign
|
| 7 |
+
|
| 8 |
+
from six import integer_types
|
| 9 |
+
from warnings import warn
|
| 10 |
+
|
| 11 |
+
from ._common import weekday
|
| 12 |
+
|
| 13 |
+
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7))
|
| 14 |
+
|
| 15 |
+
__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class relativedelta(object):
|
| 19 |
+
"""
|
| 20 |
+
The relativedelta type is designed to be applied to an existing datetime and
|
| 21 |
+
can replace specific components of that datetime, or represents an interval
|
| 22 |
+
of time.
|
| 23 |
+
|
| 24 |
+
It is based on the specification of the excellent work done by M.-A. Lemburg
|
| 25 |
+
in his
|
| 26 |
+
`mx.DateTime <https://www.egenix.com/products/python/mxBase/mxDateTime/>`_ extension.
|
| 27 |
+
However, notice that this type does *NOT* implement the same algorithm as
|
| 28 |
+
his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.
|
| 29 |
+
|
| 30 |
+
There are two different ways to build a relativedelta instance. The
|
| 31 |
+
first one is passing it two date/datetime classes::
|
| 32 |
+
|
| 33 |
+
relativedelta(datetime1, datetime2)
|
| 34 |
+
|
| 35 |
+
The second one is passing it any number of the following keyword arguments::
|
| 36 |
+
|
| 37 |
+
relativedelta(arg1=x,arg2=y,arg3=z...)
|
| 38 |
+
|
| 39 |
+
year, month, day, hour, minute, second, microsecond:
|
| 40 |
+
Absolute information (argument is singular); adding or subtracting a
|
| 41 |
+
relativedelta with absolute information does not perform an arithmetic
|
| 42 |
+
operation, but rather REPLACES the corresponding value in the
|
| 43 |
+
original datetime with the value(s) in relativedelta.
|
| 44 |
+
|
| 45 |
+
years, months, weeks, days, hours, minutes, seconds, microseconds:
|
| 46 |
+
Relative information, may be negative (argument is plural); adding
|
| 47 |
+
or subtracting a relativedelta with relative information performs
|
| 48 |
+
the corresponding arithmetic operation on the original datetime value
|
| 49 |
+
with the information in the relativedelta.
|
| 50 |
+
|
| 51 |
+
weekday:
|
| 52 |
+
One of the weekday instances (MO, TU, etc) available in the
|
| 53 |
+
relativedelta module. These instances may receive a parameter N,
|
| 54 |
+
specifying the Nth weekday, which could be positive or negative
|
| 55 |
+
(like MO(+1) or MO(-2)). Not specifying it is the same as specifying
|
| 56 |
+
+1. You can also use an integer, where 0=MO. This argument is always
|
| 57 |
+
relative e.g. if the calculated date is already Monday, using MO(1)
|
| 58 |
+
or MO(-1) won't change the day. To effectively make it absolute, use
|
| 59 |
+
it in combination with the day argument (e.g. day=1, MO(1) for first
|
| 60 |
+
Monday of the month).
|
| 61 |
+
|
| 62 |
+
leapdays:
|
| 63 |
+
Will add given days to the date found, if year is a leap
|
| 64 |
+
year, and the date found is post 28 of february.
|
| 65 |
+
|
| 66 |
+
yearday, nlyearday:
|
| 67 |
+
Set the yearday or the non-leap year day (jump leap days).
|
| 68 |
+
These are converted to day/month/leapdays information.
|
| 69 |
+
|
| 70 |
+
There are relative and absolute forms of the keyword
|
| 71 |
+
arguments. The plural is relative, and the singular is
|
| 72 |
+
absolute. For each argument in the order below, the absolute form
|
| 73 |
+
is applied first (by setting each attribute to that value) and
|
| 74 |
+
then the relative form (by adding the value to the attribute).
|
| 75 |
+
|
| 76 |
+
The order of attributes considered when this relativedelta is
|
| 77 |
+
added to a datetime is:
|
| 78 |
+
|
| 79 |
+
1. Year
|
| 80 |
+
2. Month
|
| 81 |
+
3. Day
|
| 82 |
+
4. Hours
|
| 83 |
+
5. Minutes
|
| 84 |
+
6. Seconds
|
| 85 |
+
7. Microseconds
|
| 86 |
+
|
| 87 |
+
Finally, weekday is applied, using the rule described above.
|
| 88 |
+
|
| 89 |
+
For example
|
| 90 |
+
|
| 91 |
+
>>> from datetime import datetime
|
| 92 |
+
>>> from dateutil.relativedelta import relativedelta, MO
|
| 93 |
+
>>> dt = datetime(2018, 4, 9, 13, 37, 0)
|
| 94 |
+
>>> delta = relativedelta(hours=25, day=1, weekday=MO(1))
|
| 95 |
+
>>> dt + delta
|
| 96 |
+
datetime.datetime(2018, 4, 2, 14, 37)
|
| 97 |
+
|
| 98 |
+
First, the day is set to 1 (the first of the month), then 25 hours
|
| 99 |
+
are added, to get to the 2nd day and 14th hour, finally the
|
| 100 |
+
weekday is applied, but since the 2nd is already a Monday there is
|
| 101 |
+
no effect.
|
| 102 |
+
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
def __init__(self, dt1=None, dt2=None,
|
| 106 |
+
years=0, months=0, days=0, leapdays=0, weeks=0,
|
| 107 |
+
hours=0, minutes=0, seconds=0, microseconds=0,
|
| 108 |
+
year=None, month=None, day=None, weekday=None,
|
| 109 |
+
yearday=None, nlyearday=None,
|
| 110 |
+
hour=None, minute=None, second=None, microsecond=None):
|
| 111 |
+
|
| 112 |
+
if dt1 and dt2:
|
| 113 |
+
# datetime is a subclass of date. So both must be date
|
| 114 |
+
if not (isinstance(dt1, datetime.date) and
|
| 115 |
+
isinstance(dt2, datetime.date)):
|
| 116 |
+
raise TypeError("relativedelta only diffs datetime/date")
|
| 117 |
+
|
| 118 |
+
# We allow two dates, or two datetimes, so we coerce them to be
|
| 119 |
+
# of the same type
|
| 120 |
+
if (isinstance(dt1, datetime.datetime) !=
|
| 121 |
+
isinstance(dt2, datetime.datetime)):
|
| 122 |
+
if not isinstance(dt1, datetime.datetime):
|
| 123 |
+
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
|
| 124 |
+
elif not isinstance(dt2, datetime.datetime):
|
| 125 |
+
dt2 = datetime.datetime.fromordinal(dt2.toordinal())
|
| 126 |
+
|
| 127 |
+
self.years = 0
|
| 128 |
+
self.months = 0
|
| 129 |
+
self.days = 0
|
| 130 |
+
self.leapdays = 0
|
| 131 |
+
self.hours = 0
|
| 132 |
+
self.minutes = 0
|
| 133 |
+
self.seconds = 0
|
| 134 |
+
self.microseconds = 0
|
| 135 |
+
self.year = None
|
| 136 |
+
self.month = None
|
| 137 |
+
self.day = None
|
| 138 |
+
self.weekday = None
|
| 139 |
+
self.hour = None
|
| 140 |
+
self.minute = None
|
| 141 |
+
self.second = None
|
| 142 |
+
self.microsecond = None
|
| 143 |
+
self._has_time = 0
|
| 144 |
+
|
| 145 |
+
# Get year / month delta between the two
|
| 146 |
+
months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month)
|
| 147 |
+
self._set_months(months)
|
| 148 |
+
|
| 149 |
+
# Remove the year/month delta so the timedelta is just well-defined
|
| 150 |
+
# time units (seconds, days and microseconds)
|
| 151 |
+
dtm = self.__radd__(dt2)
|
| 152 |
+
|
| 153 |
+
# If we've overshot our target, make an adjustment
|
| 154 |
+
if dt1 < dt2:
|
| 155 |
+
compare = operator.gt
|
| 156 |
+
increment = 1
|
| 157 |
+
else:
|
| 158 |
+
compare = operator.lt
|
| 159 |
+
increment = -1
|
| 160 |
+
|
| 161 |
+
while compare(dt1, dtm):
|
| 162 |
+
months += increment
|
| 163 |
+
self._set_months(months)
|
| 164 |
+
dtm = self.__radd__(dt2)
|
| 165 |
+
|
| 166 |
+
# Get the timedelta between the "months-adjusted" date and dt1
|
| 167 |
+
delta = dt1 - dtm
|
| 168 |
+
self.seconds = delta.seconds + delta.days * 86400
|
| 169 |
+
self.microseconds = delta.microseconds
|
| 170 |
+
else:
|
| 171 |
+
# Check for non-integer values in integer-only quantities
|
| 172 |
+
if any(x is not None and x != int(x) for x in (years, months)):
|
| 173 |
+
raise ValueError("Non-integer years and months are "
|
| 174 |
+
"ambiguous and not currently supported.")
|
| 175 |
+
|
| 176 |
+
# Relative information
|
| 177 |
+
self.years = int(years)
|
| 178 |
+
self.months = int(months)
|
| 179 |
+
self.days = days + weeks * 7
|
| 180 |
+
self.leapdays = leapdays
|
| 181 |
+
self.hours = hours
|
| 182 |
+
self.minutes = minutes
|
| 183 |
+
self.seconds = seconds
|
| 184 |
+
self.microseconds = microseconds
|
| 185 |
+
|
| 186 |
+
# Absolute information
|
| 187 |
+
self.year = year
|
| 188 |
+
self.month = month
|
| 189 |
+
self.day = day
|
| 190 |
+
self.hour = hour
|
| 191 |
+
self.minute = minute
|
| 192 |
+
self.second = second
|
| 193 |
+
self.microsecond = microsecond
|
| 194 |
+
|
| 195 |
+
if any(x is not None and int(x) != x
|
| 196 |
+
for x in (year, month, day, hour,
|
| 197 |
+
minute, second, microsecond)):
|
| 198 |
+
# For now we'll deprecate floats - later it'll be an error.
|
| 199 |
+
warn("Non-integer value passed as absolute information. " +
|
| 200 |
+
"This is not a well-defined condition and will raise " +
|
| 201 |
+
"errors in future versions.", DeprecationWarning)
|
| 202 |
+
|
| 203 |
+
if isinstance(weekday, integer_types):
|
| 204 |
+
self.weekday = weekdays[weekday]
|
| 205 |
+
else:
|
| 206 |
+
self.weekday = weekday
|
| 207 |
+
|
| 208 |
+
yday = 0
|
| 209 |
+
if nlyearday:
|
| 210 |
+
yday = nlyearday
|
| 211 |
+
elif yearday:
|
| 212 |
+
yday = yearday
|
| 213 |
+
if yearday > 59:
|
| 214 |
+
self.leapdays = -1
|
| 215 |
+
if yday:
|
| 216 |
+
ydayidx = [31, 59, 90, 120, 151, 181, 212,
|
| 217 |
+
243, 273, 304, 334, 366]
|
| 218 |
+
for idx, ydays in enumerate(ydayidx):
|
| 219 |
+
if yday <= ydays:
|
| 220 |
+
self.month = idx+1
|
| 221 |
+
if idx == 0:
|
| 222 |
+
self.day = yday
|
| 223 |
+
else:
|
| 224 |
+
self.day = yday-ydayidx[idx-1]
|
| 225 |
+
break
|
| 226 |
+
else:
|
| 227 |
+
raise ValueError("invalid year day (%d)" % yday)
|
| 228 |
+
|
| 229 |
+
self._fix()
|
| 230 |
+
|
| 231 |
+
def _fix(self):
|
| 232 |
+
if abs(self.microseconds) > 999999:
|
| 233 |
+
s = _sign(self.microseconds)
|
| 234 |
+
div, mod = divmod(self.microseconds * s, 1000000)
|
| 235 |
+
self.microseconds = mod * s
|
| 236 |
+
self.seconds += div * s
|
| 237 |
+
if abs(self.seconds) > 59:
|
| 238 |
+
s = _sign(self.seconds)
|
| 239 |
+
div, mod = divmod(self.seconds * s, 60)
|
| 240 |
+
self.seconds = mod * s
|
| 241 |
+
self.minutes += div * s
|
| 242 |
+
if abs(self.minutes) > 59:
|
| 243 |
+
s = _sign(self.minutes)
|
| 244 |
+
div, mod = divmod(self.minutes * s, 60)
|
| 245 |
+
self.minutes = mod * s
|
| 246 |
+
self.hours += div * s
|
| 247 |
+
if abs(self.hours) > 23:
|
| 248 |
+
s = _sign(self.hours)
|
| 249 |
+
div, mod = divmod(self.hours * s, 24)
|
| 250 |
+
self.hours = mod * s
|
| 251 |
+
self.days += div * s
|
| 252 |
+
if abs(self.months) > 11:
|
| 253 |
+
s = _sign(self.months)
|
| 254 |
+
div, mod = divmod(self.months * s, 12)
|
| 255 |
+
self.months = mod * s
|
| 256 |
+
self.years += div * s
|
| 257 |
+
if (self.hours or self.minutes or self.seconds or self.microseconds
|
| 258 |
+
or self.hour is not None or self.minute is not None or
|
| 259 |
+
self.second is not None or self.microsecond is not None):
|
| 260 |
+
self._has_time = 1
|
| 261 |
+
else:
|
| 262 |
+
self._has_time = 0
|
| 263 |
+
|
| 264 |
+
@property
|
| 265 |
+
def weeks(self):
|
| 266 |
+
return int(self.days / 7.0)
|
| 267 |
+
|
| 268 |
+
@weeks.setter
|
| 269 |
+
def weeks(self, value):
|
| 270 |
+
self.days = self.days - (self.weeks * 7) + value * 7
|
| 271 |
+
|
| 272 |
+
def _set_months(self, months):
|
| 273 |
+
self.months = months
|
| 274 |
+
if abs(self.months) > 11:
|
| 275 |
+
s = _sign(self.months)
|
| 276 |
+
div, mod = divmod(self.months * s, 12)
|
| 277 |
+
self.months = mod * s
|
| 278 |
+
self.years = div * s
|
| 279 |
+
else:
|
| 280 |
+
self.years = 0
|
| 281 |
+
|
| 282 |
+
def normalized(self):
|
| 283 |
+
"""
|
| 284 |
+
Return a version of this object represented entirely using integer
|
| 285 |
+
values for the relative attributes.
|
| 286 |
+
|
| 287 |
+
>>> relativedelta(days=1.5, hours=2).normalized()
|
| 288 |
+
relativedelta(days=+1, hours=+14)
|
| 289 |
+
|
| 290 |
+
:return:
|
| 291 |
+
Returns a :class:`dateutil.relativedelta.relativedelta` object.
|
| 292 |
+
"""
|
| 293 |
+
# Cascade remainders down (rounding each to roughly nearest microsecond)
|
| 294 |
+
days = int(self.days)
|
| 295 |
+
|
| 296 |
+
hours_f = round(self.hours + 24 * (self.days - days), 11)
|
| 297 |
+
hours = int(hours_f)
|
| 298 |
+
|
| 299 |
+
minutes_f = round(self.minutes + 60 * (hours_f - hours), 10)
|
| 300 |
+
minutes = int(minutes_f)
|
| 301 |
+
|
| 302 |
+
seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8)
|
| 303 |
+
seconds = int(seconds_f)
|
| 304 |
+
|
| 305 |
+
microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds))
|
| 306 |
+
|
| 307 |
+
# Constructor carries overflow back up with call to _fix()
|
| 308 |
+
return self.__class__(years=self.years, months=self.months,
|
| 309 |
+
days=days, hours=hours, minutes=minutes,
|
| 310 |
+
seconds=seconds, microseconds=microseconds,
|
| 311 |
+
leapdays=self.leapdays, year=self.year,
|
| 312 |
+
month=self.month, day=self.day,
|
| 313 |
+
weekday=self.weekday, hour=self.hour,
|
| 314 |
+
minute=self.minute, second=self.second,
|
| 315 |
+
microsecond=self.microsecond)
|
| 316 |
+
|
| 317 |
+
def __add__(self, other):
|
| 318 |
+
if isinstance(other, relativedelta):
|
| 319 |
+
return self.__class__(years=other.years + self.years,
|
| 320 |
+
months=other.months + self.months,
|
| 321 |
+
days=other.days + self.days,
|
| 322 |
+
hours=other.hours + self.hours,
|
| 323 |
+
minutes=other.minutes + self.minutes,
|
| 324 |
+
seconds=other.seconds + self.seconds,
|
| 325 |
+
microseconds=(other.microseconds +
|
| 326 |
+
self.microseconds),
|
| 327 |
+
leapdays=other.leapdays or self.leapdays,
|
| 328 |
+
year=(other.year if other.year is not None
|
| 329 |
+
else self.year),
|
| 330 |
+
month=(other.month if other.month is not None
|
| 331 |
+
else self.month),
|
| 332 |
+
day=(other.day if other.day is not None
|
| 333 |
+
else self.day),
|
| 334 |
+
weekday=(other.weekday if other.weekday is not None
|
| 335 |
+
else self.weekday),
|
| 336 |
+
hour=(other.hour if other.hour is not None
|
| 337 |
+
else self.hour),
|
| 338 |
+
minute=(other.minute if other.minute is not None
|
| 339 |
+
else self.minute),
|
| 340 |
+
second=(other.second if other.second is not None
|
| 341 |
+
else self.second),
|
| 342 |
+
microsecond=(other.microsecond if other.microsecond
|
| 343 |
+
is not None else
|
| 344 |
+
self.microsecond))
|
| 345 |
+
if isinstance(other, datetime.timedelta):
|
| 346 |
+
return self.__class__(years=self.years,
|
| 347 |
+
months=self.months,
|
| 348 |
+
days=self.days + other.days,
|
| 349 |
+
hours=self.hours,
|
| 350 |
+
minutes=self.minutes,
|
| 351 |
+
seconds=self.seconds + other.seconds,
|
| 352 |
+
microseconds=self.microseconds + other.microseconds,
|
| 353 |
+
leapdays=self.leapdays,
|
| 354 |
+
year=self.year,
|
| 355 |
+
month=self.month,
|
| 356 |
+
day=self.day,
|
| 357 |
+
weekday=self.weekday,
|
| 358 |
+
hour=self.hour,
|
| 359 |
+
minute=self.minute,
|
| 360 |
+
second=self.second,
|
| 361 |
+
microsecond=self.microsecond)
|
| 362 |
+
if not isinstance(other, datetime.date):
|
| 363 |
+
return NotImplemented
|
| 364 |
+
elif self._has_time and not isinstance(other, datetime.datetime):
|
| 365 |
+
other = datetime.datetime.fromordinal(other.toordinal())
|
| 366 |
+
year = (self.year or other.year)+self.years
|
| 367 |
+
month = self.month or other.month
|
| 368 |
+
if self.months:
|
| 369 |
+
assert 1 <= abs(self.months) <= 12
|
| 370 |
+
month += self.months
|
| 371 |
+
if month > 12:
|
| 372 |
+
year += 1
|
| 373 |
+
month -= 12
|
| 374 |
+
elif month < 1:
|
| 375 |
+
year -= 1
|
| 376 |
+
month += 12
|
| 377 |
+
day = min(calendar.monthrange(year, month)[1],
|
| 378 |
+
self.day or other.day)
|
| 379 |
+
repl = {"year": year, "month": month, "day": day}
|
| 380 |
+
for attr in ["hour", "minute", "second", "microsecond"]:
|
| 381 |
+
value = getattr(self, attr)
|
| 382 |
+
if value is not None:
|
| 383 |
+
repl[attr] = value
|
| 384 |
+
days = self.days
|
| 385 |
+
if self.leapdays and month > 2 and calendar.isleap(year):
|
| 386 |
+
days += self.leapdays
|
| 387 |
+
ret = (other.replace(**repl)
|
| 388 |
+
+ datetime.timedelta(days=days,
|
| 389 |
+
hours=self.hours,
|
| 390 |
+
minutes=self.minutes,
|
| 391 |
+
seconds=self.seconds,
|
| 392 |
+
microseconds=self.microseconds))
|
| 393 |
+
if self.weekday:
|
| 394 |
+
weekday, nth = self.weekday.weekday, self.weekday.n or 1
|
| 395 |
+
jumpdays = (abs(nth) - 1) * 7
|
| 396 |
+
if nth > 0:
|
| 397 |
+
jumpdays += (7 - ret.weekday() + weekday) % 7
|
| 398 |
+
else:
|
| 399 |
+
jumpdays += (ret.weekday() - weekday) % 7
|
| 400 |
+
jumpdays *= -1
|
| 401 |
+
ret += datetime.timedelta(days=jumpdays)
|
| 402 |
+
return ret
|
| 403 |
+
|
| 404 |
+
def __radd__(self, other):
|
| 405 |
+
return self.__add__(other)
|
| 406 |
+
|
| 407 |
+
def __rsub__(self, other):
|
| 408 |
+
return self.__neg__().__radd__(other)
|
| 409 |
+
|
| 410 |
+
def __sub__(self, other):
|
| 411 |
+
if not isinstance(other, relativedelta):
|
| 412 |
+
return NotImplemented # In case the other object defines __rsub__
|
| 413 |
+
return self.__class__(years=self.years - other.years,
|
| 414 |
+
months=self.months - other.months,
|
| 415 |
+
days=self.days - other.days,
|
| 416 |
+
hours=self.hours - other.hours,
|
| 417 |
+
minutes=self.minutes - other.minutes,
|
| 418 |
+
seconds=self.seconds - other.seconds,
|
| 419 |
+
microseconds=self.microseconds - other.microseconds,
|
| 420 |
+
leapdays=self.leapdays or other.leapdays,
|
| 421 |
+
year=(self.year if self.year is not None
|
| 422 |
+
else other.year),
|
| 423 |
+
month=(self.month if self.month is not None else
|
| 424 |
+
other.month),
|
| 425 |
+
day=(self.day if self.day is not None else
|
| 426 |
+
other.day),
|
| 427 |
+
weekday=(self.weekday if self.weekday is not None else
|
| 428 |
+
other.weekday),
|
| 429 |
+
hour=(self.hour if self.hour is not None else
|
| 430 |
+
other.hour),
|
| 431 |
+
minute=(self.minute if self.minute is not None else
|
| 432 |
+
other.minute),
|
| 433 |
+
second=(self.second if self.second is not None else
|
| 434 |
+
other.second),
|
| 435 |
+
microsecond=(self.microsecond if self.microsecond
|
| 436 |
+
is not None else
|
| 437 |
+
other.microsecond))
|
| 438 |
+
|
| 439 |
+
def __abs__(self):
|
| 440 |
+
return self.__class__(years=abs(self.years),
|
| 441 |
+
months=abs(self.months),
|
| 442 |
+
days=abs(self.days),
|
| 443 |
+
hours=abs(self.hours),
|
| 444 |
+
minutes=abs(self.minutes),
|
| 445 |
+
seconds=abs(self.seconds),
|
| 446 |
+
microseconds=abs(self.microseconds),
|
| 447 |
+
leapdays=self.leapdays,
|
| 448 |
+
year=self.year,
|
| 449 |
+
month=self.month,
|
| 450 |
+
day=self.day,
|
| 451 |
+
weekday=self.weekday,
|
| 452 |
+
hour=self.hour,
|
| 453 |
+
minute=self.minute,
|
| 454 |
+
second=self.second,
|
| 455 |
+
microsecond=self.microsecond)
|
| 456 |
+
|
| 457 |
+
def __neg__(self):
|
| 458 |
+
return self.__class__(years=-self.years,
|
| 459 |
+
months=-self.months,
|
| 460 |
+
days=-self.days,
|
| 461 |
+
hours=-self.hours,
|
| 462 |
+
minutes=-self.minutes,
|
| 463 |
+
seconds=-self.seconds,
|
| 464 |
+
microseconds=-self.microseconds,
|
| 465 |
+
leapdays=self.leapdays,
|
| 466 |
+
year=self.year,
|
| 467 |
+
month=self.month,
|
| 468 |
+
day=self.day,
|
| 469 |
+
weekday=self.weekday,
|
| 470 |
+
hour=self.hour,
|
| 471 |
+
minute=self.minute,
|
| 472 |
+
second=self.second,
|
| 473 |
+
microsecond=self.microsecond)
|
| 474 |
+
|
| 475 |
+
def __bool__(self):
|
| 476 |
+
return not (not self.years and
|
| 477 |
+
not self.months and
|
| 478 |
+
not self.days and
|
| 479 |
+
not self.hours and
|
| 480 |
+
not self.minutes and
|
| 481 |
+
not self.seconds and
|
| 482 |
+
not self.microseconds and
|
| 483 |
+
not self.leapdays and
|
| 484 |
+
self.year is None and
|
| 485 |
+
self.month is None and
|
| 486 |
+
self.day is None and
|
| 487 |
+
self.weekday is None and
|
| 488 |
+
self.hour is None and
|
| 489 |
+
self.minute is None and
|
| 490 |
+
self.second is None and
|
| 491 |
+
self.microsecond is None)
|
| 492 |
+
# Compatibility with Python 2.x
|
| 493 |
+
__nonzero__ = __bool__
|
| 494 |
+
|
| 495 |
+
def __mul__(self, other):
|
| 496 |
+
try:
|
| 497 |
+
f = float(other)
|
| 498 |
+
except TypeError:
|
| 499 |
+
return NotImplemented
|
| 500 |
+
|
| 501 |
+
return self.__class__(years=int(self.years * f),
|
| 502 |
+
months=int(self.months * f),
|
| 503 |
+
days=int(self.days * f),
|
| 504 |
+
hours=int(self.hours * f),
|
| 505 |
+
minutes=int(self.minutes * f),
|
| 506 |
+
seconds=int(self.seconds * f),
|
| 507 |
+
microseconds=int(self.microseconds * f),
|
| 508 |
+
leapdays=self.leapdays,
|
| 509 |
+
year=self.year,
|
| 510 |
+
month=self.month,
|
| 511 |
+
day=self.day,
|
| 512 |
+
weekday=self.weekday,
|
| 513 |
+
hour=self.hour,
|
| 514 |
+
minute=self.minute,
|
| 515 |
+
second=self.second,
|
| 516 |
+
microsecond=self.microsecond)
|
| 517 |
+
|
| 518 |
+
__rmul__ = __mul__
|
| 519 |
+
|
| 520 |
+
def __eq__(self, other):
|
| 521 |
+
if not isinstance(other, relativedelta):
|
| 522 |
+
return NotImplemented
|
| 523 |
+
if self.weekday or other.weekday:
|
| 524 |
+
if not self.weekday or not other.weekday:
|
| 525 |
+
return False
|
| 526 |
+
if self.weekday.weekday != other.weekday.weekday:
|
| 527 |
+
return False
|
| 528 |
+
n1, n2 = self.weekday.n, other.weekday.n
|
| 529 |
+
if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):
|
| 530 |
+
return False
|
| 531 |
+
return (self.years == other.years and
|
| 532 |
+
self.months == other.months and
|
| 533 |
+
self.days == other.days and
|
| 534 |
+
self.hours == other.hours and
|
| 535 |
+
self.minutes == other.minutes and
|
| 536 |
+
self.seconds == other.seconds and
|
| 537 |
+
self.microseconds == other.microseconds and
|
| 538 |
+
self.leapdays == other.leapdays and
|
| 539 |
+
self.year == other.year and
|
| 540 |
+
self.month == other.month and
|
| 541 |
+
self.day == other.day and
|
| 542 |
+
self.hour == other.hour and
|
| 543 |
+
self.minute == other.minute and
|
| 544 |
+
self.second == other.second and
|
| 545 |
+
self.microsecond == other.microsecond)
|
| 546 |
+
|
| 547 |
+
def __hash__(self):
|
| 548 |
+
return hash((
|
| 549 |
+
self.weekday,
|
| 550 |
+
self.years,
|
| 551 |
+
self.months,
|
| 552 |
+
self.days,
|
| 553 |
+
self.hours,
|
| 554 |
+
self.minutes,
|
| 555 |
+
self.seconds,
|
| 556 |
+
self.microseconds,
|
| 557 |
+
self.leapdays,
|
| 558 |
+
self.year,
|
| 559 |
+
self.month,
|
| 560 |
+
self.day,
|
| 561 |
+
self.hour,
|
| 562 |
+
self.minute,
|
| 563 |
+
self.second,
|
| 564 |
+
self.microsecond,
|
| 565 |
+
))
|
| 566 |
+
|
| 567 |
+
def __ne__(self, other):
|
| 568 |
+
return not self.__eq__(other)
|
| 569 |
+
|
| 570 |
+
def __div__(self, other):
|
| 571 |
+
try:
|
| 572 |
+
reciprocal = 1 / float(other)
|
| 573 |
+
except TypeError:
|
| 574 |
+
return NotImplemented
|
| 575 |
+
|
| 576 |
+
return self.__mul__(reciprocal)
|
| 577 |
+
|
| 578 |
+
__truediv__ = __div__
|
| 579 |
+
|
| 580 |
+
def __repr__(self):
|
| 581 |
+
l = []
|
| 582 |
+
for attr in ["years", "months", "days", "leapdays",
|
| 583 |
+
"hours", "minutes", "seconds", "microseconds"]:
|
| 584 |
+
value = getattr(self, attr)
|
| 585 |
+
if value:
|
| 586 |
+
l.append("{attr}={value:+g}".format(attr=attr, value=value))
|
| 587 |
+
for attr in ["year", "month", "day", "weekday",
|
| 588 |
+
"hour", "minute", "second", "microsecond"]:
|
| 589 |
+
value = getattr(self, attr)
|
| 590 |
+
if value is not None:
|
| 591 |
+
l.append("{attr}={value}".format(attr=attr, value=repr(value)))
|
| 592 |
+
return "{classname}({attrs})".format(classname=self.__class__.__name__,
|
| 593 |
+
attrs=", ".join(l))
|
| 594 |
+
|
| 595 |
+
|
| 596 |
+
def _sign(x):
|
| 597 |
+
return int(copysign(1, x))
|
| 598 |
+
|
| 599 |
+
# vim:ts=4:sw=4:et
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/rrule.py
ADDED
|
@@ -0,0 +1,1737 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
The rrule module offers a small, complete, and very fast, implementation of
|
| 4 |
+
the recurrence rules documented in the
|
| 5 |
+
`iCalendar RFC <https://tools.ietf.org/html/rfc5545>`_,
|
| 6 |
+
including support for caching of results.
|
| 7 |
+
"""
|
| 8 |
+
import calendar
|
| 9 |
+
import datetime
|
| 10 |
+
import heapq
|
| 11 |
+
import itertools
|
| 12 |
+
import re
|
| 13 |
+
import sys
|
| 14 |
+
from functools import wraps
|
| 15 |
+
# For warning about deprecation of until and count
|
| 16 |
+
from warnings import warn
|
| 17 |
+
|
| 18 |
+
from six import advance_iterator, integer_types
|
| 19 |
+
|
| 20 |
+
from six.moves import _thread, range
|
| 21 |
+
|
| 22 |
+
from ._common import weekday as weekdaybase
|
| 23 |
+
|
| 24 |
+
try:
|
| 25 |
+
from math import gcd
|
| 26 |
+
except ImportError:
|
| 27 |
+
from fractions import gcd
|
| 28 |
+
|
| 29 |
+
__all__ = ["rrule", "rruleset", "rrulestr",
|
| 30 |
+
"YEARLY", "MONTHLY", "WEEKLY", "DAILY",
|
| 31 |
+
"HOURLY", "MINUTELY", "SECONDLY",
|
| 32 |
+
"MO", "TU", "WE", "TH", "FR", "SA", "SU"]
|
| 33 |
+
|
| 34 |
+
# Every mask is 7 days longer to handle cross-year weekly periods.
|
| 35 |
+
M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 +
|
| 36 |
+
[7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7)
|
| 37 |
+
M365MASK = list(M366MASK)
|
| 38 |
+
M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32))
|
| 39 |
+
MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
|
| 40 |
+
MDAY365MASK = list(MDAY366MASK)
|
| 41 |
+
M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0))
|
| 42 |
+
NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
|
| 43 |
+
NMDAY365MASK = list(NMDAY366MASK)
|
| 44 |
+
M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366)
|
| 45 |
+
M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365)
|
| 46 |
+
WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55
|
| 47 |
+
del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31]
|
| 48 |
+
MDAY365MASK = tuple(MDAY365MASK)
|
| 49 |
+
M365MASK = tuple(M365MASK)
|
| 50 |
+
|
| 51 |
+
FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY']
|
| 52 |
+
|
| 53 |
+
(YEARLY,
|
| 54 |
+
MONTHLY,
|
| 55 |
+
WEEKLY,
|
| 56 |
+
DAILY,
|
| 57 |
+
HOURLY,
|
| 58 |
+
MINUTELY,
|
| 59 |
+
SECONDLY) = list(range(7))
|
| 60 |
+
|
| 61 |
+
# Imported on demand.
|
| 62 |
+
easter = None
|
| 63 |
+
parser = None
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class weekday(weekdaybase):
|
| 67 |
+
"""
|
| 68 |
+
This version of weekday does not allow n = 0.
|
| 69 |
+
"""
|
| 70 |
+
def __init__(self, wkday, n=None):
|
| 71 |
+
if n == 0:
|
| 72 |
+
raise ValueError("Can't create weekday with n==0")
|
| 73 |
+
|
| 74 |
+
super(weekday, self).__init__(wkday, n)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7))
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def _invalidates_cache(f):
|
| 81 |
+
"""
|
| 82 |
+
Decorator for rruleset methods which may invalidate the
|
| 83 |
+
cached length.
|
| 84 |
+
"""
|
| 85 |
+
@wraps(f)
|
| 86 |
+
def inner_func(self, *args, **kwargs):
|
| 87 |
+
rv = f(self, *args, **kwargs)
|
| 88 |
+
self._invalidate_cache()
|
| 89 |
+
return rv
|
| 90 |
+
|
| 91 |
+
return inner_func
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class rrulebase(object):
|
| 95 |
+
def __init__(self, cache=False):
|
| 96 |
+
if cache:
|
| 97 |
+
self._cache = []
|
| 98 |
+
self._cache_lock = _thread.allocate_lock()
|
| 99 |
+
self._invalidate_cache()
|
| 100 |
+
else:
|
| 101 |
+
self._cache = None
|
| 102 |
+
self._cache_complete = False
|
| 103 |
+
self._len = None
|
| 104 |
+
|
| 105 |
+
def __iter__(self):
|
| 106 |
+
if self._cache_complete:
|
| 107 |
+
return iter(self._cache)
|
| 108 |
+
elif self._cache is None:
|
| 109 |
+
return self._iter()
|
| 110 |
+
else:
|
| 111 |
+
return self._iter_cached()
|
| 112 |
+
|
| 113 |
+
def _invalidate_cache(self):
|
| 114 |
+
if self._cache is not None:
|
| 115 |
+
self._cache = []
|
| 116 |
+
self._cache_complete = False
|
| 117 |
+
self._cache_gen = self._iter()
|
| 118 |
+
|
| 119 |
+
if self._cache_lock.locked():
|
| 120 |
+
self._cache_lock.release()
|
| 121 |
+
|
| 122 |
+
self._len = None
|
| 123 |
+
|
| 124 |
+
def _iter_cached(self):
|
| 125 |
+
i = 0
|
| 126 |
+
gen = self._cache_gen
|
| 127 |
+
cache = self._cache
|
| 128 |
+
acquire = self._cache_lock.acquire
|
| 129 |
+
release = self._cache_lock.release
|
| 130 |
+
while gen:
|
| 131 |
+
if i == len(cache):
|
| 132 |
+
acquire()
|
| 133 |
+
if self._cache_complete:
|
| 134 |
+
break
|
| 135 |
+
try:
|
| 136 |
+
for j in range(10):
|
| 137 |
+
cache.append(advance_iterator(gen))
|
| 138 |
+
except StopIteration:
|
| 139 |
+
self._cache_gen = gen = None
|
| 140 |
+
self._cache_complete = True
|
| 141 |
+
break
|
| 142 |
+
release()
|
| 143 |
+
yield cache[i]
|
| 144 |
+
i += 1
|
| 145 |
+
while i < self._len:
|
| 146 |
+
yield cache[i]
|
| 147 |
+
i += 1
|
| 148 |
+
|
| 149 |
+
def __getitem__(self, item):
|
| 150 |
+
if self._cache_complete:
|
| 151 |
+
return self._cache[item]
|
| 152 |
+
elif isinstance(item, slice):
|
| 153 |
+
if item.step and item.step < 0:
|
| 154 |
+
return list(iter(self))[item]
|
| 155 |
+
else:
|
| 156 |
+
return list(itertools.islice(self,
|
| 157 |
+
item.start or 0,
|
| 158 |
+
item.stop or sys.maxsize,
|
| 159 |
+
item.step or 1))
|
| 160 |
+
elif item >= 0:
|
| 161 |
+
gen = iter(self)
|
| 162 |
+
try:
|
| 163 |
+
for i in range(item+1):
|
| 164 |
+
res = advance_iterator(gen)
|
| 165 |
+
except StopIteration:
|
| 166 |
+
raise IndexError
|
| 167 |
+
return res
|
| 168 |
+
else:
|
| 169 |
+
return list(iter(self))[item]
|
| 170 |
+
|
| 171 |
+
def __contains__(self, item):
|
| 172 |
+
if self._cache_complete:
|
| 173 |
+
return item in self._cache
|
| 174 |
+
else:
|
| 175 |
+
for i in self:
|
| 176 |
+
if i == item:
|
| 177 |
+
return True
|
| 178 |
+
elif i > item:
|
| 179 |
+
return False
|
| 180 |
+
return False
|
| 181 |
+
|
| 182 |
+
# __len__() introduces a large performance penalty.
|
| 183 |
+
def count(self):
|
| 184 |
+
""" Returns the number of recurrences in this set. It will have go
|
| 185 |
+
through the whole recurrence, if this hasn't been done before. """
|
| 186 |
+
if self._len is None:
|
| 187 |
+
for x in self:
|
| 188 |
+
pass
|
| 189 |
+
return self._len
|
| 190 |
+
|
| 191 |
+
def before(self, dt, inc=False):
|
| 192 |
+
""" Returns the last recurrence before the given datetime instance. The
|
| 193 |
+
inc keyword defines what happens if dt is an occurrence. With
|
| 194 |
+
inc=True, if dt itself is an occurrence, it will be returned. """
|
| 195 |
+
if self._cache_complete:
|
| 196 |
+
gen = self._cache
|
| 197 |
+
else:
|
| 198 |
+
gen = self
|
| 199 |
+
last = None
|
| 200 |
+
if inc:
|
| 201 |
+
for i in gen:
|
| 202 |
+
if i > dt:
|
| 203 |
+
break
|
| 204 |
+
last = i
|
| 205 |
+
else:
|
| 206 |
+
for i in gen:
|
| 207 |
+
if i >= dt:
|
| 208 |
+
break
|
| 209 |
+
last = i
|
| 210 |
+
return last
|
| 211 |
+
|
| 212 |
+
def after(self, dt, inc=False):
|
| 213 |
+
""" Returns the first recurrence after the given datetime instance. The
|
| 214 |
+
inc keyword defines what happens if dt is an occurrence. With
|
| 215 |
+
inc=True, if dt itself is an occurrence, it will be returned. """
|
| 216 |
+
if self._cache_complete:
|
| 217 |
+
gen = self._cache
|
| 218 |
+
else:
|
| 219 |
+
gen = self
|
| 220 |
+
if inc:
|
| 221 |
+
for i in gen:
|
| 222 |
+
if i >= dt:
|
| 223 |
+
return i
|
| 224 |
+
else:
|
| 225 |
+
for i in gen:
|
| 226 |
+
if i > dt:
|
| 227 |
+
return i
|
| 228 |
+
return None
|
| 229 |
+
|
| 230 |
+
def xafter(self, dt, count=None, inc=False):
|
| 231 |
+
"""
|
| 232 |
+
Generator which yields up to `count` recurrences after the given
|
| 233 |
+
datetime instance, equivalent to `after`.
|
| 234 |
+
|
| 235 |
+
:param dt:
|
| 236 |
+
The datetime at which to start generating recurrences.
|
| 237 |
+
|
| 238 |
+
:param count:
|
| 239 |
+
The maximum number of recurrences to generate. If `None` (default),
|
| 240 |
+
dates are generated until the recurrence rule is exhausted.
|
| 241 |
+
|
| 242 |
+
:param inc:
|
| 243 |
+
If `dt` is an instance of the rule and `inc` is `True`, it is
|
| 244 |
+
included in the output.
|
| 245 |
+
|
| 246 |
+
:yields: Yields a sequence of `datetime` objects.
|
| 247 |
+
"""
|
| 248 |
+
|
| 249 |
+
if self._cache_complete:
|
| 250 |
+
gen = self._cache
|
| 251 |
+
else:
|
| 252 |
+
gen = self
|
| 253 |
+
|
| 254 |
+
# Select the comparison function
|
| 255 |
+
if inc:
|
| 256 |
+
comp = lambda dc, dtc: dc >= dtc
|
| 257 |
+
else:
|
| 258 |
+
comp = lambda dc, dtc: dc > dtc
|
| 259 |
+
|
| 260 |
+
# Generate dates
|
| 261 |
+
n = 0
|
| 262 |
+
for d in gen:
|
| 263 |
+
if comp(d, dt):
|
| 264 |
+
if count is not None:
|
| 265 |
+
n += 1
|
| 266 |
+
if n > count:
|
| 267 |
+
break
|
| 268 |
+
|
| 269 |
+
yield d
|
| 270 |
+
|
| 271 |
+
def between(self, after, before, inc=False, count=1):
|
| 272 |
+
""" Returns all the occurrences of the rrule between after and before.
|
| 273 |
+
The inc keyword defines what happens if after and/or before are
|
| 274 |
+
themselves occurrences. With inc=True, they will be included in the
|
| 275 |
+
list, if they are found in the recurrence set. """
|
| 276 |
+
if self._cache_complete:
|
| 277 |
+
gen = self._cache
|
| 278 |
+
else:
|
| 279 |
+
gen = self
|
| 280 |
+
started = False
|
| 281 |
+
l = []
|
| 282 |
+
if inc:
|
| 283 |
+
for i in gen:
|
| 284 |
+
if i > before:
|
| 285 |
+
break
|
| 286 |
+
elif not started:
|
| 287 |
+
if i >= after:
|
| 288 |
+
started = True
|
| 289 |
+
l.append(i)
|
| 290 |
+
else:
|
| 291 |
+
l.append(i)
|
| 292 |
+
else:
|
| 293 |
+
for i in gen:
|
| 294 |
+
if i >= before:
|
| 295 |
+
break
|
| 296 |
+
elif not started:
|
| 297 |
+
if i > after:
|
| 298 |
+
started = True
|
| 299 |
+
l.append(i)
|
| 300 |
+
else:
|
| 301 |
+
l.append(i)
|
| 302 |
+
return l
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
class rrule(rrulebase):
|
| 306 |
+
"""
|
| 307 |
+
That's the base of the rrule operation. It accepts all the keywords
|
| 308 |
+
defined in the RFC as its constructor parameters (except byday,
|
| 309 |
+
which was renamed to byweekday) and more. The constructor prototype is::
|
| 310 |
+
|
| 311 |
+
rrule(freq)
|
| 312 |
+
|
| 313 |
+
Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY,
|
| 314 |
+
or SECONDLY.
|
| 315 |
+
|
| 316 |
+
.. note::
|
| 317 |
+
Per RFC section 3.3.10, recurrence instances falling on invalid dates
|
| 318 |
+
and times are ignored rather than coerced:
|
| 319 |
+
|
| 320 |
+
Recurrence rules may generate recurrence instances with an invalid
|
| 321 |
+
date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM
|
| 322 |
+
on a day where the local time is moved forward by an hour at 1:00
|
| 323 |
+
AM). Such recurrence instances MUST be ignored and MUST NOT be
|
| 324 |
+
counted as part of the recurrence set.
|
| 325 |
+
|
| 326 |
+
This can lead to possibly surprising behavior when, for example, the
|
| 327 |
+
start date occurs at the end of the month:
|
| 328 |
+
|
| 329 |
+
>>> from dateutil.rrule import rrule, MONTHLY
|
| 330 |
+
>>> from datetime import datetime
|
| 331 |
+
>>> start_date = datetime(2014, 12, 31)
|
| 332 |
+
>>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date))
|
| 333 |
+
... # doctest: +NORMALIZE_WHITESPACE
|
| 334 |
+
[datetime.datetime(2014, 12, 31, 0, 0),
|
| 335 |
+
datetime.datetime(2015, 1, 31, 0, 0),
|
| 336 |
+
datetime.datetime(2015, 3, 31, 0, 0),
|
| 337 |
+
datetime.datetime(2015, 5, 31, 0, 0)]
|
| 338 |
+
|
| 339 |
+
Additionally, it supports the following keyword arguments:
|
| 340 |
+
|
| 341 |
+
:param dtstart:
|
| 342 |
+
The recurrence start. Besides being the base for the recurrence,
|
| 343 |
+
missing parameters in the final recurrence instances will also be
|
| 344 |
+
extracted from this date. If not given, datetime.now() will be used
|
| 345 |
+
instead.
|
| 346 |
+
:param interval:
|
| 347 |
+
The interval between each freq iteration. For example, when using
|
| 348 |
+
YEARLY, an interval of 2 means once every two years, but with HOURLY,
|
| 349 |
+
it means once every two hours. The default interval is 1.
|
| 350 |
+
:param wkst:
|
| 351 |
+
The week start day. Must be one of the MO, TU, WE constants, or an
|
| 352 |
+
integer, specifying the first day of the week. This will affect
|
| 353 |
+
recurrences based on weekly periods. The default week start is got
|
| 354 |
+
from calendar.firstweekday(), and may be modified by
|
| 355 |
+
calendar.setfirstweekday().
|
| 356 |
+
:param count:
|
| 357 |
+
If given, this determines how many occurrences will be generated.
|
| 358 |
+
|
| 359 |
+
.. note::
|
| 360 |
+
As of version 2.5.0, the use of the keyword ``until`` in conjunction
|
| 361 |
+
with ``count`` is deprecated, to make sure ``dateutil`` is fully
|
| 362 |
+
compliant with `RFC-5545 Sec. 3.3.10 <https://tools.ietf.org/
|
| 363 |
+
html/rfc5545#section-3.3.10>`_. Therefore, ``until`` and ``count``
|
| 364 |
+
**must not** occur in the same call to ``rrule``.
|
| 365 |
+
:param until:
|
| 366 |
+
If given, this must be a datetime instance specifying the upper-bound
|
| 367 |
+
limit of the recurrence. The last recurrence in the rule is the greatest
|
| 368 |
+
datetime that is less than or equal to the value specified in the
|
| 369 |
+
``until`` parameter.
|
| 370 |
+
|
| 371 |
+
.. note::
|
| 372 |
+
As of version 2.5.0, the use of the keyword ``until`` in conjunction
|
| 373 |
+
with ``count`` is deprecated, to make sure ``dateutil`` is fully
|
| 374 |
+
compliant with `RFC-5545 Sec. 3.3.10 <https://tools.ietf.org/
|
| 375 |
+
html/rfc5545#section-3.3.10>`_. Therefore, ``until`` and ``count``
|
| 376 |
+
**must not** occur in the same call to ``rrule``.
|
| 377 |
+
:param bysetpos:
|
| 378 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 379 |
+
positive or negative. Each given integer will specify an occurrence
|
| 380 |
+
number, corresponding to the nth occurrence of the rule inside the
|
| 381 |
+
frequency period. For example, a bysetpos of -1 if combined with a
|
| 382 |
+
MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will
|
| 383 |
+
result in the last work day of every month.
|
| 384 |
+
:param bymonth:
|
| 385 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 386 |
+
meaning the months to apply the recurrence to.
|
| 387 |
+
:param bymonthday:
|
| 388 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 389 |
+
meaning the month days to apply the recurrence to.
|
| 390 |
+
:param byyearday:
|
| 391 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 392 |
+
meaning the year days to apply the recurrence to.
|
| 393 |
+
:param byeaster:
|
| 394 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 395 |
+
positive or negative. Each integer will define an offset from the
|
| 396 |
+
Easter Sunday. Passing the offset 0 to byeaster will yield the Easter
|
| 397 |
+
Sunday itself. This is an extension to the RFC specification.
|
| 398 |
+
:param byweekno:
|
| 399 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 400 |
+
meaning the week numbers to apply the recurrence to. Week numbers
|
| 401 |
+
have the meaning described in ISO8601, that is, the first week of
|
| 402 |
+
the year is that containing at least four days of the new year.
|
| 403 |
+
:param byweekday:
|
| 404 |
+
If given, it must be either an integer (0 == MO), a sequence of
|
| 405 |
+
integers, one of the weekday constants (MO, TU, etc), or a sequence
|
| 406 |
+
of these constants. When given, these variables will define the
|
| 407 |
+
weekdays where the recurrence will be applied. It's also possible to
|
| 408 |
+
use an argument n for the weekday instances, which will mean the nth
|
| 409 |
+
occurrence of this weekday in the period. For example, with MONTHLY,
|
| 410 |
+
or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the
|
| 411 |
+
first friday of the month where the recurrence happens. Notice that in
|
| 412 |
+
the RFC documentation, this is specified as BYDAY, but was renamed to
|
| 413 |
+
avoid the ambiguity of that keyword.
|
| 414 |
+
:param byhour:
|
| 415 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 416 |
+
meaning the hours to apply the recurrence to.
|
| 417 |
+
:param byminute:
|
| 418 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 419 |
+
meaning the minutes to apply the recurrence to.
|
| 420 |
+
:param bysecond:
|
| 421 |
+
If given, it must be either an integer, or a sequence of integers,
|
| 422 |
+
meaning the seconds to apply the recurrence to.
|
| 423 |
+
:param cache:
|
| 424 |
+
If given, it must be a boolean value specifying to enable or disable
|
| 425 |
+
caching of results. If you will use the same rrule instance multiple
|
| 426 |
+
times, enabling caching will improve the performance considerably.
|
| 427 |
+
"""
|
| 428 |
+
def __init__(self, freq, dtstart=None,
|
| 429 |
+
interval=1, wkst=None, count=None, until=None, bysetpos=None,
|
| 430 |
+
bymonth=None, bymonthday=None, byyearday=None, byeaster=None,
|
| 431 |
+
byweekno=None, byweekday=None,
|
| 432 |
+
byhour=None, byminute=None, bysecond=None,
|
| 433 |
+
cache=False):
|
| 434 |
+
super(rrule, self).__init__(cache)
|
| 435 |
+
global easter
|
| 436 |
+
if not dtstart:
|
| 437 |
+
if until and until.tzinfo:
|
| 438 |
+
dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0)
|
| 439 |
+
else:
|
| 440 |
+
dtstart = datetime.datetime.now().replace(microsecond=0)
|
| 441 |
+
elif not isinstance(dtstart, datetime.datetime):
|
| 442 |
+
dtstart = datetime.datetime.fromordinal(dtstart.toordinal())
|
| 443 |
+
else:
|
| 444 |
+
dtstart = dtstart.replace(microsecond=0)
|
| 445 |
+
self._dtstart = dtstart
|
| 446 |
+
self._tzinfo = dtstart.tzinfo
|
| 447 |
+
self._freq = freq
|
| 448 |
+
self._interval = interval
|
| 449 |
+
self._count = count
|
| 450 |
+
|
| 451 |
+
# Cache the original byxxx rules, if they are provided, as the _byxxx
|
| 452 |
+
# attributes do not necessarily map to the inputs, and this can be
|
| 453 |
+
# a problem in generating the strings. Only store things if they've
|
| 454 |
+
# been supplied (the string retrieval will just use .get())
|
| 455 |
+
self._original_rule = {}
|
| 456 |
+
|
| 457 |
+
if until and not isinstance(until, datetime.datetime):
|
| 458 |
+
until = datetime.datetime.fromordinal(until.toordinal())
|
| 459 |
+
self._until = until
|
| 460 |
+
|
| 461 |
+
if self._dtstart and self._until:
|
| 462 |
+
if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None):
|
| 463 |
+
# According to RFC5545 Section 3.3.10:
|
| 464 |
+
# https://tools.ietf.org/html/rfc5545#section-3.3.10
|
| 465 |
+
#
|
| 466 |
+
# > If the "DTSTART" property is specified as a date with UTC
|
| 467 |
+
# > time or a date with local time and time zone reference,
|
| 468 |
+
# > then the UNTIL rule part MUST be specified as a date with
|
| 469 |
+
# > UTC time.
|
| 470 |
+
raise ValueError(
|
| 471 |
+
'RRULE UNTIL values must be specified in UTC when DTSTART '
|
| 472 |
+
'is timezone-aware'
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
if count is not None and until:
|
| 476 |
+
warn("Using both 'count' and 'until' is inconsistent with RFC 5545"
|
| 477 |
+
" and has been deprecated in dateutil. Future versions will "
|
| 478 |
+
"raise an error.", DeprecationWarning)
|
| 479 |
+
|
| 480 |
+
if wkst is None:
|
| 481 |
+
self._wkst = calendar.firstweekday()
|
| 482 |
+
elif isinstance(wkst, integer_types):
|
| 483 |
+
self._wkst = wkst
|
| 484 |
+
else:
|
| 485 |
+
self._wkst = wkst.weekday
|
| 486 |
+
|
| 487 |
+
if bysetpos is None:
|
| 488 |
+
self._bysetpos = None
|
| 489 |
+
elif isinstance(bysetpos, integer_types):
|
| 490 |
+
if bysetpos == 0 or not (-366 <= bysetpos <= 366):
|
| 491 |
+
raise ValueError("bysetpos must be between 1 and 366, "
|
| 492 |
+
"or between -366 and -1")
|
| 493 |
+
self._bysetpos = (bysetpos,)
|
| 494 |
+
else:
|
| 495 |
+
self._bysetpos = tuple(bysetpos)
|
| 496 |
+
for pos in self._bysetpos:
|
| 497 |
+
if pos == 0 or not (-366 <= pos <= 366):
|
| 498 |
+
raise ValueError("bysetpos must be between 1 and 366, "
|
| 499 |
+
"or between -366 and -1")
|
| 500 |
+
|
| 501 |
+
if self._bysetpos:
|
| 502 |
+
self._original_rule['bysetpos'] = self._bysetpos
|
| 503 |
+
|
| 504 |
+
if (byweekno is None and byyearday is None and bymonthday is None and
|
| 505 |
+
byweekday is None and byeaster is None):
|
| 506 |
+
if freq == YEARLY:
|
| 507 |
+
if bymonth is None:
|
| 508 |
+
bymonth = dtstart.month
|
| 509 |
+
self._original_rule['bymonth'] = None
|
| 510 |
+
bymonthday = dtstart.day
|
| 511 |
+
self._original_rule['bymonthday'] = None
|
| 512 |
+
elif freq == MONTHLY:
|
| 513 |
+
bymonthday = dtstart.day
|
| 514 |
+
self._original_rule['bymonthday'] = None
|
| 515 |
+
elif freq == WEEKLY:
|
| 516 |
+
byweekday = dtstart.weekday()
|
| 517 |
+
self._original_rule['byweekday'] = None
|
| 518 |
+
|
| 519 |
+
# bymonth
|
| 520 |
+
if bymonth is None:
|
| 521 |
+
self._bymonth = None
|
| 522 |
+
else:
|
| 523 |
+
if isinstance(bymonth, integer_types):
|
| 524 |
+
bymonth = (bymonth,)
|
| 525 |
+
|
| 526 |
+
self._bymonth = tuple(sorted(set(bymonth)))
|
| 527 |
+
|
| 528 |
+
if 'bymonth' not in self._original_rule:
|
| 529 |
+
self._original_rule['bymonth'] = self._bymonth
|
| 530 |
+
|
| 531 |
+
# byyearday
|
| 532 |
+
if byyearday is None:
|
| 533 |
+
self._byyearday = None
|
| 534 |
+
else:
|
| 535 |
+
if isinstance(byyearday, integer_types):
|
| 536 |
+
byyearday = (byyearday,)
|
| 537 |
+
|
| 538 |
+
self._byyearday = tuple(sorted(set(byyearday)))
|
| 539 |
+
self._original_rule['byyearday'] = self._byyearday
|
| 540 |
+
|
| 541 |
+
# byeaster
|
| 542 |
+
if byeaster is not None:
|
| 543 |
+
if not easter:
|
| 544 |
+
from dateutil import easter
|
| 545 |
+
if isinstance(byeaster, integer_types):
|
| 546 |
+
self._byeaster = (byeaster,)
|
| 547 |
+
else:
|
| 548 |
+
self._byeaster = tuple(sorted(byeaster))
|
| 549 |
+
|
| 550 |
+
self._original_rule['byeaster'] = self._byeaster
|
| 551 |
+
else:
|
| 552 |
+
self._byeaster = None
|
| 553 |
+
|
| 554 |
+
# bymonthday
|
| 555 |
+
if bymonthday is None:
|
| 556 |
+
self._bymonthday = ()
|
| 557 |
+
self._bynmonthday = ()
|
| 558 |
+
else:
|
| 559 |
+
if isinstance(bymonthday, integer_types):
|
| 560 |
+
bymonthday = (bymonthday,)
|
| 561 |
+
|
| 562 |
+
bymonthday = set(bymonthday) # Ensure it's unique
|
| 563 |
+
|
| 564 |
+
self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0))
|
| 565 |
+
self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0))
|
| 566 |
+
|
| 567 |
+
# Storing positive numbers first, then negative numbers
|
| 568 |
+
if 'bymonthday' not in self._original_rule:
|
| 569 |
+
self._original_rule['bymonthday'] = tuple(
|
| 570 |
+
itertools.chain(self._bymonthday, self._bynmonthday))
|
| 571 |
+
|
| 572 |
+
# byweekno
|
| 573 |
+
if byweekno is None:
|
| 574 |
+
self._byweekno = None
|
| 575 |
+
else:
|
| 576 |
+
if isinstance(byweekno, integer_types):
|
| 577 |
+
byweekno = (byweekno,)
|
| 578 |
+
|
| 579 |
+
self._byweekno = tuple(sorted(set(byweekno)))
|
| 580 |
+
|
| 581 |
+
self._original_rule['byweekno'] = self._byweekno
|
| 582 |
+
|
| 583 |
+
# byweekday / bynweekday
|
| 584 |
+
if byweekday is None:
|
| 585 |
+
self._byweekday = None
|
| 586 |
+
self._bynweekday = None
|
| 587 |
+
else:
|
| 588 |
+
# If it's one of the valid non-sequence types, convert to a
|
| 589 |
+
# single-element sequence before the iterator that builds the
|
| 590 |
+
# byweekday set.
|
| 591 |
+
if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"):
|
| 592 |
+
byweekday = (byweekday,)
|
| 593 |
+
|
| 594 |
+
self._byweekday = set()
|
| 595 |
+
self._bynweekday = set()
|
| 596 |
+
for wday in byweekday:
|
| 597 |
+
if isinstance(wday, integer_types):
|
| 598 |
+
self._byweekday.add(wday)
|
| 599 |
+
elif not wday.n or freq > MONTHLY:
|
| 600 |
+
self._byweekday.add(wday.weekday)
|
| 601 |
+
else:
|
| 602 |
+
self._bynweekday.add((wday.weekday, wday.n))
|
| 603 |
+
|
| 604 |
+
if not self._byweekday:
|
| 605 |
+
self._byweekday = None
|
| 606 |
+
elif not self._bynweekday:
|
| 607 |
+
self._bynweekday = None
|
| 608 |
+
|
| 609 |
+
if self._byweekday is not None:
|
| 610 |
+
self._byweekday = tuple(sorted(self._byweekday))
|
| 611 |
+
orig_byweekday = [weekday(x) for x in self._byweekday]
|
| 612 |
+
else:
|
| 613 |
+
orig_byweekday = ()
|
| 614 |
+
|
| 615 |
+
if self._bynweekday is not None:
|
| 616 |
+
self._bynweekday = tuple(sorted(self._bynweekday))
|
| 617 |
+
orig_bynweekday = [weekday(*x) for x in self._bynweekday]
|
| 618 |
+
else:
|
| 619 |
+
orig_bynweekday = ()
|
| 620 |
+
|
| 621 |
+
if 'byweekday' not in self._original_rule:
|
| 622 |
+
self._original_rule['byweekday'] = tuple(itertools.chain(
|
| 623 |
+
orig_byweekday, orig_bynweekday))
|
| 624 |
+
|
| 625 |
+
# byhour
|
| 626 |
+
if byhour is None:
|
| 627 |
+
if freq < HOURLY:
|
| 628 |
+
self._byhour = {dtstart.hour}
|
| 629 |
+
else:
|
| 630 |
+
self._byhour = None
|
| 631 |
+
else:
|
| 632 |
+
if isinstance(byhour, integer_types):
|
| 633 |
+
byhour = (byhour,)
|
| 634 |
+
|
| 635 |
+
if freq == HOURLY:
|
| 636 |
+
self._byhour = self.__construct_byset(start=dtstart.hour,
|
| 637 |
+
byxxx=byhour,
|
| 638 |
+
base=24)
|
| 639 |
+
else:
|
| 640 |
+
self._byhour = set(byhour)
|
| 641 |
+
|
| 642 |
+
self._byhour = tuple(sorted(self._byhour))
|
| 643 |
+
self._original_rule['byhour'] = self._byhour
|
| 644 |
+
|
| 645 |
+
# byminute
|
| 646 |
+
if byminute is None:
|
| 647 |
+
if freq < MINUTELY:
|
| 648 |
+
self._byminute = {dtstart.minute}
|
| 649 |
+
else:
|
| 650 |
+
self._byminute = None
|
| 651 |
+
else:
|
| 652 |
+
if isinstance(byminute, integer_types):
|
| 653 |
+
byminute = (byminute,)
|
| 654 |
+
|
| 655 |
+
if freq == MINUTELY:
|
| 656 |
+
self._byminute = self.__construct_byset(start=dtstart.minute,
|
| 657 |
+
byxxx=byminute,
|
| 658 |
+
base=60)
|
| 659 |
+
else:
|
| 660 |
+
self._byminute = set(byminute)
|
| 661 |
+
|
| 662 |
+
self._byminute = tuple(sorted(self._byminute))
|
| 663 |
+
self._original_rule['byminute'] = self._byminute
|
| 664 |
+
|
| 665 |
+
# bysecond
|
| 666 |
+
if bysecond is None:
|
| 667 |
+
if freq < SECONDLY:
|
| 668 |
+
self._bysecond = ((dtstart.second,))
|
| 669 |
+
else:
|
| 670 |
+
self._bysecond = None
|
| 671 |
+
else:
|
| 672 |
+
if isinstance(bysecond, integer_types):
|
| 673 |
+
bysecond = (bysecond,)
|
| 674 |
+
|
| 675 |
+
self._bysecond = set(bysecond)
|
| 676 |
+
|
| 677 |
+
if freq == SECONDLY:
|
| 678 |
+
self._bysecond = self.__construct_byset(start=dtstart.second,
|
| 679 |
+
byxxx=bysecond,
|
| 680 |
+
base=60)
|
| 681 |
+
else:
|
| 682 |
+
self._bysecond = set(bysecond)
|
| 683 |
+
|
| 684 |
+
self._bysecond = tuple(sorted(self._bysecond))
|
| 685 |
+
self._original_rule['bysecond'] = self._bysecond
|
| 686 |
+
|
| 687 |
+
if self._freq >= HOURLY:
|
| 688 |
+
self._timeset = None
|
| 689 |
+
else:
|
| 690 |
+
self._timeset = []
|
| 691 |
+
for hour in self._byhour:
|
| 692 |
+
for minute in self._byminute:
|
| 693 |
+
for second in self._bysecond:
|
| 694 |
+
self._timeset.append(
|
| 695 |
+
datetime.time(hour, minute, second,
|
| 696 |
+
tzinfo=self._tzinfo))
|
| 697 |
+
self._timeset.sort()
|
| 698 |
+
self._timeset = tuple(self._timeset)
|
| 699 |
+
|
| 700 |
+
def __str__(self):
|
| 701 |
+
"""
|
| 702 |
+
Output a string that would generate this RRULE if passed to rrulestr.
|
| 703 |
+
This is mostly compatible with RFC5545, except for the
|
| 704 |
+
dateutil-specific extension BYEASTER.
|
| 705 |
+
"""
|
| 706 |
+
|
| 707 |
+
output = []
|
| 708 |
+
h, m, s = [None] * 3
|
| 709 |
+
if self._dtstart:
|
| 710 |
+
output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S'))
|
| 711 |
+
h, m, s = self._dtstart.timetuple()[3:6]
|
| 712 |
+
|
| 713 |
+
parts = ['FREQ=' + FREQNAMES[self._freq]]
|
| 714 |
+
if self._interval != 1:
|
| 715 |
+
parts.append('INTERVAL=' + str(self._interval))
|
| 716 |
+
|
| 717 |
+
if self._wkst:
|
| 718 |
+
parts.append('WKST=' + repr(weekday(self._wkst))[0:2])
|
| 719 |
+
|
| 720 |
+
if self._count is not None:
|
| 721 |
+
parts.append('COUNT=' + str(self._count))
|
| 722 |
+
|
| 723 |
+
if self._until:
|
| 724 |
+
parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S'))
|
| 725 |
+
|
| 726 |
+
if self._original_rule.get('byweekday') is not None:
|
| 727 |
+
# The str() method on weekday objects doesn't generate
|
| 728 |
+
# RFC5545-compliant strings, so we should modify that.
|
| 729 |
+
original_rule = dict(self._original_rule)
|
| 730 |
+
wday_strings = []
|
| 731 |
+
for wday in original_rule['byweekday']:
|
| 732 |
+
if wday.n:
|
| 733 |
+
wday_strings.append('{n:+d}{wday}'.format(
|
| 734 |
+
n=wday.n,
|
| 735 |
+
wday=repr(wday)[0:2]))
|
| 736 |
+
else:
|
| 737 |
+
wday_strings.append(repr(wday))
|
| 738 |
+
|
| 739 |
+
original_rule['byweekday'] = wday_strings
|
| 740 |
+
else:
|
| 741 |
+
original_rule = self._original_rule
|
| 742 |
+
|
| 743 |
+
partfmt = '{name}={vals}'
|
| 744 |
+
for name, key in [('BYSETPOS', 'bysetpos'),
|
| 745 |
+
('BYMONTH', 'bymonth'),
|
| 746 |
+
('BYMONTHDAY', 'bymonthday'),
|
| 747 |
+
('BYYEARDAY', 'byyearday'),
|
| 748 |
+
('BYWEEKNO', 'byweekno'),
|
| 749 |
+
('BYDAY', 'byweekday'),
|
| 750 |
+
('BYHOUR', 'byhour'),
|
| 751 |
+
('BYMINUTE', 'byminute'),
|
| 752 |
+
('BYSECOND', 'bysecond'),
|
| 753 |
+
('BYEASTER', 'byeaster')]:
|
| 754 |
+
value = original_rule.get(key)
|
| 755 |
+
if value:
|
| 756 |
+
parts.append(partfmt.format(name=name, vals=(','.join(str(v)
|
| 757 |
+
for v in value))))
|
| 758 |
+
|
| 759 |
+
output.append('RRULE:' + ';'.join(parts))
|
| 760 |
+
return '\n'.join(output)
|
| 761 |
+
|
| 762 |
+
def replace(self, **kwargs):
|
| 763 |
+
"""Return new rrule with same attributes except for those attributes given new
|
| 764 |
+
values by whichever keyword arguments are specified."""
|
| 765 |
+
new_kwargs = {"interval": self._interval,
|
| 766 |
+
"count": self._count,
|
| 767 |
+
"dtstart": self._dtstart,
|
| 768 |
+
"freq": self._freq,
|
| 769 |
+
"until": self._until,
|
| 770 |
+
"wkst": self._wkst,
|
| 771 |
+
"cache": False if self._cache is None else True }
|
| 772 |
+
new_kwargs.update(self._original_rule)
|
| 773 |
+
new_kwargs.update(kwargs)
|
| 774 |
+
return rrule(**new_kwargs)
|
| 775 |
+
|
| 776 |
+
def _iter(self):
|
| 777 |
+
year, month, day, hour, minute, second, weekday, yearday, _ = \
|
| 778 |
+
self._dtstart.timetuple()
|
| 779 |
+
|
| 780 |
+
# Some local variables to speed things up a bit
|
| 781 |
+
freq = self._freq
|
| 782 |
+
interval = self._interval
|
| 783 |
+
wkst = self._wkst
|
| 784 |
+
until = self._until
|
| 785 |
+
bymonth = self._bymonth
|
| 786 |
+
byweekno = self._byweekno
|
| 787 |
+
byyearday = self._byyearday
|
| 788 |
+
byweekday = self._byweekday
|
| 789 |
+
byeaster = self._byeaster
|
| 790 |
+
bymonthday = self._bymonthday
|
| 791 |
+
bynmonthday = self._bynmonthday
|
| 792 |
+
bysetpos = self._bysetpos
|
| 793 |
+
byhour = self._byhour
|
| 794 |
+
byminute = self._byminute
|
| 795 |
+
bysecond = self._bysecond
|
| 796 |
+
|
| 797 |
+
ii = _iterinfo(self)
|
| 798 |
+
ii.rebuild(year, month)
|
| 799 |
+
|
| 800 |
+
getdayset = {YEARLY: ii.ydayset,
|
| 801 |
+
MONTHLY: ii.mdayset,
|
| 802 |
+
WEEKLY: ii.wdayset,
|
| 803 |
+
DAILY: ii.ddayset,
|
| 804 |
+
HOURLY: ii.ddayset,
|
| 805 |
+
MINUTELY: ii.ddayset,
|
| 806 |
+
SECONDLY: ii.ddayset}[freq]
|
| 807 |
+
|
| 808 |
+
if freq < HOURLY:
|
| 809 |
+
timeset = self._timeset
|
| 810 |
+
else:
|
| 811 |
+
gettimeset = {HOURLY: ii.htimeset,
|
| 812 |
+
MINUTELY: ii.mtimeset,
|
| 813 |
+
SECONDLY: ii.stimeset}[freq]
|
| 814 |
+
if ((freq >= HOURLY and
|
| 815 |
+
self._byhour and hour not in self._byhour) or
|
| 816 |
+
(freq >= MINUTELY and
|
| 817 |
+
self._byminute and minute not in self._byminute) or
|
| 818 |
+
(freq >= SECONDLY and
|
| 819 |
+
self._bysecond and second not in self._bysecond)):
|
| 820 |
+
timeset = ()
|
| 821 |
+
else:
|
| 822 |
+
timeset = gettimeset(hour, minute, second)
|
| 823 |
+
|
| 824 |
+
total = 0
|
| 825 |
+
count = self._count
|
| 826 |
+
while True:
|
| 827 |
+
# Get dayset with the right frequency
|
| 828 |
+
dayset, start, end = getdayset(year, month, day)
|
| 829 |
+
|
| 830 |
+
# Do the "hard" work ;-)
|
| 831 |
+
filtered = False
|
| 832 |
+
for i in dayset[start:end]:
|
| 833 |
+
if ((bymonth and ii.mmask[i] not in bymonth) or
|
| 834 |
+
(byweekno and not ii.wnomask[i]) or
|
| 835 |
+
(byweekday and ii.wdaymask[i] not in byweekday) or
|
| 836 |
+
(ii.nwdaymask and not ii.nwdaymask[i]) or
|
| 837 |
+
(byeaster and not ii.eastermask[i]) or
|
| 838 |
+
((bymonthday or bynmonthday) and
|
| 839 |
+
ii.mdaymask[i] not in bymonthday and
|
| 840 |
+
ii.nmdaymask[i] not in bynmonthday) or
|
| 841 |
+
(byyearday and
|
| 842 |
+
((i < ii.yearlen and i+1 not in byyearday and
|
| 843 |
+
-ii.yearlen+i not in byyearday) or
|
| 844 |
+
(i >= ii.yearlen and i+1-ii.yearlen not in byyearday and
|
| 845 |
+
-ii.nextyearlen+i-ii.yearlen not in byyearday)))):
|
| 846 |
+
dayset[i] = None
|
| 847 |
+
filtered = True
|
| 848 |
+
|
| 849 |
+
# Output results
|
| 850 |
+
if bysetpos and timeset:
|
| 851 |
+
poslist = []
|
| 852 |
+
for pos in bysetpos:
|
| 853 |
+
if pos < 0:
|
| 854 |
+
daypos, timepos = divmod(pos, len(timeset))
|
| 855 |
+
else:
|
| 856 |
+
daypos, timepos = divmod(pos-1, len(timeset))
|
| 857 |
+
try:
|
| 858 |
+
i = [x for x in dayset[start:end]
|
| 859 |
+
if x is not None][daypos]
|
| 860 |
+
time = timeset[timepos]
|
| 861 |
+
except IndexError:
|
| 862 |
+
pass
|
| 863 |
+
else:
|
| 864 |
+
date = datetime.date.fromordinal(ii.yearordinal+i)
|
| 865 |
+
res = datetime.datetime.combine(date, time)
|
| 866 |
+
if res not in poslist:
|
| 867 |
+
poslist.append(res)
|
| 868 |
+
poslist.sort()
|
| 869 |
+
for res in poslist:
|
| 870 |
+
if until and res > until:
|
| 871 |
+
self._len = total
|
| 872 |
+
return
|
| 873 |
+
elif res >= self._dtstart:
|
| 874 |
+
if count is not None:
|
| 875 |
+
count -= 1
|
| 876 |
+
if count < 0:
|
| 877 |
+
self._len = total
|
| 878 |
+
return
|
| 879 |
+
total += 1
|
| 880 |
+
yield res
|
| 881 |
+
else:
|
| 882 |
+
for i in dayset[start:end]:
|
| 883 |
+
if i is not None:
|
| 884 |
+
date = datetime.date.fromordinal(ii.yearordinal + i)
|
| 885 |
+
for time in timeset:
|
| 886 |
+
res = datetime.datetime.combine(date, time)
|
| 887 |
+
if until and res > until:
|
| 888 |
+
self._len = total
|
| 889 |
+
return
|
| 890 |
+
elif res >= self._dtstart:
|
| 891 |
+
if count is not None:
|
| 892 |
+
count -= 1
|
| 893 |
+
if count < 0:
|
| 894 |
+
self._len = total
|
| 895 |
+
return
|
| 896 |
+
|
| 897 |
+
total += 1
|
| 898 |
+
yield res
|
| 899 |
+
|
| 900 |
+
# Handle frequency and interval
|
| 901 |
+
fixday = False
|
| 902 |
+
if freq == YEARLY:
|
| 903 |
+
year += interval
|
| 904 |
+
if year > datetime.MAXYEAR:
|
| 905 |
+
self._len = total
|
| 906 |
+
return
|
| 907 |
+
ii.rebuild(year, month)
|
| 908 |
+
elif freq == MONTHLY:
|
| 909 |
+
month += interval
|
| 910 |
+
if month > 12:
|
| 911 |
+
div, mod = divmod(month, 12)
|
| 912 |
+
month = mod
|
| 913 |
+
year += div
|
| 914 |
+
if month == 0:
|
| 915 |
+
month = 12
|
| 916 |
+
year -= 1
|
| 917 |
+
if year > datetime.MAXYEAR:
|
| 918 |
+
self._len = total
|
| 919 |
+
return
|
| 920 |
+
ii.rebuild(year, month)
|
| 921 |
+
elif freq == WEEKLY:
|
| 922 |
+
if wkst > weekday:
|
| 923 |
+
day += -(weekday+1+(6-wkst))+self._interval*7
|
| 924 |
+
else:
|
| 925 |
+
day += -(weekday-wkst)+self._interval*7
|
| 926 |
+
weekday = wkst
|
| 927 |
+
fixday = True
|
| 928 |
+
elif freq == DAILY:
|
| 929 |
+
day += interval
|
| 930 |
+
fixday = True
|
| 931 |
+
elif freq == HOURLY:
|
| 932 |
+
if filtered:
|
| 933 |
+
# Jump to one iteration before next day
|
| 934 |
+
hour += ((23-hour)//interval)*interval
|
| 935 |
+
|
| 936 |
+
if byhour:
|
| 937 |
+
ndays, hour = self.__mod_distance(value=hour,
|
| 938 |
+
byxxx=self._byhour,
|
| 939 |
+
base=24)
|
| 940 |
+
else:
|
| 941 |
+
ndays, hour = divmod(hour+interval, 24)
|
| 942 |
+
|
| 943 |
+
if ndays:
|
| 944 |
+
day += ndays
|
| 945 |
+
fixday = True
|
| 946 |
+
|
| 947 |
+
timeset = gettimeset(hour, minute, second)
|
| 948 |
+
elif freq == MINUTELY:
|
| 949 |
+
if filtered:
|
| 950 |
+
# Jump to one iteration before next day
|
| 951 |
+
minute += ((1439-(hour*60+minute))//interval)*interval
|
| 952 |
+
|
| 953 |
+
valid = False
|
| 954 |
+
rep_rate = (24*60)
|
| 955 |
+
for j in range(rep_rate // gcd(interval, rep_rate)):
|
| 956 |
+
if byminute:
|
| 957 |
+
nhours, minute = \
|
| 958 |
+
self.__mod_distance(value=minute,
|
| 959 |
+
byxxx=self._byminute,
|
| 960 |
+
base=60)
|
| 961 |
+
else:
|
| 962 |
+
nhours, minute = divmod(minute+interval, 60)
|
| 963 |
+
|
| 964 |
+
div, hour = divmod(hour+nhours, 24)
|
| 965 |
+
if div:
|
| 966 |
+
day += div
|
| 967 |
+
fixday = True
|
| 968 |
+
filtered = False
|
| 969 |
+
|
| 970 |
+
if not byhour or hour in byhour:
|
| 971 |
+
valid = True
|
| 972 |
+
break
|
| 973 |
+
|
| 974 |
+
if not valid:
|
| 975 |
+
raise ValueError('Invalid combination of interval and ' +
|
| 976 |
+
'byhour resulting in empty rule.')
|
| 977 |
+
|
| 978 |
+
timeset = gettimeset(hour, minute, second)
|
| 979 |
+
elif freq == SECONDLY:
|
| 980 |
+
if filtered:
|
| 981 |
+
# Jump to one iteration before next day
|
| 982 |
+
second += (((86399 - (hour * 3600 + minute * 60 + second))
|
| 983 |
+
// interval) * interval)
|
| 984 |
+
|
| 985 |
+
rep_rate = (24 * 3600)
|
| 986 |
+
valid = False
|
| 987 |
+
for j in range(0, rep_rate // gcd(interval, rep_rate)):
|
| 988 |
+
if bysecond:
|
| 989 |
+
nminutes, second = \
|
| 990 |
+
self.__mod_distance(value=second,
|
| 991 |
+
byxxx=self._bysecond,
|
| 992 |
+
base=60)
|
| 993 |
+
else:
|
| 994 |
+
nminutes, second = divmod(second+interval, 60)
|
| 995 |
+
|
| 996 |
+
div, minute = divmod(minute+nminutes, 60)
|
| 997 |
+
if div:
|
| 998 |
+
hour += div
|
| 999 |
+
div, hour = divmod(hour, 24)
|
| 1000 |
+
if div:
|
| 1001 |
+
day += div
|
| 1002 |
+
fixday = True
|
| 1003 |
+
|
| 1004 |
+
if ((not byhour or hour in byhour) and
|
| 1005 |
+
(not byminute or minute in byminute) and
|
| 1006 |
+
(not bysecond or second in bysecond)):
|
| 1007 |
+
valid = True
|
| 1008 |
+
break
|
| 1009 |
+
|
| 1010 |
+
if not valid:
|
| 1011 |
+
raise ValueError('Invalid combination of interval, ' +
|
| 1012 |
+
'byhour and byminute resulting in empty' +
|
| 1013 |
+
' rule.')
|
| 1014 |
+
|
| 1015 |
+
timeset = gettimeset(hour, minute, second)
|
| 1016 |
+
|
| 1017 |
+
if fixday and day > 28:
|
| 1018 |
+
daysinmonth = calendar.monthrange(year, month)[1]
|
| 1019 |
+
if day > daysinmonth:
|
| 1020 |
+
while day > daysinmonth:
|
| 1021 |
+
day -= daysinmonth
|
| 1022 |
+
month += 1
|
| 1023 |
+
if month == 13:
|
| 1024 |
+
month = 1
|
| 1025 |
+
year += 1
|
| 1026 |
+
if year > datetime.MAXYEAR:
|
| 1027 |
+
self._len = total
|
| 1028 |
+
return
|
| 1029 |
+
daysinmonth = calendar.monthrange(year, month)[1]
|
| 1030 |
+
ii.rebuild(year, month)
|
| 1031 |
+
|
| 1032 |
+
def __construct_byset(self, start, byxxx, base):
|
| 1033 |
+
"""
|
| 1034 |
+
If a `BYXXX` sequence is passed to the constructor at the same level as
|
| 1035 |
+
`FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some
|
| 1036 |
+
specifications which cannot be reached given some starting conditions.
|
| 1037 |
+
|
| 1038 |
+
This occurs whenever the interval is not coprime with the base of a
|
| 1039 |
+
given unit and the difference between the starting position and the
|
| 1040 |
+
ending position is not coprime with the greatest common denominator
|
| 1041 |
+
between the interval and the base. For example, with a FREQ of hourly
|
| 1042 |
+
starting at 17:00 and an interval of 4, the only valid values for
|
| 1043 |
+
BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not
|
| 1044 |
+
coprime.
|
| 1045 |
+
|
| 1046 |
+
:param start:
|
| 1047 |
+
Specifies the starting position.
|
| 1048 |
+
:param byxxx:
|
| 1049 |
+
An iterable containing the list of allowed values.
|
| 1050 |
+
:param base:
|
| 1051 |
+
The largest allowable value for the specified frequency (e.g.
|
| 1052 |
+
24 hours, 60 minutes).
|
| 1053 |
+
|
| 1054 |
+
This does not preserve the type of the iterable, returning a set, since
|
| 1055 |
+
the values should be unique and the order is irrelevant, this will
|
| 1056 |
+
speed up later lookups.
|
| 1057 |
+
|
| 1058 |
+
In the event of an empty set, raises a :exception:`ValueError`, as this
|
| 1059 |
+
results in an empty rrule.
|
| 1060 |
+
"""
|
| 1061 |
+
|
| 1062 |
+
cset = set()
|
| 1063 |
+
|
| 1064 |
+
# Support a single byxxx value.
|
| 1065 |
+
if isinstance(byxxx, integer_types):
|
| 1066 |
+
byxxx = (byxxx, )
|
| 1067 |
+
|
| 1068 |
+
for num in byxxx:
|
| 1069 |
+
i_gcd = gcd(self._interval, base)
|
| 1070 |
+
# Use divmod rather than % because we need to wrap negative nums.
|
| 1071 |
+
if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0:
|
| 1072 |
+
cset.add(num)
|
| 1073 |
+
|
| 1074 |
+
if len(cset) == 0:
|
| 1075 |
+
raise ValueError("Invalid rrule byxxx generates an empty set.")
|
| 1076 |
+
|
| 1077 |
+
return cset
|
| 1078 |
+
|
| 1079 |
+
def __mod_distance(self, value, byxxx, base):
|
| 1080 |
+
"""
|
| 1081 |
+
Calculates the next value in a sequence where the `FREQ` parameter is
|
| 1082 |
+
specified along with a `BYXXX` parameter at the same "level"
|
| 1083 |
+
(e.g. `HOURLY` specified with `BYHOUR`).
|
| 1084 |
+
|
| 1085 |
+
:param value:
|
| 1086 |
+
The old value of the component.
|
| 1087 |
+
:param byxxx:
|
| 1088 |
+
The `BYXXX` set, which should have been generated by
|
| 1089 |
+
`rrule._construct_byset`, or something else which checks that a
|
| 1090 |
+
valid rule is present.
|
| 1091 |
+
:param base:
|
| 1092 |
+
The largest allowable value for the specified frequency (e.g.
|
| 1093 |
+
24 hours, 60 minutes).
|
| 1094 |
+
|
| 1095 |
+
If a valid value is not found after `base` iterations (the maximum
|
| 1096 |
+
number before the sequence would start to repeat), this raises a
|
| 1097 |
+
:exception:`ValueError`, as no valid values were found.
|
| 1098 |
+
|
| 1099 |
+
This returns a tuple of `divmod(n*interval, base)`, where `n` is the
|
| 1100 |
+
smallest number of `interval` repetitions until the next specified
|
| 1101 |
+
value in `byxxx` is found.
|
| 1102 |
+
"""
|
| 1103 |
+
accumulator = 0
|
| 1104 |
+
for ii in range(1, base + 1):
|
| 1105 |
+
# Using divmod() over % to account for negative intervals
|
| 1106 |
+
div, value = divmod(value + self._interval, base)
|
| 1107 |
+
accumulator += div
|
| 1108 |
+
if value in byxxx:
|
| 1109 |
+
return (accumulator, value)
|
| 1110 |
+
|
| 1111 |
+
|
| 1112 |
+
class _iterinfo(object):
|
| 1113 |
+
__slots__ = ["rrule", "lastyear", "lastmonth",
|
| 1114 |
+
"yearlen", "nextyearlen", "yearordinal", "yearweekday",
|
| 1115 |
+
"mmask", "mrange", "mdaymask", "nmdaymask",
|
| 1116 |
+
"wdaymask", "wnomask", "nwdaymask", "eastermask"]
|
| 1117 |
+
|
| 1118 |
+
def __init__(self, rrule):
|
| 1119 |
+
for attr in self.__slots__:
|
| 1120 |
+
setattr(self, attr, None)
|
| 1121 |
+
self.rrule = rrule
|
| 1122 |
+
|
| 1123 |
+
def rebuild(self, year, month):
|
| 1124 |
+
# Every mask is 7 days longer to handle cross-year weekly periods.
|
| 1125 |
+
rr = self.rrule
|
| 1126 |
+
if year != self.lastyear:
|
| 1127 |
+
self.yearlen = 365 + calendar.isleap(year)
|
| 1128 |
+
self.nextyearlen = 365 + calendar.isleap(year + 1)
|
| 1129 |
+
firstyday = datetime.date(year, 1, 1)
|
| 1130 |
+
self.yearordinal = firstyday.toordinal()
|
| 1131 |
+
self.yearweekday = firstyday.weekday()
|
| 1132 |
+
|
| 1133 |
+
wday = datetime.date(year, 1, 1).weekday()
|
| 1134 |
+
if self.yearlen == 365:
|
| 1135 |
+
self.mmask = M365MASK
|
| 1136 |
+
self.mdaymask = MDAY365MASK
|
| 1137 |
+
self.nmdaymask = NMDAY365MASK
|
| 1138 |
+
self.wdaymask = WDAYMASK[wday:]
|
| 1139 |
+
self.mrange = M365RANGE
|
| 1140 |
+
else:
|
| 1141 |
+
self.mmask = M366MASK
|
| 1142 |
+
self.mdaymask = MDAY366MASK
|
| 1143 |
+
self.nmdaymask = NMDAY366MASK
|
| 1144 |
+
self.wdaymask = WDAYMASK[wday:]
|
| 1145 |
+
self.mrange = M366RANGE
|
| 1146 |
+
|
| 1147 |
+
if not rr._byweekno:
|
| 1148 |
+
self.wnomask = None
|
| 1149 |
+
else:
|
| 1150 |
+
self.wnomask = [0]*(self.yearlen+7)
|
| 1151 |
+
# no1wkst = firstwkst = self.wdaymask.index(rr._wkst)
|
| 1152 |
+
no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7
|
| 1153 |
+
if no1wkst >= 4:
|
| 1154 |
+
no1wkst = 0
|
| 1155 |
+
# Number of days in the year, plus the days we got
|
| 1156 |
+
# from last year.
|
| 1157 |
+
wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7
|
| 1158 |
+
else:
|
| 1159 |
+
# Number of days in the year, minus the days we
|
| 1160 |
+
# left in last year.
|
| 1161 |
+
wyearlen = self.yearlen-no1wkst
|
| 1162 |
+
div, mod = divmod(wyearlen, 7)
|
| 1163 |
+
numweeks = div+mod//4
|
| 1164 |
+
for n in rr._byweekno:
|
| 1165 |
+
if n < 0:
|
| 1166 |
+
n += numweeks+1
|
| 1167 |
+
if not (0 < n <= numweeks):
|
| 1168 |
+
continue
|
| 1169 |
+
if n > 1:
|
| 1170 |
+
i = no1wkst+(n-1)*7
|
| 1171 |
+
if no1wkst != firstwkst:
|
| 1172 |
+
i -= 7-firstwkst
|
| 1173 |
+
else:
|
| 1174 |
+
i = no1wkst
|
| 1175 |
+
for j in range(7):
|
| 1176 |
+
self.wnomask[i] = 1
|
| 1177 |
+
i += 1
|
| 1178 |
+
if self.wdaymask[i] == rr._wkst:
|
| 1179 |
+
break
|
| 1180 |
+
if 1 in rr._byweekno:
|
| 1181 |
+
# Check week number 1 of next year as well
|
| 1182 |
+
# TODO: Check -numweeks for next year.
|
| 1183 |
+
i = no1wkst+numweeks*7
|
| 1184 |
+
if no1wkst != firstwkst:
|
| 1185 |
+
i -= 7-firstwkst
|
| 1186 |
+
if i < self.yearlen:
|
| 1187 |
+
# If week starts in next year, we
|
| 1188 |
+
# don't care about it.
|
| 1189 |
+
for j in range(7):
|
| 1190 |
+
self.wnomask[i] = 1
|
| 1191 |
+
i += 1
|
| 1192 |
+
if self.wdaymask[i] == rr._wkst:
|
| 1193 |
+
break
|
| 1194 |
+
if no1wkst:
|
| 1195 |
+
# Check last week number of last year as
|
| 1196 |
+
# well. If no1wkst is 0, either the year
|
| 1197 |
+
# started on week start, or week number 1
|
| 1198 |
+
# got days from last year, so there are no
|
| 1199 |
+
# days from last year's last week number in
|
| 1200 |
+
# this year.
|
| 1201 |
+
if -1 not in rr._byweekno:
|
| 1202 |
+
lyearweekday = datetime.date(year-1, 1, 1).weekday()
|
| 1203 |
+
lno1wkst = (7-lyearweekday+rr._wkst) % 7
|
| 1204 |
+
lyearlen = 365+calendar.isleap(year-1)
|
| 1205 |
+
if lno1wkst >= 4:
|
| 1206 |
+
lno1wkst = 0
|
| 1207 |
+
lnumweeks = 52+(lyearlen +
|
| 1208 |
+
(lyearweekday-rr._wkst) % 7) % 7//4
|
| 1209 |
+
else:
|
| 1210 |
+
lnumweeks = 52+(self.yearlen-no1wkst) % 7//4
|
| 1211 |
+
else:
|
| 1212 |
+
lnumweeks = -1
|
| 1213 |
+
if lnumweeks in rr._byweekno:
|
| 1214 |
+
for i in range(no1wkst):
|
| 1215 |
+
self.wnomask[i] = 1
|
| 1216 |
+
|
| 1217 |
+
if (rr._bynweekday and (month != self.lastmonth or
|
| 1218 |
+
year != self.lastyear)):
|
| 1219 |
+
ranges = []
|
| 1220 |
+
if rr._freq == YEARLY:
|
| 1221 |
+
if rr._bymonth:
|
| 1222 |
+
for month in rr._bymonth:
|
| 1223 |
+
ranges.append(self.mrange[month-1:month+1])
|
| 1224 |
+
else:
|
| 1225 |
+
ranges = [(0, self.yearlen)]
|
| 1226 |
+
elif rr._freq == MONTHLY:
|
| 1227 |
+
ranges = [self.mrange[month-1:month+1]]
|
| 1228 |
+
if ranges:
|
| 1229 |
+
# Weekly frequency won't get here, so we may not
|
| 1230 |
+
# care about cross-year weekly periods.
|
| 1231 |
+
self.nwdaymask = [0]*self.yearlen
|
| 1232 |
+
for first, last in ranges:
|
| 1233 |
+
last -= 1
|
| 1234 |
+
for wday, n in rr._bynweekday:
|
| 1235 |
+
if n < 0:
|
| 1236 |
+
i = last+(n+1)*7
|
| 1237 |
+
i -= (self.wdaymask[i]-wday) % 7
|
| 1238 |
+
else:
|
| 1239 |
+
i = first+(n-1)*7
|
| 1240 |
+
i += (7-self.wdaymask[i]+wday) % 7
|
| 1241 |
+
if first <= i <= last:
|
| 1242 |
+
self.nwdaymask[i] = 1
|
| 1243 |
+
|
| 1244 |
+
if rr._byeaster:
|
| 1245 |
+
self.eastermask = [0]*(self.yearlen+7)
|
| 1246 |
+
eyday = easter.easter(year).toordinal()-self.yearordinal
|
| 1247 |
+
for offset in rr._byeaster:
|
| 1248 |
+
self.eastermask[eyday+offset] = 1
|
| 1249 |
+
|
| 1250 |
+
self.lastyear = year
|
| 1251 |
+
self.lastmonth = month
|
| 1252 |
+
|
| 1253 |
+
def ydayset(self, year, month, day):
|
| 1254 |
+
return list(range(self.yearlen)), 0, self.yearlen
|
| 1255 |
+
|
| 1256 |
+
def mdayset(self, year, month, day):
|
| 1257 |
+
dset = [None]*self.yearlen
|
| 1258 |
+
start, end = self.mrange[month-1:month+1]
|
| 1259 |
+
for i in range(start, end):
|
| 1260 |
+
dset[i] = i
|
| 1261 |
+
return dset, start, end
|
| 1262 |
+
|
| 1263 |
+
def wdayset(self, year, month, day):
|
| 1264 |
+
# We need to handle cross-year weeks here.
|
| 1265 |
+
dset = [None]*(self.yearlen+7)
|
| 1266 |
+
i = datetime.date(year, month, day).toordinal()-self.yearordinal
|
| 1267 |
+
start = i
|
| 1268 |
+
for j in range(7):
|
| 1269 |
+
dset[i] = i
|
| 1270 |
+
i += 1
|
| 1271 |
+
# if (not (0 <= i < self.yearlen) or
|
| 1272 |
+
# self.wdaymask[i] == self.rrule._wkst):
|
| 1273 |
+
# This will cross the year boundary, if necessary.
|
| 1274 |
+
if self.wdaymask[i] == self.rrule._wkst:
|
| 1275 |
+
break
|
| 1276 |
+
return dset, start, i
|
| 1277 |
+
|
| 1278 |
+
def ddayset(self, year, month, day):
|
| 1279 |
+
dset = [None] * self.yearlen
|
| 1280 |
+
i = datetime.date(year, month, day).toordinal() - self.yearordinal
|
| 1281 |
+
dset[i] = i
|
| 1282 |
+
return dset, i, i + 1
|
| 1283 |
+
|
| 1284 |
+
def htimeset(self, hour, minute, second):
|
| 1285 |
+
tset = []
|
| 1286 |
+
rr = self.rrule
|
| 1287 |
+
for minute in rr._byminute:
|
| 1288 |
+
for second in rr._bysecond:
|
| 1289 |
+
tset.append(datetime.time(hour, minute, second,
|
| 1290 |
+
tzinfo=rr._tzinfo))
|
| 1291 |
+
tset.sort()
|
| 1292 |
+
return tset
|
| 1293 |
+
|
| 1294 |
+
def mtimeset(self, hour, minute, second):
|
| 1295 |
+
tset = []
|
| 1296 |
+
rr = self.rrule
|
| 1297 |
+
for second in rr._bysecond:
|
| 1298 |
+
tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo))
|
| 1299 |
+
tset.sort()
|
| 1300 |
+
return tset
|
| 1301 |
+
|
| 1302 |
+
def stimeset(self, hour, minute, second):
|
| 1303 |
+
return (datetime.time(hour, minute, second,
|
| 1304 |
+
tzinfo=self.rrule._tzinfo),)
|
| 1305 |
+
|
| 1306 |
+
|
| 1307 |
+
class rruleset(rrulebase):
|
| 1308 |
+
""" The rruleset type allows more complex recurrence setups, mixing
|
| 1309 |
+
multiple rules, dates, exclusion rules, and exclusion dates. The type
|
| 1310 |
+
constructor takes the following keyword arguments:
|
| 1311 |
+
|
| 1312 |
+
:param cache: If True, caching of results will be enabled, improving
|
| 1313 |
+
performance of multiple queries considerably. """
|
| 1314 |
+
|
| 1315 |
+
class _genitem(object):
|
| 1316 |
+
def __init__(self, genlist, gen):
|
| 1317 |
+
try:
|
| 1318 |
+
self.dt = advance_iterator(gen)
|
| 1319 |
+
genlist.append(self)
|
| 1320 |
+
except StopIteration:
|
| 1321 |
+
pass
|
| 1322 |
+
self.genlist = genlist
|
| 1323 |
+
self.gen = gen
|
| 1324 |
+
|
| 1325 |
+
def __next__(self):
|
| 1326 |
+
try:
|
| 1327 |
+
self.dt = advance_iterator(self.gen)
|
| 1328 |
+
except StopIteration:
|
| 1329 |
+
if self.genlist[0] is self:
|
| 1330 |
+
heapq.heappop(self.genlist)
|
| 1331 |
+
else:
|
| 1332 |
+
self.genlist.remove(self)
|
| 1333 |
+
heapq.heapify(self.genlist)
|
| 1334 |
+
|
| 1335 |
+
next = __next__
|
| 1336 |
+
|
| 1337 |
+
def __lt__(self, other):
|
| 1338 |
+
return self.dt < other.dt
|
| 1339 |
+
|
| 1340 |
+
def __gt__(self, other):
|
| 1341 |
+
return self.dt > other.dt
|
| 1342 |
+
|
| 1343 |
+
def __eq__(self, other):
|
| 1344 |
+
return self.dt == other.dt
|
| 1345 |
+
|
| 1346 |
+
def __ne__(self, other):
|
| 1347 |
+
return self.dt != other.dt
|
| 1348 |
+
|
| 1349 |
+
def __init__(self, cache=False):
|
| 1350 |
+
super(rruleset, self).__init__(cache)
|
| 1351 |
+
self._rrule = []
|
| 1352 |
+
self._rdate = []
|
| 1353 |
+
self._exrule = []
|
| 1354 |
+
self._exdate = []
|
| 1355 |
+
|
| 1356 |
+
@_invalidates_cache
|
| 1357 |
+
def rrule(self, rrule):
|
| 1358 |
+
""" Include the given :py:class:`rrule` instance in the recurrence set
|
| 1359 |
+
generation. """
|
| 1360 |
+
self._rrule.append(rrule)
|
| 1361 |
+
|
| 1362 |
+
@_invalidates_cache
|
| 1363 |
+
def rdate(self, rdate):
|
| 1364 |
+
""" Include the given :py:class:`datetime` instance in the recurrence
|
| 1365 |
+
set generation. """
|
| 1366 |
+
self._rdate.append(rdate)
|
| 1367 |
+
|
| 1368 |
+
@_invalidates_cache
|
| 1369 |
+
def exrule(self, exrule):
|
| 1370 |
+
""" Include the given rrule instance in the recurrence set exclusion
|
| 1371 |
+
list. Dates which are part of the given recurrence rules will not
|
| 1372 |
+
be generated, even if some inclusive rrule or rdate matches them.
|
| 1373 |
+
"""
|
| 1374 |
+
self._exrule.append(exrule)
|
| 1375 |
+
|
| 1376 |
+
@_invalidates_cache
|
| 1377 |
+
def exdate(self, exdate):
|
| 1378 |
+
""" Include the given datetime instance in the recurrence set
|
| 1379 |
+
exclusion list. Dates included that way will not be generated,
|
| 1380 |
+
even if some inclusive rrule or rdate matches them. """
|
| 1381 |
+
self._exdate.append(exdate)
|
| 1382 |
+
|
| 1383 |
+
def _iter(self):
|
| 1384 |
+
rlist = []
|
| 1385 |
+
self._rdate.sort()
|
| 1386 |
+
self._genitem(rlist, iter(self._rdate))
|
| 1387 |
+
for gen in [iter(x) for x in self._rrule]:
|
| 1388 |
+
self._genitem(rlist, gen)
|
| 1389 |
+
exlist = []
|
| 1390 |
+
self._exdate.sort()
|
| 1391 |
+
self._genitem(exlist, iter(self._exdate))
|
| 1392 |
+
for gen in [iter(x) for x in self._exrule]:
|
| 1393 |
+
self._genitem(exlist, gen)
|
| 1394 |
+
lastdt = None
|
| 1395 |
+
total = 0
|
| 1396 |
+
heapq.heapify(rlist)
|
| 1397 |
+
heapq.heapify(exlist)
|
| 1398 |
+
while rlist:
|
| 1399 |
+
ritem = rlist[0]
|
| 1400 |
+
if not lastdt or lastdt != ritem.dt:
|
| 1401 |
+
while exlist and exlist[0] < ritem:
|
| 1402 |
+
exitem = exlist[0]
|
| 1403 |
+
advance_iterator(exitem)
|
| 1404 |
+
if exlist and exlist[0] is exitem:
|
| 1405 |
+
heapq.heapreplace(exlist, exitem)
|
| 1406 |
+
if not exlist or ritem != exlist[0]:
|
| 1407 |
+
total += 1
|
| 1408 |
+
yield ritem.dt
|
| 1409 |
+
lastdt = ritem.dt
|
| 1410 |
+
advance_iterator(ritem)
|
| 1411 |
+
if rlist and rlist[0] is ritem:
|
| 1412 |
+
heapq.heapreplace(rlist, ritem)
|
| 1413 |
+
self._len = total
|
| 1414 |
+
|
| 1415 |
+
|
| 1416 |
+
|
| 1417 |
+
|
| 1418 |
+
class _rrulestr(object):
|
| 1419 |
+
""" Parses a string representation of a recurrence rule or set of
|
| 1420 |
+
recurrence rules.
|
| 1421 |
+
|
| 1422 |
+
:param s:
|
| 1423 |
+
Required, a string defining one or more recurrence rules.
|
| 1424 |
+
|
| 1425 |
+
:param dtstart:
|
| 1426 |
+
If given, used as the default recurrence start if not specified in the
|
| 1427 |
+
rule string.
|
| 1428 |
+
|
| 1429 |
+
:param cache:
|
| 1430 |
+
If set ``True`` caching of results will be enabled, improving
|
| 1431 |
+
performance of multiple queries considerably.
|
| 1432 |
+
|
| 1433 |
+
:param unfold:
|
| 1434 |
+
If set ``True`` indicates that a rule string is split over more
|
| 1435 |
+
than one line and should be joined before processing.
|
| 1436 |
+
|
| 1437 |
+
:param forceset:
|
| 1438 |
+
If set ``True`` forces a :class:`dateutil.rrule.rruleset` to
|
| 1439 |
+
be returned.
|
| 1440 |
+
|
| 1441 |
+
:param compatible:
|
| 1442 |
+
If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``.
|
| 1443 |
+
|
| 1444 |
+
:param ignoretz:
|
| 1445 |
+
If set ``True``, time zones in parsed strings are ignored and a naive
|
| 1446 |
+
:class:`datetime.datetime` object is returned.
|
| 1447 |
+
|
| 1448 |
+
:param tzids:
|
| 1449 |
+
If given, a callable or mapping used to retrieve a
|
| 1450 |
+
:class:`datetime.tzinfo` from a string representation.
|
| 1451 |
+
Defaults to :func:`dateutil.tz.gettz`.
|
| 1452 |
+
|
| 1453 |
+
:param tzinfos:
|
| 1454 |
+
Additional time zone names / aliases which may be present in a string
|
| 1455 |
+
representation. See :func:`dateutil.parser.parse` for more
|
| 1456 |
+
information.
|
| 1457 |
+
|
| 1458 |
+
:return:
|
| 1459 |
+
Returns a :class:`dateutil.rrule.rruleset` or
|
| 1460 |
+
:class:`dateutil.rrule.rrule`
|
| 1461 |
+
"""
|
| 1462 |
+
|
| 1463 |
+
_freq_map = {"YEARLY": YEARLY,
|
| 1464 |
+
"MONTHLY": MONTHLY,
|
| 1465 |
+
"WEEKLY": WEEKLY,
|
| 1466 |
+
"DAILY": DAILY,
|
| 1467 |
+
"HOURLY": HOURLY,
|
| 1468 |
+
"MINUTELY": MINUTELY,
|
| 1469 |
+
"SECONDLY": SECONDLY}
|
| 1470 |
+
|
| 1471 |
+
_weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3,
|
| 1472 |
+
"FR": 4, "SA": 5, "SU": 6}
|
| 1473 |
+
|
| 1474 |
+
def _handle_int(self, rrkwargs, name, value, **kwargs):
|
| 1475 |
+
rrkwargs[name.lower()] = int(value)
|
| 1476 |
+
|
| 1477 |
+
def _handle_int_list(self, rrkwargs, name, value, **kwargs):
|
| 1478 |
+
rrkwargs[name.lower()] = [int(x) for x in value.split(',')]
|
| 1479 |
+
|
| 1480 |
+
_handle_INTERVAL = _handle_int
|
| 1481 |
+
_handle_COUNT = _handle_int
|
| 1482 |
+
_handle_BYSETPOS = _handle_int_list
|
| 1483 |
+
_handle_BYMONTH = _handle_int_list
|
| 1484 |
+
_handle_BYMONTHDAY = _handle_int_list
|
| 1485 |
+
_handle_BYYEARDAY = _handle_int_list
|
| 1486 |
+
_handle_BYEASTER = _handle_int_list
|
| 1487 |
+
_handle_BYWEEKNO = _handle_int_list
|
| 1488 |
+
_handle_BYHOUR = _handle_int_list
|
| 1489 |
+
_handle_BYMINUTE = _handle_int_list
|
| 1490 |
+
_handle_BYSECOND = _handle_int_list
|
| 1491 |
+
|
| 1492 |
+
def _handle_FREQ(self, rrkwargs, name, value, **kwargs):
|
| 1493 |
+
rrkwargs["freq"] = self._freq_map[value]
|
| 1494 |
+
|
| 1495 |
+
def _handle_UNTIL(self, rrkwargs, name, value, **kwargs):
|
| 1496 |
+
global parser
|
| 1497 |
+
if not parser:
|
| 1498 |
+
from dateutil import parser
|
| 1499 |
+
try:
|
| 1500 |
+
rrkwargs["until"] = parser.parse(value,
|
| 1501 |
+
ignoretz=kwargs.get("ignoretz"),
|
| 1502 |
+
tzinfos=kwargs.get("tzinfos"))
|
| 1503 |
+
except ValueError:
|
| 1504 |
+
raise ValueError("invalid until date")
|
| 1505 |
+
|
| 1506 |
+
def _handle_WKST(self, rrkwargs, name, value, **kwargs):
|
| 1507 |
+
rrkwargs["wkst"] = self._weekday_map[value]
|
| 1508 |
+
|
| 1509 |
+
def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs):
|
| 1510 |
+
"""
|
| 1511 |
+
Two ways to specify this: +1MO or MO(+1)
|
| 1512 |
+
"""
|
| 1513 |
+
l = []
|
| 1514 |
+
for wday in value.split(','):
|
| 1515 |
+
if '(' in wday:
|
| 1516 |
+
# If it's of the form TH(+1), etc.
|
| 1517 |
+
splt = wday.split('(')
|
| 1518 |
+
w = splt[0]
|
| 1519 |
+
n = int(splt[1][:-1])
|
| 1520 |
+
elif len(wday):
|
| 1521 |
+
# If it's of the form +1MO
|
| 1522 |
+
for i in range(len(wday)):
|
| 1523 |
+
if wday[i] not in '+-0123456789':
|
| 1524 |
+
break
|
| 1525 |
+
n = wday[:i] or None
|
| 1526 |
+
w = wday[i:]
|
| 1527 |
+
if n:
|
| 1528 |
+
n = int(n)
|
| 1529 |
+
else:
|
| 1530 |
+
raise ValueError("Invalid (empty) BYDAY specification.")
|
| 1531 |
+
|
| 1532 |
+
l.append(weekdays[self._weekday_map[w]](n))
|
| 1533 |
+
rrkwargs["byweekday"] = l
|
| 1534 |
+
|
| 1535 |
+
_handle_BYDAY = _handle_BYWEEKDAY
|
| 1536 |
+
|
| 1537 |
+
def _parse_rfc_rrule(self, line,
|
| 1538 |
+
dtstart=None,
|
| 1539 |
+
cache=False,
|
| 1540 |
+
ignoretz=False,
|
| 1541 |
+
tzinfos=None):
|
| 1542 |
+
if line.find(':') != -1:
|
| 1543 |
+
name, value = line.split(':')
|
| 1544 |
+
if name != "RRULE":
|
| 1545 |
+
raise ValueError("unknown parameter name")
|
| 1546 |
+
else:
|
| 1547 |
+
value = line
|
| 1548 |
+
rrkwargs = {}
|
| 1549 |
+
for pair in value.split(';'):
|
| 1550 |
+
name, value = pair.split('=')
|
| 1551 |
+
name = name.upper()
|
| 1552 |
+
value = value.upper()
|
| 1553 |
+
try:
|
| 1554 |
+
getattr(self, "_handle_"+name)(rrkwargs, name, value,
|
| 1555 |
+
ignoretz=ignoretz,
|
| 1556 |
+
tzinfos=tzinfos)
|
| 1557 |
+
except AttributeError:
|
| 1558 |
+
raise ValueError("unknown parameter '%s'" % name)
|
| 1559 |
+
except (KeyError, ValueError):
|
| 1560 |
+
raise ValueError("invalid '%s': %s" % (name, value))
|
| 1561 |
+
return rrule(dtstart=dtstart, cache=cache, **rrkwargs)
|
| 1562 |
+
|
| 1563 |
+
def _parse_date_value(self, date_value, parms, rule_tzids,
|
| 1564 |
+
ignoretz, tzids, tzinfos):
|
| 1565 |
+
global parser
|
| 1566 |
+
if not parser:
|
| 1567 |
+
from dateutil import parser
|
| 1568 |
+
|
| 1569 |
+
datevals = []
|
| 1570 |
+
value_found = False
|
| 1571 |
+
TZID = None
|
| 1572 |
+
|
| 1573 |
+
for parm in parms:
|
| 1574 |
+
if parm.startswith("TZID="):
|
| 1575 |
+
try:
|
| 1576 |
+
tzkey = rule_tzids[parm.split('TZID=')[-1]]
|
| 1577 |
+
except KeyError:
|
| 1578 |
+
continue
|
| 1579 |
+
if tzids is None:
|
| 1580 |
+
from . import tz
|
| 1581 |
+
tzlookup = tz.gettz
|
| 1582 |
+
elif callable(tzids):
|
| 1583 |
+
tzlookup = tzids
|
| 1584 |
+
else:
|
| 1585 |
+
tzlookup = getattr(tzids, 'get', None)
|
| 1586 |
+
if tzlookup is None:
|
| 1587 |
+
msg = ('tzids must be a callable, mapping, or None, '
|
| 1588 |
+
'not %s' % tzids)
|
| 1589 |
+
raise ValueError(msg)
|
| 1590 |
+
|
| 1591 |
+
TZID = tzlookup(tzkey)
|
| 1592 |
+
continue
|
| 1593 |
+
|
| 1594 |
+
# RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found
|
| 1595 |
+
# only once.
|
| 1596 |
+
if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}:
|
| 1597 |
+
raise ValueError("unsupported parm: " + parm)
|
| 1598 |
+
else:
|
| 1599 |
+
if value_found:
|
| 1600 |
+
msg = ("Duplicate value parameter found in: " + parm)
|
| 1601 |
+
raise ValueError(msg)
|
| 1602 |
+
value_found = True
|
| 1603 |
+
|
| 1604 |
+
for datestr in date_value.split(','):
|
| 1605 |
+
date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos)
|
| 1606 |
+
if TZID is not None:
|
| 1607 |
+
if date.tzinfo is None:
|
| 1608 |
+
date = date.replace(tzinfo=TZID)
|
| 1609 |
+
else:
|
| 1610 |
+
raise ValueError('DTSTART/EXDATE specifies multiple timezone')
|
| 1611 |
+
datevals.append(date)
|
| 1612 |
+
|
| 1613 |
+
return datevals
|
| 1614 |
+
|
| 1615 |
+
def _parse_rfc(self, s,
|
| 1616 |
+
dtstart=None,
|
| 1617 |
+
cache=False,
|
| 1618 |
+
unfold=False,
|
| 1619 |
+
forceset=False,
|
| 1620 |
+
compatible=False,
|
| 1621 |
+
ignoretz=False,
|
| 1622 |
+
tzids=None,
|
| 1623 |
+
tzinfos=None):
|
| 1624 |
+
global parser
|
| 1625 |
+
if compatible:
|
| 1626 |
+
forceset = True
|
| 1627 |
+
unfold = True
|
| 1628 |
+
|
| 1629 |
+
TZID_NAMES = dict(map(
|
| 1630 |
+
lambda x: (x.upper(), x),
|
| 1631 |
+
re.findall('TZID=(?P<name>[^:]+):', s)
|
| 1632 |
+
))
|
| 1633 |
+
s = s.upper()
|
| 1634 |
+
if not s.strip():
|
| 1635 |
+
raise ValueError("empty string")
|
| 1636 |
+
if unfold:
|
| 1637 |
+
lines = s.splitlines()
|
| 1638 |
+
i = 0
|
| 1639 |
+
while i < len(lines):
|
| 1640 |
+
line = lines[i].rstrip()
|
| 1641 |
+
if not line:
|
| 1642 |
+
del lines[i]
|
| 1643 |
+
elif i > 0 and line[0] == " ":
|
| 1644 |
+
lines[i-1] += line[1:]
|
| 1645 |
+
del lines[i]
|
| 1646 |
+
else:
|
| 1647 |
+
i += 1
|
| 1648 |
+
else:
|
| 1649 |
+
lines = s.split()
|
| 1650 |
+
if (not forceset and len(lines) == 1 and (s.find(':') == -1 or
|
| 1651 |
+
s.startswith('RRULE:'))):
|
| 1652 |
+
return self._parse_rfc_rrule(lines[0], cache=cache,
|
| 1653 |
+
dtstart=dtstart, ignoretz=ignoretz,
|
| 1654 |
+
tzinfos=tzinfos)
|
| 1655 |
+
else:
|
| 1656 |
+
rrulevals = []
|
| 1657 |
+
rdatevals = []
|
| 1658 |
+
exrulevals = []
|
| 1659 |
+
exdatevals = []
|
| 1660 |
+
for line in lines:
|
| 1661 |
+
if not line:
|
| 1662 |
+
continue
|
| 1663 |
+
if line.find(':') == -1:
|
| 1664 |
+
name = "RRULE"
|
| 1665 |
+
value = line
|
| 1666 |
+
else:
|
| 1667 |
+
name, value = line.split(':', 1)
|
| 1668 |
+
parms = name.split(';')
|
| 1669 |
+
if not parms:
|
| 1670 |
+
raise ValueError("empty property name")
|
| 1671 |
+
name = parms[0]
|
| 1672 |
+
parms = parms[1:]
|
| 1673 |
+
if name == "RRULE":
|
| 1674 |
+
for parm in parms:
|
| 1675 |
+
raise ValueError("unsupported RRULE parm: "+parm)
|
| 1676 |
+
rrulevals.append(value)
|
| 1677 |
+
elif name == "RDATE":
|
| 1678 |
+
for parm in parms:
|
| 1679 |
+
if parm != "VALUE=DATE-TIME":
|
| 1680 |
+
raise ValueError("unsupported RDATE parm: "+parm)
|
| 1681 |
+
rdatevals.append(value)
|
| 1682 |
+
elif name == "EXRULE":
|
| 1683 |
+
for parm in parms:
|
| 1684 |
+
raise ValueError("unsupported EXRULE parm: "+parm)
|
| 1685 |
+
exrulevals.append(value)
|
| 1686 |
+
elif name == "EXDATE":
|
| 1687 |
+
exdatevals.extend(
|
| 1688 |
+
self._parse_date_value(value, parms,
|
| 1689 |
+
TZID_NAMES, ignoretz,
|
| 1690 |
+
tzids, tzinfos)
|
| 1691 |
+
)
|
| 1692 |
+
elif name == "DTSTART":
|
| 1693 |
+
dtvals = self._parse_date_value(value, parms, TZID_NAMES,
|
| 1694 |
+
ignoretz, tzids, tzinfos)
|
| 1695 |
+
if len(dtvals) != 1:
|
| 1696 |
+
raise ValueError("Multiple DTSTART values specified:" +
|
| 1697 |
+
value)
|
| 1698 |
+
dtstart = dtvals[0]
|
| 1699 |
+
else:
|
| 1700 |
+
raise ValueError("unsupported property: "+name)
|
| 1701 |
+
if (forceset or len(rrulevals) > 1 or rdatevals
|
| 1702 |
+
or exrulevals or exdatevals):
|
| 1703 |
+
if not parser and (rdatevals or exdatevals):
|
| 1704 |
+
from dateutil import parser
|
| 1705 |
+
rset = rruleset(cache=cache)
|
| 1706 |
+
for value in rrulevals:
|
| 1707 |
+
rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart,
|
| 1708 |
+
ignoretz=ignoretz,
|
| 1709 |
+
tzinfos=tzinfos))
|
| 1710 |
+
for value in rdatevals:
|
| 1711 |
+
for datestr in value.split(','):
|
| 1712 |
+
rset.rdate(parser.parse(datestr,
|
| 1713 |
+
ignoretz=ignoretz,
|
| 1714 |
+
tzinfos=tzinfos))
|
| 1715 |
+
for value in exrulevals:
|
| 1716 |
+
rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart,
|
| 1717 |
+
ignoretz=ignoretz,
|
| 1718 |
+
tzinfos=tzinfos))
|
| 1719 |
+
for value in exdatevals:
|
| 1720 |
+
rset.exdate(value)
|
| 1721 |
+
if compatible and dtstart:
|
| 1722 |
+
rset.rdate(dtstart)
|
| 1723 |
+
return rset
|
| 1724 |
+
else:
|
| 1725 |
+
return self._parse_rfc_rrule(rrulevals[0],
|
| 1726 |
+
dtstart=dtstart,
|
| 1727 |
+
cache=cache,
|
| 1728 |
+
ignoretz=ignoretz,
|
| 1729 |
+
tzinfos=tzinfos)
|
| 1730 |
+
|
| 1731 |
+
def __call__(self, s, **kwargs):
|
| 1732 |
+
return self._parse_rfc(s, **kwargs)
|
| 1733 |
+
|
| 1734 |
+
|
| 1735 |
+
rrulestr = _rrulestr()
|
| 1736 |
+
|
| 1737 |
+
# vim:ts=4:sw=4:et
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tz/__init__.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
from .tz import *
|
| 3 |
+
from .tz import __doc__
|
| 4 |
+
|
| 5 |
+
__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
|
| 6 |
+
"tzstr", "tzical", "tzwin", "tzwinlocal", "gettz",
|
| 7 |
+
"enfold", "datetime_ambiguous", "datetime_exists",
|
| 8 |
+
"resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class DeprecatedTzFormatWarning(Warning):
|
| 12 |
+
"""Warning raised when time zones are parsed from deprecated formats."""
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/tzwin.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# tzwin has moved to dateutil.tz.win
|
| 2 |
+
from .tz.win import *
|
evalkit_internvl/lib/python3.10/site-packages/dateutil/utils.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
This module offers general convenience and utility functions for dealing with
|
| 4 |
+
datetimes.
|
| 5 |
+
|
| 6 |
+
.. versionadded:: 2.7.0
|
| 7 |
+
"""
|
| 8 |
+
from __future__ import unicode_literals
|
| 9 |
+
|
| 10 |
+
from datetime import datetime, time
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def today(tzinfo=None):
|
| 14 |
+
"""
|
| 15 |
+
Returns a :py:class:`datetime` representing the current day at midnight
|
| 16 |
+
|
| 17 |
+
:param tzinfo:
|
| 18 |
+
The time zone to attach (also used to determine the current day).
|
| 19 |
+
|
| 20 |
+
:return:
|
| 21 |
+
A :py:class:`datetime.datetime` object representing the current day
|
| 22 |
+
at midnight.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
dt = datetime.now(tzinfo)
|
| 26 |
+
return datetime.combine(dt.date(), time(0, tzinfo=tzinfo))
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def default_tzinfo(dt, tzinfo):
|
| 30 |
+
"""
|
| 31 |
+
Sets the ``tzinfo`` parameter on naive datetimes only
|
| 32 |
+
|
| 33 |
+
This is useful for example when you are provided a datetime that may have
|
| 34 |
+
either an implicit or explicit time zone, such as when parsing a time zone
|
| 35 |
+
string.
|
| 36 |
+
|
| 37 |
+
.. doctest::
|
| 38 |
+
|
| 39 |
+
>>> from dateutil.tz import tzoffset
|
| 40 |
+
>>> from dateutil.parser import parse
|
| 41 |
+
>>> from dateutil.utils import default_tzinfo
|
| 42 |
+
>>> dflt_tz = tzoffset("EST", -18000)
|
| 43 |
+
>>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz))
|
| 44 |
+
2014-01-01 12:30:00+00:00
|
| 45 |
+
>>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz))
|
| 46 |
+
2014-01-01 12:30:00-05:00
|
| 47 |
+
|
| 48 |
+
:param dt:
|
| 49 |
+
The datetime on which to replace the time zone
|
| 50 |
+
|
| 51 |
+
:param tzinfo:
|
| 52 |
+
The :py:class:`datetime.tzinfo` subclass instance to assign to
|
| 53 |
+
``dt`` if (and only if) it is naive.
|
| 54 |
+
|
| 55 |
+
:return:
|
| 56 |
+
Returns an aware :py:class:`datetime.datetime`.
|
| 57 |
+
"""
|
| 58 |
+
if dt.tzinfo is not None:
|
| 59 |
+
return dt
|
| 60 |
+
else:
|
| 61 |
+
return dt.replace(tzinfo=tzinfo)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def within_delta(dt1, dt2, delta):
|
| 65 |
+
"""
|
| 66 |
+
Useful for comparing two datetimes that may have a negligible difference
|
| 67 |
+
to be considered equal.
|
| 68 |
+
"""
|
| 69 |
+
delta = abs(delta)
|
| 70 |
+
difference = dt1 - dt2
|
| 71 |
+
return -delta <= difference <= delta
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.75 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_api.cpython-310.pyc
ADDED
|
Binary file (3.27 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_exceptions.cpython-310.pyc
ADDED
|
Binary file (2.41 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_models.cpython-310.pyc
ADDED
|
Binary file (15.7 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_ssl.cpython-310.pyc
ADDED
|
Binary file (435 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_synchronization.cpython-310.pyc
ADDED
|
Binary file (7.85 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_trace.cpython-310.pyc
ADDED
|
Binary file (3.54 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/__pycache__/_utils.cpython-310.pyc
ADDED
|
Binary file (875 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__init__.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .connection import AsyncHTTPConnection
|
| 2 |
+
from .connection_pool import AsyncConnectionPool
|
| 3 |
+
from .http11 import AsyncHTTP11Connection
|
| 4 |
+
from .http_proxy import AsyncHTTPProxy
|
| 5 |
+
from .interfaces import AsyncConnectionInterface
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from .http2 import AsyncHTTP2Connection
|
| 9 |
+
except ImportError: # pragma: nocover
|
| 10 |
+
|
| 11 |
+
class AsyncHTTP2Connection: # type: ignore
|
| 12 |
+
def __init__(self, *args, **kwargs) -> None: # type: ignore
|
| 13 |
+
raise RuntimeError(
|
| 14 |
+
"Attempted to use http2 support, but the `h2` package is not "
|
| 15 |
+
"installed. Use 'pip install httpcore[http2]'."
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
try:
|
| 20 |
+
from .socks_proxy import AsyncSOCKSProxy
|
| 21 |
+
except ImportError: # pragma: nocover
|
| 22 |
+
|
| 23 |
+
class AsyncSOCKSProxy: # type: ignore
|
| 24 |
+
def __init__(self, *args, **kwargs) -> None: # type: ignore
|
| 25 |
+
raise RuntimeError(
|
| 26 |
+
"Attempted to use SOCKS support, but the `socksio` package is not "
|
| 27 |
+
"installed. Use 'pip install httpcore[socks]'."
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
__all__ = [
|
| 32 |
+
"AsyncHTTPConnection",
|
| 33 |
+
"AsyncConnectionPool",
|
| 34 |
+
"AsyncHTTPProxy",
|
| 35 |
+
"AsyncHTTP11Connection",
|
| 36 |
+
"AsyncHTTP2Connection",
|
| 37 |
+
"AsyncConnectionInterface",
|
| 38 |
+
"AsyncSOCKSProxy",
|
| 39 |
+
]
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.43 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/connection.cpython-310.pyc
ADDED
|
Binary file (6.53 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-310.pyc
ADDED
|
Binary file (11.5 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/http11.cpython-310.pyc
ADDED
|
Binary file (9.73 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/http2.cpython-310.pyc
ADDED
|
Binary file (16.4 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-310.pyc
ADDED
|
Binary file (12.2 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/interfaces.cpython-310.pyc
ADDED
|
Binary file (4.42 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-310.pyc
ADDED
|
Binary file (10.1 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/connection.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import itertools
|
| 2 |
+
import logging
|
| 3 |
+
import ssl
|
| 4 |
+
from types import TracebackType
|
| 5 |
+
from typing import Iterable, Iterator, Optional, Type
|
| 6 |
+
|
| 7 |
+
from .._backends.auto import AutoBackend
|
| 8 |
+
from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream
|
| 9 |
+
from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout
|
| 10 |
+
from .._models import Origin, Request, Response
|
| 11 |
+
from .._ssl import default_ssl_context
|
| 12 |
+
from .._synchronization import AsyncLock
|
| 13 |
+
from .._trace import Trace
|
| 14 |
+
from .http11 import AsyncHTTP11Connection
|
| 15 |
+
from .interfaces import AsyncConnectionInterface
|
| 16 |
+
|
| 17 |
+
RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc.
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger("httpcore.connection")
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def exponential_backoff(factor: float) -> Iterator[float]:
|
| 24 |
+
yield 0
|
| 25 |
+
for n in itertools.count(2):
|
| 26 |
+
yield factor * (2 ** (n - 2))
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class AsyncHTTPConnection(AsyncConnectionInterface):
|
| 30 |
+
def __init__(
|
| 31 |
+
self,
|
| 32 |
+
origin: Origin,
|
| 33 |
+
ssl_context: Optional[ssl.SSLContext] = None,
|
| 34 |
+
keepalive_expiry: Optional[float] = None,
|
| 35 |
+
http1: bool = True,
|
| 36 |
+
http2: bool = False,
|
| 37 |
+
retries: int = 0,
|
| 38 |
+
local_address: Optional[str] = None,
|
| 39 |
+
uds: Optional[str] = None,
|
| 40 |
+
network_backend: Optional[AsyncNetworkBackend] = None,
|
| 41 |
+
socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
|
| 42 |
+
) -> None:
|
| 43 |
+
self._origin = origin
|
| 44 |
+
self._ssl_context = ssl_context
|
| 45 |
+
self._keepalive_expiry = keepalive_expiry
|
| 46 |
+
self._http1 = http1
|
| 47 |
+
self._http2 = http2
|
| 48 |
+
self._retries = retries
|
| 49 |
+
self._local_address = local_address
|
| 50 |
+
self._uds = uds
|
| 51 |
+
|
| 52 |
+
self._network_backend: AsyncNetworkBackend = (
|
| 53 |
+
AutoBackend() if network_backend is None else network_backend
|
| 54 |
+
)
|
| 55 |
+
self._connection: Optional[AsyncConnectionInterface] = None
|
| 56 |
+
self._connect_failed: bool = False
|
| 57 |
+
self._request_lock = AsyncLock()
|
| 58 |
+
self._socket_options = socket_options
|
| 59 |
+
|
| 60 |
+
async def handle_async_request(self, request: Request) -> Response:
|
| 61 |
+
if not self.can_handle_request(request.url.origin):
|
| 62 |
+
raise RuntimeError(
|
| 63 |
+
f"Attempted to send request to {request.url.origin} on connection to {self._origin}"
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
async with self._request_lock:
|
| 67 |
+
if self._connection is None:
|
| 68 |
+
try:
|
| 69 |
+
stream = await self._connect(request)
|
| 70 |
+
|
| 71 |
+
ssl_object = stream.get_extra_info("ssl_object")
|
| 72 |
+
http2_negotiated = (
|
| 73 |
+
ssl_object is not None
|
| 74 |
+
and ssl_object.selected_alpn_protocol() == "h2"
|
| 75 |
+
)
|
| 76 |
+
if http2_negotiated or (self._http2 and not self._http1):
|
| 77 |
+
from .http2 import AsyncHTTP2Connection
|
| 78 |
+
|
| 79 |
+
self._connection = AsyncHTTP2Connection(
|
| 80 |
+
origin=self._origin,
|
| 81 |
+
stream=stream,
|
| 82 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 83 |
+
)
|
| 84 |
+
else:
|
| 85 |
+
self._connection = AsyncHTTP11Connection(
|
| 86 |
+
origin=self._origin,
|
| 87 |
+
stream=stream,
|
| 88 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 89 |
+
)
|
| 90 |
+
except Exception as exc:
|
| 91 |
+
self._connect_failed = True
|
| 92 |
+
raise exc
|
| 93 |
+
elif not self._connection.is_available():
|
| 94 |
+
raise ConnectionNotAvailable()
|
| 95 |
+
|
| 96 |
+
return await self._connection.handle_async_request(request)
|
| 97 |
+
|
| 98 |
+
async def _connect(self, request: Request) -> AsyncNetworkStream:
|
| 99 |
+
timeouts = request.extensions.get("timeout", {})
|
| 100 |
+
sni_hostname = request.extensions.get("sni_hostname", None)
|
| 101 |
+
timeout = timeouts.get("connect", None)
|
| 102 |
+
|
| 103 |
+
retries_left = self._retries
|
| 104 |
+
delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR)
|
| 105 |
+
|
| 106 |
+
while True:
|
| 107 |
+
try:
|
| 108 |
+
if self._uds is None:
|
| 109 |
+
kwargs = {
|
| 110 |
+
"host": self._origin.host.decode("ascii"),
|
| 111 |
+
"port": self._origin.port,
|
| 112 |
+
"local_address": self._local_address,
|
| 113 |
+
"timeout": timeout,
|
| 114 |
+
"socket_options": self._socket_options,
|
| 115 |
+
}
|
| 116 |
+
async with Trace("connect_tcp", logger, request, kwargs) as trace:
|
| 117 |
+
stream = await self._network_backend.connect_tcp(**kwargs)
|
| 118 |
+
trace.return_value = stream
|
| 119 |
+
else:
|
| 120 |
+
kwargs = {
|
| 121 |
+
"path": self._uds,
|
| 122 |
+
"timeout": timeout,
|
| 123 |
+
"socket_options": self._socket_options,
|
| 124 |
+
}
|
| 125 |
+
async with Trace(
|
| 126 |
+
"connect_unix_socket", logger, request, kwargs
|
| 127 |
+
) as trace:
|
| 128 |
+
stream = await self._network_backend.connect_unix_socket(
|
| 129 |
+
**kwargs
|
| 130 |
+
)
|
| 131 |
+
trace.return_value = stream
|
| 132 |
+
|
| 133 |
+
if self._origin.scheme == b"https":
|
| 134 |
+
ssl_context = (
|
| 135 |
+
default_ssl_context()
|
| 136 |
+
if self._ssl_context is None
|
| 137 |
+
else self._ssl_context
|
| 138 |
+
)
|
| 139 |
+
alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"]
|
| 140 |
+
ssl_context.set_alpn_protocols(alpn_protocols)
|
| 141 |
+
|
| 142 |
+
kwargs = {
|
| 143 |
+
"ssl_context": ssl_context,
|
| 144 |
+
"server_hostname": sni_hostname
|
| 145 |
+
or self._origin.host.decode("ascii"),
|
| 146 |
+
"timeout": timeout,
|
| 147 |
+
}
|
| 148 |
+
async with Trace("start_tls", logger, request, kwargs) as trace:
|
| 149 |
+
stream = await stream.start_tls(**kwargs)
|
| 150 |
+
trace.return_value = stream
|
| 151 |
+
return stream
|
| 152 |
+
except (ConnectError, ConnectTimeout):
|
| 153 |
+
if retries_left <= 0:
|
| 154 |
+
raise
|
| 155 |
+
retries_left -= 1
|
| 156 |
+
delay = next(delays)
|
| 157 |
+
async with Trace("retry", logger, request, kwargs) as trace:
|
| 158 |
+
await self._network_backend.sleep(delay)
|
| 159 |
+
|
| 160 |
+
def can_handle_request(self, origin: Origin) -> bool:
|
| 161 |
+
return origin == self._origin
|
| 162 |
+
|
| 163 |
+
async def aclose(self) -> None:
|
| 164 |
+
if self._connection is not None:
|
| 165 |
+
async with Trace("close", logger, None, {}):
|
| 166 |
+
await self._connection.aclose()
|
| 167 |
+
|
| 168 |
+
def is_available(self) -> bool:
|
| 169 |
+
if self._connection is None:
|
| 170 |
+
# If HTTP/2 support is enabled, and the resulting connection could
|
| 171 |
+
# end up as HTTP/2 then we should indicate the connection as being
|
| 172 |
+
# available to service multiple requests.
|
| 173 |
+
return (
|
| 174 |
+
self._http2
|
| 175 |
+
and (self._origin.scheme == b"https" or not self._http1)
|
| 176 |
+
and not self._connect_failed
|
| 177 |
+
)
|
| 178 |
+
return self._connection.is_available()
|
| 179 |
+
|
| 180 |
+
def has_expired(self) -> bool:
|
| 181 |
+
if self._connection is None:
|
| 182 |
+
return self._connect_failed
|
| 183 |
+
return self._connection.has_expired()
|
| 184 |
+
|
| 185 |
+
def is_idle(self) -> bool:
|
| 186 |
+
if self._connection is None:
|
| 187 |
+
return self._connect_failed
|
| 188 |
+
return self._connection.is_idle()
|
| 189 |
+
|
| 190 |
+
def is_closed(self) -> bool:
|
| 191 |
+
if self._connection is None:
|
| 192 |
+
return self._connect_failed
|
| 193 |
+
return self._connection.is_closed()
|
| 194 |
+
|
| 195 |
+
def info(self) -> str:
|
| 196 |
+
if self._connection is None:
|
| 197 |
+
return "CONNECTION FAILED" if self._connect_failed else "CONNECTING"
|
| 198 |
+
return self._connection.info()
|
| 199 |
+
|
| 200 |
+
def __repr__(self) -> str:
|
| 201 |
+
return f"<{self.__class__.__name__} [{self.info()}]>"
|
| 202 |
+
|
| 203 |
+
# These context managers are not used in the standard flow, but are
|
| 204 |
+
# useful for testing or working with connection instances directly.
|
| 205 |
+
|
| 206 |
+
async def __aenter__(self) -> "AsyncHTTPConnection":
|
| 207 |
+
return self
|
| 208 |
+
|
| 209 |
+
async def __aexit__(
|
| 210 |
+
self,
|
| 211 |
+
exc_type: Optional[Type[BaseException]] = None,
|
| 212 |
+
exc_value: Optional[BaseException] = None,
|
| 213 |
+
traceback: Optional[TracebackType] = None,
|
| 214 |
+
) -> None:
|
| 215 |
+
await self.aclose()
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/connection_pool.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ssl
|
| 2 |
+
import sys
|
| 3 |
+
from types import TracebackType
|
| 4 |
+
from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type
|
| 5 |
+
|
| 6 |
+
from .._backends.auto import AutoBackend
|
| 7 |
+
from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend
|
| 8 |
+
from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol
|
| 9 |
+
from .._models import Origin, Request, Response
|
| 10 |
+
from .._synchronization import AsyncEvent, AsyncLock, AsyncShieldCancellation
|
| 11 |
+
from .connection import AsyncHTTPConnection
|
| 12 |
+
from .interfaces import AsyncConnectionInterface, AsyncRequestInterface
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class RequestStatus:
|
| 16 |
+
def __init__(self, request: Request):
|
| 17 |
+
self.request = request
|
| 18 |
+
self.connection: Optional[AsyncConnectionInterface] = None
|
| 19 |
+
self._connection_acquired = AsyncEvent()
|
| 20 |
+
|
| 21 |
+
def set_connection(self, connection: AsyncConnectionInterface) -> None:
|
| 22 |
+
assert self.connection is None
|
| 23 |
+
self.connection = connection
|
| 24 |
+
self._connection_acquired.set()
|
| 25 |
+
|
| 26 |
+
def unset_connection(self) -> None:
|
| 27 |
+
assert self.connection is not None
|
| 28 |
+
self.connection = None
|
| 29 |
+
self._connection_acquired = AsyncEvent()
|
| 30 |
+
|
| 31 |
+
async def wait_for_connection(
|
| 32 |
+
self, timeout: Optional[float] = None
|
| 33 |
+
) -> AsyncConnectionInterface:
|
| 34 |
+
if self.connection is None:
|
| 35 |
+
await self._connection_acquired.wait(timeout=timeout)
|
| 36 |
+
assert self.connection is not None
|
| 37 |
+
return self.connection
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class AsyncConnectionPool(AsyncRequestInterface):
|
| 41 |
+
"""
|
| 42 |
+
A connection pool for making HTTP requests.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
def __init__(
|
| 46 |
+
self,
|
| 47 |
+
ssl_context: Optional[ssl.SSLContext] = None,
|
| 48 |
+
max_connections: Optional[int] = 10,
|
| 49 |
+
max_keepalive_connections: Optional[int] = None,
|
| 50 |
+
keepalive_expiry: Optional[float] = None,
|
| 51 |
+
http1: bool = True,
|
| 52 |
+
http2: bool = False,
|
| 53 |
+
retries: int = 0,
|
| 54 |
+
local_address: Optional[str] = None,
|
| 55 |
+
uds: Optional[str] = None,
|
| 56 |
+
network_backend: Optional[AsyncNetworkBackend] = None,
|
| 57 |
+
socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
|
| 58 |
+
) -> None:
|
| 59 |
+
"""
|
| 60 |
+
A connection pool for making HTTP requests.
|
| 61 |
+
|
| 62 |
+
Parameters:
|
| 63 |
+
ssl_context: An SSL context to use for verifying connections.
|
| 64 |
+
If not specified, the default `httpcore.default_ssl_context()`
|
| 65 |
+
will be used.
|
| 66 |
+
max_connections: The maximum number of concurrent HTTP connections that
|
| 67 |
+
the pool should allow. Any attempt to send a request on a pool that
|
| 68 |
+
would exceed this amount will block until a connection is available.
|
| 69 |
+
max_keepalive_connections: The maximum number of idle HTTP connections
|
| 70 |
+
that will be maintained in the pool.
|
| 71 |
+
keepalive_expiry: The duration in seconds that an idle HTTP connection
|
| 72 |
+
may be maintained for before being expired from the pool.
|
| 73 |
+
http1: A boolean indicating if HTTP/1.1 requests should be supported
|
| 74 |
+
by the connection pool. Defaults to True.
|
| 75 |
+
http2: A boolean indicating if HTTP/2 requests should be supported by
|
| 76 |
+
the connection pool. Defaults to False.
|
| 77 |
+
retries: The maximum number of retries when trying to establish a
|
| 78 |
+
connection.
|
| 79 |
+
local_address: Local address to connect from. Can also be used to connect
|
| 80 |
+
using a particular address family. Using `local_address="0.0.0.0"`
|
| 81 |
+
will connect using an `AF_INET` address (IPv4), while using
|
| 82 |
+
`local_address="::"` will connect using an `AF_INET6` address (IPv6).
|
| 83 |
+
uds: Path to a Unix Domain Socket to use instead of TCP sockets.
|
| 84 |
+
network_backend: A backend instance to use for handling network I/O.
|
| 85 |
+
socket_options: Socket options that have to be included
|
| 86 |
+
in the TCP socket when the connection was established.
|
| 87 |
+
"""
|
| 88 |
+
self._ssl_context = ssl_context
|
| 89 |
+
|
| 90 |
+
self._max_connections = (
|
| 91 |
+
sys.maxsize if max_connections is None else max_connections
|
| 92 |
+
)
|
| 93 |
+
self._max_keepalive_connections = (
|
| 94 |
+
sys.maxsize
|
| 95 |
+
if max_keepalive_connections is None
|
| 96 |
+
else max_keepalive_connections
|
| 97 |
+
)
|
| 98 |
+
self._max_keepalive_connections = min(
|
| 99 |
+
self._max_connections, self._max_keepalive_connections
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
self._keepalive_expiry = keepalive_expiry
|
| 103 |
+
self._http1 = http1
|
| 104 |
+
self._http2 = http2
|
| 105 |
+
self._retries = retries
|
| 106 |
+
self._local_address = local_address
|
| 107 |
+
self._uds = uds
|
| 108 |
+
|
| 109 |
+
self._pool: List[AsyncConnectionInterface] = []
|
| 110 |
+
self._requests: List[RequestStatus] = []
|
| 111 |
+
self._pool_lock = AsyncLock()
|
| 112 |
+
self._network_backend = (
|
| 113 |
+
AutoBackend() if network_backend is None else network_backend
|
| 114 |
+
)
|
| 115 |
+
self._socket_options = socket_options
|
| 116 |
+
|
| 117 |
+
def create_connection(self, origin: Origin) -> AsyncConnectionInterface:
|
| 118 |
+
return AsyncHTTPConnection(
|
| 119 |
+
origin=origin,
|
| 120 |
+
ssl_context=self._ssl_context,
|
| 121 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 122 |
+
http1=self._http1,
|
| 123 |
+
http2=self._http2,
|
| 124 |
+
retries=self._retries,
|
| 125 |
+
local_address=self._local_address,
|
| 126 |
+
uds=self._uds,
|
| 127 |
+
network_backend=self._network_backend,
|
| 128 |
+
socket_options=self._socket_options,
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
@property
|
| 132 |
+
def connections(self) -> List[AsyncConnectionInterface]:
|
| 133 |
+
"""
|
| 134 |
+
Return a list of the connections currently in the pool.
|
| 135 |
+
|
| 136 |
+
For example:
|
| 137 |
+
|
| 138 |
+
```python
|
| 139 |
+
>>> pool.connections
|
| 140 |
+
[
|
| 141 |
+
<AsyncHTTPConnection ['https://example.com:443', HTTP/1.1, ACTIVE, Request Count: 6]>,
|
| 142 |
+
<AsyncHTTPConnection ['https://example.com:443', HTTP/1.1, IDLE, Request Count: 9]> ,
|
| 143 |
+
<AsyncHTTPConnection ['http://example.com:80', HTTP/1.1, IDLE, Request Count: 1]>,
|
| 144 |
+
]
|
| 145 |
+
```
|
| 146 |
+
"""
|
| 147 |
+
return list(self._pool)
|
| 148 |
+
|
| 149 |
+
async def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool:
|
| 150 |
+
"""
|
| 151 |
+
Attempt to provide a connection that can handle the given origin.
|
| 152 |
+
"""
|
| 153 |
+
origin = status.request.url.origin
|
| 154 |
+
|
| 155 |
+
# If there are queued requests in front of us, then don't acquire a
|
| 156 |
+
# connection. We handle requests strictly in order.
|
| 157 |
+
waiting = [s for s in self._requests if s.connection is None]
|
| 158 |
+
if waiting and waiting[0] is not status:
|
| 159 |
+
return False
|
| 160 |
+
|
| 161 |
+
# Reuse an existing connection if one is currently available.
|
| 162 |
+
for idx, connection in enumerate(self._pool):
|
| 163 |
+
if connection.can_handle_request(origin) and connection.is_available():
|
| 164 |
+
self._pool.pop(idx)
|
| 165 |
+
self._pool.insert(0, connection)
|
| 166 |
+
status.set_connection(connection)
|
| 167 |
+
return True
|
| 168 |
+
|
| 169 |
+
# If the pool is currently full, attempt to close one idle connection.
|
| 170 |
+
if len(self._pool) >= self._max_connections:
|
| 171 |
+
for idx, connection in reversed(list(enumerate(self._pool))):
|
| 172 |
+
if connection.is_idle():
|
| 173 |
+
await connection.aclose()
|
| 174 |
+
self._pool.pop(idx)
|
| 175 |
+
break
|
| 176 |
+
|
| 177 |
+
# If the pool is still full, then we cannot acquire a connection.
|
| 178 |
+
if len(self._pool) >= self._max_connections:
|
| 179 |
+
return False
|
| 180 |
+
|
| 181 |
+
# Otherwise create a new connection.
|
| 182 |
+
connection = self.create_connection(origin)
|
| 183 |
+
self._pool.insert(0, connection)
|
| 184 |
+
status.set_connection(connection)
|
| 185 |
+
return True
|
| 186 |
+
|
| 187 |
+
async def _close_expired_connections(self) -> None:
|
| 188 |
+
"""
|
| 189 |
+
Clean up the connection pool by closing off any connections that have expired.
|
| 190 |
+
"""
|
| 191 |
+
# Close any connections that have expired their keep-alive time.
|
| 192 |
+
for idx, connection in reversed(list(enumerate(self._pool))):
|
| 193 |
+
if connection.has_expired():
|
| 194 |
+
await connection.aclose()
|
| 195 |
+
self._pool.pop(idx)
|
| 196 |
+
|
| 197 |
+
# If the pool size exceeds the maximum number of allowed keep-alive connections,
|
| 198 |
+
# then close off idle connections as required.
|
| 199 |
+
pool_size = len(self._pool)
|
| 200 |
+
for idx, connection in reversed(list(enumerate(self._pool))):
|
| 201 |
+
if connection.is_idle() and pool_size > self._max_keepalive_connections:
|
| 202 |
+
await connection.aclose()
|
| 203 |
+
self._pool.pop(idx)
|
| 204 |
+
pool_size -= 1
|
| 205 |
+
|
| 206 |
+
async def handle_async_request(self, request: Request) -> Response:
|
| 207 |
+
"""
|
| 208 |
+
Send an HTTP request, and return an HTTP response.
|
| 209 |
+
|
| 210 |
+
This is the core implementation that is called into by `.request()` or `.stream()`.
|
| 211 |
+
"""
|
| 212 |
+
scheme = request.url.scheme.decode()
|
| 213 |
+
if scheme == "":
|
| 214 |
+
raise UnsupportedProtocol(
|
| 215 |
+
"Request URL is missing an 'http://' or 'https://' protocol."
|
| 216 |
+
)
|
| 217 |
+
if scheme not in ("http", "https", "ws", "wss"):
|
| 218 |
+
raise UnsupportedProtocol(
|
| 219 |
+
f"Request URL has an unsupported protocol '{scheme}://'."
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
status = RequestStatus(request)
|
| 223 |
+
|
| 224 |
+
async with self._pool_lock:
|
| 225 |
+
self._requests.append(status)
|
| 226 |
+
await self._close_expired_connections()
|
| 227 |
+
await self._attempt_to_acquire_connection(status)
|
| 228 |
+
|
| 229 |
+
while True:
|
| 230 |
+
timeouts = request.extensions.get("timeout", {})
|
| 231 |
+
timeout = timeouts.get("pool", None)
|
| 232 |
+
try:
|
| 233 |
+
connection = await status.wait_for_connection(timeout=timeout)
|
| 234 |
+
except BaseException as exc:
|
| 235 |
+
# If we timeout here, or if the task is cancelled, then make
|
| 236 |
+
# sure to remove the request from the queue before bubbling
|
| 237 |
+
# up the exception.
|
| 238 |
+
async with self._pool_lock:
|
| 239 |
+
# Ensure only remove when task exists.
|
| 240 |
+
if status in self._requests:
|
| 241 |
+
self._requests.remove(status)
|
| 242 |
+
raise exc
|
| 243 |
+
|
| 244 |
+
try:
|
| 245 |
+
response = await connection.handle_async_request(request)
|
| 246 |
+
except ConnectionNotAvailable:
|
| 247 |
+
# The ConnectionNotAvailable exception is a special case, that
|
| 248 |
+
# indicates we need to retry the request on a new connection.
|
| 249 |
+
#
|
| 250 |
+
# The most common case where this can occur is when multiple
|
| 251 |
+
# requests are queued waiting for a single connection, which
|
| 252 |
+
# might end up as an HTTP/2 connection, but which actually ends
|
| 253 |
+
# up as HTTP/1.1.
|
| 254 |
+
async with self._pool_lock:
|
| 255 |
+
# Maintain our position in the request queue, but reset the
|
| 256 |
+
# status so that the request becomes queued again.
|
| 257 |
+
status.unset_connection()
|
| 258 |
+
await self._attempt_to_acquire_connection(status)
|
| 259 |
+
except BaseException as exc:
|
| 260 |
+
with AsyncShieldCancellation():
|
| 261 |
+
await self.response_closed(status)
|
| 262 |
+
raise exc
|
| 263 |
+
else:
|
| 264 |
+
break
|
| 265 |
+
|
| 266 |
+
# When we return the response, we wrap the stream in a special class
|
| 267 |
+
# that handles notifying the connection pool once the response
|
| 268 |
+
# has been released.
|
| 269 |
+
assert isinstance(response.stream, AsyncIterable)
|
| 270 |
+
return Response(
|
| 271 |
+
status=response.status,
|
| 272 |
+
headers=response.headers,
|
| 273 |
+
content=ConnectionPoolByteStream(response.stream, self, status),
|
| 274 |
+
extensions=response.extensions,
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
async def response_closed(self, status: RequestStatus) -> None:
|
| 278 |
+
"""
|
| 279 |
+
This method acts as a callback once the request/response cycle is complete.
|
| 280 |
+
|
| 281 |
+
It is called into from the `ConnectionPoolByteStream.aclose()` method.
|
| 282 |
+
"""
|
| 283 |
+
assert status.connection is not None
|
| 284 |
+
connection = status.connection
|
| 285 |
+
|
| 286 |
+
async with self._pool_lock:
|
| 287 |
+
# Update the state of the connection pool.
|
| 288 |
+
if status in self._requests:
|
| 289 |
+
self._requests.remove(status)
|
| 290 |
+
|
| 291 |
+
if connection.is_closed() and connection in self._pool:
|
| 292 |
+
self._pool.remove(connection)
|
| 293 |
+
|
| 294 |
+
# Since we've had a response closed, it's possible we'll now be able
|
| 295 |
+
# to service one or more requests that are currently pending.
|
| 296 |
+
for status in self._requests:
|
| 297 |
+
if status.connection is None:
|
| 298 |
+
acquired = await self._attempt_to_acquire_connection(status)
|
| 299 |
+
# If we could not acquire a connection for a queued request
|
| 300 |
+
# then we don't need to check anymore requests that are
|
| 301 |
+
# queued later behind it.
|
| 302 |
+
if not acquired:
|
| 303 |
+
break
|
| 304 |
+
|
| 305 |
+
# Housekeeping.
|
| 306 |
+
await self._close_expired_connections()
|
| 307 |
+
|
| 308 |
+
async def aclose(self) -> None:
|
| 309 |
+
"""
|
| 310 |
+
Close any connections in the pool.
|
| 311 |
+
"""
|
| 312 |
+
async with self._pool_lock:
|
| 313 |
+
for connection in self._pool:
|
| 314 |
+
await connection.aclose()
|
| 315 |
+
self._pool = []
|
| 316 |
+
self._requests = []
|
| 317 |
+
|
| 318 |
+
async def __aenter__(self) -> "AsyncConnectionPool":
|
| 319 |
+
return self
|
| 320 |
+
|
| 321 |
+
async def __aexit__(
|
| 322 |
+
self,
|
| 323 |
+
exc_type: Optional[Type[BaseException]] = None,
|
| 324 |
+
exc_value: Optional[BaseException] = None,
|
| 325 |
+
traceback: Optional[TracebackType] = None,
|
| 326 |
+
) -> None:
|
| 327 |
+
await self.aclose()
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
class ConnectionPoolByteStream:
|
| 331 |
+
"""
|
| 332 |
+
A wrapper around the response byte stream, that additionally handles
|
| 333 |
+
notifying the connection pool when the response has been closed.
|
| 334 |
+
"""
|
| 335 |
+
|
| 336 |
+
def __init__(
|
| 337 |
+
self,
|
| 338 |
+
stream: AsyncIterable[bytes],
|
| 339 |
+
pool: AsyncConnectionPool,
|
| 340 |
+
status: RequestStatus,
|
| 341 |
+
) -> None:
|
| 342 |
+
self._stream = stream
|
| 343 |
+
self._pool = pool
|
| 344 |
+
self._status = status
|
| 345 |
+
|
| 346 |
+
async def __aiter__(self) -> AsyncIterator[bytes]:
|
| 347 |
+
async for part in self._stream:
|
| 348 |
+
yield part
|
| 349 |
+
|
| 350 |
+
async def aclose(self) -> None:
|
| 351 |
+
try:
|
| 352 |
+
if hasattr(self._stream, "aclose"):
|
| 353 |
+
await self._stream.aclose()
|
| 354 |
+
finally:
|
| 355 |
+
with AsyncShieldCancellation():
|
| 356 |
+
await self._pool.response_closed(self._status)
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/http11.py
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import enum
|
| 2 |
+
import logging
|
| 3 |
+
import time
|
| 4 |
+
from types import TracebackType
|
| 5 |
+
from typing import (
|
| 6 |
+
AsyncIterable,
|
| 7 |
+
AsyncIterator,
|
| 8 |
+
List,
|
| 9 |
+
Optional,
|
| 10 |
+
Tuple,
|
| 11 |
+
Type,
|
| 12 |
+
Union,
|
| 13 |
+
cast,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
import h11
|
| 17 |
+
|
| 18 |
+
from .._backends.base import AsyncNetworkStream
|
| 19 |
+
from .._exceptions import (
|
| 20 |
+
ConnectionNotAvailable,
|
| 21 |
+
LocalProtocolError,
|
| 22 |
+
RemoteProtocolError,
|
| 23 |
+
map_exceptions,
|
| 24 |
+
)
|
| 25 |
+
from .._models import Origin, Request, Response
|
| 26 |
+
from .._synchronization import AsyncLock, AsyncShieldCancellation
|
| 27 |
+
from .._trace import Trace
|
| 28 |
+
from .interfaces import AsyncConnectionInterface
|
| 29 |
+
|
| 30 |
+
logger = logging.getLogger("httpcore.http11")
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# A subset of `h11.Event` types supported by `_send_event`
|
| 34 |
+
H11SendEvent = Union[
|
| 35 |
+
h11.Request,
|
| 36 |
+
h11.Data,
|
| 37 |
+
h11.EndOfMessage,
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class HTTPConnectionState(enum.IntEnum):
|
| 42 |
+
NEW = 0
|
| 43 |
+
ACTIVE = 1
|
| 44 |
+
IDLE = 2
|
| 45 |
+
CLOSED = 3
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class AsyncHTTP11Connection(AsyncConnectionInterface):
|
| 49 |
+
READ_NUM_BYTES = 64 * 1024
|
| 50 |
+
MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024
|
| 51 |
+
|
| 52 |
+
def __init__(
|
| 53 |
+
self,
|
| 54 |
+
origin: Origin,
|
| 55 |
+
stream: AsyncNetworkStream,
|
| 56 |
+
keepalive_expiry: Optional[float] = None,
|
| 57 |
+
) -> None:
|
| 58 |
+
self._origin = origin
|
| 59 |
+
self._network_stream = stream
|
| 60 |
+
self._keepalive_expiry: Optional[float] = keepalive_expiry
|
| 61 |
+
self._expire_at: Optional[float] = None
|
| 62 |
+
self._state = HTTPConnectionState.NEW
|
| 63 |
+
self._state_lock = AsyncLock()
|
| 64 |
+
self._request_count = 0
|
| 65 |
+
self._h11_state = h11.Connection(
|
| 66 |
+
our_role=h11.CLIENT,
|
| 67 |
+
max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE,
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
async def handle_async_request(self, request: Request) -> Response:
|
| 71 |
+
if not self.can_handle_request(request.url.origin):
|
| 72 |
+
raise RuntimeError(
|
| 73 |
+
f"Attempted to send request to {request.url.origin} on connection "
|
| 74 |
+
f"to {self._origin}"
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
async with self._state_lock:
|
| 78 |
+
if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE):
|
| 79 |
+
self._request_count += 1
|
| 80 |
+
self._state = HTTPConnectionState.ACTIVE
|
| 81 |
+
self._expire_at = None
|
| 82 |
+
else:
|
| 83 |
+
raise ConnectionNotAvailable()
|
| 84 |
+
|
| 85 |
+
try:
|
| 86 |
+
kwargs = {"request": request}
|
| 87 |
+
async with Trace("send_request_headers", logger, request, kwargs) as trace:
|
| 88 |
+
await self._send_request_headers(**kwargs)
|
| 89 |
+
async with Trace("send_request_body", logger, request, kwargs) as trace:
|
| 90 |
+
await self._send_request_body(**kwargs)
|
| 91 |
+
async with Trace(
|
| 92 |
+
"receive_response_headers", logger, request, kwargs
|
| 93 |
+
) as trace:
|
| 94 |
+
(
|
| 95 |
+
http_version,
|
| 96 |
+
status,
|
| 97 |
+
reason_phrase,
|
| 98 |
+
headers,
|
| 99 |
+
) = await self._receive_response_headers(**kwargs)
|
| 100 |
+
trace.return_value = (
|
| 101 |
+
http_version,
|
| 102 |
+
status,
|
| 103 |
+
reason_phrase,
|
| 104 |
+
headers,
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
return Response(
|
| 108 |
+
status=status,
|
| 109 |
+
headers=headers,
|
| 110 |
+
content=HTTP11ConnectionByteStream(self, request),
|
| 111 |
+
extensions={
|
| 112 |
+
"http_version": http_version,
|
| 113 |
+
"reason_phrase": reason_phrase,
|
| 114 |
+
"network_stream": self._network_stream,
|
| 115 |
+
},
|
| 116 |
+
)
|
| 117 |
+
except BaseException as exc:
|
| 118 |
+
with AsyncShieldCancellation():
|
| 119 |
+
async with Trace("response_closed", logger, request) as trace:
|
| 120 |
+
await self._response_closed()
|
| 121 |
+
raise exc
|
| 122 |
+
|
| 123 |
+
# Sending the request...
|
| 124 |
+
|
| 125 |
+
async def _send_request_headers(self, request: Request) -> None:
|
| 126 |
+
timeouts = request.extensions.get("timeout", {})
|
| 127 |
+
timeout = timeouts.get("write", None)
|
| 128 |
+
|
| 129 |
+
with map_exceptions({h11.LocalProtocolError: LocalProtocolError}):
|
| 130 |
+
event = h11.Request(
|
| 131 |
+
method=request.method,
|
| 132 |
+
target=request.url.target,
|
| 133 |
+
headers=request.headers,
|
| 134 |
+
)
|
| 135 |
+
await self._send_event(event, timeout=timeout)
|
| 136 |
+
|
| 137 |
+
async def _send_request_body(self, request: Request) -> None:
|
| 138 |
+
timeouts = request.extensions.get("timeout", {})
|
| 139 |
+
timeout = timeouts.get("write", None)
|
| 140 |
+
|
| 141 |
+
assert isinstance(request.stream, AsyncIterable)
|
| 142 |
+
async for chunk in request.stream:
|
| 143 |
+
event = h11.Data(data=chunk)
|
| 144 |
+
await self._send_event(event, timeout=timeout)
|
| 145 |
+
|
| 146 |
+
await self._send_event(h11.EndOfMessage(), timeout=timeout)
|
| 147 |
+
|
| 148 |
+
async def _send_event(
|
| 149 |
+
self, event: h11.Event, timeout: Optional[float] = None
|
| 150 |
+
) -> None:
|
| 151 |
+
bytes_to_send = self._h11_state.send(event)
|
| 152 |
+
if bytes_to_send is not None:
|
| 153 |
+
await self._network_stream.write(bytes_to_send, timeout=timeout)
|
| 154 |
+
|
| 155 |
+
# Receiving the response...
|
| 156 |
+
|
| 157 |
+
async def _receive_response_headers(
|
| 158 |
+
self, request: Request
|
| 159 |
+
) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]:
|
| 160 |
+
timeouts = request.extensions.get("timeout", {})
|
| 161 |
+
timeout = timeouts.get("read", None)
|
| 162 |
+
|
| 163 |
+
while True:
|
| 164 |
+
event = await self._receive_event(timeout=timeout)
|
| 165 |
+
if isinstance(event, h11.Response):
|
| 166 |
+
break
|
| 167 |
+
if (
|
| 168 |
+
isinstance(event, h11.InformationalResponse)
|
| 169 |
+
and event.status_code == 101
|
| 170 |
+
):
|
| 171 |
+
break
|
| 172 |
+
|
| 173 |
+
http_version = b"HTTP/" + event.http_version
|
| 174 |
+
|
| 175 |
+
# h11 version 0.11+ supports a `raw_items` interface to get the
|
| 176 |
+
# raw header casing, rather than the enforced lowercase headers.
|
| 177 |
+
headers = event.headers.raw_items()
|
| 178 |
+
|
| 179 |
+
return http_version, event.status_code, event.reason, headers
|
| 180 |
+
|
| 181 |
+
async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]:
|
| 182 |
+
timeouts = request.extensions.get("timeout", {})
|
| 183 |
+
timeout = timeouts.get("read", None)
|
| 184 |
+
|
| 185 |
+
while True:
|
| 186 |
+
event = await self._receive_event(timeout=timeout)
|
| 187 |
+
if isinstance(event, h11.Data):
|
| 188 |
+
yield bytes(event.data)
|
| 189 |
+
elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)):
|
| 190 |
+
break
|
| 191 |
+
|
| 192 |
+
async def _receive_event(
|
| 193 |
+
self, timeout: Optional[float] = None
|
| 194 |
+
) -> Union[h11.Event, Type[h11.PAUSED]]:
|
| 195 |
+
while True:
|
| 196 |
+
with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}):
|
| 197 |
+
event = self._h11_state.next_event()
|
| 198 |
+
|
| 199 |
+
if event is h11.NEED_DATA:
|
| 200 |
+
data = await self._network_stream.read(
|
| 201 |
+
self.READ_NUM_BYTES, timeout=timeout
|
| 202 |
+
)
|
| 203 |
+
|
| 204 |
+
# If we feed this case through h11 we'll raise an exception like:
|
| 205 |
+
#
|
| 206 |
+
# httpcore.RemoteProtocolError: can't handle event type
|
| 207 |
+
# ConnectionClosed when role=SERVER and state=SEND_RESPONSE
|
| 208 |
+
#
|
| 209 |
+
# Which is accurate, but not very informative from an end-user
|
| 210 |
+
# perspective. Instead we handle this case distinctly and treat
|
| 211 |
+
# it as a ConnectError.
|
| 212 |
+
if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE:
|
| 213 |
+
msg = "Server disconnected without sending a response."
|
| 214 |
+
raise RemoteProtocolError(msg)
|
| 215 |
+
|
| 216 |
+
self._h11_state.receive_data(data)
|
| 217 |
+
else:
|
| 218 |
+
# mypy fails to narrow the type in the above if statement above
|
| 219 |
+
return cast(Union[h11.Event, Type[h11.PAUSED]], event)
|
| 220 |
+
|
| 221 |
+
async def _response_closed(self) -> None:
|
| 222 |
+
async with self._state_lock:
|
| 223 |
+
if (
|
| 224 |
+
self._h11_state.our_state is h11.DONE
|
| 225 |
+
and self._h11_state.their_state is h11.DONE
|
| 226 |
+
):
|
| 227 |
+
self._state = HTTPConnectionState.IDLE
|
| 228 |
+
self._h11_state.start_next_cycle()
|
| 229 |
+
if self._keepalive_expiry is not None:
|
| 230 |
+
now = time.monotonic()
|
| 231 |
+
self._expire_at = now + self._keepalive_expiry
|
| 232 |
+
else:
|
| 233 |
+
await self.aclose()
|
| 234 |
+
|
| 235 |
+
# Once the connection is no longer required...
|
| 236 |
+
|
| 237 |
+
async def aclose(self) -> None:
|
| 238 |
+
# Note that this method unilaterally closes the connection, and does
|
| 239 |
+
# not have any kind of locking in place around it.
|
| 240 |
+
self._state = HTTPConnectionState.CLOSED
|
| 241 |
+
await self._network_stream.aclose()
|
| 242 |
+
|
| 243 |
+
# The AsyncConnectionInterface methods provide information about the state of
|
| 244 |
+
# the connection, allowing for a connection pooling implementation to
|
| 245 |
+
# determine when to reuse and when to close the connection...
|
| 246 |
+
|
| 247 |
+
def can_handle_request(self, origin: Origin) -> bool:
|
| 248 |
+
return origin == self._origin
|
| 249 |
+
|
| 250 |
+
def is_available(self) -> bool:
|
| 251 |
+
# Note that HTTP/1.1 connections in the "NEW" state are not treated as
|
| 252 |
+
# being "available". The control flow which created the connection will
|
| 253 |
+
# be able to send an outgoing request, but the connection will not be
|
| 254 |
+
# acquired from the connection pool for any other request.
|
| 255 |
+
return self._state == HTTPConnectionState.IDLE
|
| 256 |
+
|
| 257 |
+
def has_expired(self) -> bool:
|
| 258 |
+
now = time.monotonic()
|
| 259 |
+
keepalive_expired = self._expire_at is not None and now > self._expire_at
|
| 260 |
+
|
| 261 |
+
# If the HTTP connection is idle but the socket is readable, then the
|
| 262 |
+
# only valid state is that the socket is about to return b"", indicating
|
| 263 |
+
# a server-initiated disconnect.
|
| 264 |
+
server_disconnected = (
|
| 265 |
+
self._state == HTTPConnectionState.IDLE
|
| 266 |
+
and self._network_stream.get_extra_info("is_readable")
|
| 267 |
+
)
|
| 268 |
+
|
| 269 |
+
return keepalive_expired or server_disconnected
|
| 270 |
+
|
| 271 |
+
def is_idle(self) -> bool:
|
| 272 |
+
return self._state == HTTPConnectionState.IDLE
|
| 273 |
+
|
| 274 |
+
def is_closed(self) -> bool:
|
| 275 |
+
return self._state == HTTPConnectionState.CLOSED
|
| 276 |
+
|
| 277 |
+
def info(self) -> str:
|
| 278 |
+
origin = str(self._origin)
|
| 279 |
+
return (
|
| 280 |
+
f"{origin!r}, HTTP/1.1, {self._state.name}, "
|
| 281 |
+
f"Request Count: {self._request_count}"
|
| 282 |
+
)
|
| 283 |
+
|
| 284 |
+
def __repr__(self) -> str:
|
| 285 |
+
class_name = self.__class__.__name__
|
| 286 |
+
origin = str(self._origin)
|
| 287 |
+
return (
|
| 288 |
+
f"<{class_name} [{origin!r}, {self._state.name}, "
|
| 289 |
+
f"Request Count: {self._request_count}]>"
|
| 290 |
+
)
|
| 291 |
+
|
| 292 |
+
# These context managers are not used in the standard flow, but are
|
| 293 |
+
# useful for testing or working with connection instances directly.
|
| 294 |
+
|
| 295 |
+
async def __aenter__(self) -> "AsyncHTTP11Connection":
|
| 296 |
+
return self
|
| 297 |
+
|
| 298 |
+
async def __aexit__(
|
| 299 |
+
self,
|
| 300 |
+
exc_type: Optional[Type[BaseException]] = None,
|
| 301 |
+
exc_value: Optional[BaseException] = None,
|
| 302 |
+
traceback: Optional[TracebackType] = None,
|
| 303 |
+
) -> None:
|
| 304 |
+
await self.aclose()
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
class HTTP11ConnectionByteStream:
|
| 308 |
+
def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None:
|
| 309 |
+
self._connection = connection
|
| 310 |
+
self._request = request
|
| 311 |
+
self._closed = False
|
| 312 |
+
|
| 313 |
+
async def __aiter__(self) -> AsyncIterator[bytes]:
|
| 314 |
+
kwargs = {"request": self._request}
|
| 315 |
+
try:
|
| 316 |
+
async with Trace("receive_response_body", logger, self._request, kwargs):
|
| 317 |
+
async for chunk in self._connection._receive_response_body(**kwargs):
|
| 318 |
+
yield chunk
|
| 319 |
+
except BaseException as exc:
|
| 320 |
+
# If we get an exception while streaming the response,
|
| 321 |
+
# we want to close the response (and possibly the connection)
|
| 322 |
+
# before raising that exception.
|
| 323 |
+
with AsyncShieldCancellation():
|
| 324 |
+
await self.aclose()
|
| 325 |
+
raise exc
|
| 326 |
+
|
| 327 |
+
async def aclose(self) -> None:
|
| 328 |
+
if not self._closed:
|
| 329 |
+
self._closed = True
|
| 330 |
+
async with Trace("response_closed", logger, self._request):
|
| 331 |
+
await self._connection._response_closed()
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/http2.py
ADDED
|
@@ -0,0 +1,589 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import enum
|
| 2 |
+
import logging
|
| 3 |
+
import time
|
| 4 |
+
import types
|
| 5 |
+
import typing
|
| 6 |
+
|
| 7 |
+
import h2.config
|
| 8 |
+
import h2.connection
|
| 9 |
+
import h2.events
|
| 10 |
+
import h2.exceptions
|
| 11 |
+
import h2.settings
|
| 12 |
+
|
| 13 |
+
from .._backends.base import AsyncNetworkStream
|
| 14 |
+
from .._exceptions import (
|
| 15 |
+
ConnectionNotAvailable,
|
| 16 |
+
LocalProtocolError,
|
| 17 |
+
RemoteProtocolError,
|
| 18 |
+
)
|
| 19 |
+
from .._models import Origin, Request, Response
|
| 20 |
+
from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation
|
| 21 |
+
from .._trace import Trace
|
| 22 |
+
from .interfaces import AsyncConnectionInterface
|
| 23 |
+
|
| 24 |
+
logger = logging.getLogger("httpcore.http2")
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def has_body_headers(request: Request) -> bool:
|
| 28 |
+
return any(
|
| 29 |
+
k.lower() == b"content-length" or k.lower() == b"transfer-encoding"
|
| 30 |
+
for k, v in request.headers
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class HTTPConnectionState(enum.IntEnum):
|
| 35 |
+
ACTIVE = 1
|
| 36 |
+
IDLE = 2
|
| 37 |
+
CLOSED = 3
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class AsyncHTTP2Connection(AsyncConnectionInterface):
|
| 41 |
+
READ_NUM_BYTES = 64 * 1024
|
| 42 |
+
CONFIG = h2.config.H2Configuration(validate_inbound_headers=False)
|
| 43 |
+
|
| 44 |
+
def __init__(
|
| 45 |
+
self,
|
| 46 |
+
origin: Origin,
|
| 47 |
+
stream: AsyncNetworkStream,
|
| 48 |
+
keepalive_expiry: typing.Optional[float] = None,
|
| 49 |
+
):
|
| 50 |
+
self._origin = origin
|
| 51 |
+
self._network_stream = stream
|
| 52 |
+
self._keepalive_expiry: typing.Optional[float] = keepalive_expiry
|
| 53 |
+
self._h2_state = h2.connection.H2Connection(config=self.CONFIG)
|
| 54 |
+
self._state = HTTPConnectionState.IDLE
|
| 55 |
+
self._expire_at: typing.Optional[float] = None
|
| 56 |
+
self._request_count = 0
|
| 57 |
+
self._init_lock = AsyncLock()
|
| 58 |
+
self._state_lock = AsyncLock()
|
| 59 |
+
self._read_lock = AsyncLock()
|
| 60 |
+
self._write_lock = AsyncLock()
|
| 61 |
+
self._sent_connection_init = False
|
| 62 |
+
self._used_all_stream_ids = False
|
| 63 |
+
self._connection_error = False
|
| 64 |
+
|
| 65 |
+
# Mapping from stream ID to response stream events.
|
| 66 |
+
self._events: typing.Dict[
|
| 67 |
+
int,
|
| 68 |
+
typing.Union[
|
| 69 |
+
h2.events.ResponseReceived,
|
| 70 |
+
h2.events.DataReceived,
|
| 71 |
+
h2.events.StreamEnded,
|
| 72 |
+
h2.events.StreamReset,
|
| 73 |
+
],
|
| 74 |
+
] = {}
|
| 75 |
+
|
| 76 |
+
# Connection terminated events are stored as state since
|
| 77 |
+
# we need to handle them for all streams.
|
| 78 |
+
self._connection_terminated: typing.Optional[
|
| 79 |
+
h2.events.ConnectionTerminated
|
| 80 |
+
] = None
|
| 81 |
+
|
| 82 |
+
self._read_exception: typing.Optional[Exception] = None
|
| 83 |
+
self._write_exception: typing.Optional[Exception] = None
|
| 84 |
+
|
| 85 |
+
async def handle_async_request(self, request: Request) -> Response:
|
| 86 |
+
if not self.can_handle_request(request.url.origin):
|
| 87 |
+
# This cannot occur in normal operation, since the connection pool
|
| 88 |
+
# will only send requests on connections that handle them.
|
| 89 |
+
# It's in place simply for resilience as a guard against incorrect
|
| 90 |
+
# usage, for anyone working directly with httpcore connections.
|
| 91 |
+
raise RuntimeError(
|
| 92 |
+
f"Attempted to send request to {request.url.origin} on connection "
|
| 93 |
+
f"to {self._origin}"
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
async with self._state_lock:
|
| 97 |
+
if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE):
|
| 98 |
+
self._request_count += 1
|
| 99 |
+
self._expire_at = None
|
| 100 |
+
self._state = HTTPConnectionState.ACTIVE
|
| 101 |
+
else:
|
| 102 |
+
raise ConnectionNotAvailable()
|
| 103 |
+
|
| 104 |
+
async with self._init_lock:
|
| 105 |
+
if not self._sent_connection_init:
|
| 106 |
+
try:
|
| 107 |
+
kwargs = {"request": request}
|
| 108 |
+
async with Trace("send_connection_init", logger, request, kwargs):
|
| 109 |
+
await self._send_connection_init(**kwargs)
|
| 110 |
+
except BaseException as exc:
|
| 111 |
+
with AsyncShieldCancellation():
|
| 112 |
+
await self.aclose()
|
| 113 |
+
raise exc
|
| 114 |
+
|
| 115 |
+
self._sent_connection_init = True
|
| 116 |
+
|
| 117 |
+
# Initially start with just 1 until the remote server provides
|
| 118 |
+
# its max_concurrent_streams value
|
| 119 |
+
self._max_streams = 1
|
| 120 |
+
|
| 121 |
+
local_settings_max_streams = (
|
| 122 |
+
self._h2_state.local_settings.max_concurrent_streams
|
| 123 |
+
)
|
| 124 |
+
self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams)
|
| 125 |
+
|
| 126 |
+
for _ in range(local_settings_max_streams - self._max_streams):
|
| 127 |
+
await self._max_streams_semaphore.acquire()
|
| 128 |
+
|
| 129 |
+
await self._max_streams_semaphore.acquire()
|
| 130 |
+
|
| 131 |
+
try:
|
| 132 |
+
stream_id = self._h2_state.get_next_available_stream_id()
|
| 133 |
+
self._events[stream_id] = []
|
| 134 |
+
except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover
|
| 135 |
+
self._used_all_stream_ids = True
|
| 136 |
+
self._request_count -= 1
|
| 137 |
+
raise ConnectionNotAvailable()
|
| 138 |
+
|
| 139 |
+
try:
|
| 140 |
+
kwargs = {"request": request, "stream_id": stream_id}
|
| 141 |
+
async with Trace("send_request_headers", logger, request, kwargs):
|
| 142 |
+
await self._send_request_headers(request=request, stream_id=stream_id)
|
| 143 |
+
async with Trace("send_request_body", logger, request, kwargs):
|
| 144 |
+
await self._send_request_body(request=request, stream_id=stream_id)
|
| 145 |
+
async with Trace(
|
| 146 |
+
"receive_response_headers", logger, request, kwargs
|
| 147 |
+
) as trace:
|
| 148 |
+
status, headers = await self._receive_response(
|
| 149 |
+
request=request, stream_id=stream_id
|
| 150 |
+
)
|
| 151 |
+
trace.return_value = (status, headers)
|
| 152 |
+
|
| 153 |
+
return Response(
|
| 154 |
+
status=status,
|
| 155 |
+
headers=headers,
|
| 156 |
+
content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id),
|
| 157 |
+
extensions={
|
| 158 |
+
"http_version": b"HTTP/2",
|
| 159 |
+
"network_stream": self._network_stream,
|
| 160 |
+
"stream_id": stream_id,
|
| 161 |
+
},
|
| 162 |
+
)
|
| 163 |
+
except BaseException as exc: # noqa: PIE786
|
| 164 |
+
with AsyncShieldCancellation():
|
| 165 |
+
kwargs = {"stream_id": stream_id}
|
| 166 |
+
async with Trace("response_closed", logger, request, kwargs):
|
| 167 |
+
await self._response_closed(stream_id=stream_id)
|
| 168 |
+
|
| 169 |
+
if isinstance(exc, h2.exceptions.ProtocolError):
|
| 170 |
+
# One case where h2 can raise a protocol error is when a
|
| 171 |
+
# closed frame has been seen by the state machine.
|
| 172 |
+
#
|
| 173 |
+
# This happens when one stream is reading, and encounters
|
| 174 |
+
# a GOAWAY event. Other flows of control may then raise
|
| 175 |
+
# a protocol error at any point they interact with the 'h2_state'.
|
| 176 |
+
#
|
| 177 |
+
# In this case we'll have stored the event, and should raise
|
| 178 |
+
# it as a RemoteProtocolError.
|
| 179 |
+
if self._connection_terminated: # pragma: nocover
|
| 180 |
+
raise RemoteProtocolError(self._connection_terminated)
|
| 181 |
+
# If h2 raises a protocol error in some other state then we
|
| 182 |
+
# must somehow have made a protocol violation.
|
| 183 |
+
raise LocalProtocolError(exc) # pragma: nocover
|
| 184 |
+
|
| 185 |
+
raise exc
|
| 186 |
+
|
| 187 |
+
async def _send_connection_init(self, request: Request) -> None:
|
| 188 |
+
"""
|
| 189 |
+
The HTTP/2 connection requires some initial setup before we can start
|
| 190 |
+
using individual request/response streams on it.
|
| 191 |
+
"""
|
| 192 |
+
# Need to set these manually here instead of manipulating via
|
| 193 |
+
# __setitem__() otherwise the H2Connection will emit SettingsUpdate
|
| 194 |
+
# frames in addition to sending the undesired defaults.
|
| 195 |
+
self._h2_state.local_settings = h2.settings.Settings(
|
| 196 |
+
client=True,
|
| 197 |
+
initial_values={
|
| 198 |
+
# Disable PUSH_PROMISE frames from the server since we don't do anything
|
| 199 |
+
# with them for now. Maybe when we support caching?
|
| 200 |
+
h2.settings.SettingCodes.ENABLE_PUSH: 0,
|
| 201 |
+
# These two are taken from h2 for safe defaults
|
| 202 |
+
h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100,
|
| 203 |
+
h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536,
|
| 204 |
+
},
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
# Some websites (*cough* Yahoo *cough*) balk at this setting being
|
| 208 |
+
# present in the initial handshake since it's not defined in the original
|
| 209 |
+
# RFC despite the RFC mandating ignoring settings you don't know about.
|
| 210 |
+
del self._h2_state.local_settings[
|
| 211 |
+
h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL
|
| 212 |
+
]
|
| 213 |
+
|
| 214 |
+
self._h2_state.initiate_connection()
|
| 215 |
+
self._h2_state.increment_flow_control_window(2**24)
|
| 216 |
+
await self._write_outgoing_data(request)
|
| 217 |
+
|
| 218 |
+
# Sending the request...
|
| 219 |
+
|
| 220 |
+
async def _send_request_headers(self, request: Request, stream_id: int) -> None:
|
| 221 |
+
"""
|
| 222 |
+
Send the request headers to a given stream ID.
|
| 223 |
+
"""
|
| 224 |
+
end_stream = not has_body_headers(request)
|
| 225 |
+
|
| 226 |
+
# In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'.
|
| 227 |
+
# In order to gracefully handle HTTP/1.1 and HTTP/2 we always require
|
| 228 |
+
# HTTP/1.1 style headers, and map them appropriately if we end up on
|
| 229 |
+
# an HTTP/2 connection.
|
| 230 |
+
authority = [v for k, v in request.headers if k.lower() == b"host"][0]
|
| 231 |
+
|
| 232 |
+
headers = [
|
| 233 |
+
(b":method", request.method),
|
| 234 |
+
(b":authority", authority),
|
| 235 |
+
(b":scheme", request.url.scheme),
|
| 236 |
+
(b":path", request.url.target),
|
| 237 |
+
] + [
|
| 238 |
+
(k.lower(), v)
|
| 239 |
+
for k, v in request.headers
|
| 240 |
+
if k.lower()
|
| 241 |
+
not in (
|
| 242 |
+
b"host",
|
| 243 |
+
b"transfer-encoding",
|
| 244 |
+
)
|
| 245 |
+
]
|
| 246 |
+
|
| 247 |
+
self._h2_state.send_headers(stream_id, headers, end_stream=end_stream)
|
| 248 |
+
self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id)
|
| 249 |
+
await self._write_outgoing_data(request)
|
| 250 |
+
|
| 251 |
+
async def _send_request_body(self, request: Request, stream_id: int) -> None:
|
| 252 |
+
"""
|
| 253 |
+
Iterate over the request body sending it to a given stream ID.
|
| 254 |
+
"""
|
| 255 |
+
if not has_body_headers(request):
|
| 256 |
+
return
|
| 257 |
+
|
| 258 |
+
assert isinstance(request.stream, typing.AsyncIterable)
|
| 259 |
+
async for data in request.stream:
|
| 260 |
+
await self._send_stream_data(request, stream_id, data)
|
| 261 |
+
await self._send_end_stream(request, stream_id)
|
| 262 |
+
|
| 263 |
+
async def _send_stream_data(
|
| 264 |
+
self, request: Request, stream_id: int, data: bytes
|
| 265 |
+
) -> None:
|
| 266 |
+
"""
|
| 267 |
+
Send a single chunk of data in one or more data frames.
|
| 268 |
+
"""
|
| 269 |
+
while data:
|
| 270 |
+
max_flow = await self._wait_for_outgoing_flow(request, stream_id)
|
| 271 |
+
chunk_size = min(len(data), max_flow)
|
| 272 |
+
chunk, data = data[:chunk_size], data[chunk_size:]
|
| 273 |
+
self._h2_state.send_data(stream_id, chunk)
|
| 274 |
+
await self._write_outgoing_data(request)
|
| 275 |
+
|
| 276 |
+
async def _send_end_stream(self, request: Request, stream_id: int) -> None:
|
| 277 |
+
"""
|
| 278 |
+
Send an empty data frame on on a given stream ID with the END_STREAM flag set.
|
| 279 |
+
"""
|
| 280 |
+
self._h2_state.end_stream(stream_id)
|
| 281 |
+
await self._write_outgoing_data(request)
|
| 282 |
+
|
| 283 |
+
# Receiving the response...
|
| 284 |
+
|
| 285 |
+
async def _receive_response(
|
| 286 |
+
self, request: Request, stream_id: int
|
| 287 |
+
) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]:
|
| 288 |
+
"""
|
| 289 |
+
Return the response status code and headers for a given stream ID.
|
| 290 |
+
"""
|
| 291 |
+
while True:
|
| 292 |
+
event = await self._receive_stream_event(request, stream_id)
|
| 293 |
+
if isinstance(event, h2.events.ResponseReceived):
|
| 294 |
+
break
|
| 295 |
+
|
| 296 |
+
status_code = 200
|
| 297 |
+
headers = []
|
| 298 |
+
for k, v in event.headers:
|
| 299 |
+
if k == b":status":
|
| 300 |
+
status_code = int(v.decode("ascii", errors="ignore"))
|
| 301 |
+
elif not k.startswith(b":"):
|
| 302 |
+
headers.append((k, v))
|
| 303 |
+
|
| 304 |
+
return (status_code, headers)
|
| 305 |
+
|
| 306 |
+
async def _receive_response_body(
|
| 307 |
+
self, request: Request, stream_id: int
|
| 308 |
+
) -> typing.AsyncIterator[bytes]:
|
| 309 |
+
"""
|
| 310 |
+
Iterator that returns the bytes of the response body for a given stream ID.
|
| 311 |
+
"""
|
| 312 |
+
while True:
|
| 313 |
+
event = await self._receive_stream_event(request, stream_id)
|
| 314 |
+
if isinstance(event, h2.events.DataReceived):
|
| 315 |
+
amount = event.flow_controlled_length
|
| 316 |
+
self._h2_state.acknowledge_received_data(amount, stream_id)
|
| 317 |
+
await self._write_outgoing_data(request)
|
| 318 |
+
yield event.data
|
| 319 |
+
elif isinstance(event, h2.events.StreamEnded):
|
| 320 |
+
break
|
| 321 |
+
|
| 322 |
+
async def _receive_stream_event(
|
| 323 |
+
self, request: Request, stream_id: int
|
| 324 |
+
) -> typing.Union[
|
| 325 |
+
h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded
|
| 326 |
+
]:
|
| 327 |
+
"""
|
| 328 |
+
Return the next available event for a given stream ID.
|
| 329 |
+
|
| 330 |
+
Will read more data from the network if required.
|
| 331 |
+
"""
|
| 332 |
+
while not self._events.get(stream_id):
|
| 333 |
+
await self._receive_events(request, stream_id)
|
| 334 |
+
event = self._events[stream_id].pop(0)
|
| 335 |
+
if isinstance(event, h2.events.StreamReset):
|
| 336 |
+
raise RemoteProtocolError(event)
|
| 337 |
+
return event
|
| 338 |
+
|
| 339 |
+
async def _receive_events(
|
| 340 |
+
self, request: Request, stream_id: typing.Optional[int] = None
|
| 341 |
+
) -> None:
|
| 342 |
+
"""
|
| 343 |
+
Read some data from the network until we see one or more events
|
| 344 |
+
for a given stream ID.
|
| 345 |
+
"""
|
| 346 |
+
async with self._read_lock:
|
| 347 |
+
if self._connection_terminated is not None:
|
| 348 |
+
last_stream_id = self._connection_terminated.last_stream_id
|
| 349 |
+
if stream_id and last_stream_id and stream_id > last_stream_id:
|
| 350 |
+
self._request_count -= 1
|
| 351 |
+
raise ConnectionNotAvailable()
|
| 352 |
+
raise RemoteProtocolError(self._connection_terminated)
|
| 353 |
+
|
| 354 |
+
# This conditional is a bit icky. We don't want to block reading if we've
|
| 355 |
+
# actually got an event to return for a given stream. We need to do that
|
| 356 |
+
# check *within* the atomic read lock. Though it also need to be optional,
|
| 357 |
+
# because when we call it from `_wait_for_outgoing_flow` we *do* want to
|
| 358 |
+
# block until we've available flow control, event when we have events
|
| 359 |
+
# pending for the stream ID we're attempting to send on.
|
| 360 |
+
if stream_id is None or not self._events.get(stream_id):
|
| 361 |
+
events = await self._read_incoming_data(request)
|
| 362 |
+
for event in events:
|
| 363 |
+
if isinstance(event, h2.events.RemoteSettingsChanged):
|
| 364 |
+
async with Trace(
|
| 365 |
+
"receive_remote_settings", logger, request
|
| 366 |
+
) as trace:
|
| 367 |
+
await self._receive_remote_settings_change(event)
|
| 368 |
+
trace.return_value = event
|
| 369 |
+
|
| 370 |
+
elif isinstance(
|
| 371 |
+
event,
|
| 372 |
+
(
|
| 373 |
+
h2.events.ResponseReceived,
|
| 374 |
+
h2.events.DataReceived,
|
| 375 |
+
h2.events.StreamEnded,
|
| 376 |
+
h2.events.StreamReset,
|
| 377 |
+
),
|
| 378 |
+
):
|
| 379 |
+
if event.stream_id in self._events:
|
| 380 |
+
self._events[event.stream_id].append(event)
|
| 381 |
+
|
| 382 |
+
elif isinstance(event, h2.events.ConnectionTerminated):
|
| 383 |
+
self._connection_terminated = event
|
| 384 |
+
|
| 385 |
+
await self._write_outgoing_data(request)
|
| 386 |
+
|
| 387 |
+
async def _receive_remote_settings_change(self, event: h2.events.Event) -> None:
|
| 388 |
+
max_concurrent_streams = event.changed_settings.get(
|
| 389 |
+
h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS
|
| 390 |
+
)
|
| 391 |
+
if max_concurrent_streams:
|
| 392 |
+
new_max_streams = min(
|
| 393 |
+
max_concurrent_streams.new_value,
|
| 394 |
+
self._h2_state.local_settings.max_concurrent_streams,
|
| 395 |
+
)
|
| 396 |
+
if new_max_streams and new_max_streams != self._max_streams:
|
| 397 |
+
while new_max_streams > self._max_streams:
|
| 398 |
+
await self._max_streams_semaphore.release()
|
| 399 |
+
self._max_streams += 1
|
| 400 |
+
while new_max_streams < self._max_streams:
|
| 401 |
+
await self._max_streams_semaphore.acquire()
|
| 402 |
+
self._max_streams -= 1
|
| 403 |
+
|
| 404 |
+
async def _response_closed(self, stream_id: int) -> None:
|
| 405 |
+
await self._max_streams_semaphore.release()
|
| 406 |
+
del self._events[stream_id]
|
| 407 |
+
async with self._state_lock:
|
| 408 |
+
if self._connection_terminated and not self._events:
|
| 409 |
+
await self.aclose()
|
| 410 |
+
|
| 411 |
+
elif self._state == HTTPConnectionState.ACTIVE and not self._events:
|
| 412 |
+
self._state = HTTPConnectionState.IDLE
|
| 413 |
+
if self._keepalive_expiry is not None:
|
| 414 |
+
now = time.monotonic()
|
| 415 |
+
self._expire_at = now + self._keepalive_expiry
|
| 416 |
+
if self._used_all_stream_ids: # pragma: nocover
|
| 417 |
+
await self.aclose()
|
| 418 |
+
|
| 419 |
+
async def aclose(self) -> None:
|
| 420 |
+
# Note that this method unilaterally closes the connection, and does
|
| 421 |
+
# not have any kind of locking in place around it.
|
| 422 |
+
self._h2_state.close_connection()
|
| 423 |
+
self._state = HTTPConnectionState.CLOSED
|
| 424 |
+
await self._network_stream.aclose()
|
| 425 |
+
|
| 426 |
+
# Wrappers around network read/write operations...
|
| 427 |
+
|
| 428 |
+
async def _read_incoming_data(
|
| 429 |
+
self, request: Request
|
| 430 |
+
) -> typing.List[h2.events.Event]:
|
| 431 |
+
timeouts = request.extensions.get("timeout", {})
|
| 432 |
+
timeout = timeouts.get("read", None)
|
| 433 |
+
|
| 434 |
+
if self._read_exception is not None:
|
| 435 |
+
raise self._read_exception # pragma: nocover
|
| 436 |
+
|
| 437 |
+
try:
|
| 438 |
+
data = await self._network_stream.read(self.READ_NUM_BYTES, timeout)
|
| 439 |
+
if data == b"":
|
| 440 |
+
raise RemoteProtocolError("Server disconnected")
|
| 441 |
+
except Exception as exc:
|
| 442 |
+
# If we get a network error we should:
|
| 443 |
+
#
|
| 444 |
+
# 1. Save the exception and just raise it immediately on any future reads.
|
| 445 |
+
# (For example, this means that a single read timeout or disconnect will
|
| 446 |
+
# immediately close all pending streams. Without requiring multiple
|
| 447 |
+
# sequential timeouts.)
|
| 448 |
+
# 2. Mark the connection as errored, so that we don't accept any other
|
| 449 |
+
# incoming requests.
|
| 450 |
+
self._read_exception = exc
|
| 451 |
+
self._connection_error = True
|
| 452 |
+
raise exc
|
| 453 |
+
|
| 454 |
+
events: typing.List[h2.events.Event] = self._h2_state.receive_data(data)
|
| 455 |
+
|
| 456 |
+
return events
|
| 457 |
+
|
| 458 |
+
async def _write_outgoing_data(self, request: Request) -> None:
|
| 459 |
+
timeouts = request.extensions.get("timeout", {})
|
| 460 |
+
timeout = timeouts.get("write", None)
|
| 461 |
+
|
| 462 |
+
async with self._write_lock:
|
| 463 |
+
data_to_send = self._h2_state.data_to_send()
|
| 464 |
+
|
| 465 |
+
if self._write_exception is not None:
|
| 466 |
+
raise self._write_exception # pragma: nocover
|
| 467 |
+
|
| 468 |
+
try:
|
| 469 |
+
await self._network_stream.write(data_to_send, timeout)
|
| 470 |
+
except Exception as exc: # pragma: nocover
|
| 471 |
+
# If we get a network error we should:
|
| 472 |
+
#
|
| 473 |
+
# 1. Save the exception and just raise it immediately on any future write.
|
| 474 |
+
# (For example, this means that a single write timeout or disconnect will
|
| 475 |
+
# immediately close all pending streams. Without requiring multiple
|
| 476 |
+
# sequential timeouts.)
|
| 477 |
+
# 2. Mark the connection as errored, so that we don't accept any other
|
| 478 |
+
# incoming requests.
|
| 479 |
+
self._write_exception = exc
|
| 480 |
+
self._connection_error = True
|
| 481 |
+
raise exc
|
| 482 |
+
|
| 483 |
+
# Flow control...
|
| 484 |
+
|
| 485 |
+
async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int:
|
| 486 |
+
"""
|
| 487 |
+
Returns the maximum allowable outgoing flow for a given stream.
|
| 488 |
+
|
| 489 |
+
If the allowable flow is zero, then waits on the network until
|
| 490 |
+
WindowUpdated frames have increased the flow rate.
|
| 491 |
+
https://tools.ietf.org/html/rfc7540#section-6.9
|
| 492 |
+
"""
|
| 493 |
+
local_flow: int = self._h2_state.local_flow_control_window(stream_id)
|
| 494 |
+
max_frame_size: int = self._h2_state.max_outbound_frame_size
|
| 495 |
+
flow = min(local_flow, max_frame_size)
|
| 496 |
+
while flow == 0:
|
| 497 |
+
await self._receive_events(request)
|
| 498 |
+
local_flow = self._h2_state.local_flow_control_window(stream_id)
|
| 499 |
+
max_frame_size = self._h2_state.max_outbound_frame_size
|
| 500 |
+
flow = min(local_flow, max_frame_size)
|
| 501 |
+
return flow
|
| 502 |
+
|
| 503 |
+
# Interface for connection pooling...
|
| 504 |
+
|
| 505 |
+
def can_handle_request(self, origin: Origin) -> bool:
|
| 506 |
+
return origin == self._origin
|
| 507 |
+
|
| 508 |
+
def is_available(self) -> bool:
|
| 509 |
+
return (
|
| 510 |
+
self._state != HTTPConnectionState.CLOSED
|
| 511 |
+
and not self._connection_error
|
| 512 |
+
and not self._used_all_stream_ids
|
| 513 |
+
and not (
|
| 514 |
+
self._h2_state.state_machine.state
|
| 515 |
+
== h2.connection.ConnectionState.CLOSED
|
| 516 |
+
)
|
| 517 |
+
)
|
| 518 |
+
|
| 519 |
+
def has_expired(self) -> bool:
|
| 520 |
+
now = time.monotonic()
|
| 521 |
+
return self._expire_at is not None and now > self._expire_at
|
| 522 |
+
|
| 523 |
+
def is_idle(self) -> bool:
|
| 524 |
+
return self._state == HTTPConnectionState.IDLE
|
| 525 |
+
|
| 526 |
+
def is_closed(self) -> bool:
|
| 527 |
+
return self._state == HTTPConnectionState.CLOSED
|
| 528 |
+
|
| 529 |
+
def info(self) -> str:
|
| 530 |
+
origin = str(self._origin)
|
| 531 |
+
return (
|
| 532 |
+
f"{origin!r}, HTTP/2, {self._state.name}, "
|
| 533 |
+
f"Request Count: {self._request_count}"
|
| 534 |
+
)
|
| 535 |
+
|
| 536 |
+
def __repr__(self) -> str:
|
| 537 |
+
class_name = self.__class__.__name__
|
| 538 |
+
origin = str(self._origin)
|
| 539 |
+
return (
|
| 540 |
+
f"<{class_name} [{origin!r}, {self._state.name}, "
|
| 541 |
+
f"Request Count: {self._request_count}]>"
|
| 542 |
+
)
|
| 543 |
+
|
| 544 |
+
# These context managers are not used in the standard flow, but are
|
| 545 |
+
# useful for testing or working with connection instances directly.
|
| 546 |
+
|
| 547 |
+
async def __aenter__(self) -> "AsyncHTTP2Connection":
|
| 548 |
+
return self
|
| 549 |
+
|
| 550 |
+
async def __aexit__(
|
| 551 |
+
self,
|
| 552 |
+
exc_type: typing.Optional[typing.Type[BaseException]] = None,
|
| 553 |
+
exc_value: typing.Optional[BaseException] = None,
|
| 554 |
+
traceback: typing.Optional[types.TracebackType] = None,
|
| 555 |
+
) -> None:
|
| 556 |
+
await self.aclose()
|
| 557 |
+
|
| 558 |
+
|
| 559 |
+
class HTTP2ConnectionByteStream:
|
| 560 |
+
def __init__(
|
| 561 |
+
self, connection: AsyncHTTP2Connection, request: Request, stream_id: int
|
| 562 |
+
) -> None:
|
| 563 |
+
self._connection = connection
|
| 564 |
+
self._request = request
|
| 565 |
+
self._stream_id = stream_id
|
| 566 |
+
self._closed = False
|
| 567 |
+
|
| 568 |
+
async def __aiter__(self) -> typing.AsyncIterator[bytes]:
|
| 569 |
+
kwargs = {"request": self._request, "stream_id": self._stream_id}
|
| 570 |
+
try:
|
| 571 |
+
async with Trace("receive_response_body", logger, self._request, kwargs):
|
| 572 |
+
async for chunk in self._connection._receive_response_body(
|
| 573 |
+
request=self._request, stream_id=self._stream_id
|
| 574 |
+
):
|
| 575 |
+
yield chunk
|
| 576 |
+
except BaseException as exc:
|
| 577 |
+
# If we get an exception while streaming the response,
|
| 578 |
+
# we want to close the response (and possibly the connection)
|
| 579 |
+
# before raising that exception.
|
| 580 |
+
with AsyncShieldCancellation():
|
| 581 |
+
await self.aclose()
|
| 582 |
+
raise exc
|
| 583 |
+
|
| 584 |
+
async def aclose(self) -> None:
|
| 585 |
+
if not self._closed:
|
| 586 |
+
self._closed = True
|
| 587 |
+
kwargs = {"stream_id": self._stream_id}
|
| 588 |
+
async with Trace("response_closed", logger, self._request, kwargs):
|
| 589 |
+
await self._connection._response_closed(stream_id=self._stream_id)
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/http_proxy.py
ADDED
|
@@ -0,0 +1,350 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import ssl
|
| 3 |
+
from base64 import b64encode
|
| 4 |
+
from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union
|
| 5 |
+
|
| 6 |
+
from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend
|
| 7 |
+
from .._exceptions import ProxyError
|
| 8 |
+
from .._models import (
|
| 9 |
+
URL,
|
| 10 |
+
Origin,
|
| 11 |
+
Request,
|
| 12 |
+
Response,
|
| 13 |
+
enforce_bytes,
|
| 14 |
+
enforce_headers,
|
| 15 |
+
enforce_url,
|
| 16 |
+
)
|
| 17 |
+
from .._ssl import default_ssl_context
|
| 18 |
+
from .._synchronization import AsyncLock
|
| 19 |
+
from .._trace import Trace
|
| 20 |
+
from .connection import AsyncHTTPConnection
|
| 21 |
+
from .connection_pool import AsyncConnectionPool
|
| 22 |
+
from .http11 import AsyncHTTP11Connection
|
| 23 |
+
from .interfaces import AsyncConnectionInterface
|
| 24 |
+
|
| 25 |
+
HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]]
|
| 26 |
+
HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
logger = logging.getLogger("httpcore.proxy")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def merge_headers(
|
| 33 |
+
default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
|
| 34 |
+
override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
|
| 35 |
+
) -> List[Tuple[bytes, bytes]]:
|
| 36 |
+
"""
|
| 37 |
+
Append default_headers and override_headers, de-duplicating if a key exists
|
| 38 |
+
in both cases.
|
| 39 |
+
"""
|
| 40 |
+
default_headers = [] if default_headers is None else list(default_headers)
|
| 41 |
+
override_headers = [] if override_headers is None else list(override_headers)
|
| 42 |
+
has_override = set(key.lower() for key, value in override_headers)
|
| 43 |
+
default_headers = [
|
| 44 |
+
(key, value)
|
| 45 |
+
for key, value in default_headers
|
| 46 |
+
if key.lower() not in has_override
|
| 47 |
+
]
|
| 48 |
+
return default_headers + override_headers
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def build_auth_header(username: bytes, password: bytes) -> bytes:
|
| 52 |
+
userpass = username + b":" + password
|
| 53 |
+
return b"Basic " + b64encode(userpass)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class AsyncHTTPProxy(AsyncConnectionPool):
|
| 57 |
+
"""
|
| 58 |
+
A connection pool that sends requests via an HTTP proxy.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
def __init__(
|
| 62 |
+
self,
|
| 63 |
+
proxy_url: Union[URL, bytes, str],
|
| 64 |
+
proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None,
|
| 65 |
+
proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
|
| 66 |
+
ssl_context: Optional[ssl.SSLContext] = None,
|
| 67 |
+
max_connections: Optional[int] = 10,
|
| 68 |
+
max_keepalive_connections: Optional[int] = None,
|
| 69 |
+
keepalive_expiry: Optional[float] = None,
|
| 70 |
+
http1: bool = True,
|
| 71 |
+
http2: bool = False,
|
| 72 |
+
retries: int = 0,
|
| 73 |
+
local_address: Optional[str] = None,
|
| 74 |
+
uds: Optional[str] = None,
|
| 75 |
+
network_backend: Optional[AsyncNetworkBackend] = None,
|
| 76 |
+
socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
|
| 77 |
+
) -> None:
|
| 78 |
+
"""
|
| 79 |
+
A connection pool for making HTTP requests.
|
| 80 |
+
|
| 81 |
+
Parameters:
|
| 82 |
+
proxy_url: The URL to use when connecting to the proxy server.
|
| 83 |
+
For example `"http://127.0.0.1:8080/"`.
|
| 84 |
+
proxy_auth: Any proxy authentication as a two-tuple of
|
| 85 |
+
(username, password). May be either bytes or ascii-only str.
|
| 86 |
+
proxy_headers: Any HTTP headers to use for the proxy requests.
|
| 87 |
+
For example `{"Proxy-Authorization": "Basic <username>:<password>"}`.
|
| 88 |
+
ssl_context: An SSL context to use for verifying connections.
|
| 89 |
+
If not specified, the default `httpcore.default_ssl_context()`
|
| 90 |
+
will be used.
|
| 91 |
+
max_connections: The maximum number of concurrent HTTP connections that
|
| 92 |
+
the pool should allow. Any attempt to send a request on a pool that
|
| 93 |
+
would exceed this amount will block until a connection is available.
|
| 94 |
+
max_keepalive_connections: The maximum number of idle HTTP connections
|
| 95 |
+
that will be maintained in the pool.
|
| 96 |
+
keepalive_expiry: The duration in seconds that an idle HTTP connection
|
| 97 |
+
may be maintained for before being expired from the pool.
|
| 98 |
+
http1: A boolean indicating if HTTP/1.1 requests should be supported
|
| 99 |
+
by the connection pool. Defaults to True.
|
| 100 |
+
http2: A boolean indicating if HTTP/2 requests should be supported by
|
| 101 |
+
the connection pool. Defaults to False.
|
| 102 |
+
retries: The maximum number of retries when trying to establish
|
| 103 |
+
a connection.
|
| 104 |
+
local_address: Local address to connect from. Can also be used to
|
| 105 |
+
connect using a particular address family. Using
|
| 106 |
+
`local_address="0.0.0.0"` will connect using an `AF_INET` address
|
| 107 |
+
(IPv4), while using `local_address="::"` will connect using an
|
| 108 |
+
`AF_INET6` address (IPv6).
|
| 109 |
+
uds: Path to a Unix Domain Socket to use instead of TCP sockets.
|
| 110 |
+
network_backend: A backend instance to use for handling network I/O.
|
| 111 |
+
"""
|
| 112 |
+
super().__init__(
|
| 113 |
+
ssl_context=ssl_context,
|
| 114 |
+
max_connections=max_connections,
|
| 115 |
+
max_keepalive_connections=max_keepalive_connections,
|
| 116 |
+
keepalive_expiry=keepalive_expiry,
|
| 117 |
+
http1=http1,
|
| 118 |
+
http2=http2,
|
| 119 |
+
network_backend=network_backend,
|
| 120 |
+
retries=retries,
|
| 121 |
+
local_address=local_address,
|
| 122 |
+
uds=uds,
|
| 123 |
+
socket_options=socket_options,
|
| 124 |
+
)
|
| 125 |
+
self._ssl_context = ssl_context
|
| 126 |
+
self._proxy_url = enforce_url(proxy_url, name="proxy_url")
|
| 127 |
+
self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers")
|
| 128 |
+
if proxy_auth is not None:
|
| 129 |
+
username = enforce_bytes(proxy_auth[0], name="proxy_auth")
|
| 130 |
+
password = enforce_bytes(proxy_auth[1], name="proxy_auth")
|
| 131 |
+
authorization = build_auth_header(username, password)
|
| 132 |
+
self._proxy_headers = [
|
| 133 |
+
(b"Proxy-Authorization", authorization)
|
| 134 |
+
] + self._proxy_headers
|
| 135 |
+
|
| 136 |
+
def create_connection(self, origin: Origin) -> AsyncConnectionInterface:
|
| 137 |
+
if origin.scheme == b"http":
|
| 138 |
+
return AsyncForwardHTTPConnection(
|
| 139 |
+
proxy_origin=self._proxy_url.origin,
|
| 140 |
+
proxy_headers=self._proxy_headers,
|
| 141 |
+
remote_origin=origin,
|
| 142 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 143 |
+
network_backend=self._network_backend,
|
| 144 |
+
)
|
| 145 |
+
return AsyncTunnelHTTPConnection(
|
| 146 |
+
proxy_origin=self._proxy_url.origin,
|
| 147 |
+
proxy_headers=self._proxy_headers,
|
| 148 |
+
remote_origin=origin,
|
| 149 |
+
ssl_context=self._ssl_context,
|
| 150 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 151 |
+
http1=self._http1,
|
| 152 |
+
http2=self._http2,
|
| 153 |
+
network_backend=self._network_backend,
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
class AsyncForwardHTTPConnection(AsyncConnectionInterface):
|
| 158 |
+
def __init__(
|
| 159 |
+
self,
|
| 160 |
+
proxy_origin: Origin,
|
| 161 |
+
remote_origin: Origin,
|
| 162 |
+
proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
|
| 163 |
+
keepalive_expiry: Optional[float] = None,
|
| 164 |
+
network_backend: Optional[AsyncNetworkBackend] = None,
|
| 165 |
+
socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
|
| 166 |
+
) -> None:
|
| 167 |
+
self._connection = AsyncHTTPConnection(
|
| 168 |
+
origin=proxy_origin,
|
| 169 |
+
keepalive_expiry=keepalive_expiry,
|
| 170 |
+
network_backend=network_backend,
|
| 171 |
+
socket_options=socket_options,
|
| 172 |
+
)
|
| 173 |
+
self._proxy_origin = proxy_origin
|
| 174 |
+
self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers")
|
| 175 |
+
self._remote_origin = remote_origin
|
| 176 |
+
|
| 177 |
+
async def handle_async_request(self, request: Request) -> Response:
|
| 178 |
+
headers = merge_headers(self._proxy_headers, request.headers)
|
| 179 |
+
url = URL(
|
| 180 |
+
scheme=self._proxy_origin.scheme,
|
| 181 |
+
host=self._proxy_origin.host,
|
| 182 |
+
port=self._proxy_origin.port,
|
| 183 |
+
target=bytes(request.url),
|
| 184 |
+
)
|
| 185 |
+
proxy_request = Request(
|
| 186 |
+
method=request.method,
|
| 187 |
+
url=url,
|
| 188 |
+
headers=headers,
|
| 189 |
+
content=request.stream,
|
| 190 |
+
extensions=request.extensions,
|
| 191 |
+
)
|
| 192 |
+
return await self._connection.handle_async_request(proxy_request)
|
| 193 |
+
|
| 194 |
+
def can_handle_request(self, origin: Origin) -> bool:
|
| 195 |
+
return origin == self._remote_origin
|
| 196 |
+
|
| 197 |
+
async def aclose(self) -> None:
|
| 198 |
+
await self._connection.aclose()
|
| 199 |
+
|
| 200 |
+
def info(self) -> str:
|
| 201 |
+
return self._connection.info()
|
| 202 |
+
|
| 203 |
+
def is_available(self) -> bool:
|
| 204 |
+
return self._connection.is_available()
|
| 205 |
+
|
| 206 |
+
def has_expired(self) -> bool:
|
| 207 |
+
return self._connection.has_expired()
|
| 208 |
+
|
| 209 |
+
def is_idle(self) -> bool:
|
| 210 |
+
return self._connection.is_idle()
|
| 211 |
+
|
| 212 |
+
def is_closed(self) -> bool:
|
| 213 |
+
return self._connection.is_closed()
|
| 214 |
+
|
| 215 |
+
def __repr__(self) -> str:
|
| 216 |
+
return f"<{self.__class__.__name__} [{self.info()}]>"
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
class AsyncTunnelHTTPConnection(AsyncConnectionInterface):
|
| 220 |
+
def __init__(
|
| 221 |
+
self,
|
| 222 |
+
proxy_origin: Origin,
|
| 223 |
+
remote_origin: Origin,
|
| 224 |
+
ssl_context: Optional[ssl.SSLContext] = None,
|
| 225 |
+
proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
|
| 226 |
+
keepalive_expiry: Optional[float] = None,
|
| 227 |
+
http1: bool = True,
|
| 228 |
+
http2: bool = False,
|
| 229 |
+
network_backend: Optional[AsyncNetworkBackend] = None,
|
| 230 |
+
socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
|
| 231 |
+
) -> None:
|
| 232 |
+
self._connection: AsyncConnectionInterface = AsyncHTTPConnection(
|
| 233 |
+
origin=proxy_origin,
|
| 234 |
+
keepalive_expiry=keepalive_expiry,
|
| 235 |
+
network_backend=network_backend,
|
| 236 |
+
socket_options=socket_options,
|
| 237 |
+
)
|
| 238 |
+
self._proxy_origin = proxy_origin
|
| 239 |
+
self._remote_origin = remote_origin
|
| 240 |
+
self._ssl_context = ssl_context
|
| 241 |
+
self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers")
|
| 242 |
+
self._keepalive_expiry = keepalive_expiry
|
| 243 |
+
self._http1 = http1
|
| 244 |
+
self._http2 = http2
|
| 245 |
+
self._connect_lock = AsyncLock()
|
| 246 |
+
self._connected = False
|
| 247 |
+
|
| 248 |
+
async def handle_async_request(self, request: Request) -> Response:
|
| 249 |
+
timeouts = request.extensions.get("timeout", {})
|
| 250 |
+
timeout = timeouts.get("connect", None)
|
| 251 |
+
|
| 252 |
+
async with self._connect_lock:
|
| 253 |
+
if not self._connected:
|
| 254 |
+
target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port)
|
| 255 |
+
|
| 256 |
+
connect_url = URL(
|
| 257 |
+
scheme=self._proxy_origin.scheme,
|
| 258 |
+
host=self._proxy_origin.host,
|
| 259 |
+
port=self._proxy_origin.port,
|
| 260 |
+
target=target,
|
| 261 |
+
)
|
| 262 |
+
connect_headers = merge_headers(
|
| 263 |
+
[(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers
|
| 264 |
+
)
|
| 265 |
+
connect_request = Request(
|
| 266 |
+
method=b"CONNECT",
|
| 267 |
+
url=connect_url,
|
| 268 |
+
headers=connect_headers,
|
| 269 |
+
extensions=request.extensions,
|
| 270 |
+
)
|
| 271 |
+
connect_response = await self._connection.handle_async_request(
|
| 272 |
+
connect_request
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
if connect_response.status < 200 or connect_response.status > 299:
|
| 276 |
+
reason_bytes = connect_response.extensions.get("reason_phrase", b"")
|
| 277 |
+
reason_str = reason_bytes.decode("ascii", errors="ignore")
|
| 278 |
+
msg = "%d %s" % (connect_response.status, reason_str)
|
| 279 |
+
await self._connection.aclose()
|
| 280 |
+
raise ProxyError(msg)
|
| 281 |
+
|
| 282 |
+
stream = connect_response.extensions["network_stream"]
|
| 283 |
+
|
| 284 |
+
# Upgrade the stream to SSL
|
| 285 |
+
ssl_context = (
|
| 286 |
+
default_ssl_context()
|
| 287 |
+
if self._ssl_context is None
|
| 288 |
+
else self._ssl_context
|
| 289 |
+
)
|
| 290 |
+
alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"]
|
| 291 |
+
ssl_context.set_alpn_protocols(alpn_protocols)
|
| 292 |
+
|
| 293 |
+
kwargs = {
|
| 294 |
+
"ssl_context": ssl_context,
|
| 295 |
+
"server_hostname": self._remote_origin.host.decode("ascii"),
|
| 296 |
+
"timeout": timeout,
|
| 297 |
+
}
|
| 298 |
+
async with Trace("start_tls", logger, request, kwargs) as trace:
|
| 299 |
+
stream = await stream.start_tls(**kwargs)
|
| 300 |
+
trace.return_value = stream
|
| 301 |
+
|
| 302 |
+
# Determine if we should be using HTTP/1.1 or HTTP/2
|
| 303 |
+
ssl_object = stream.get_extra_info("ssl_object")
|
| 304 |
+
http2_negotiated = (
|
| 305 |
+
ssl_object is not None
|
| 306 |
+
and ssl_object.selected_alpn_protocol() == "h2"
|
| 307 |
+
)
|
| 308 |
+
|
| 309 |
+
# Create the HTTP/1.1 or HTTP/2 connection
|
| 310 |
+
if http2_negotiated or (self._http2 and not self._http1):
|
| 311 |
+
from .http2 import AsyncHTTP2Connection
|
| 312 |
+
|
| 313 |
+
self._connection = AsyncHTTP2Connection(
|
| 314 |
+
origin=self._remote_origin,
|
| 315 |
+
stream=stream,
|
| 316 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 317 |
+
)
|
| 318 |
+
else:
|
| 319 |
+
self._connection = AsyncHTTP11Connection(
|
| 320 |
+
origin=self._remote_origin,
|
| 321 |
+
stream=stream,
|
| 322 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
self._connected = True
|
| 326 |
+
return await self._connection.handle_async_request(request)
|
| 327 |
+
|
| 328 |
+
def can_handle_request(self, origin: Origin) -> bool:
|
| 329 |
+
return origin == self._remote_origin
|
| 330 |
+
|
| 331 |
+
async def aclose(self) -> None:
|
| 332 |
+
await self._connection.aclose()
|
| 333 |
+
|
| 334 |
+
def info(self) -> str:
|
| 335 |
+
return self._connection.info()
|
| 336 |
+
|
| 337 |
+
def is_available(self) -> bool:
|
| 338 |
+
return self._connection.is_available()
|
| 339 |
+
|
| 340 |
+
def has_expired(self) -> bool:
|
| 341 |
+
return self._connection.has_expired()
|
| 342 |
+
|
| 343 |
+
def is_idle(self) -> bool:
|
| 344 |
+
return self._connection.is_idle()
|
| 345 |
+
|
| 346 |
+
def is_closed(self) -> bool:
|
| 347 |
+
return self._connection.is_closed()
|
| 348 |
+
|
| 349 |
+
def __repr__(self) -> str:
|
| 350 |
+
return f"<{self.__class__.__name__} [{self.info()}]>"
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/interfaces.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import asynccontextmanager
|
| 2 |
+
from typing import AsyncIterator, Optional, Union
|
| 3 |
+
|
| 4 |
+
from .._models import (
|
| 5 |
+
URL,
|
| 6 |
+
Extensions,
|
| 7 |
+
HeaderTypes,
|
| 8 |
+
Origin,
|
| 9 |
+
Request,
|
| 10 |
+
Response,
|
| 11 |
+
enforce_bytes,
|
| 12 |
+
enforce_headers,
|
| 13 |
+
enforce_url,
|
| 14 |
+
include_request_headers,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class AsyncRequestInterface:
|
| 19 |
+
async def request(
|
| 20 |
+
self,
|
| 21 |
+
method: Union[bytes, str],
|
| 22 |
+
url: Union[URL, bytes, str],
|
| 23 |
+
*,
|
| 24 |
+
headers: HeaderTypes = None,
|
| 25 |
+
content: Union[bytes, AsyncIterator[bytes], None] = None,
|
| 26 |
+
extensions: Optional[Extensions] = None,
|
| 27 |
+
) -> Response:
|
| 28 |
+
# Strict type checking on our parameters.
|
| 29 |
+
method = enforce_bytes(method, name="method")
|
| 30 |
+
url = enforce_url(url, name="url")
|
| 31 |
+
headers = enforce_headers(headers, name="headers")
|
| 32 |
+
|
| 33 |
+
# Include Host header, and optionally Content-Length or Transfer-Encoding.
|
| 34 |
+
headers = include_request_headers(headers, url=url, content=content)
|
| 35 |
+
|
| 36 |
+
request = Request(
|
| 37 |
+
method=method,
|
| 38 |
+
url=url,
|
| 39 |
+
headers=headers,
|
| 40 |
+
content=content,
|
| 41 |
+
extensions=extensions,
|
| 42 |
+
)
|
| 43 |
+
response = await self.handle_async_request(request)
|
| 44 |
+
try:
|
| 45 |
+
await response.aread()
|
| 46 |
+
finally:
|
| 47 |
+
await response.aclose()
|
| 48 |
+
return response
|
| 49 |
+
|
| 50 |
+
@asynccontextmanager
|
| 51 |
+
async def stream(
|
| 52 |
+
self,
|
| 53 |
+
method: Union[bytes, str],
|
| 54 |
+
url: Union[URL, bytes, str],
|
| 55 |
+
*,
|
| 56 |
+
headers: HeaderTypes = None,
|
| 57 |
+
content: Union[bytes, AsyncIterator[bytes], None] = None,
|
| 58 |
+
extensions: Optional[Extensions] = None,
|
| 59 |
+
) -> AsyncIterator[Response]:
|
| 60 |
+
# Strict type checking on our parameters.
|
| 61 |
+
method = enforce_bytes(method, name="method")
|
| 62 |
+
url = enforce_url(url, name="url")
|
| 63 |
+
headers = enforce_headers(headers, name="headers")
|
| 64 |
+
|
| 65 |
+
# Include Host header, and optionally Content-Length or Transfer-Encoding.
|
| 66 |
+
headers = include_request_headers(headers, url=url, content=content)
|
| 67 |
+
|
| 68 |
+
request = Request(
|
| 69 |
+
method=method,
|
| 70 |
+
url=url,
|
| 71 |
+
headers=headers,
|
| 72 |
+
content=content,
|
| 73 |
+
extensions=extensions,
|
| 74 |
+
)
|
| 75 |
+
response = await self.handle_async_request(request)
|
| 76 |
+
try:
|
| 77 |
+
yield response
|
| 78 |
+
finally:
|
| 79 |
+
await response.aclose()
|
| 80 |
+
|
| 81 |
+
async def handle_async_request(self, request: Request) -> Response:
|
| 82 |
+
raise NotImplementedError() # pragma: nocover
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class AsyncConnectionInterface(AsyncRequestInterface):
|
| 86 |
+
async def aclose(self) -> None:
|
| 87 |
+
raise NotImplementedError() # pragma: nocover
|
| 88 |
+
|
| 89 |
+
def info(self) -> str:
|
| 90 |
+
raise NotImplementedError() # pragma: nocover
|
| 91 |
+
|
| 92 |
+
def can_handle_request(self, origin: Origin) -> bool:
|
| 93 |
+
raise NotImplementedError() # pragma: nocover
|
| 94 |
+
|
| 95 |
+
def is_available(self) -> bool:
|
| 96 |
+
"""
|
| 97 |
+
Return `True` if the connection is currently able to accept an
|
| 98 |
+
outgoing request.
|
| 99 |
+
|
| 100 |
+
An HTTP/1.1 connection will only be available if it is currently idle.
|
| 101 |
+
|
| 102 |
+
An HTTP/2 connection will be available so long as the stream ID space is
|
| 103 |
+
not yet exhausted, and the connection is not in an error state.
|
| 104 |
+
|
| 105 |
+
While the connection is being established we may not yet know if it is going
|
| 106 |
+
to result in an HTTP/1.1 or HTTP/2 connection. The connection should be
|
| 107 |
+
treated as being available, but might ultimately raise `NewConnectionRequired`
|
| 108 |
+
required exceptions if multiple requests are attempted over a connection
|
| 109 |
+
that ends up being established as HTTP/1.1.
|
| 110 |
+
"""
|
| 111 |
+
raise NotImplementedError() # pragma: nocover
|
| 112 |
+
|
| 113 |
+
def has_expired(self) -> bool:
|
| 114 |
+
"""
|
| 115 |
+
Return `True` if the connection is in a state where it should be closed.
|
| 116 |
+
|
| 117 |
+
This either means that the connection is idle and it has passed the
|
| 118 |
+
expiry time on its keep-alive, or that server has sent an EOF.
|
| 119 |
+
"""
|
| 120 |
+
raise NotImplementedError() # pragma: nocover
|
| 121 |
+
|
| 122 |
+
def is_idle(self) -> bool:
|
| 123 |
+
"""
|
| 124 |
+
Return `True` if the connection is currently idle.
|
| 125 |
+
"""
|
| 126 |
+
raise NotImplementedError() # pragma: nocover
|
| 127 |
+
|
| 128 |
+
def is_closed(self) -> bool:
|
| 129 |
+
"""
|
| 130 |
+
Return `True` if the connection has been closed.
|
| 131 |
+
|
| 132 |
+
Used when a response is closed to determine if the connection may be
|
| 133 |
+
returned to the connection pool or not.
|
| 134 |
+
"""
|
| 135 |
+
raise NotImplementedError() # pragma: nocover
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_async/socks_proxy.py
ADDED
|
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import ssl
|
| 3 |
+
import typing
|
| 4 |
+
|
| 5 |
+
from socksio import socks5
|
| 6 |
+
|
| 7 |
+
from .._backends.auto import AutoBackend
|
| 8 |
+
from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream
|
| 9 |
+
from .._exceptions import ConnectionNotAvailable, ProxyError
|
| 10 |
+
from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url
|
| 11 |
+
from .._ssl import default_ssl_context
|
| 12 |
+
from .._synchronization import AsyncLock
|
| 13 |
+
from .._trace import Trace
|
| 14 |
+
from .connection_pool import AsyncConnectionPool
|
| 15 |
+
from .http11 import AsyncHTTP11Connection
|
| 16 |
+
from .interfaces import AsyncConnectionInterface
|
| 17 |
+
|
| 18 |
+
logger = logging.getLogger("httpcore.socks")
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
AUTH_METHODS = {
|
| 22 |
+
b"\x00": "NO AUTHENTICATION REQUIRED",
|
| 23 |
+
b"\x01": "GSSAPI",
|
| 24 |
+
b"\x02": "USERNAME/PASSWORD",
|
| 25 |
+
b"\xff": "NO ACCEPTABLE METHODS",
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
REPLY_CODES = {
|
| 29 |
+
b"\x00": "Succeeded",
|
| 30 |
+
b"\x01": "General SOCKS server failure",
|
| 31 |
+
b"\x02": "Connection not allowed by ruleset",
|
| 32 |
+
b"\x03": "Network unreachable",
|
| 33 |
+
b"\x04": "Host unreachable",
|
| 34 |
+
b"\x05": "Connection refused",
|
| 35 |
+
b"\x06": "TTL expired",
|
| 36 |
+
b"\x07": "Command not supported",
|
| 37 |
+
b"\x08": "Address type not supported",
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
async def _init_socks5_connection(
|
| 42 |
+
stream: AsyncNetworkStream,
|
| 43 |
+
*,
|
| 44 |
+
host: bytes,
|
| 45 |
+
port: int,
|
| 46 |
+
auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
|
| 47 |
+
) -> None:
|
| 48 |
+
conn = socks5.SOCKS5Connection()
|
| 49 |
+
|
| 50 |
+
# Auth method request
|
| 51 |
+
auth_method = (
|
| 52 |
+
socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED
|
| 53 |
+
if auth is None
|
| 54 |
+
else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD
|
| 55 |
+
)
|
| 56 |
+
conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method]))
|
| 57 |
+
outgoing_bytes = conn.data_to_send()
|
| 58 |
+
await stream.write(outgoing_bytes)
|
| 59 |
+
|
| 60 |
+
# Auth method response
|
| 61 |
+
incoming_bytes = await stream.read(max_bytes=4096)
|
| 62 |
+
response = conn.receive_data(incoming_bytes)
|
| 63 |
+
assert isinstance(response, socks5.SOCKS5AuthReply)
|
| 64 |
+
if response.method != auth_method:
|
| 65 |
+
requested = AUTH_METHODS.get(auth_method, "UNKNOWN")
|
| 66 |
+
responded = AUTH_METHODS.get(response.method, "UNKNOWN")
|
| 67 |
+
raise ProxyError(
|
| 68 |
+
f"Requested {requested} from proxy server, but got {responded}."
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD:
|
| 72 |
+
# Username/password request
|
| 73 |
+
assert auth is not None
|
| 74 |
+
username, password = auth
|
| 75 |
+
conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password))
|
| 76 |
+
outgoing_bytes = conn.data_to_send()
|
| 77 |
+
await stream.write(outgoing_bytes)
|
| 78 |
+
|
| 79 |
+
# Username/password response
|
| 80 |
+
incoming_bytes = await stream.read(max_bytes=4096)
|
| 81 |
+
response = conn.receive_data(incoming_bytes)
|
| 82 |
+
assert isinstance(response, socks5.SOCKS5UsernamePasswordReply)
|
| 83 |
+
if not response.success:
|
| 84 |
+
raise ProxyError("Invalid username/password")
|
| 85 |
+
|
| 86 |
+
# Connect request
|
| 87 |
+
conn.send(
|
| 88 |
+
socks5.SOCKS5CommandRequest.from_address(
|
| 89 |
+
socks5.SOCKS5Command.CONNECT, (host, port)
|
| 90 |
+
)
|
| 91 |
+
)
|
| 92 |
+
outgoing_bytes = conn.data_to_send()
|
| 93 |
+
await stream.write(outgoing_bytes)
|
| 94 |
+
|
| 95 |
+
# Connect response
|
| 96 |
+
incoming_bytes = await stream.read(max_bytes=4096)
|
| 97 |
+
response = conn.receive_data(incoming_bytes)
|
| 98 |
+
assert isinstance(response, socks5.SOCKS5Reply)
|
| 99 |
+
if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED:
|
| 100 |
+
reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN")
|
| 101 |
+
raise ProxyError(f"Proxy Server could not connect: {reply_code}.")
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class AsyncSOCKSProxy(AsyncConnectionPool):
|
| 105 |
+
"""
|
| 106 |
+
A connection pool that sends requests via an HTTP proxy.
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
proxy_url: typing.Union[URL, bytes, str],
|
| 112 |
+
proxy_auth: typing.Optional[
|
| 113 |
+
typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]]
|
| 114 |
+
] = None,
|
| 115 |
+
ssl_context: typing.Optional[ssl.SSLContext] = None,
|
| 116 |
+
max_connections: typing.Optional[int] = 10,
|
| 117 |
+
max_keepalive_connections: typing.Optional[int] = None,
|
| 118 |
+
keepalive_expiry: typing.Optional[float] = None,
|
| 119 |
+
http1: bool = True,
|
| 120 |
+
http2: bool = False,
|
| 121 |
+
retries: int = 0,
|
| 122 |
+
network_backend: typing.Optional[AsyncNetworkBackend] = None,
|
| 123 |
+
) -> None:
|
| 124 |
+
"""
|
| 125 |
+
A connection pool for making HTTP requests.
|
| 126 |
+
|
| 127 |
+
Parameters:
|
| 128 |
+
proxy_url: The URL to use when connecting to the proxy server.
|
| 129 |
+
For example `"http://127.0.0.1:8080/"`.
|
| 130 |
+
ssl_context: An SSL context to use for verifying connections.
|
| 131 |
+
If not specified, the default `httpcore.default_ssl_context()`
|
| 132 |
+
will be used.
|
| 133 |
+
max_connections: The maximum number of concurrent HTTP connections that
|
| 134 |
+
the pool should allow. Any attempt to send a request on a pool that
|
| 135 |
+
would exceed this amount will block until a connection is available.
|
| 136 |
+
max_keepalive_connections: The maximum number of idle HTTP connections
|
| 137 |
+
that will be maintained in the pool.
|
| 138 |
+
keepalive_expiry: The duration in seconds that an idle HTTP connection
|
| 139 |
+
may be maintained for before being expired from the pool.
|
| 140 |
+
http1: A boolean indicating if HTTP/1.1 requests should be supported
|
| 141 |
+
by the connection pool. Defaults to True.
|
| 142 |
+
http2: A boolean indicating if HTTP/2 requests should be supported by
|
| 143 |
+
the connection pool. Defaults to False.
|
| 144 |
+
retries: The maximum number of retries when trying to establish
|
| 145 |
+
a connection.
|
| 146 |
+
local_address: Local address to connect from. Can also be used to
|
| 147 |
+
connect using a particular address family. Using
|
| 148 |
+
`local_address="0.0.0.0"` will connect using an `AF_INET` address
|
| 149 |
+
(IPv4), while using `local_address="::"` will connect using an
|
| 150 |
+
`AF_INET6` address (IPv6).
|
| 151 |
+
uds: Path to a Unix Domain Socket to use instead of TCP sockets.
|
| 152 |
+
network_backend: A backend instance to use for handling network I/O.
|
| 153 |
+
"""
|
| 154 |
+
super().__init__(
|
| 155 |
+
ssl_context=ssl_context,
|
| 156 |
+
max_connections=max_connections,
|
| 157 |
+
max_keepalive_connections=max_keepalive_connections,
|
| 158 |
+
keepalive_expiry=keepalive_expiry,
|
| 159 |
+
http1=http1,
|
| 160 |
+
http2=http2,
|
| 161 |
+
network_backend=network_backend,
|
| 162 |
+
retries=retries,
|
| 163 |
+
)
|
| 164 |
+
self._ssl_context = ssl_context
|
| 165 |
+
self._proxy_url = enforce_url(proxy_url, name="proxy_url")
|
| 166 |
+
if proxy_auth is not None:
|
| 167 |
+
username, password = proxy_auth
|
| 168 |
+
username_bytes = enforce_bytes(username, name="proxy_auth")
|
| 169 |
+
password_bytes = enforce_bytes(password, name="proxy_auth")
|
| 170 |
+
self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = (
|
| 171 |
+
username_bytes,
|
| 172 |
+
password_bytes,
|
| 173 |
+
)
|
| 174 |
+
else:
|
| 175 |
+
self._proxy_auth = None
|
| 176 |
+
|
| 177 |
+
def create_connection(self, origin: Origin) -> AsyncConnectionInterface:
|
| 178 |
+
return AsyncSocks5Connection(
|
| 179 |
+
proxy_origin=self._proxy_url.origin,
|
| 180 |
+
remote_origin=origin,
|
| 181 |
+
proxy_auth=self._proxy_auth,
|
| 182 |
+
ssl_context=self._ssl_context,
|
| 183 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 184 |
+
http1=self._http1,
|
| 185 |
+
http2=self._http2,
|
| 186 |
+
network_backend=self._network_backend,
|
| 187 |
+
)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
class AsyncSocks5Connection(AsyncConnectionInterface):
|
| 191 |
+
def __init__(
|
| 192 |
+
self,
|
| 193 |
+
proxy_origin: Origin,
|
| 194 |
+
remote_origin: Origin,
|
| 195 |
+
proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
|
| 196 |
+
ssl_context: typing.Optional[ssl.SSLContext] = None,
|
| 197 |
+
keepalive_expiry: typing.Optional[float] = None,
|
| 198 |
+
http1: bool = True,
|
| 199 |
+
http2: bool = False,
|
| 200 |
+
network_backend: typing.Optional[AsyncNetworkBackend] = None,
|
| 201 |
+
) -> None:
|
| 202 |
+
self._proxy_origin = proxy_origin
|
| 203 |
+
self._remote_origin = remote_origin
|
| 204 |
+
self._proxy_auth = proxy_auth
|
| 205 |
+
self._ssl_context = ssl_context
|
| 206 |
+
self._keepalive_expiry = keepalive_expiry
|
| 207 |
+
self._http1 = http1
|
| 208 |
+
self._http2 = http2
|
| 209 |
+
|
| 210 |
+
self._network_backend: AsyncNetworkBackend = (
|
| 211 |
+
AutoBackend() if network_backend is None else network_backend
|
| 212 |
+
)
|
| 213 |
+
self._connect_lock = AsyncLock()
|
| 214 |
+
self._connection: typing.Optional[AsyncConnectionInterface] = None
|
| 215 |
+
self._connect_failed = False
|
| 216 |
+
|
| 217 |
+
async def handle_async_request(self, request: Request) -> Response:
|
| 218 |
+
timeouts = request.extensions.get("timeout", {})
|
| 219 |
+
timeout = timeouts.get("connect", None)
|
| 220 |
+
|
| 221 |
+
async with self._connect_lock:
|
| 222 |
+
if self._connection is None:
|
| 223 |
+
try:
|
| 224 |
+
# Connect to the proxy
|
| 225 |
+
kwargs = {
|
| 226 |
+
"host": self._proxy_origin.host.decode("ascii"),
|
| 227 |
+
"port": self._proxy_origin.port,
|
| 228 |
+
"timeout": timeout,
|
| 229 |
+
}
|
| 230 |
+
with Trace("connect_tcp", logger, request, kwargs) as trace:
|
| 231 |
+
stream = await self._network_backend.connect_tcp(**kwargs)
|
| 232 |
+
trace.return_value = stream
|
| 233 |
+
|
| 234 |
+
# Connect to the remote host using socks5
|
| 235 |
+
kwargs = {
|
| 236 |
+
"stream": stream,
|
| 237 |
+
"host": self._remote_origin.host.decode("ascii"),
|
| 238 |
+
"port": self._remote_origin.port,
|
| 239 |
+
"auth": self._proxy_auth,
|
| 240 |
+
}
|
| 241 |
+
with Trace(
|
| 242 |
+
"setup_socks5_connection", logger, request, kwargs
|
| 243 |
+
) as trace:
|
| 244 |
+
await _init_socks5_connection(**kwargs)
|
| 245 |
+
trace.return_value = stream
|
| 246 |
+
|
| 247 |
+
# Upgrade the stream to SSL
|
| 248 |
+
if self._remote_origin.scheme == b"https":
|
| 249 |
+
ssl_context = (
|
| 250 |
+
default_ssl_context()
|
| 251 |
+
if self._ssl_context is None
|
| 252 |
+
else self._ssl_context
|
| 253 |
+
)
|
| 254 |
+
alpn_protocols = (
|
| 255 |
+
["http/1.1", "h2"] if self._http2 else ["http/1.1"]
|
| 256 |
+
)
|
| 257 |
+
ssl_context.set_alpn_protocols(alpn_protocols)
|
| 258 |
+
|
| 259 |
+
kwargs = {
|
| 260 |
+
"ssl_context": ssl_context,
|
| 261 |
+
"server_hostname": self._remote_origin.host.decode("ascii"),
|
| 262 |
+
"timeout": timeout,
|
| 263 |
+
}
|
| 264 |
+
async with Trace("start_tls", logger, request, kwargs) as trace:
|
| 265 |
+
stream = await stream.start_tls(**kwargs)
|
| 266 |
+
trace.return_value = stream
|
| 267 |
+
|
| 268 |
+
# Determine if we should be using HTTP/1.1 or HTTP/2
|
| 269 |
+
ssl_object = stream.get_extra_info("ssl_object")
|
| 270 |
+
http2_negotiated = (
|
| 271 |
+
ssl_object is not None
|
| 272 |
+
and ssl_object.selected_alpn_protocol() == "h2"
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
# Create the HTTP/1.1 or HTTP/2 connection
|
| 276 |
+
if http2_negotiated or (
|
| 277 |
+
self._http2 and not self._http1
|
| 278 |
+
): # pragma: nocover
|
| 279 |
+
from .http2 import AsyncHTTP2Connection
|
| 280 |
+
|
| 281 |
+
self._connection = AsyncHTTP2Connection(
|
| 282 |
+
origin=self._remote_origin,
|
| 283 |
+
stream=stream,
|
| 284 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 285 |
+
)
|
| 286 |
+
else:
|
| 287 |
+
self._connection = AsyncHTTP11Connection(
|
| 288 |
+
origin=self._remote_origin,
|
| 289 |
+
stream=stream,
|
| 290 |
+
keepalive_expiry=self._keepalive_expiry,
|
| 291 |
+
)
|
| 292 |
+
except Exception as exc:
|
| 293 |
+
self._connect_failed = True
|
| 294 |
+
raise exc
|
| 295 |
+
elif not self._connection.is_available(): # pragma: nocover
|
| 296 |
+
raise ConnectionNotAvailable()
|
| 297 |
+
|
| 298 |
+
return await self._connection.handle_async_request(request)
|
| 299 |
+
|
| 300 |
+
def can_handle_request(self, origin: Origin) -> bool:
|
| 301 |
+
return origin == self._remote_origin
|
| 302 |
+
|
| 303 |
+
async def aclose(self) -> None:
|
| 304 |
+
if self._connection is not None:
|
| 305 |
+
await self._connection.aclose()
|
| 306 |
+
|
| 307 |
+
def is_available(self) -> bool:
|
| 308 |
+
if self._connection is None: # pragma: nocover
|
| 309 |
+
# If HTTP/2 support is enabled, and the resulting connection could
|
| 310 |
+
# end up as HTTP/2 then we should indicate the connection as being
|
| 311 |
+
# available to service multiple requests.
|
| 312 |
+
return (
|
| 313 |
+
self._http2
|
| 314 |
+
and (self._remote_origin.scheme == b"https" or not self._http1)
|
| 315 |
+
and not self._connect_failed
|
| 316 |
+
)
|
| 317 |
+
return self._connection.is_available()
|
| 318 |
+
|
| 319 |
+
def has_expired(self) -> bool:
|
| 320 |
+
if self._connection is None: # pragma: nocover
|
| 321 |
+
return self._connect_failed
|
| 322 |
+
return self._connection.has_expired()
|
| 323 |
+
|
| 324 |
+
def is_idle(self) -> bool:
|
| 325 |
+
if self._connection is None: # pragma: nocover
|
| 326 |
+
return self._connect_failed
|
| 327 |
+
return self._connection.is_idle()
|
| 328 |
+
|
| 329 |
+
def is_closed(self) -> bool:
|
| 330 |
+
if self._connection is None: # pragma: nocover
|
| 331 |
+
return self._connect_failed
|
| 332 |
+
return self._connection.is_closed()
|
| 333 |
+
|
| 334 |
+
def info(self) -> str:
|
| 335 |
+
if self._connection is None: # pragma: nocover
|
| 336 |
+
return "CONNECTION FAILED" if self._connect_failed else "CONNECTING"
|
| 337 |
+
return self._connection.info()
|
| 338 |
+
|
| 339 |
+
def __repr__(self) -> str:
|
| 340 |
+
return f"<{self.__class__.__name__} [{self.info()}]>"
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__init__.py
ADDED
|
File without changes
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (180 Bytes). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/anyio.cpython-310.pyc
ADDED
|
Binary file (4.79 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/auto.cpython-310.pyc
ADDED
|
Binary file (1.79 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (3.88 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/mock.cpython-310.pyc
ADDED
|
Binary file (5.39 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/sync.cpython-310.pyc
ADDED
|
Binary file (4.14 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/__pycache__/trio.cpython-310.pyc
ADDED
|
Binary file (5.14 kB). View file
|
|
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/anyio.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ssl
|
| 2 |
+
import typing
|
| 3 |
+
|
| 4 |
+
import anyio
|
| 5 |
+
|
| 6 |
+
from .._exceptions import (
|
| 7 |
+
ConnectError,
|
| 8 |
+
ConnectTimeout,
|
| 9 |
+
ReadError,
|
| 10 |
+
ReadTimeout,
|
| 11 |
+
WriteError,
|
| 12 |
+
WriteTimeout,
|
| 13 |
+
map_exceptions,
|
| 14 |
+
)
|
| 15 |
+
from .._utils import is_socket_readable
|
| 16 |
+
from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class AnyIOStream(AsyncNetworkStream):
|
| 20 |
+
def __init__(self, stream: anyio.abc.ByteStream) -> None:
|
| 21 |
+
self._stream = stream
|
| 22 |
+
|
| 23 |
+
async def read(
|
| 24 |
+
self, max_bytes: int, timeout: typing.Optional[float] = None
|
| 25 |
+
) -> bytes:
|
| 26 |
+
exc_map = {
|
| 27 |
+
TimeoutError: ReadTimeout,
|
| 28 |
+
anyio.BrokenResourceError: ReadError,
|
| 29 |
+
anyio.ClosedResourceError: ReadError,
|
| 30 |
+
}
|
| 31 |
+
with map_exceptions(exc_map):
|
| 32 |
+
with anyio.fail_after(timeout):
|
| 33 |
+
try:
|
| 34 |
+
return await self._stream.receive(max_bytes=max_bytes)
|
| 35 |
+
except anyio.EndOfStream: # pragma: nocover
|
| 36 |
+
return b""
|
| 37 |
+
|
| 38 |
+
async def write(
|
| 39 |
+
self, buffer: bytes, timeout: typing.Optional[float] = None
|
| 40 |
+
) -> None:
|
| 41 |
+
if not buffer:
|
| 42 |
+
return
|
| 43 |
+
|
| 44 |
+
exc_map = {
|
| 45 |
+
TimeoutError: WriteTimeout,
|
| 46 |
+
anyio.BrokenResourceError: WriteError,
|
| 47 |
+
anyio.ClosedResourceError: WriteError,
|
| 48 |
+
}
|
| 49 |
+
with map_exceptions(exc_map):
|
| 50 |
+
with anyio.fail_after(timeout):
|
| 51 |
+
await self._stream.send(item=buffer)
|
| 52 |
+
|
| 53 |
+
async def aclose(self) -> None:
|
| 54 |
+
await self._stream.aclose()
|
| 55 |
+
|
| 56 |
+
async def start_tls(
|
| 57 |
+
self,
|
| 58 |
+
ssl_context: ssl.SSLContext,
|
| 59 |
+
server_hostname: typing.Optional[str] = None,
|
| 60 |
+
timeout: typing.Optional[float] = None,
|
| 61 |
+
) -> AsyncNetworkStream:
|
| 62 |
+
exc_map = {
|
| 63 |
+
TimeoutError: ConnectTimeout,
|
| 64 |
+
anyio.BrokenResourceError: ConnectError,
|
| 65 |
+
}
|
| 66 |
+
with map_exceptions(exc_map):
|
| 67 |
+
try:
|
| 68 |
+
with anyio.fail_after(timeout):
|
| 69 |
+
ssl_stream = await anyio.streams.tls.TLSStream.wrap(
|
| 70 |
+
self._stream,
|
| 71 |
+
ssl_context=ssl_context,
|
| 72 |
+
hostname=server_hostname,
|
| 73 |
+
standard_compatible=False,
|
| 74 |
+
server_side=False,
|
| 75 |
+
)
|
| 76 |
+
except Exception as exc: # pragma: nocover
|
| 77 |
+
await self.aclose()
|
| 78 |
+
raise exc
|
| 79 |
+
return AnyIOStream(ssl_stream)
|
| 80 |
+
|
| 81 |
+
def get_extra_info(self, info: str) -> typing.Any:
|
| 82 |
+
if info == "ssl_object":
|
| 83 |
+
return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None)
|
| 84 |
+
if info == "client_addr":
|
| 85 |
+
return self._stream.extra(anyio.abc.SocketAttribute.local_address, None)
|
| 86 |
+
if info == "server_addr":
|
| 87 |
+
return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None)
|
| 88 |
+
if info == "socket":
|
| 89 |
+
return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None)
|
| 90 |
+
if info == "is_readable":
|
| 91 |
+
sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None)
|
| 92 |
+
return is_socket_readable(sock)
|
| 93 |
+
return None
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class AnyIOBackend(AsyncNetworkBackend):
|
| 97 |
+
async def connect_tcp(
|
| 98 |
+
self,
|
| 99 |
+
host: str,
|
| 100 |
+
port: int,
|
| 101 |
+
timeout: typing.Optional[float] = None,
|
| 102 |
+
local_address: typing.Optional[str] = None,
|
| 103 |
+
socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
|
| 104 |
+
) -> AsyncNetworkStream:
|
| 105 |
+
if socket_options is None:
|
| 106 |
+
socket_options = [] # pragma: no cover
|
| 107 |
+
exc_map = {
|
| 108 |
+
TimeoutError: ConnectTimeout,
|
| 109 |
+
OSError: ConnectError,
|
| 110 |
+
anyio.BrokenResourceError: ConnectError,
|
| 111 |
+
}
|
| 112 |
+
with map_exceptions(exc_map):
|
| 113 |
+
with anyio.fail_after(timeout):
|
| 114 |
+
stream: anyio.abc.ByteStream = await anyio.connect_tcp(
|
| 115 |
+
remote_host=host,
|
| 116 |
+
remote_port=port,
|
| 117 |
+
local_host=local_address,
|
| 118 |
+
)
|
| 119 |
+
# By default TCP sockets opened in `asyncio` include TCP_NODELAY.
|
| 120 |
+
for option in socket_options:
|
| 121 |
+
stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover
|
| 122 |
+
return AnyIOStream(stream)
|
| 123 |
+
|
| 124 |
+
async def connect_unix_socket(
|
| 125 |
+
self,
|
| 126 |
+
path: str,
|
| 127 |
+
timeout: typing.Optional[float] = None,
|
| 128 |
+
socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
|
| 129 |
+
) -> AsyncNetworkStream: # pragma: nocover
|
| 130 |
+
if socket_options is None:
|
| 131 |
+
socket_options = []
|
| 132 |
+
exc_map = {
|
| 133 |
+
TimeoutError: ConnectTimeout,
|
| 134 |
+
OSError: ConnectError,
|
| 135 |
+
anyio.BrokenResourceError: ConnectError,
|
| 136 |
+
}
|
| 137 |
+
with map_exceptions(exc_map):
|
| 138 |
+
with anyio.fail_after(timeout):
|
| 139 |
+
stream: anyio.abc.ByteStream = await anyio.connect_unix(path)
|
| 140 |
+
for option in socket_options:
|
| 141 |
+
stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover
|
| 142 |
+
return AnyIOStream(stream)
|
| 143 |
+
|
| 144 |
+
async def sleep(self, seconds: float) -> None:
|
| 145 |
+
await anyio.sleep(seconds) # pragma: nocover
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/auto.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import typing
|
| 2 |
+
from typing import Optional
|
| 3 |
+
|
| 4 |
+
import sniffio
|
| 5 |
+
|
| 6 |
+
from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class AutoBackend(AsyncNetworkBackend):
|
| 10 |
+
async def _init_backend(self) -> None:
|
| 11 |
+
if not (hasattr(self, "_backend")):
|
| 12 |
+
backend = sniffio.current_async_library()
|
| 13 |
+
if backend == "trio":
|
| 14 |
+
from .trio import TrioBackend
|
| 15 |
+
|
| 16 |
+
self._backend: AsyncNetworkBackend = TrioBackend()
|
| 17 |
+
else:
|
| 18 |
+
from .anyio import AnyIOBackend
|
| 19 |
+
|
| 20 |
+
self._backend = AnyIOBackend()
|
| 21 |
+
|
| 22 |
+
async def connect_tcp(
|
| 23 |
+
self,
|
| 24 |
+
host: str,
|
| 25 |
+
port: int,
|
| 26 |
+
timeout: Optional[float] = None,
|
| 27 |
+
local_address: Optional[str] = None,
|
| 28 |
+
socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
|
| 29 |
+
) -> AsyncNetworkStream:
|
| 30 |
+
await self._init_backend()
|
| 31 |
+
return await self._backend.connect_tcp(
|
| 32 |
+
host,
|
| 33 |
+
port,
|
| 34 |
+
timeout=timeout,
|
| 35 |
+
local_address=local_address,
|
| 36 |
+
socket_options=socket_options,
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
async def connect_unix_socket(
|
| 40 |
+
self,
|
| 41 |
+
path: str,
|
| 42 |
+
timeout: Optional[float] = None,
|
| 43 |
+
socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
|
| 44 |
+
) -> AsyncNetworkStream: # pragma: nocover
|
| 45 |
+
await self._init_backend()
|
| 46 |
+
return await self._backend.connect_unix_socket(
|
| 47 |
+
path, timeout=timeout, socket_options=socket_options
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
async def sleep(self, seconds: float) -> None: # pragma: nocover
|
| 51 |
+
await self._init_backend()
|
| 52 |
+
return await self._backend.sleep(seconds)
|
evalkit_internvl/lib/python3.10/site-packages/httpcore/_backends/base.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ssl
|
| 2 |
+
import time
|
| 3 |
+
import typing
|
| 4 |
+
|
| 5 |
+
SOCKET_OPTION = typing.Union[
|
| 6 |
+
typing.Tuple[int, int, int],
|
| 7 |
+
typing.Tuple[int, int, typing.Union[bytes, bytearray]],
|
| 8 |
+
typing.Tuple[int, int, None, int],
|
| 9 |
+
]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class NetworkStream:
|
| 13 |
+
def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes:
|
| 14 |
+
raise NotImplementedError() # pragma: nocover
|
| 15 |
+
|
| 16 |
+
def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None:
|
| 17 |
+
raise NotImplementedError() # pragma: nocover
|
| 18 |
+
|
| 19 |
+
def close(self) -> None:
|
| 20 |
+
raise NotImplementedError() # pragma: nocover
|
| 21 |
+
|
| 22 |
+
def start_tls(
|
| 23 |
+
self,
|
| 24 |
+
ssl_context: ssl.SSLContext,
|
| 25 |
+
server_hostname: typing.Optional[str] = None,
|
| 26 |
+
timeout: typing.Optional[float] = None,
|
| 27 |
+
) -> "NetworkStream":
|
| 28 |
+
raise NotImplementedError() # pragma: nocover
|
| 29 |
+
|
| 30 |
+
def get_extra_info(self, info: str) -> typing.Any:
|
| 31 |
+
return None # pragma: nocover
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class NetworkBackend:
|
| 35 |
+
def connect_tcp(
|
| 36 |
+
self,
|
| 37 |
+
host: str,
|
| 38 |
+
port: int,
|
| 39 |
+
timeout: typing.Optional[float] = None,
|
| 40 |
+
local_address: typing.Optional[str] = None,
|
| 41 |
+
socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
|
| 42 |
+
) -> NetworkStream:
|
| 43 |
+
raise NotImplementedError() # pragma: nocover
|
| 44 |
+
|
| 45 |
+
def connect_unix_socket(
|
| 46 |
+
self,
|
| 47 |
+
path: str,
|
| 48 |
+
timeout: typing.Optional[float] = None,
|
| 49 |
+
socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
|
| 50 |
+
) -> NetworkStream:
|
| 51 |
+
raise NotImplementedError() # pragma: nocover
|
| 52 |
+
|
| 53 |
+
def sleep(self, seconds: float) -> None:
|
| 54 |
+
time.sleep(seconds) # pragma: nocover
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class AsyncNetworkStream:
|
| 58 |
+
async def read(
|
| 59 |
+
self, max_bytes: int, timeout: typing.Optional[float] = None
|
| 60 |
+
) -> bytes:
|
| 61 |
+
raise NotImplementedError() # pragma: nocover
|
| 62 |
+
|
| 63 |
+
async def write(
|
| 64 |
+
self, buffer: bytes, timeout: typing.Optional[float] = None
|
| 65 |
+
) -> None:
|
| 66 |
+
raise NotImplementedError() # pragma: nocover
|
| 67 |
+
|
| 68 |
+
async def aclose(self) -> None:
|
| 69 |
+
raise NotImplementedError() # pragma: nocover
|
| 70 |
+
|
| 71 |
+
async def start_tls(
|
| 72 |
+
self,
|
| 73 |
+
ssl_context: ssl.SSLContext,
|
| 74 |
+
server_hostname: typing.Optional[str] = None,
|
| 75 |
+
timeout: typing.Optional[float] = None,
|
| 76 |
+
) -> "AsyncNetworkStream":
|
| 77 |
+
raise NotImplementedError() # pragma: nocover
|
| 78 |
+
|
| 79 |
+
def get_extra_info(self, info: str) -> typing.Any:
|
| 80 |
+
return None # pragma: nocover
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class AsyncNetworkBackend:
|
| 84 |
+
async def connect_tcp(
|
| 85 |
+
self,
|
| 86 |
+
host: str,
|
| 87 |
+
port: int,
|
| 88 |
+
timeout: typing.Optional[float] = None,
|
| 89 |
+
local_address: typing.Optional[str] = None,
|
| 90 |
+
socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
|
| 91 |
+
) -> AsyncNetworkStream:
|
| 92 |
+
raise NotImplementedError() # pragma: nocover
|
| 93 |
+
|
| 94 |
+
async def connect_unix_socket(
|
| 95 |
+
self,
|
| 96 |
+
path: str,
|
| 97 |
+
timeout: typing.Optional[float] = None,
|
| 98 |
+
socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
|
| 99 |
+
) -> AsyncNetworkStream:
|
| 100 |
+
raise NotImplementedError() # pragma: nocover
|
| 101 |
+
|
| 102 |
+
async def sleep(self, seconds: float) -> None:
|
| 103 |
+
raise NotImplementedError() # pragma: nocover
|