Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- deepseek/lib/python3.10/site-packages/certifi/core.py +114 -0
- deepseek/lib/python3.10/site-packages/diskcache/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/site-packages/diskcache/__pycache__/djangocache.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/site-packages/diskcache/__pycache__/fanout.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/site-packages/diskcache/__pycache__/persistent.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/site-packages/diskcache/__pycache__/recipes.cpython-310.pyc +0 -0
- deepseek/lib/python3.10/site-packages/diskcache/core.py +2452 -0
- deepseek/lib/python3.10/site-packages/diskcache/fanout.py +687 -0
- deepseek/lib/python3.10/site-packages/diskcache/persistent.py +1245 -0
- deepseek/lib/python3.10/site-packages/diskcache/recipes.py +488 -0
- deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/LICENSE +24 -0
- deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/LICENSES.third-party +507 -0
- deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/METADATA +84 -0
- deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/RECORD +0 -0
- deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/WHEEL +6 -0
- deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/top_level.txt +1 -0
- deepseek/lib/python3.10/site-packages/numpy/random/tests/data/pcg64-testset-1.csv +1001 -0
- deepseek/lib/python3.10/site-packages/pytz/__init__.py +1554 -0
- deepseek/lib/python3.10/site-packages/pytz/exceptions.py +59 -0
- deepseek/lib/python3.10/site-packages/pytz/lazy.py +172 -0
- deepseek/lib/python3.10/site-packages/pytz/reference.py +140 -0
- deepseek/lib/python3.10/site-packages/pytz/tzfile.py +133 -0
- deepseek/lib/python3.10/site-packages/pytz/tzinfo.py +580 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Cuba +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/EST +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/EST5EDT +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Egypt +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/GMT-0 +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Jamaica +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Japan +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Kwajalein +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/MST7MDT +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/NZ +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Portugal +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/ROC +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/UCT +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/W-SU +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/WET +0 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/leapseconds +79 -0
- deepseek/lib/python3.10/site-packages/pytz/zoneinfo/zonenow.tab +299 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/__init__.py +19 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/__init__.py +38 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/functional.py +644 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__init__.py +131 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/__init__.cpython-310.pyc +0 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/activation.cpython-310.pyc +0 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/batchnorm.cpython-310.pyc +0 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/conv.cpython-310.pyc +0 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/dropout.cpython-310.pyc +0 -0
- deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/embedding_ops.cpython-310.pyc +0 -0
deepseek/lib/python3.10/site-packages/certifi/core.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
certifi.py
|
| 3 |
+
~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module returns the installation location of cacert.pem or its contents.
|
| 6 |
+
"""
|
| 7 |
+
import sys
|
| 8 |
+
import atexit
|
| 9 |
+
|
| 10 |
+
def exit_cacert_ctx() -> None:
|
| 11 |
+
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
if sys.version_info >= (3, 11):
|
| 15 |
+
|
| 16 |
+
from importlib.resources import as_file, files
|
| 17 |
+
|
| 18 |
+
_CACERT_CTX = None
|
| 19 |
+
_CACERT_PATH = None
|
| 20 |
+
|
| 21 |
+
def where() -> str:
|
| 22 |
+
# This is slightly terrible, but we want to delay extracting the file
|
| 23 |
+
# in cases where we're inside of a zipimport situation until someone
|
| 24 |
+
# actually calls where(), but we don't want to re-extract the file
|
| 25 |
+
# on every call of where(), so we'll do it once then store it in a
|
| 26 |
+
# global variable.
|
| 27 |
+
global _CACERT_CTX
|
| 28 |
+
global _CACERT_PATH
|
| 29 |
+
if _CACERT_PATH is None:
|
| 30 |
+
# This is slightly janky, the importlib.resources API wants you to
|
| 31 |
+
# manage the cleanup of this file, so it doesn't actually return a
|
| 32 |
+
# path, it returns a context manager that will give you the path
|
| 33 |
+
# when you enter it and will do any cleanup when you leave it. In
|
| 34 |
+
# the common case of not needing a temporary file, it will just
|
| 35 |
+
# return the file system location and the __exit__() is a no-op.
|
| 36 |
+
#
|
| 37 |
+
# We also have to hold onto the actual context manager, because
|
| 38 |
+
# it will do the cleanup whenever it gets garbage collected, so
|
| 39 |
+
# we will also store that at the global level as well.
|
| 40 |
+
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
| 41 |
+
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
| 42 |
+
atexit.register(exit_cacert_ctx)
|
| 43 |
+
|
| 44 |
+
return _CACERT_PATH
|
| 45 |
+
|
| 46 |
+
def contents() -> str:
|
| 47 |
+
return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
|
| 48 |
+
|
| 49 |
+
elif sys.version_info >= (3, 7):
|
| 50 |
+
|
| 51 |
+
from importlib.resources import path as get_path, read_text
|
| 52 |
+
|
| 53 |
+
_CACERT_CTX = None
|
| 54 |
+
_CACERT_PATH = None
|
| 55 |
+
|
| 56 |
+
def where() -> str:
|
| 57 |
+
# This is slightly terrible, but we want to delay extracting the
|
| 58 |
+
# file in cases where we're inside of a zipimport situation until
|
| 59 |
+
# someone actually calls where(), but we don't want to re-extract
|
| 60 |
+
# the file on every call of where(), so we'll do it once then store
|
| 61 |
+
# it in a global variable.
|
| 62 |
+
global _CACERT_CTX
|
| 63 |
+
global _CACERT_PATH
|
| 64 |
+
if _CACERT_PATH is None:
|
| 65 |
+
# This is slightly janky, the importlib.resources API wants you
|
| 66 |
+
# to manage the cleanup of this file, so it doesn't actually
|
| 67 |
+
# return a path, it returns a context manager that will give
|
| 68 |
+
# you the path when you enter it and will do any cleanup when
|
| 69 |
+
# you leave it. In the common case of not needing a temporary
|
| 70 |
+
# file, it will just return the file system location and the
|
| 71 |
+
# __exit__() is a no-op.
|
| 72 |
+
#
|
| 73 |
+
# We also have to hold onto the actual context manager, because
|
| 74 |
+
# it will do the cleanup whenever it gets garbage collected, so
|
| 75 |
+
# we will also store that at the global level as well.
|
| 76 |
+
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
| 77 |
+
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
| 78 |
+
atexit.register(exit_cacert_ctx)
|
| 79 |
+
|
| 80 |
+
return _CACERT_PATH
|
| 81 |
+
|
| 82 |
+
def contents() -> str:
|
| 83 |
+
return read_text("certifi", "cacert.pem", encoding="ascii")
|
| 84 |
+
|
| 85 |
+
else:
|
| 86 |
+
import os
|
| 87 |
+
import types
|
| 88 |
+
from typing import Union
|
| 89 |
+
|
| 90 |
+
Package = Union[types.ModuleType, str]
|
| 91 |
+
Resource = Union[str, "os.PathLike"]
|
| 92 |
+
|
| 93 |
+
# This fallback will work for Python versions prior to 3.7 that lack the
|
| 94 |
+
# importlib.resources module but relies on the existing `where` function
|
| 95 |
+
# so won't address issues with environments like PyOxidizer that don't set
|
| 96 |
+
# __file__ on modules.
|
| 97 |
+
def read_text(
|
| 98 |
+
package: Package,
|
| 99 |
+
resource: Resource,
|
| 100 |
+
encoding: str = 'utf-8',
|
| 101 |
+
errors: str = 'strict'
|
| 102 |
+
) -> str:
|
| 103 |
+
with open(where(), encoding=encoding) as data:
|
| 104 |
+
return data.read()
|
| 105 |
+
|
| 106 |
+
# If we don't have importlib.resources, then we will just do the old logic
|
| 107 |
+
# of assuming we're on the filesystem and munge the path directly.
|
| 108 |
+
def where() -> str:
|
| 109 |
+
f = os.path.dirname(__file__)
|
| 110 |
+
|
| 111 |
+
return os.path.join(f, "cacert.pem")
|
| 112 |
+
|
| 113 |
+
def contents() -> str:
|
| 114 |
+
return read_text("certifi", "cacert.pem", encoding="ascii")
|
deepseek/lib/python3.10/site-packages/diskcache/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.22 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/diskcache/__pycache__/djangocache.cpython-310.pyc
ADDED
|
Binary file (15.7 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/diskcache/__pycache__/fanout.cpython-310.pyc
ADDED
|
Binary file (23.9 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/diskcache/__pycache__/persistent.cpython-310.pyc
ADDED
|
Binary file (35.3 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/diskcache/__pycache__/recipes.cpython-310.pyc
ADDED
|
Binary file (15 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/diskcache/core.py
ADDED
|
@@ -0,0 +1,2452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Core disk and file backed cache API.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import codecs
|
| 5 |
+
import contextlib as cl
|
| 6 |
+
import errno
|
| 7 |
+
import functools as ft
|
| 8 |
+
import io
|
| 9 |
+
import json
|
| 10 |
+
import os
|
| 11 |
+
import os.path as op
|
| 12 |
+
import pickle
|
| 13 |
+
import pickletools
|
| 14 |
+
import sqlite3
|
| 15 |
+
import struct
|
| 16 |
+
import tempfile
|
| 17 |
+
import threading
|
| 18 |
+
import time
|
| 19 |
+
import warnings
|
| 20 |
+
import zlib
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def full_name(func):
|
| 24 |
+
"""Return full name of `func` by adding the module and function name."""
|
| 25 |
+
return func.__module__ + '.' + func.__qualname__
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class Constant(tuple):
|
| 29 |
+
"""Pretty display of immutable constant."""
|
| 30 |
+
|
| 31 |
+
def __new__(cls, name):
|
| 32 |
+
return tuple.__new__(cls, (name,))
|
| 33 |
+
|
| 34 |
+
def __repr__(self):
|
| 35 |
+
return '%s' % self[0]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
DBNAME = 'cache.db'
|
| 39 |
+
ENOVAL = Constant('ENOVAL')
|
| 40 |
+
UNKNOWN = Constant('UNKNOWN')
|
| 41 |
+
|
| 42 |
+
MODE_NONE = 0
|
| 43 |
+
MODE_RAW = 1
|
| 44 |
+
MODE_BINARY = 2
|
| 45 |
+
MODE_TEXT = 3
|
| 46 |
+
MODE_PICKLE = 4
|
| 47 |
+
|
| 48 |
+
DEFAULT_SETTINGS = {
|
| 49 |
+
'statistics': 0, # False
|
| 50 |
+
'tag_index': 0, # False
|
| 51 |
+
'eviction_policy': 'least-recently-stored',
|
| 52 |
+
'size_limit': 2**30, # 1gb
|
| 53 |
+
'cull_limit': 10,
|
| 54 |
+
'sqlite_auto_vacuum': 1, # FULL
|
| 55 |
+
'sqlite_cache_size': 2**13, # 8,192 pages
|
| 56 |
+
'sqlite_journal_mode': 'wal',
|
| 57 |
+
'sqlite_mmap_size': 2**26, # 64mb
|
| 58 |
+
'sqlite_synchronous': 1, # NORMAL
|
| 59 |
+
'disk_min_file_size': 2**15, # 32kb
|
| 60 |
+
'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL,
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
METADATA = {
|
| 64 |
+
'count': 0,
|
| 65 |
+
'size': 0,
|
| 66 |
+
'hits': 0,
|
| 67 |
+
'misses': 0,
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
EVICTION_POLICY = {
|
| 71 |
+
'none': {
|
| 72 |
+
'init': None,
|
| 73 |
+
'get': None,
|
| 74 |
+
'cull': None,
|
| 75 |
+
},
|
| 76 |
+
'least-recently-stored': {
|
| 77 |
+
'init': (
|
| 78 |
+
'CREATE INDEX IF NOT EXISTS Cache_store_time ON'
|
| 79 |
+
' Cache (store_time)'
|
| 80 |
+
),
|
| 81 |
+
'get': None,
|
| 82 |
+
'cull': 'SELECT {fields} FROM Cache ORDER BY store_time LIMIT ?',
|
| 83 |
+
},
|
| 84 |
+
'least-recently-used': {
|
| 85 |
+
'init': (
|
| 86 |
+
'CREATE INDEX IF NOT EXISTS Cache_access_time ON'
|
| 87 |
+
' Cache (access_time)'
|
| 88 |
+
),
|
| 89 |
+
'get': 'access_time = {now}',
|
| 90 |
+
'cull': 'SELECT {fields} FROM Cache ORDER BY access_time LIMIT ?',
|
| 91 |
+
},
|
| 92 |
+
'least-frequently-used': {
|
| 93 |
+
'init': (
|
| 94 |
+
'CREATE INDEX IF NOT EXISTS Cache_access_count ON'
|
| 95 |
+
' Cache (access_count)'
|
| 96 |
+
),
|
| 97 |
+
'get': 'access_count = access_count + 1',
|
| 98 |
+
'cull': 'SELECT {fields} FROM Cache ORDER BY access_count LIMIT ?',
|
| 99 |
+
},
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class Disk:
|
| 104 |
+
"""Cache key and value serialization for SQLite database and files."""
|
| 105 |
+
|
| 106 |
+
def __init__(self, directory, min_file_size=0, pickle_protocol=0):
|
| 107 |
+
"""Initialize disk instance.
|
| 108 |
+
|
| 109 |
+
:param str directory: directory path
|
| 110 |
+
:param int min_file_size: minimum size for file use
|
| 111 |
+
:param int pickle_protocol: pickle protocol for serialization
|
| 112 |
+
|
| 113 |
+
"""
|
| 114 |
+
self._directory = directory
|
| 115 |
+
self.min_file_size = min_file_size
|
| 116 |
+
self.pickle_protocol = pickle_protocol
|
| 117 |
+
|
| 118 |
+
def hash(self, key):
|
| 119 |
+
"""Compute portable hash for `key`.
|
| 120 |
+
|
| 121 |
+
:param key: key to hash
|
| 122 |
+
:return: hash value
|
| 123 |
+
|
| 124 |
+
"""
|
| 125 |
+
mask = 0xFFFFFFFF
|
| 126 |
+
disk_key, _ = self.put(key)
|
| 127 |
+
type_disk_key = type(disk_key)
|
| 128 |
+
|
| 129 |
+
if type_disk_key is sqlite3.Binary:
|
| 130 |
+
return zlib.adler32(disk_key) & mask
|
| 131 |
+
elif type_disk_key is str:
|
| 132 |
+
return zlib.adler32(disk_key.encode('utf-8')) & mask # noqa
|
| 133 |
+
elif type_disk_key is int:
|
| 134 |
+
return disk_key % mask
|
| 135 |
+
else:
|
| 136 |
+
assert type_disk_key is float
|
| 137 |
+
return zlib.adler32(struct.pack('!d', disk_key)) & mask
|
| 138 |
+
|
| 139 |
+
def put(self, key):
|
| 140 |
+
"""Convert `key` to fields key and raw for Cache table.
|
| 141 |
+
|
| 142 |
+
:param key: key to convert
|
| 143 |
+
:return: (database key, raw boolean) pair
|
| 144 |
+
|
| 145 |
+
"""
|
| 146 |
+
# pylint: disable=unidiomatic-typecheck
|
| 147 |
+
type_key = type(key)
|
| 148 |
+
|
| 149 |
+
if type_key is bytes:
|
| 150 |
+
return sqlite3.Binary(key), True
|
| 151 |
+
elif (
|
| 152 |
+
(type_key is str)
|
| 153 |
+
or (
|
| 154 |
+
type_key is int
|
| 155 |
+
and -9223372036854775808 <= key <= 9223372036854775807
|
| 156 |
+
)
|
| 157 |
+
or (type_key is float)
|
| 158 |
+
):
|
| 159 |
+
return key, True
|
| 160 |
+
else:
|
| 161 |
+
data = pickle.dumps(key, protocol=self.pickle_protocol)
|
| 162 |
+
result = pickletools.optimize(data)
|
| 163 |
+
return sqlite3.Binary(result), False
|
| 164 |
+
|
| 165 |
+
def get(self, key, raw):
|
| 166 |
+
"""Convert fields `key` and `raw` from Cache table to key.
|
| 167 |
+
|
| 168 |
+
:param key: database key to convert
|
| 169 |
+
:param bool raw: flag indicating raw database storage
|
| 170 |
+
:return: corresponding Python key
|
| 171 |
+
|
| 172 |
+
"""
|
| 173 |
+
# pylint: disable=unidiomatic-typecheck
|
| 174 |
+
if raw:
|
| 175 |
+
return bytes(key) if type(key) is sqlite3.Binary else key
|
| 176 |
+
else:
|
| 177 |
+
return pickle.load(io.BytesIO(key))
|
| 178 |
+
|
| 179 |
+
def store(self, value, read, key=UNKNOWN):
|
| 180 |
+
"""Convert `value` to fields size, mode, filename, and value for Cache
|
| 181 |
+
table.
|
| 182 |
+
|
| 183 |
+
:param value: value to convert
|
| 184 |
+
:param bool read: True when value is file-like object
|
| 185 |
+
:param key: key for item (default UNKNOWN)
|
| 186 |
+
:return: (size, mode, filename, value) tuple for Cache table
|
| 187 |
+
|
| 188 |
+
"""
|
| 189 |
+
# pylint: disable=unidiomatic-typecheck
|
| 190 |
+
type_value = type(value)
|
| 191 |
+
min_file_size = self.min_file_size
|
| 192 |
+
|
| 193 |
+
if (
|
| 194 |
+
(type_value is str and len(value) < min_file_size)
|
| 195 |
+
or (
|
| 196 |
+
type_value is int
|
| 197 |
+
and -9223372036854775808 <= value <= 9223372036854775807
|
| 198 |
+
)
|
| 199 |
+
or (type_value is float)
|
| 200 |
+
):
|
| 201 |
+
return 0, MODE_RAW, None, value
|
| 202 |
+
elif type_value is bytes:
|
| 203 |
+
if len(value) < min_file_size:
|
| 204 |
+
return 0, MODE_RAW, None, sqlite3.Binary(value)
|
| 205 |
+
else:
|
| 206 |
+
filename, full_path = self.filename(key, value)
|
| 207 |
+
self._write(full_path, io.BytesIO(value), 'xb')
|
| 208 |
+
return len(value), MODE_BINARY, filename, None
|
| 209 |
+
elif type_value is str:
|
| 210 |
+
filename, full_path = self.filename(key, value)
|
| 211 |
+
self._write(full_path, io.StringIO(value), 'x', 'UTF-8')
|
| 212 |
+
size = op.getsize(full_path)
|
| 213 |
+
return size, MODE_TEXT, filename, None
|
| 214 |
+
elif read:
|
| 215 |
+
reader = ft.partial(value.read, 2**22)
|
| 216 |
+
filename, full_path = self.filename(key, value)
|
| 217 |
+
iterator = iter(reader, b'')
|
| 218 |
+
size = self._write(full_path, iterator, 'xb')
|
| 219 |
+
return size, MODE_BINARY, filename, None
|
| 220 |
+
else:
|
| 221 |
+
result = pickle.dumps(value, protocol=self.pickle_protocol)
|
| 222 |
+
|
| 223 |
+
if len(result) < min_file_size:
|
| 224 |
+
return 0, MODE_PICKLE, None, sqlite3.Binary(result)
|
| 225 |
+
else:
|
| 226 |
+
filename, full_path = self.filename(key, value)
|
| 227 |
+
self._write(full_path, io.BytesIO(result), 'xb')
|
| 228 |
+
return len(result), MODE_PICKLE, filename, None
|
| 229 |
+
|
| 230 |
+
def _write(self, full_path, iterator, mode, encoding=None):
|
| 231 |
+
full_dir, _ = op.split(full_path)
|
| 232 |
+
|
| 233 |
+
for count in range(1, 11):
|
| 234 |
+
with cl.suppress(OSError):
|
| 235 |
+
os.makedirs(full_dir)
|
| 236 |
+
|
| 237 |
+
try:
|
| 238 |
+
# Another cache may have deleted the directory before
|
| 239 |
+
# the file could be opened.
|
| 240 |
+
writer = open(full_path, mode, encoding=encoding)
|
| 241 |
+
except OSError:
|
| 242 |
+
if count == 10:
|
| 243 |
+
# Give up after 10 tries to open the file.
|
| 244 |
+
raise
|
| 245 |
+
continue
|
| 246 |
+
|
| 247 |
+
with writer:
|
| 248 |
+
size = 0
|
| 249 |
+
for chunk in iterator:
|
| 250 |
+
size += len(chunk)
|
| 251 |
+
writer.write(chunk)
|
| 252 |
+
return size
|
| 253 |
+
|
| 254 |
+
def fetch(self, mode, filename, value, read):
|
| 255 |
+
"""Convert fields `mode`, `filename`, and `value` from Cache table to
|
| 256 |
+
value.
|
| 257 |
+
|
| 258 |
+
:param int mode: value mode raw, binary, text, or pickle
|
| 259 |
+
:param str filename: filename of corresponding value
|
| 260 |
+
:param value: database value
|
| 261 |
+
:param bool read: when True, return an open file handle
|
| 262 |
+
:return: corresponding Python value
|
| 263 |
+
:raises: IOError if the value cannot be read
|
| 264 |
+
|
| 265 |
+
"""
|
| 266 |
+
# pylint: disable=unidiomatic-typecheck,consider-using-with
|
| 267 |
+
if mode == MODE_RAW:
|
| 268 |
+
return bytes(value) if type(value) is sqlite3.Binary else value
|
| 269 |
+
elif mode == MODE_BINARY:
|
| 270 |
+
if read:
|
| 271 |
+
return open(op.join(self._directory, filename), 'rb')
|
| 272 |
+
else:
|
| 273 |
+
with open(op.join(self._directory, filename), 'rb') as reader:
|
| 274 |
+
return reader.read()
|
| 275 |
+
elif mode == MODE_TEXT:
|
| 276 |
+
full_path = op.join(self._directory, filename)
|
| 277 |
+
with open(full_path, 'r', encoding='UTF-8') as reader:
|
| 278 |
+
return reader.read()
|
| 279 |
+
elif mode == MODE_PICKLE:
|
| 280 |
+
if value is None:
|
| 281 |
+
with open(op.join(self._directory, filename), 'rb') as reader:
|
| 282 |
+
return pickle.load(reader)
|
| 283 |
+
else:
|
| 284 |
+
return pickle.load(io.BytesIO(value))
|
| 285 |
+
|
| 286 |
+
def filename(self, key=UNKNOWN, value=UNKNOWN):
|
| 287 |
+
"""Return filename and full-path tuple for file storage.
|
| 288 |
+
|
| 289 |
+
Filename will be a randomly generated 28 character hexadecimal string
|
| 290 |
+
with ".val" suffixed. Two levels of sub-directories will be used to
|
| 291 |
+
reduce the size of directories. On older filesystems, lookups in
|
| 292 |
+
directories with many files may be slow.
|
| 293 |
+
|
| 294 |
+
The default implementation ignores the `key` and `value` parameters.
|
| 295 |
+
|
| 296 |
+
In some scenarios, for example :meth:`Cache.push
|
| 297 |
+
<diskcache.Cache.push>`, the `key` or `value` may not be known when the
|
| 298 |
+
item is stored in the cache.
|
| 299 |
+
|
| 300 |
+
:param key: key for item (default UNKNOWN)
|
| 301 |
+
:param value: value for item (default UNKNOWN)
|
| 302 |
+
|
| 303 |
+
"""
|
| 304 |
+
# pylint: disable=unused-argument
|
| 305 |
+
hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8')
|
| 306 |
+
sub_dir = op.join(hex_name[:2], hex_name[2:4])
|
| 307 |
+
name = hex_name[4:] + '.val'
|
| 308 |
+
filename = op.join(sub_dir, name)
|
| 309 |
+
full_path = op.join(self._directory, filename)
|
| 310 |
+
return filename, full_path
|
| 311 |
+
|
| 312 |
+
def remove(self, file_path):
|
| 313 |
+
"""Remove a file given by `file_path`.
|
| 314 |
+
|
| 315 |
+
This method is cross-thread and cross-process safe. If an OSError
|
| 316 |
+
occurs, it is suppressed.
|
| 317 |
+
|
| 318 |
+
:param str file_path: relative path to file
|
| 319 |
+
|
| 320 |
+
"""
|
| 321 |
+
full_path = op.join(self._directory, file_path)
|
| 322 |
+
full_dir, _ = op.split(full_path)
|
| 323 |
+
|
| 324 |
+
# Suppress OSError that may occur if two caches attempt to delete the
|
| 325 |
+
# same file or directory at the same time.
|
| 326 |
+
|
| 327 |
+
with cl.suppress(OSError):
|
| 328 |
+
os.remove(full_path)
|
| 329 |
+
|
| 330 |
+
with cl.suppress(OSError):
|
| 331 |
+
os.removedirs(full_dir)
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
class JSONDisk(Disk):
|
| 335 |
+
"""Cache key and value using JSON serialization with zlib compression."""
|
| 336 |
+
|
| 337 |
+
def __init__(self, directory, compress_level=1, **kwargs):
|
| 338 |
+
"""Initialize JSON disk instance.
|
| 339 |
+
|
| 340 |
+
Keys and values are compressed using the zlib library. The
|
| 341 |
+
`compress_level` is an integer from 0 to 9 controlling the level of
|
| 342 |
+
compression; 1 is fastest and produces the least compression, 9 is
|
| 343 |
+
slowest and produces the most compression, and 0 is no compression.
|
| 344 |
+
|
| 345 |
+
:param str directory: directory path
|
| 346 |
+
:param int compress_level: zlib compression level (default 1)
|
| 347 |
+
:param kwargs: super class arguments
|
| 348 |
+
|
| 349 |
+
"""
|
| 350 |
+
self.compress_level = compress_level
|
| 351 |
+
super().__init__(directory, **kwargs)
|
| 352 |
+
|
| 353 |
+
def put(self, key):
|
| 354 |
+
json_bytes = json.dumps(key).encode('utf-8')
|
| 355 |
+
data = zlib.compress(json_bytes, self.compress_level)
|
| 356 |
+
return super().put(data)
|
| 357 |
+
|
| 358 |
+
def get(self, key, raw):
|
| 359 |
+
data = super().get(key, raw)
|
| 360 |
+
return json.loads(zlib.decompress(data).decode('utf-8'))
|
| 361 |
+
|
| 362 |
+
def store(self, value, read, key=UNKNOWN):
|
| 363 |
+
if not read:
|
| 364 |
+
json_bytes = json.dumps(value).encode('utf-8')
|
| 365 |
+
value = zlib.compress(json_bytes, self.compress_level)
|
| 366 |
+
return super().store(value, read, key=key)
|
| 367 |
+
|
| 368 |
+
def fetch(self, mode, filename, value, read):
|
| 369 |
+
data = super().fetch(mode, filename, value, read)
|
| 370 |
+
if not read:
|
| 371 |
+
data = json.loads(zlib.decompress(data).decode('utf-8'))
|
| 372 |
+
return data
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
class Timeout(Exception):
|
| 376 |
+
"""Database timeout expired."""
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
class UnknownFileWarning(UserWarning):
|
| 380 |
+
"""Warning used by Cache.check for unknown files."""
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
class EmptyDirWarning(UserWarning):
|
| 384 |
+
"""Warning used by Cache.check for empty directories."""
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
def args_to_key(base, args, kwargs, typed, ignore):
|
| 388 |
+
"""Create cache key out of function arguments.
|
| 389 |
+
|
| 390 |
+
:param tuple base: base of key
|
| 391 |
+
:param tuple args: function arguments
|
| 392 |
+
:param dict kwargs: function keyword arguments
|
| 393 |
+
:param bool typed: include types in cache key
|
| 394 |
+
:param set ignore: positional or keyword args to ignore
|
| 395 |
+
:return: cache key tuple
|
| 396 |
+
|
| 397 |
+
"""
|
| 398 |
+
args = tuple(arg for index, arg in enumerate(args) if index not in ignore)
|
| 399 |
+
key = base + args + (None,)
|
| 400 |
+
|
| 401 |
+
if kwargs:
|
| 402 |
+
kwargs = {key: val for key, val in kwargs.items() if key not in ignore}
|
| 403 |
+
sorted_items = sorted(kwargs.items())
|
| 404 |
+
|
| 405 |
+
for item in sorted_items:
|
| 406 |
+
key += item
|
| 407 |
+
|
| 408 |
+
if typed:
|
| 409 |
+
key += tuple(type(arg) for arg in args)
|
| 410 |
+
|
| 411 |
+
if kwargs:
|
| 412 |
+
key += tuple(type(value) for _, value in sorted_items)
|
| 413 |
+
|
| 414 |
+
return key
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
class Cache:
|
| 418 |
+
"""Disk and file backed cache."""
|
| 419 |
+
|
| 420 |
+
def __init__(self, directory=None, timeout=60, disk=Disk, **settings):
|
| 421 |
+
"""Initialize cache instance.
|
| 422 |
+
|
| 423 |
+
:param str directory: cache directory
|
| 424 |
+
:param float timeout: SQLite connection timeout
|
| 425 |
+
:param disk: Disk type or subclass for serialization
|
| 426 |
+
:param settings: any of DEFAULT_SETTINGS
|
| 427 |
+
|
| 428 |
+
"""
|
| 429 |
+
try:
|
| 430 |
+
assert issubclass(disk, Disk)
|
| 431 |
+
except (TypeError, AssertionError):
|
| 432 |
+
raise ValueError('disk must subclass diskcache.Disk') from None
|
| 433 |
+
|
| 434 |
+
if directory is None:
|
| 435 |
+
directory = tempfile.mkdtemp(prefix='diskcache-')
|
| 436 |
+
directory = str(directory)
|
| 437 |
+
directory = op.expanduser(directory)
|
| 438 |
+
directory = op.expandvars(directory)
|
| 439 |
+
|
| 440 |
+
self._directory = directory
|
| 441 |
+
self._timeout = 0 # Manually handle retries during initialization.
|
| 442 |
+
self._local = threading.local()
|
| 443 |
+
self._txn_id = None
|
| 444 |
+
|
| 445 |
+
if not op.isdir(directory):
|
| 446 |
+
try:
|
| 447 |
+
os.makedirs(directory, 0o755)
|
| 448 |
+
except OSError as error:
|
| 449 |
+
if error.errno != errno.EEXIST:
|
| 450 |
+
raise EnvironmentError(
|
| 451 |
+
error.errno,
|
| 452 |
+
'Cache directory "%s" does not exist'
|
| 453 |
+
' and could not be created' % self._directory,
|
| 454 |
+
) from None
|
| 455 |
+
|
| 456 |
+
sql = self._sql_retry
|
| 457 |
+
|
| 458 |
+
# Setup Settings table.
|
| 459 |
+
|
| 460 |
+
try:
|
| 461 |
+
current_settings = dict(
|
| 462 |
+
sql('SELECT key, value FROM Settings').fetchall()
|
| 463 |
+
)
|
| 464 |
+
except sqlite3.OperationalError:
|
| 465 |
+
current_settings = {}
|
| 466 |
+
|
| 467 |
+
sets = DEFAULT_SETTINGS.copy()
|
| 468 |
+
sets.update(current_settings)
|
| 469 |
+
sets.update(settings)
|
| 470 |
+
|
| 471 |
+
for key in METADATA:
|
| 472 |
+
sets.pop(key, None)
|
| 473 |
+
|
| 474 |
+
# Chance to set pragmas before any tables are created.
|
| 475 |
+
|
| 476 |
+
for key, value in sorted(sets.items()):
|
| 477 |
+
if key.startswith('sqlite_'):
|
| 478 |
+
self.reset(key, value, update=False)
|
| 479 |
+
|
| 480 |
+
sql(
|
| 481 |
+
'CREATE TABLE IF NOT EXISTS Settings ('
|
| 482 |
+
' key TEXT NOT NULL UNIQUE,'
|
| 483 |
+
' value)'
|
| 484 |
+
)
|
| 485 |
+
|
| 486 |
+
# Setup Disk object (must happen after settings initialized).
|
| 487 |
+
|
| 488 |
+
kwargs = {
|
| 489 |
+
key[5:]: value
|
| 490 |
+
for key, value in sets.items()
|
| 491 |
+
if key.startswith('disk_')
|
| 492 |
+
}
|
| 493 |
+
self._disk = disk(directory, **kwargs)
|
| 494 |
+
|
| 495 |
+
# Set cached attributes: updates settings and sets pragmas.
|
| 496 |
+
|
| 497 |
+
for key, value in sets.items():
|
| 498 |
+
query = 'INSERT OR REPLACE INTO Settings VALUES (?, ?)'
|
| 499 |
+
sql(query, (key, value))
|
| 500 |
+
self.reset(key, value)
|
| 501 |
+
|
| 502 |
+
for key, value in METADATA.items():
|
| 503 |
+
query = 'INSERT OR IGNORE INTO Settings VALUES (?, ?)'
|
| 504 |
+
sql(query, (key, value))
|
| 505 |
+
self.reset(key)
|
| 506 |
+
|
| 507 |
+
((self._page_size,),) = sql('PRAGMA page_size').fetchall()
|
| 508 |
+
|
| 509 |
+
# Setup Cache table.
|
| 510 |
+
|
| 511 |
+
sql(
|
| 512 |
+
'CREATE TABLE IF NOT EXISTS Cache ('
|
| 513 |
+
' rowid INTEGER PRIMARY KEY,'
|
| 514 |
+
' key BLOB,'
|
| 515 |
+
' raw INTEGER,'
|
| 516 |
+
' store_time REAL,'
|
| 517 |
+
' expire_time REAL,'
|
| 518 |
+
' access_time REAL,'
|
| 519 |
+
' access_count INTEGER DEFAULT 0,'
|
| 520 |
+
' tag BLOB,'
|
| 521 |
+
' size INTEGER DEFAULT 0,'
|
| 522 |
+
' mode INTEGER DEFAULT 0,'
|
| 523 |
+
' filename TEXT,'
|
| 524 |
+
' value BLOB)'
|
| 525 |
+
)
|
| 526 |
+
|
| 527 |
+
sql(
|
| 528 |
+
'CREATE UNIQUE INDEX IF NOT EXISTS Cache_key_raw ON'
|
| 529 |
+
' Cache(key, raw)'
|
| 530 |
+
)
|
| 531 |
+
|
| 532 |
+
sql(
|
| 533 |
+
'CREATE INDEX IF NOT EXISTS Cache_expire_time ON'
|
| 534 |
+
' Cache (expire_time)'
|
| 535 |
+
)
|
| 536 |
+
|
| 537 |
+
query = EVICTION_POLICY[self.eviction_policy]['init']
|
| 538 |
+
|
| 539 |
+
if query is not None:
|
| 540 |
+
sql(query)
|
| 541 |
+
|
| 542 |
+
# Use triggers to keep Metadata updated.
|
| 543 |
+
|
| 544 |
+
sql(
|
| 545 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_count_insert'
|
| 546 |
+
' AFTER INSERT ON Cache FOR EACH ROW BEGIN'
|
| 547 |
+
' UPDATE Settings SET value = value + 1'
|
| 548 |
+
' WHERE key = "count"; END'
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
sql(
|
| 552 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_count_delete'
|
| 553 |
+
' AFTER DELETE ON Cache FOR EACH ROW BEGIN'
|
| 554 |
+
' UPDATE Settings SET value = value - 1'
|
| 555 |
+
' WHERE key = "count"; END'
|
| 556 |
+
)
|
| 557 |
+
|
| 558 |
+
sql(
|
| 559 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_size_insert'
|
| 560 |
+
' AFTER INSERT ON Cache FOR EACH ROW BEGIN'
|
| 561 |
+
' UPDATE Settings SET value = value + NEW.size'
|
| 562 |
+
' WHERE key = "size"; END'
|
| 563 |
+
)
|
| 564 |
+
|
| 565 |
+
sql(
|
| 566 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_size_update'
|
| 567 |
+
' AFTER UPDATE ON Cache FOR EACH ROW BEGIN'
|
| 568 |
+
' UPDATE Settings'
|
| 569 |
+
' SET value = value + NEW.size - OLD.size'
|
| 570 |
+
' WHERE key = "size"; END'
|
| 571 |
+
)
|
| 572 |
+
|
| 573 |
+
sql(
|
| 574 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_size_delete'
|
| 575 |
+
' AFTER DELETE ON Cache FOR EACH ROW BEGIN'
|
| 576 |
+
' UPDATE Settings SET value = value - OLD.size'
|
| 577 |
+
' WHERE key = "size"; END'
|
| 578 |
+
)
|
| 579 |
+
|
| 580 |
+
# Create tag index if requested.
|
| 581 |
+
|
| 582 |
+
if self.tag_index: # pylint: disable=no-member
|
| 583 |
+
self.create_tag_index()
|
| 584 |
+
else:
|
| 585 |
+
self.drop_tag_index()
|
| 586 |
+
|
| 587 |
+
# Close and re-open database connection with given timeout.
|
| 588 |
+
|
| 589 |
+
self.close()
|
| 590 |
+
self._timeout = timeout
|
| 591 |
+
self._sql # pylint: disable=pointless-statement
|
| 592 |
+
|
| 593 |
+
@property
|
| 594 |
+
def directory(self):
|
| 595 |
+
"""Cache directory."""
|
| 596 |
+
return self._directory
|
| 597 |
+
|
| 598 |
+
@property
|
| 599 |
+
def timeout(self):
|
| 600 |
+
"""SQLite connection timeout value in seconds."""
|
| 601 |
+
return self._timeout
|
| 602 |
+
|
| 603 |
+
@property
|
| 604 |
+
def disk(self):
|
| 605 |
+
"""Disk used for serialization."""
|
| 606 |
+
return self._disk
|
| 607 |
+
|
| 608 |
+
@property
|
| 609 |
+
def _con(self):
|
| 610 |
+
# Check process ID to support process forking. If the process
|
| 611 |
+
# ID changes, close the connection and update the process ID.
|
| 612 |
+
|
| 613 |
+
local_pid = getattr(self._local, 'pid', None)
|
| 614 |
+
pid = os.getpid()
|
| 615 |
+
|
| 616 |
+
if local_pid != pid:
|
| 617 |
+
self.close()
|
| 618 |
+
self._local.pid = pid
|
| 619 |
+
|
| 620 |
+
con = getattr(self._local, 'con', None)
|
| 621 |
+
|
| 622 |
+
if con is None:
|
| 623 |
+
con = self._local.con = sqlite3.connect(
|
| 624 |
+
op.join(self._directory, DBNAME),
|
| 625 |
+
timeout=self._timeout,
|
| 626 |
+
isolation_level=None,
|
| 627 |
+
)
|
| 628 |
+
|
| 629 |
+
# Some SQLite pragmas work on a per-connection basis so
|
| 630 |
+
# query the Settings table and reset the pragmas. The
|
| 631 |
+
# Settings table may not exist so catch and ignore the
|
| 632 |
+
# OperationalError that may occur.
|
| 633 |
+
|
| 634 |
+
try:
|
| 635 |
+
select = 'SELECT key, value FROM Settings'
|
| 636 |
+
settings = con.execute(select).fetchall()
|
| 637 |
+
except sqlite3.OperationalError:
|
| 638 |
+
pass
|
| 639 |
+
else:
|
| 640 |
+
for key, value in settings:
|
| 641 |
+
if key.startswith('sqlite_'):
|
| 642 |
+
self.reset(key, value, update=False)
|
| 643 |
+
|
| 644 |
+
return con
|
| 645 |
+
|
| 646 |
+
@property
|
| 647 |
+
def _sql(self):
|
| 648 |
+
return self._con.execute
|
| 649 |
+
|
| 650 |
+
@property
|
| 651 |
+
def _sql_retry(self):
|
| 652 |
+
sql = self._sql
|
| 653 |
+
|
| 654 |
+
# 2018-11-01 GrantJ - Some SQLite builds/versions handle
|
| 655 |
+
# the SQLITE_BUSY return value and connection parameter
|
| 656 |
+
# "timeout" differently. For a more reliable duration,
|
| 657 |
+
# manually retry the statement for 60 seconds. Only used
|
| 658 |
+
# by statements which modify the database and do not use
|
| 659 |
+
# a transaction (like those in ``__init__`` or ``reset``).
|
| 660 |
+
# See Issue #85 for and tests/issue_85.py for more details.
|
| 661 |
+
|
| 662 |
+
def _execute_with_retry(statement, *args, **kwargs):
|
| 663 |
+
start = time.time()
|
| 664 |
+
while True:
|
| 665 |
+
try:
|
| 666 |
+
return sql(statement, *args, **kwargs)
|
| 667 |
+
except sqlite3.OperationalError as exc:
|
| 668 |
+
if str(exc) != 'database is locked':
|
| 669 |
+
raise
|
| 670 |
+
diff = time.time() - start
|
| 671 |
+
if diff > 60:
|
| 672 |
+
raise
|
| 673 |
+
time.sleep(0.001)
|
| 674 |
+
|
| 675 |
+
return _execute_with_retry
|
| 676 |
+
|
| 677 |
+
@cl.contextmanager
|
| 678 |
+
def transact(self, retry=False):
|
| 679 |
+
"""Context manager to perform a transaction by locking the cache.
|
| 680 |
+
|
| 681 |
+
While the cache is locked, no other write operation is permitted.
|
| 682 |
+
Transactions should therefore be as short as possible. Read and write
|
| 683 |
+
operations performed in a transaction are atomic. Read operations may
|
| 684 |
+
occur concurrent to a transaction.
|
| 685 |
+
|
| 686 |
+
Transactions may be nested and may not be shared between threads.
|
| 687 |
+
|
| 688 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 689 |
+
`False` (default).
|
| 690 |
+
|
| 691 |
+
>>> cache = Cache()
|
| 692 |
+
>>> with cache.transact(): # Atomically increment two keys.
|
| 693 |
+
... _ = cache.incr('total', 123.4)
|
| 694 |
+
... _ = cache.incr('count', 1)
|
| 695 |
+
>>> with cache.transact(): # Atomically calculate average.
|
| 696 |
+
... average = cache['total'] / cache['count']
|
| 697 |
+
>>> average
|
| 698 |
+
123.4
|
| 699 |
+
|
| 700 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 701 |
+
:return: context manager for use in `with` statement
|
| 702 |
+
:raises Timeout: if database timeout occurs
|
| 703 |
+
|
| 704 |
+
"""
|
| 705 |
+
with self._transact(retry=retry):
|
| 706 |
+
yield
|
| 707 |
+
|
| 708 |
+
@cl.contextmanager
|
| 709 |
+
def _transact(self, retry=False, filename=None):
|
| 710 |
+
sql = self._sql
|
| 711 |
+
filenames = []
|
| 712 |
+
_disk_remove = self._disk.remove
|
| 713 |
+
tid = threading.get_ident()
|
| 714 |
+
txn_id = self._txn_id
|
| 715 |
+
|
| 716 |
+
if tid == txn_id:
|
| 717 |
+
begin = False
|
| 718 |
+
else:
|
| 719 |
+
while True:
|
| 720 |
+
try:
|
| 721 |
+
sql('BEGIN IMMEDIATE')
|
| 722 |
+
begin = True
|
| 723 |
+
self._txn_id = tid
|
| 724 |
+
break
|
| 725 |
+
except sqlite3.OperationalError:
|
| 726 |
+
if retry:
|
| 727 |
+
continue
|
| 728 |
+
if filename is not None:
|
| 729 |
+
_disk_remove(filename)
|
| 730 |
+
raise Timeout from None
|
| 731 |
+
|
| 732 |
+
try:
|
| 733 |
+
yield sql, filenames.append
|
| 734 |
+
except BaseException:
|
| 735 |
+
if begin:
|
| 736 |
+
assert self._txn_id == tid
|
| 737 |
+
self._txn_id = None
|
| 738 |
+
sql('ROLLBACK')
|
| 739 |
+
raise
|
| 740 |
+
else:
|
| 741 |
+
if begin:
|
| 742 |
+
assert self._txn_id == tid
|
| 743 |
+
self._txn_id = None
|
| 744 |
+
sql('COMMIT')
|
| 745 |
+
for name in filenames:
|
| 746 |
+
if name is not None:
|
| 747 |
+
_disk_remove(name)
|
| 748 |
+
|
| 749 |
+
def set(self, key, value, expire=None, read=False, tag=None, retry=False):
|
| 750 |
+
"""Set `key` and `value` item in cache.
|
| 751 |
+
|
| 752 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 753 |
+
for reading in binary mode.
|
| 754 |
+
|
| 755 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 756 |
+
`False` (default).
|
| 757 |
+
|
| 758 |
+
:param key: key for item
|
| 759 |
+
:param value: value for item
|
| 760 |
+
:param float expire: seconds until item expires
|
| 761 |
+
(default None, no expiry)
|
| 762 |
+
:param bool read: read value as bytes from file (default False)
|
| 763 |
+
:param str tag: text to associate with key (default None)
|
| 764 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 765 |
+
:return: True if item was set
|
| 766 |
+
:raises Timeout: if database timeout occurs
|
| 767 |
+
|
| 768 |
+
"""
|
| 769 |
+
now = time.time()
|
| 770 |
+
db_key, raw = self._disk.put(key)
|
| 771 |
+
expire_time = None if expire is None else now + expire
|
| 772 |
+
size, mode, filename, db_value = self._disk.store(value, read, key=key)
|
| 773 |
+
columns = (expire_time, tag, size, mode, filename, db_value)
|
| 774 |
+
|
| 775 |
+
# The order of SELECT, UPDATE, and INSERT is important below.
|
| 776 |
+
#
|
| 777 |
+
# Typical cache usage pattern is:
|
| 778 |
+
#
|
| 779 |
+
# value = cache.get(key)
|
| 780 |
+
# if value is None:
|
| 781 |
+
# value = expensive_calculation()
|
| 782 |
+
# cache.set(key, value)
|
| 783 |
+
#
|
| 784 |
+
# Cache.get does not evict expired keys to avoid writes during lookups.
|
| 785 |
+
# Commonly used/expired keys will therefore remain in the cache making
|
| 786 |
+
# an UPDATE the preferred path.
|
| 787 |
+
#
|
| 788 |
+
# The alternative is to assume the key is not present by first trying
|
| 789 |
+
# to INSERT and then handling the IntegrityError that occurs from
|
| 790 |
+
# violating the UNIQUE constraint. This optimistic approach was
|
| 791 |
+
# rejected based on the common cache usage pattern.
|
| 792 |
+
#
|
| 793 |
+
# INSERT OR REPLACE aka UPSERT is not used because the old filename may
|
| 794 |
+
# need cleanup.
|
| 795 |
+
|
| 796 |
+
with self._transact(retry, filename) as (sql, cleanup):
|
| 797 |
+
rows = sql(
|
| 798 |
+
'SELECT rowid, filename FROM Cache'
|
| 799 |
+
' WHERE key = ? AND raw = ?',
|
| 800 |
+
(db_key, raw),
|
| 801 |
+
).fetchall()
|
| 802 |
+
|
| 803 |
+
if rows:
|
| 804 |
+
((rowid, old_filename),) = rows
|
| 805 |
+
cleanup(old_filename)
|
| 806 |
+
self._row_update(rowid, now, columns)
|
| 807 |
+
else:
|
| 808 |
+
self._row_insert(db_key, raw, now, columns)
|
| 809 |
+
|
| 810 |
+
self._cull(now, sql, cleanup)
|
| 811 |
+
|
| 812 |
+
return True
|
| 813 |
+
|
| 814 |
+
def __setitem__(self, key, value):
|
| 815 |
+
"""Set corresponding `value` for `key` in cache.
|
| 816 |
+
|
| 817 |
+
:param key: key for item
|
| 818 |
+
:param value: value for item
|
| 819 |
+
:return: corresponding value
|
| 820 |
+
:raises KeyError: if key is not found
|
| 821 |
+
|
| 822 |
+
"""
|
| 823 |
+
self.set(key, value, retry=True)
|
| 824 |
+
|
| 825 |
+
def _row_update(self, rowid, now, columns):
|
| 826 |
+
sql = self._sql
|
| 827 |
+
expire_time, tag, size, mode, filename, value = columns
|
| 828 |
+
sql(
|
| 829 |
+
'UPDATE Cache SET'
|
| 830 |
+
' store_time = ?,'
|
| 831 |
+
' expire_time = ?,'
|
| 832 |
+
' access_time = ?,'
|
| 833 |
+
' access_count = ?,'
|
| 834 |
+
' tag = ?,'
|
| 835 |
+
' size = ?,'
|
| 836 |
+
' mode = ?,'
|
| 837 |
+
' filename = ?,'
|
| 838 |
+
' value = ?'
|
| 839 |
+
' WHERE rowid = ?',
|
| 840 |
+
(
|
| 841 |
+
now, # store_time
|
| 842 |
+
expire_time,
|
| 843 |
+
now, # access_time
|
| 844 |
+
0, # access_count
|
| 845 |
+
tag,
|
| 846 |
+
size,
|
| 847 |
+
mode,
|
| 848 |
+
filename,
|
| 849 |
+
value,
|
| 850 |
+
rowid,
|
| 851 |
+
),
|
| 852 |
+
)
|
| 853 |
+
|
| 854 |
+
def _row_insert(self, key, raw, now, columns):
|
| 855 |
+
sql = self._sql
|
| 856 |
+
expire_time, tag, size, mode, filename, value = columns
|
| 857 |
+
sql(
|
| 858 |
+
'INSERT INTO Cache('
|
| 859 |
+
' key, raw, store_time, expire_time, access_time,'
|
| 860 |
+
' access_count, tag, size, mode, filename, value'
|
| 861 |
+
') VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
| 862 |
+
(
|
| 863 |
+
key,
|
| 864 |
+
raw,
|
| 865 |
+
now, # store_time
|
| 866 |
+
expire_time,
|
| 867 |
+
now, # access_time
|
| 868 |
+
0, # access_count
|
| 869 |
+
tag,
|
| 870 |
+
size,
|
| 871 |
+
mode,
|
| 872 |
+
filename,
|
| 873 |
+
value,
|
| 874 |
+
),
|
| 875 |
+
)
|
| 876 |
+
|
| 877 |
+
def _cull(self, now, sql, cleanup, limit=None):
|
| 878 |
+
cull_limit = self.cull_limit if limit is None else limit
|
| 879 |
+
|
| 880 |
+
if cull_limit == 0:
|
| 881 |
+
return
|
| 882 |
+
|
| 883 |
+
# Evict expired keys.
|
| 884 |
+
|
| 885 |
+
select_expired_template = (
|
| 886 |
+
'SELECT %s FROM Cache'
|
| 887 |
+
' WHERE expire_time IS NOT NULL AND expire_time < ?'
|
| 888 |
+
' ORDER BY expire_time LIMIT ?'
|
| 889 |
+
)
|
| 890 |
+
|
| 891 |
+
select_expired = select_expired_template % 'filename'
|
| 892 |
+
rows = sql(select_expired, (now, cull_limit)).fetchall()
|
| 893 |
+
|
| 894 |
+
if rows:
|
| 895 |
+
delete_expired = 'DELETE FROM Cache WHERE rowid IN (%s)' % (
|
| 896 |
+
select_expired_template % 'rowid'
|
| 897 |
+
)
|
| 898 |
+
sql(delete_expired, (now, cull_limit))
|
| 899 |
+
|
| 900 |
+
for (filename,) in rows:
|
| 901 |
+
cleanup(filename)
|
| 902 |
+
|
| 903 |
+
cull_limit -= len(rows)
|
| 904 |
+
|
| 905 |
+
if cull_limit == 0:
|
| 906 |
+
return
|
| 907 |
+
|
| 908 |
+
# Evict keys by policy.
|
| 909 |
+
|
| 910 |
+
select_policy = EVICTION_POLICY[self.eviction_policy]['cull']
|
| 911 |
+
|
| 912 |
+
if select_policy is None or self.volume() < self.size_limit:
|
| 913 |
+
return
|
| 914 |
+
|
| 915 |
+
select_filename = select_policy.format(fields='filename', now=now)
|
| 916 |
+
rows = sql(select_filename, (cull_limit,)).fetchall()
|
| 917 |
+
|
| 918 |
+
if rows:
|
| 919 |
+
delete = 'DELETE FROM Cache WHERE rowid IN (%s)' % (
|
| 920 |
+
select_policy.format(fields='rowid', now=now)
|
| 921 |
+
)
|
| 922 |
+
sql(delete, (cull_limit,))
|
| 923 |
+
|
| 924 |
+
for (filename,) in rows:
|
| 925 |
+
cleanup(filename)
|
| 926 |
+
|
| 927 |
+
def touch(self, key, expire=None, retry=False):
|
| 928 |
+
"""Touch `key` in cache and update `expire` time.
|
| 929 |
+
|
| 930 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 931 |
+
`False` (default).
|
| 932 |
+
|
| 933 |
+
:param key: key for item
|
| 934 |
+
:param float expire: seconds until item expires
|
| 935 |
+
(default None, no expiry)
|
| 936 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 937 |
+
:return: True if key was touched
|
| 938 |
+
:raises Timeout: if database timeout occurs
|
| 939 |
+
|
| 940 |
+
"""
|
| 941 |
+
now = time.time()
|
| 942 |
+
db_key, raw = self._disk.put(key)
|
| 943 |
+
expire_time = None if expire is None else now + expire
|
| 944 |
+
|
| 945 |
+
with self._transact(retry) as (sql, _):
|
| 946 |
+
rows = sql(
|
| 947 |
+
'SELECT rowid, expire_time FROM Cache'
|
| 948 |
+
' WHERE key = ? AND raw = ?',
|
| 949 |
+
(db_key, raw),
|
| 950 |
+
).fetchall()
|
| 951 |
+
|
| 952 |
+
if rows:
|
| 953 |
+
((rowid, old_expire_time),) = rows
|
| 954 |
+
|
| 955 |
+
if old_expire_time is None or old_expire_time > now:
|
| 956 |
+
sql(
|
| 957 |
+
'UPDATE Cache SET expire_time = ? WHERE rowid = ?',
|
| 958 |
+
(expire_time, rowid),
|
| 959 |
+
)
|
| 960 |
+
return True
|
| 961 |
+
|
| 962 |
+
return False
|
| 963 |
+
|
| 964 |
+
def add(self, key, value, expire=None, read=False, tag=None, retry=False):
|
| 965 |
+
"""Add `key` and `value` item to cache.
|
| 966 |
+
|
| 967 |
+
Similar to `set`, but only add to cache if key not present.
|
| 968 |
+
|
| 969 |
+
Operation is atomic. Only one concurrent add operation for a given key
|
| 970 |
+
will succeed.
|
| 971 |
+
|
| 972 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 973 |
+
for reading in binary mode.
|
| 974 |
+
|
| 975 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 976 |
+
`False` (default).
|
| 977 |
+
|
| 978 |
+
:param key: key for item
|
| 979 |
+
:param value: value for item
|
| 980 |
+
:param float expire: seconds until the key expires
|
| 981 |
+
(default None, no expiry)
|
| 982 |
+
:param bool read: read value as bytes from file (default False)
|
| 983 |
+
:param str tag: text to associate with key (default None)
|
| 984 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 985 |
+
:return: True if item was added
|
| 986 |
+
:raises Timeout: if database timeout occurs
|
| 987 |
+
|
| 988 |
+
"""
|
| 989 |
+
now = time.time()
|
| 990 |
+
db_key, raw = self._disk.put(key)
|
| 991 |
+
expire_time = None if expire is None else now + expire
|
| 992 |
+
size, mode, filename, db_value = self._disk.store(value, read, key=key)
|
| 993 |
+
columns = (expire_time, tag, size, mode, filename, db_value)
|
| 994 |
+
|
| 995 |
+
with self._transact(retry, filename) as (sql, cleanup):
|
| 996 |
+
rows = sql(
|
| 997 |
+
'SELECT rowid, filename, expire_time FROM Cache'
|
| 998 |
+
' WHERE key = ? AND raw = ?',
|
| 999 |
+
(db_key, raw),
|
| 1000 |
+
).fetchall()
|
| 1001 |
+
|
| 1002 |
+
if rows:
|
| 1003 |
+
((rowid, old_filename, old_expire_time),) = rows
|
| 1004 |
+
|
| 1005 |
+
if old_expire_time is None or old_expire_time > now:
|
| 1006 |
+
cleanup(filename)
|
| 1007 |
+
return False
|
| 1008 |
+
|
| 1009 |
+
cleanup(old_filename)
|
| 1010 |
+
self._row_update(rowid, now, columns)
|
| 1011 |
+
else:
|
| 1012 |
+
self._row_insert(db_key, raw, now, columns)
|
| 1013 |
+
|
| 1014 |
+
self._cull(now, sql, cleanup)
|
| 1015 |
+
|
| 1016 |
+
return True
|
| 1017 |
+
|
| 1018 |
+
def incr(self, key, delta=1, default=0, retry=False):
|
| 1019 |
+
"""Increment value by delta for item with key.
|
| 1020 |
+
|
| 1021 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 1022 |
+
is missing and default is not None then use default for value.
|
| 1023 |
+
|
| 1024 |
+
Operation is atomic. All concurrent increment operations will be
|
| 1025 |
+
counted individually.
|
| 1026 |
+
|
| 1027 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 1028 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 1029 |
+
integers.
|
| 1030 |
+
|
| 1031 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1032 |
+
`False` (default).
|
| 1033 |
+
|
| 1034 |
+
:param key: key for item
|
| 1035 |
+
:param int delta: amount to increment (default 1)
|
| 1036 |
+
:param int default: value if key is missing (default 0)
|
| 1037 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1038 |
+
:return: new value for item
|
| 1039 |
+
:raises KeyError: if key is not found and default is None
|
| 1040 |
+
:raises Timeout: if database timeout occurs
|
| 1041 |
+
|
| 1042 |
+
"""
|
| 1043 |
+
now = time.time()
|
| 1044 |
+
db_key, raw = self._disk.put(key)
|
| 1045 |
+
select = (
|
| 1046 |
+
'SELECT rowid, expire_time, filename, value FROM Cache'
|
| 1047 |
+
' WHERE key = ? AND raw = ?'
|
| 1048 |
+
)
|
| 1049 |
+
|
| 1050 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1051 |
+
rows = sql(select, (db_key, raw)).fetchall()
|
| 1052 |
+
|
| 1053 |
+
if not rows:
|
| 1054 |
+
if default is None:
|
| 1055 |
+
raise KeyError(key)
|
| 1056 |
+
|
| 1057 |
+
value = default + delta
|
| 1058 |
+
columns = (None, None) + self._disk.store(
|
| 1059 |
+
value, False, key=key
|
| 1060 |
+
)
|
| 1061 |
+
self._row_insert(db_key, raw, now, columns)
|
| 1062 |
+
self._cull(now, sql, cleanup)
|
| 1063 |
+
return value
|
| 1064 |
+
|
| 1065 |
+
((rowid, expire_time, filename, value),) = rows
|
| 1066 |
+
|
| 1067 |
+
if expire_time is not None and expire_time < now:
|
| 1068 |
+
if default is None:
|
| 1069 |
+
raise KeyError(key)
|
| 1070 |
+
|
| 1071 |
+
value = default + delta
|
| 1072 |
+
columns = (None, None) + self._disk.store(
|
| 1073 |
+
value, False, key=key
|
| 1074 |
+
)
|
| 1075 |
+
self._row_update(rowid, now, columns)
|
| 1076 |
+
self._cull(now, sql, cleanup)
|
| 1077 |
+
cleanup(filename)
|
| 1078 |
+
return value
|
| 1079 |
+
|
| 1080 |
+
value += delta
|
| 1081 |
+
|
| 1082 |
+
columns = 'store_time = ?, value = ?'
|
| 1083 |
+
update_column = EVICTION_POLICY[self.eviction_policy]['get']
|
| 1084 |
+
|
| 1085 |
+
if update_column is not None:
|
| 1086 |
+
columns += ', ' + update_column.format(now=now)
|
| 1087 |
+
|
| 1088 |
+
update = 'UPDATE Cache SET %s WHERE rowid = ?' % columns
|
| 1089 |
+
sql(update, (now, value, rowid))
|
| 1090 |
+
|
| 1091 |
+
return value
|
| 1092 |
+
|
| 1093 |
+
def decr(self, key, delta=1, default=0, retry=False):
|
| 1094 |
+
"""Decrement value by delta for item with key.
|
| 1095 |
+
|
| 1096 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 1097 |
+
is missing and default is not None then use default for value.
|
| 1098 |
+
|
| 1099 |
+
Operation is atomic. All concurrent decrement operations will be
|
| 1100 |
+
counted individually.
|
| 1101 |
+
|
| 1102 |
+
Unlike Memcached, negative values are supported. Value may be
|
| 1103 |
+
decremented below zero.
|
| 1104 |
+
|
| 1105 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 1106 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 1107 |
+
integers.
|
| 1108 |
+
|
| 1109 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1110 |
+
`False` (default).
|
| 1111 |
+
|
| 1112 |
+
:param key: key for item
|
| 1113 |
+
:param int delta: amount to decrement (default 1)
|
| 1114 |
+
:param int default: value if key is missing (default 0)
|
| 1115 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1116 |
+
:return: new value for item
|
| 1117 |
+
:raises KeyError: if key is not found and default is None
|
| 1118 |
+
:raises Timeout: if database timeout occurs
|
| 1119 |
+
|
| 1120 |
+
"""
|
| 1121 |
+
return self.incr(key, -delta, default, retry)
|
| 1122 |
+
|
| 1123 |
+
def get(
|
| 1124 |
+
self,
|
| 1125 |
+
key,
|
| 1126 |
+
default=None,
|
| 1127 |
+
read=False,
|
| 1128 |
+
expire_time=False,
|
| 1129 |
+
tag=False,
|
| 1130 |
+
retry=False,
|
| 1131 |
+
):
|
| 1132 |
+
"""Retrieve value from cache. If `key` is missing, return `default`.
|
| 1133 |
+
|
| 1134 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1135 |
+
`False` (default).
|
| 1136 |
+
|
| 1137 |
+
:param key: key for item
|
| 1138 |
+
:param default: value to return if key is missing (default None)
|
| 1139 |
+
:param bool read: if True, return file handle to value
|
| 1140 |
+
(default False)
|
| 1141 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1142 |
+
(default False)
|
| 1143 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1144 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1145 |
+
:return: value for item or default if key not found
|
| 1146 |
+
:raises Timeout: if database timeout occurs
|
| 1147 |
+
|
| 1148 |
+
"""
|
| 1149 |
+
db_key, raw = self._disk.put(key)
|
| 1150 |
+
update_column = EVICTION_POLICY[self.eviction_policy]['get']
|
| 1151 |
+
select = (
|
| 1152 |
+
'SELECT rowid, expire_time, tag, mode, filename, value'
|
| 1153 |
+
' FROM Cache WHERE key = ? AND raw = ?'
|
| 1154 |
+
' AND (expire_time IS NULL OR expire_time > ?)'
|
| 1155 |
+
)
|
| 1156 |
+
|
| 1157 |
+
if expire_time and tag:
|
| 1158 |
+
default = (default, None, None)
|
| 1159 |
+
elif expire_time or tag:
|
| 1160 |
+
default = (default, None)
|
| 1161 |
+
|
| 1162 |
+
if not self.statistics and update_column is None:
|
| 1163 |
+
# Fast path, no transaction necessary.
|
| 1164 |
+
|
| 1165 |
+
rows = self._sql(select, (db_key, raw, time.time())).fetchall()
|
| 1166 |
+
|
| 1167 |
+
if not rows:
|
| 1168 |
+
return default
|
| 1169 |
+
|
| 1170 |
+
((rowid, db_expire_time, db_tag, mode, filename, db_value),) = rows
|
| 1171 |
+
|
| 1172 |
+
try:
|
| 1173 |
+
value = self._disk.fetch(mode, filename, db_value, read)
|
| 1174 |
+
except IOError:
|
| 1175 |
+
# Key was deleted before we could retrieve result.
|
| 1176 |
+
return default
|
| 1177 |
+
|
| 1178 |
+
else: # Slow path, transaction required.
|
| 1179 |
+
cache_hit = (
|
| 1180 |
+
'UPDATE Settings SET value = value + 1 WHERE key = "hits"'
|
| 1181 |
+
)
|
| 1182 |
+
cache_miss = (
|
| 1183 |
+
'UPDATE Settings SET value = value + 1 WHERE key = "misses"'
|
| 1184 |
+
)
|
| 1185 |
+
|
| 1186 |
+
with self._transact(retry) as (sql, _):
|
| 1187 |
+
rows = sql(select, (db_key, raw, time.time())).fetchall()
|
| 1188 |
+
|
| 1189 |
+
if not rows:
|
| 1190 |
+
if self.statistics:
|
| 1191 |
+
sql(cache_miss)
|
| 1192 |
+
return default
|
| 1193 |
+
|
| 1194 |
+
(
|
| 1195 |
+
(rowid, db_expire_time, db_tag, mode, filename, db_value),
|
| 1196 |
+
) = rows # noqa: E127
|
| 1197 |
+
|
| 1198 |
+
try:
|
| 1199 |
+
value = self._disk.fetch(mode, filename, db_value, read)
|
| 1200 |
+
except IOError:
|
| 1201 |
+
# Key was deleted before we could retrieve result.
|
| 1202 |
+
if self.statistics:
|
| 1203 |
+
sql(cache_miss)
|
| 1204 |
+
return default
|
| 1205 |
+
|
| 1206 |
+
if self.statistics:
|
| 1207 |
+
sql(cache_hit)
|
| 1208 |
+
|
| 1209 |
+
now = time.time()
|
| 1210 |
+
update = 'UPDATE Cache SET %s WHERE rowid = ?'
|
| 1211 |
+
|
| 1212 |
+
if update_column is not None:
|
| 1213 |
+
sql(update % update_column.format(now=now), (rowid,))
|
| 1214 |
+
|
| 1215 |
+
if expire_time and tag:
|
| 1216 |
+
return (value, db_expire_time, db_tag)
|
| 1217 |
+
elif expire_time:
|
| 1218 |
+
return (value, db_expire_time)
|
| 1219 |
+
elif tag:
|
| 1220 |
+
return (value, db_tag)
|
| 1221 |
+
else:
|
| 1222 |
+
return value
|
| 1223 |
+
|
| 1224 |
+
def __getitem__(self, key):
|
| 1225 |
+
"""Return corresponding value for `key` from cache.
|
| 1226 |
+
|
| 1227 |
+
:param key: key matching item
|
| 1228 |
+
:return: corresponding value
|
| 1229 |
+
:raises KeyError: if key is not found
|
| 1230 |
+
|
| 1231 |
+
"""
|
| 1232 |
+
value = self.get(key, default=ENOVAL, retry=True)
|
| 1233 |
+
if value is ENOVAL:
|
| 1234 |
+
raise KeyError(key)
|
| 1235 |
+
return value
|
| 1236 |
+
|
| 1237 |
+
def read(self, key, retry=False):
|
| 1238 |
+
"""Return file handle value corresponding to `key` from cache.
|
| 1239 |
+
|
| 1240 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1241 |
+
`False` (default).
|
| 1242 |
+
|
| 1243 |
+
:param key: key matching item
|
| 1244 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1245 |
+
:return: file open for reading in binary mode
|
| 1246 |
+
:raises KeyError: if key is not found
|
| 1247 |
+
:raises Timeout: if database timeout occurs
|
| 1248 |
+
|
| 1249 |
+
"""
|
| 1250 |
+
handle = self.get(key, default=ENOVAL, read=True, retry=retry)
|
| 1251 |
+
if handle is ENOVAL:
|
| 1252 |
+
raise KeyError(key)
|
| 1253 |
+
return handle
|
| 1254 |
+
|
| 1255 |
+
def __contains__(self, key):
|
| 1256 |
+
"""Return `True` if `key` matching item is found in cache.
|
| 1257 |
+
|
| 1258 |
+
:param key: key matching item
|
| 1259 |
+
:return: True if key matching item
|
| 1260 |
+
|
| 1261 |
+
"""
|
| 1262 |
+
sql = self._sql
|
| 1263 |
+
db_key, raw = self._disk.put(key)
|
| 1264 |
+
select = (
|
| 1265 |
+
'SELECT rowid FROM Cache'
|
| 1266 |
+
' WHERE key = ? AND raw = ?'
|
| 1267 |
+
' AND (expire_time IS NULL OR expire_time > ?)'
|
| 1268 |
+
)
|
| 1269 |
+
|
| 1270 |
+
rows = sql(select, (db_key, raw, time.time())).fetchall()
|
| 1271 |
+
|
| 1272 |
+
return bool(rows)
|
| 1273 |
+
|
| 1274 |
+
def pop(
|
| 1275 |
+
self, key, default=None, expire_time=False, tag=False, retry=False
|
| 1276 |
+
): # noqa: E501
|
| 1277 |
+
"""Remove corresponding item for `key` from cache and return value.
|
| 1278 |
+
|
| 1279 |
+
If `key` is missing, return `default`.
|
| 1280 |
+
|
| 1281 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 1282 |
+
|
| 1283 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1284 |
+
`False` (default).
|
| 1285 |
+
|
| 1286 |
+
:param key: key for item
|
| 1287 |
+
:param default: value to return if key is missing (default None)
|
| 1288 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1289 |
+
(default False)
|
| 1290 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1291 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1292 |
+
:return: value for item or default if key not found
|
| 1293 |
+
:raises Timeout: if database timeout occurs
|
| 1294 |
+
|
| 1295 |
+
"""
|
| 1296 |
+
db_key, raw = self._disk.put(key)
|
| 1297 |
+
select = (
|
| 1298 |
+
'SELECT rowid, expire_time, tag, mode, filename, value'
|
| 1299 |
+
' FROM Cache WHERE key = ? AND raw = ?'
|
| 1300 |
+
' AND (expire_time IS NULL OR expire_time > ?)'
|
| 1301 |
+
)
|
| 1302 |
+
|
| 1303 |
+
if expire_time and tag:
|
| 1304 |
+
default = default, None, None
|
| 1305 |
+
elif expire_time or tag:
|
| 1306 |
+
default = default, None
|
| 1307 |
+
|
| 1308 |
+
with self._transact(retry) as (sql, _):
|
| 1309 |
+
rows = sql(select, (db_key, raw, time.time())).fetchall()
|
| 1310 |
+
|
| 1311 |
+
if not rows:
|
| 1312 |
+
return default
|
| 1313 |
+
|
| 1314 |
+
((rowid, db_expire_time, db_tag, mode, filename, db_value),) = rows
|
| 1315 |
+
|
| 1316 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1317 |
+
|
| 1318 |
+
try:
|
| 1319 |
+
value = self._disk.fetch(mode, filename, db_value, False)
|
| 1320 |
+
except IOError:
|
| 1321 |
+
# Key was deleted before we could retrieve result.
|
| 1322 |
+
return default
|
| 1323 |
+
finally:
|
| 1324 |
+
if filename is not None:
|
| 1325 |
+
self._disk.remove(filename)
|
| 1326 |
+
|
| 1327 |
+
if expire_time and tag:
|
| 1328 |
+
return value, db_expire_time, db_tag
|
| 1329 |
+
elif expire_time:
|
| 1330 |
+
return value, db_expire_time
|
| 1331 |
+
elif tag:
|
| 1332 |
+
return value, db_tag
|
| 1333 |
+
else:
|
| 1334 |
+
return value
|
| 1335 |
+
|
| 1336 |
+
def __delitem__(self, key, retry=True):
|
| 1337 |
+
"""Delete corresponding item for `key` from cache.
|
| 1338 |
+
|
| 1339 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1340 |
+
`False` (default `True`).
|
| 1341 |
+
|
| 1342 |
+
:param key: key matching item
|
| 1343 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 1344 |
+
:raises KeyError: if key is not found
|
| 1345 |
+
:raises Timeout: if database timeout occurs
|
| 1346 |
+
|
| 1347 |
+
"""
|
| 1348 |
+
db_key, raw = self._disk.put(key)
|
| 1349 |
+
|
| 1350 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1351 |
+
rows = sql(
|
| 1352 |
+
'SELECT rowid, filename FROM Cache'
|
| 1353 |
+
' WHERE key = ? AND raw = ?'
|
| 1354 |
+
' AND (expire_time IS NULL OR expire_time > ?)',
|
| 1355 |
+
(db_key, raw, time.time()),
|
| 1356 |
+
).fetchall()
|
| 1357 |
+
|
| 1358 |
+
if not rows:
|
| 1359 |
+
raise KeyError(key)
|
| 1360 |
+
|
| 1361 |
+
((rowid, filename),) = rows
|
| 1362 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1363 |
+
cleanup(filename)
|
| 1364 |
+
|
| 1365 |
+
return True
|
| 1366 |
+
|
| 1367 |
+
def delete(self, key, retry=False):
|
| 1368 |
+
"""Delete corresponding item for `key` from cache.
|
| 1369 |
+
|
| 1370 |
+
Missing keys are ignored.
|
| 1371 |
+
|
| 1372 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1373 |
+
`False` (default).
|
| 1374 |
+
|
| 1375 |
+
:param key: key matching item
|
| 1376 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1377 |
+
:return: True if item was deleted
|
| 1378 |
+
:raises Timeout: if database timeout occurs
|
| 1379 |
+
|
| 1380 |
+
"""
|
| 1381 |
+
# pylint: disable=unnecessary-dunder-call
|
| 1382 |
+
try:
|
| 1383 |
+
return self.__delitem__(key, retry=retry)
|
| 1384 |
+
except KeyError:
|
| 1385 |
+
return False
|
| 1386 |
+
|
| 1387 |
+
def push(
|
| 1388 |
+
self,
|
| 1389 |
+
value,
|
| 1390 |
+
prefix=None,
|
| 1391 |
+
side='back',
|
| 1392 |
+
expire=None,
|
| 1393 |
+
read=False,
|
| 1394 |
+
tag=None,
|
| 1395 |
+
retry=False,
|
| 1396 |
+
):
|
| 1397 |
+
"""Push `value` onto `side` of queue identified by `prefix` in cache.
|
| 1398 |
+
|
| 1399 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 1400 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 1401 |
+
|
| 1402 |
+
Defaults to pushing value on back of queue. Set side to 'front' to push
|
| 1403 |
+
value on front of queue. Side must be one of 'back' or 'front'.
|
| 1404 |
+
|
| 1405 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 1406 |
+
|
| 1407 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 1408 |
+
for reading in binary mode.
|
| 1409 |
+
|
| 1410 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1411 |
+
`False` (default).
|
| 1412 |
+
|
| 1413 |
+
See also `Cache.pull`.
|
| 1414 |
+
|
| 1415 |
+
>>> cache = Cache()
|
| 1416 |
+
>>> print(cache.push('first value'))
|
| 1417 |
+
500000000000000
|
| 1418 |
+
>>> cache.get(500000000000000)
|
| 1419 |
+
'first value'
|
| 1420 |
+
>>> print(cache.push('second value'))
|
| 1421 |
+
500000000000001
|
| 1422 |
+
>>> print(cache.push('third value', side='front'))
|
| 1423 |
+
499999999999999
|
| 1424 |
+
>>> cache.push(1234, prefix='userids')
|
| 1425 |
+
'userids-500000000000000'
|
| 1426 |
+
|
| 1427 |
+
:param value: value for item
|
| 1428 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 1429 |
+
:param str side: either 'back' or 'front' (default 'back')
|
| 1430 |
+
:param float expire: seconds until the key expires
|
| 1431 |
+
(default None, no expiry)
|
| 1432 |
+
:param bool read: read value as bytes from file (default False)
|
| 1433 |
+
:param str tag: text to associate with key (default None)
|
| 1434 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1435 |
+
:return: key for item in cache
|
| 1436 |
+
:raises Timeout: if database timeout occurs
|
| 1437 |
+
|
| 1438 |
+
"""
|
| 1439 |
+
if prefix is None:
|
| 1440 |
+
min_key = 0
|
| 1441 |
+
max_key = 999999999999999
|
| 1442 |
+
else:
|
| 1443 |
+
min_key = prefix + '-000000000000000'
|
| 1444 |
+
max_key = prefix + '-999999999999999'
|
| 1445 |
+
|
| 1446 |
+
now = time.time()
|
| 1447 |
+
raw = True
|
| 1448 |
+
expire_time = None if expire is None else now + expire
|
| 1449 |
+
size, mode, filename, db_value = self._disk.store(value, read)
|
| 1450 |
+
columns = (expire_time, tag, size, mode, filename, db_value)
|
| 1451 |
+
order = {'back': 'DESC', 'front': 'ASC'}
|
| 1452 |
+
select = (
|
| 1453 |
+
'SELECT key FROM Cache'
|
| 1454 |
+
' WHERE ? < key AND key < ? AND raw = ?'
|
| 1455 |
+
' ORDER BY key %s LIMIT 1'
|
| 1456 |
+
) % order[side]
|
| 1457 |
+
|
| 1458 |
+
with self._transact(retry, filename) as (sql, cleanup):
|
| 1459 |
+
rows = sql(select, (min_key, max_key, raw)).fetchall()
|
| 1460 |
+
|
| 1461 |
+
if rows:
|
| 1462 |
+
((key,),) = rows
|
| 1463 |
+
|
| 1464 |
+
if prefix is not None:
|
| 1465 |
+
num = int(key[(key.rfind('-') + 1) :])
|
| 1466 |
+
else:
|
| 1467 |
+
num = key
|
| 1468 |
+
|
| 1469 |
+
if side == 'back':
|
| 1470 |
+
num += 1
|
| 1471 |
+
else:
|
| 1472 |
+
assert side == 'front'
|
| 1473 |
+
num -= 1
|
| 1474 |
+
else:
|
| 1475 |
+
num = 500000000000000
|
| 1476 |
+
|
| 1477 |
+
if prefix is not None:
|
| 1478 |
+
db_key = '{0}-{1:015d}'.format(prefix, num)
|
| 1479 |
+
else:
|
| 1480 |
+
db_key = num
|
| 1481 |
+
|
| 1482 |
+
self._row_insert(db_key, raw, now, columns)
|
| 1483 |
+
self._cull(now, sql, cleanup)
|
| 1484 |
+
|
| 1485 |
+
return db_key
|
| 1486 |
+
|
| 1487 |
+
def pull(
|
| 1488 |
+
self,
|
| 1489 |
+
prefix=None,
|
| 1490 |
+
default=(None, None),
|
| 1491 |
+
side='front',
|
| 1492 |
+
expire_time=False,
|
| 1493 |
+
tag=False,
|
| 1494 |
+
retry=False,
|
| 1495 |
+
):
|
| 1496 |
+
"""Pull key and value item pair from `side` of queue in cache.
|
| 1497 |
+
|
| 1498 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 1499 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 1500 |
+
|
| 1501 |
+
If queue is empty, return default.
|
| 1502 |
+
|
| 1503 |
+
Defaults to pulling key and value item pairs from front of queue. Set
|
| 1504 |
+
side to 'back' to pull from back of queue. Side must be one of 'front'
|
| 1505 |
+
or 'back'.
|
| 1506 |
+
|
| 1507 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 1508 |
+
|
| 1509 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1510 |
+
`False` (default).
|
| 1511 |
+
|
| 1512 |
+
See also `Cache.push` and `Cache.get`.
|
| 1513 |
+
|
| 1514 |
+
>>> cache = Cache()
|
| 1515 |
+
>>> cache.pull()
|
| 1516 |
+
(None, None)
|
| 1517 |
+
>>> for letter in 'abc':
|
| 1518 |
+
... print(cache.push(letter))
|
| 1519 |
+
500000000000000
|
| 1520 |
+
500000000000001
|
| 1521 |
+
500000000000002
|
| 1522 |
+
>>> key, value = cache.pull()
|
| 1523 |
+
>>> print(key)
|
| 1524 |
+
500000000000000
|
| 1525 |
+
>>> value
|
| 1526 |
+
'a'
|
| 1527 |
+
>>> _, value = cache.pull(side='back')
|
| 1528 |
+
>>> value
|
| 1529 |
+
'c'
|
| 1530 |
+
>>> cache.push(1234, 'userids')
|
| 1531 |
+
'userids-500000000000000'
|
| 1532 |
+
>>> _, value = cache.pull('userids')
|
| 1533 |
+
>>> value
|
| 1534 |
+
1234
|
| 1535 |
+
|
| 1536 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 1537 |
+
:param default: value to return if key is missing
|
| 1538 |
+
(default (None, None))
|
| 1539 |
+
:param str side: either 'front' or 'back' (default 'front')
|
| 1540 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1541 |
+
(default False)
|
| 1542 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1543 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1544 |
+
:return: key and value item pair or default if queue is empty
|
| 1545 |
+
:raises Timeout: if database timeout occurs
|
| 1546 |
+
|
| 1547 |
+
"""
|
| 1548 |
+
# Caution: Nearly identical code exists in Cache.peek
|
| 1549 |
+
if prefix is None:
|
| 1550 |
+
min_key = 0
|
| 1551 |
+
max_key = 999999999999999
|
| 1552 |
+
else:
|
| 1553 |
+
min_key = prefix + '-000000000000000'
|
| 1554 |
+
max_key = prefix + '-999999999999999'
|
| 1555 |
+
|
| 1556 |
+
order = {'front': 'ASC', 'back': 'DESC'}
|
| 1557 |
+
select = (
|
| 1558 |
+
'SELECT rowid, key, expire_time, tag, mode, filename, value'
|
| 1559 |
+
' FROM Cache WHERE ? < key AND key < ? AND raw = 1'
|
| 1560 |
+
' ORDER BY key %s LIMIT 1'
|
| 1561 |
+
) % order[side]
|
| 1562 |
+
|
| 1563 |
+
if expire_time and tag:
|
| 1564 |
+
default = default, None, None
|
| 1565 |
+
elif expire_time or tag:
|
| 1566 |
+
default = default, None
|
| 1567 |
+
|
| 1568 |
+
while True:
|
| 1569 |
+
while True:
|
| 1570 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1571 |
+
rows = sql(select, (min_key, max_key)).fetchall()
|
| 1572 |
+
|
| 1573 |
+
if not rows:
|
| 1574 |
+
return default
|
| 1575 |
+
|
| 1576 |
+
(
|
| 1577 |
+
(rowid, key, db_expire, db_tag, mode, name, db_value),
|
| 1578 |
+
) = rows
|
| 1579 |
+
|
| 1580 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1581 |
+
|
| 1582 |
+
if db_expire is not None and db_expire < time.time():
|
| 1583 |
+
cleanup(name)
|
| 1584 |
+
else:
|
| 1585 |
+
break
|
| 1586 |
+
|
| 1587 |
+
try:
|
| 1588 |
+
value = self._disk.fetch(mode, name, db_value, False)
|
| 1589 |
+
except IOError:
|
| 1590 |
+
# Key was deleted before we could retrieve result.
|
| 1591 |
+
continue
|
| 1592 |
+
finally:
|
| 1593 |
+
if name is not None:
|
| 1594 |
+
self._disk.remove(name)
|
| 1595 |
+
break
|
| 1596 |
+
|
| 1597 |
+
if expire_time and tag:
|
| 1598 |
+
return (key, value), db_expire, db_tag
|
| 1599 |
+
elif expire_time:
|
| 1600 |
+
return (key, value), db_expire
|
| 1601 |
+
elif tag:
|
| 1602 |
+
return (key, value), db_tag
|
| 1603 |
+
else:
|
| 1604 |
+
return key, value
|
| 1605 |
+
|
| 1606 |
+
def peek(
|
| 1607 |
+
self,
|
| 1608 |
+
prefix=None,
|
| 1609 |
+
default=(None, None),
|
| 1610 |
+
side='front',
|
| 1611 |
+
expire_time=False,
|
| 1612 |
+
tag=False,
|
| 1613 |
+
retry=False,
|
| 1614 |
+
):
|
| 1615 |
+
"""Peek at key and value item pair from `side` of queue in cache.
|
| 1616 |
+
|
| 1617 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 1618 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 1619 |
+
|
| 1620 |
+
If queue is empty, return default.
|
| 1621 |
+
|
| 1622 |
+
Defaults to peeking at key and value item pairs from front of queue.
|
| 1623 |
+
Set side to 'back' to pull from back of queue. Side must be one of
|
| 1624 |
+
'front' or 'back'.
|
| 1625 |
+
|
| 1626 |
+
Expired items are deleted from cache. Operation is atomic. Concurrent
|
| 1627 |
+
operations will be serialized.
|
| 1628 |
+
|
| 1629 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1630 |
+
`False` (default).
|
| 1631 |
+
|
| 1632 |
+
See also `Cache.pull` and `Cache.push`.
|
| 1633 |
+
|
| 1634 |
+
>>> cache = Cache()
|
| 1635 |
+
>>> for letter in 'abc':
|
| 1636 |
+
... print(cache.push(letter))
|
| 1637 |
+
500000000000000
|
| 1638 |
+
500000000000001
|
| 1639 |
+
500000000000002
|
| 1640 |
+
>>> key, value = cache.peek()
|
| 1641 |
+
>>> print(key)
|
| 1642 |
+
500000000000000
|
| 1643 |
+
>>> value
|
| 1644 |
+
'a'
|
| 1645 |
+
>>> key, value = cache.peek(side='back')
|
| 1646 |
+
>>> print(key)
|
| 1647 |
+
500000000000002
|
| 1648 |
+
>>> value
|
| 1649 |
+
'c'
|
| 1650 |
+
|
| 1651 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 1652 |
+
:param default: value to return if key is missing
|
| 1653 |
+
(default (None, None))
|
| 1654 |
+
:param str side: either 'front' or 'back' (default 'front')
|
| 1655 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1656 |
+
(default False)
|
| 1657 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1658 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1659 |
+
:return: key and value item pair or default if queue is empty
|
| 1660 |
+
:raises Timeout: if database timeout occurs
|
| 1661 |
+
|
| 1662 |
+
"""
|
| 1663 |
+
# Caution: Nearly identical code exists in Cache.pull
|
| 1664 |
+
if prefix is None:
|
| 1665 |
+
min_key = 0
|
| 1666 |
+
max_key = 999999999999999
|
| 1667 |
+
else:
|
| 1668 |
+
min_key = prefix + '-000000000000000'
|
| 1669 |
+
max_key = prefix + '-999999999999999'
|
| 1670 |
+
|
| 1671 |
+
order = {'front': 'ASC', 'back': 'DESC'}
|
| 1672 |
+
select = (
|
| 1673 |
+
'SELECT rowid, key, expire_time, tag, mode, filename, value'
|
| 1674 |
+
' FROM Cache WHERE ? < key AND key < ? AND raw = 1'
|
| 1675 |
+
' ORDER BY key %s LIMIT 1'
|
| 1676 |
+
) % order[side]
|
| 1677 |
+
|
| 1678 |
+
if expire_time and tag:
|
| 1679 |
+
default = default, None, None
|
| 1680 |
+
elif expire_time or tag:
|
| 1681 |
+
default = default, None
|
| 1682 |
+
|
| 1683 |
+
while True:
|
| 1684 |
+
while True:
|
| 1685 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1686 |
+
rows = sql(select, (min_key, max_key)).fetchall()
|
| 1687 |
+
|
| 1688 |
+
if not rows:
|
| 1689 |
+
return default
|
| 1690 |
+
|
| 1691 |
+
(
|
| 1692 |
+
(rowid, key, db_expire, db_tag, mode, name, db_value),
|
| 1693 |
+
) = rows
|
| 1694 |
+
|
| 1695 |
+
if db_expire is not None and db_expire < time.time():
|
| 1696 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1697 |
+
cleanup(name)
|
| 1698 |
+
else:
|
| 1699 |
+
break
|
| 1700 |
+
|
| 1701 |
+
try:
|
| 1702 |
+
value = self._disk.fetch(mode, name, db_value, False)
|
| 1703 |
+
except IOError:
|
| 1704 |
+
# Key was deleted before we could retrieve result.
|
| 1705 |
+
continue
|
| 1706 |
+
break
|
| 1707 |
+
|
| 1708 |
+
if expire_time and tag:
|
| 1709 |
+
return (key, value), db_expire, db_tag
|
| 1710 |
+
elif expire_time:
|
| 1711 |
+
return (key, value), db_expire
|
| 1712 |
+
elif tag:
|
| 1713 |
+
return (key, value), db_tag
|
| 1714 |
+
else:
|
| 1715 |
+
return key, value
|
| 1716 |
+
|
| 1717 |
+
def peekitem(self, last=True, expire_time=False, tag=False, retry=False):
|
| 1718 |
+
"""Peek at key and value item pair in cache based on iteration order.
|
| 1719 |
+
|
| 1720 |
+
Expired items are deleted from cache. Operation is atomic. Concurrent
|
| 1721 |
+
operations will be serialized.
|
| 1722 |
+
|
| 1723 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1724 |
+
`False` (default).
|
| 1725 |
+
|
| 1726 |
+
>>> cache = Cache()
|
| 1727 |
+
>>> for num, letter in enumerate('abc'):
|
| 1728 |
+
... cache[letter] = num
|
| 1729 |
+
>>> cache.peekitem()
|
| 1730 |
+
('c', 2)
|
| 1731 |
+
>>> cache.peekitem(last=False)
|
| 1732 |
+
('a', 0)
|
| 1733 |
+
|
| 1734 |
+
:param bool last: last item in iteration order (default True)
|
| 1735 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1736 |
+
(default False)
|
| 1737 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1738 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1739 |
+
:return: key and value item pair
|
| 1740 |
+
:raises KeyError: if cache is empty
|
| 1741 |
+
:raises Timeout: if database timeout occurs
|
| 1742 |
+
|
| 1743 |
+
"""
|
| 1744 |
+
order = ('ASC', 'DESC')
|
| 1745 |
+
select = (
|
| 1746 |
+
'SELECT rowid, key, raw, expire_time, tag, mode, filename, value'
|
| 1747 |
+
' FROM Cache ORDER BY rowid %s LIMIT 1'
|
| 1748 |
+
) % order[last]
|
| 1749 |
+
|
| 1750 |
+
while True:
|
| 1751 |
+
while True:
|
| 1752 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1753 |
+
rows = sql(select).fetchall()
|
| 1754 |
+
|
| 1755 |
+
if not rows:
|
| 1756 |
+
raise KeyError('dictionary is empty')
|
| 1757 |
+
|
| 1758 |
+
(
|
| 1759 |
+
(
|
| 1760 |
+
rowid,
|
| 1761 |
+
db_key,
|
| 1762 |
+
raw,
|
| 1763 |
+
db_expire,
|
| 1764 |
+
db_tag,
|
| 1765 |
+
mode,
|
| 1766 |
+
name,
|
| 1767 |
+
db_value,
|
| 1768 |
+
),
|
| 1769 |
+
) = rows
|
| 1770 |
+
|
| 1771 |
+
if db_expire is not None and db_expire < time.time():
|
| 1772 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1773 |
+
cleanup(name)
|
| 1774 |
+
else:
|
| 1775 |
+
break
|
| 1776 |
+
|
| 1777 |
+
key = self._disk.get(db_key, raw)
|
| 1778 |
+
|
| 1779 |
+
try:
|
| 1780 |
+
value = self._disk.fetch(mode, name, db_value, False)
|
| 1781 |
+
except IOError:
|
| 1782 |
+
# Key was deleted before we could retrieve result.
|
| 1783 |
+
continue
|
| 1784 |
+
break
|
| 1785 |
+
|
| 1786 |
+
if expire_time and tag:
|
| 1787 |
+
return (key, value), db_expire, db_tag
|
| 1788 |
+
elif expire_time:
|
| 1789 |
+
return (key, value), db_expire
|
| 1790 |
+
elif tag:
|
| 1791 |
+
return (key, value), db_tag
|
| 1792 |
+
else:
|
| 1793 |
+
return key, value
|
| 1794 |
+
|
| 1795 |
+
def memoize(
|
| 1796 |
+
self, name=None, typed=False, expire=None, tag=None, ignore=()
|
| 1797 |
+
):
|
| 1798 |
+
"""Memoizing cache decorator.
|
| 1799 |
+
|
| 1800 |
+
Decorator to wrap callable with memoizing function using cache.
|
| 1801 |
+
Repeated calls with the same arguments will lookup result in cache and
|
| 1802 |
+
avoid function evaluation.
|
| 1803 |
+
|
| 1804 |
+
If name is set to None (default), the callable name will be determined
|
| 1805 |
+
automatically.
|
| 1806 |
+
|
| 1807 |
+
When expire is set to zero, function results will not be set in the
|
| 1808 |
+
cache. Cache lookups still occur, however. Read
|
| 1809 |
+
:doc:`case-study-landing-page-caching` for example usage.
|
| 1810 |
+
|
| 1811 |
+
If typed is set to True, function arguments of different types will be
|
| 1812 |
+
cached separately. For example, f(3) and f(3.0) will be treated as
|
| 1813 |
+
distinct calls with distinct results.
|
| 1814 |
+
|
| 1815 |
+
The original underlying function is accessible through the __wrapped__
|
| 1816 |
+
attribute. This is useful for introspection, for bypassing the cache,
|
| 1817 |
+
or for rewrapping the function with a different cache.
|
| 1818 |
+
|
| 1819 |
+
>>> from diskcache import Cache
|
| 1820 |
+
>>> cache = Cache()
|
| 1821 |
+
>>> @cache.memoize(expire=1, tag='fib')
|
| 1822 |
+
... def fibonacci(number):
|
| 1823 |
+
... if number == 0:
|
| 1824 |
+
... return 0
|
| 1825 |
+
... elif number == 1:
|
| 1826 |
+
... return 1
|
| 1827 |
+
... else:
|
| 1828 |
+
... return fibonacci(number - 1) + fibonacci(number - 2)
|
| 1829 |
+
>>> print(fibonacci(100))
|
| 1830 |
+
354224848179261915075
|
| 1831 |
+
|
| 1832 |
+
An additional `__cache_key__` attribute can be used to generate the
|
| 1833 |
+
cache key used for the given arguments.
|
| 1834 |
+
|
| 1835 |
+
>>> key = fibonacci.__cache_key__(100)
|
| 1836 |
+
>>> print(cache[key])
|
| 1837 |
+
354224848179261915075
|
| 1838 |
+
|
| 1839 |
+
Remember to call memoize when decorating a callable. If you forget,
|
| 1840 |
+
then a TypeError will occur. Note the lack of parenthenses after
|
| 1841 |
+
memoize below:
|
| 1842 |
+
|
| 1843 |
+
>>> @cache.memoize
|
| 1844 |
+
... def test():
|
| 1845 |
+
... pass
|
| 1846 |
+
Traceback (most recent call last):
|
| 1847 |
+
...
|
| 1848 |
+
TypeError: name cannot be callable
|
| 1849 |
+
|
| 1850 |
+
:param cache: cache to store callable arguments and return values
|
| 1851 |
+
:param str name: name given for callable (default None, automatic)
|
| 1852 |
+
:param bool typed: cache different types separately (default False)
|
| 1853 |
+
:param float expire: seconds until arguments expire
|
| 1854 |
+
(default None, no expiry)
|
| 1855 |
+
:param str tag: text to associate with arguments (default None)
|
| 1856 |
+
:param set ignore: positional or keyword args to ignore (default ())
|
| 1857 |
+
:return: callable decorator
|
| 1858 |
+
|
| 1859 |
+
"""
|
| 1860 |
+
# Caution: Nearly identical code exists in DjangoCache.memoize
|
| 1861 |
+
if callable(name):
|
| 1862 |
+
raise TypeError('name cannot be callable')
|
| 1863 |
+
|
| 1864 |
+
def decorator(func):
|
| 1865 |
+
"""Decorator created by memoize() for callable `func`."""
|
| 1866 |
+
base = (full_name(func),) if name is None else (name,)
|
| 1867 |
+
|
| 1868 |
+
@ft.wraps(func)
|
| 1869 |
+
def wrapper(*args, **kwargs):
|
| 1870 |
+
"""Wrapper for callable to cache arguments and return values."""
|
| 1871 |
+
key = wrapper.__cache_key__(*args, **kwargs)
|
| 1872 |
+
result = self.get(key, default=ENOVAL, retry=True)
|
| 1873 |
+
|
| 1874 |
+
if result is ENOVAL:
|
| 1875 |
+
result = func(*args, **kwargs)
|
| 1876 |
+
if expire is None or expire > 0:
|
| 1877 |
+
self.set(key, result, expire, tag=tag, retry=True)
|
| 1878 |
+
|
| 1879 |
+
return result
|
| 1880 |
+
|
| 1881 |
+
def __cache_key__(*args, **kwargs):
|
| 1882 |
+
"""Make key for cache given function arguments."""
|
| 1883 |
+
return args_to_key(base, args, kwargs, typed, ignore)
|
| 1884 |
+
|
| 1885 |
+
wrapper.__cache_key__ = __cache_key__
|
| 1886 |
+
return wrapper
|
| 1887 |
+
|
| 1888 |
+
return decorator
|
| 1889 |
+
|
| 1890 |
+
def check(self, fix=False, retry=False):
|
| 1891 |
+
"""Check database and file system consistency.
|
| 1892 |
+
|
| 1893 |
+
Intended for use in testing and post-mortem error analysis.
|
| 1894 |
+
|
| 1895 |
+
While checking the Cache table for consistency, a writer lock is held
|
| 1896 |
+
on the database. The lock blocks other cache clients from writing to
|
| 1897 |
+
the database. For caches with many file references, the lock may be
|
| 1898 |
+
held for a long time. For example, local benchmarking shows that a
|
| 1899 |
+
cache with 1,000 file references takes ~60ms to check.
|
| 1900 |
+
|
| 1901 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1902 |
+
`False` (default).
|
| 1903 |
+
|
| 1904 |
+
:param bool fix: correct inconsistencies
|
| 1905 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1906 |
+
:return: list of warnings
|
| 1907 |
+
:raises Timeout: if database timeout occurs
|
| 1908 |
+
|
| 1909 |
+
"""
|
| 1910 |
+
# pylint: disable=access-member-before-definition,W0201
|
| 1911 |
+
with warnings.catch_warnings(record=True) as warns:
|
| 1912 |
+
sql = self._sql
|
| 1913 |
+
|
| 1914 |
+
# Check integrity of database.
|
| 1915 |
+
|
| 1916 |
+
rows = sql('PRAGMA integrity_check').fetchall()
|
| 1917 |
+
|
| 1918 |
+
if len(rows) != 1 or rows[0][0] != 'ok':
|
| 1919 |
+
for (message,) in rows:
|
| 1920 |
+
warnings.warn(message)
|
| 1921 |
+
|
| 1922 |
+
if fix:
|
| 1923 |
+
sql('VACUUM')
|
| 1924 |
+
|
| 1925 |
+
with self._transact(retry) as (sql, _):
|
| 1926 |
+
|
| 1927 |
+
# Check Cache.filename against file system.
|
| 1928 |
+
|
| 1929 |
+
filenames = set()
|
| 1930 |
+
select = (
|
| 1931 |
+
'SELECT rowid, size, filename FROM Cache'
|
| 1932 |
+
' WHERE filename IS NOT NULL'
|
| 1933 |
+
)
|
| 1934 |
+
|
| 1935 |
+
rows = sql(select).fetchall()
|
| 1936 |
+
|
| 1937 |
+
for rowid, size, filename in rows:
|
| 1938 |
+
full_path = op.join(self._directory, filename)
|
| 1939 |
+
filenames.add(full_path)
|
| 1940 |
+
|
| 1941 |
+
if op.exists(full_path):
|
| 1942 |
+
real_size = op.getsize(full_path)
|
| 1943 |
+
|
| 1944 |
+
if size != real_size:
|
| 1945 |
+
message = 'wrong file size: %s, %d != %d'
|
| 1946 |
+
args = full_path, real_size, size
|
| 1947 |
+
warnings.warn(message % args)
|
| 1948 |
+
|
| 1949 |
+
if fix:
|
| 1950 |
+
sql(
|
| 1951 |
+
'UPDATE Cache SET size = ?'
|
| 1952 |
+
' WHERE rowid = ?',
|
| 1953 |
+
(real_size, rowid),
|
| 1954 |
+
)
|
| 1955 |
+
|
| 1956 |
+
continue
|
| 1957 |
+
|
| 1958 |
+
warnings.warn('file not found: %s' % full_path)
|
| 1959 |
+
|
| 1960 |
+
if fix:
|
| 1961 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1962 |
+
|
| 1963 |
+
# Check file system against Cache.filename.
|
| 1964 |
+
|
| 1965 |
+
for dirpath, _, files in os.walk(self._directory):
|
| 1966 |
+
paths = [op.join(dirpath, filename) for filename in files]
|
| 1967 |
+
error = set(paths) - filenames
|
| 1968 |
+
|
| 1969 |
+
for full_path in error:
|
| 1970 |
+
if DBNAME in full_path:
|
| 1971 |
+
continue
|
| 1972 |
+
|
| 1973 |
+
message = 'unknown file: %s' % full_path
|
| 1974 |
+
warnings.warn(message, UnknownFileWarning)
|
| 1975 |
+
|
| 1976 |
+
if fix:
|
| 1977 |
+
os.remove(full_path)
|
| 1978 |
+
|
| 1979 |
+
# Check for empty directories.
|
| 1980 |
+
|
| 1981 |
+
for dirpath, dirs, files in os.walk(self._directory):
|
| 1982 |
+
if not (dirs or files):
|
| 1983 |
+
message = 'empty directory: %s' % dirpath
|
| 1984 |
+
warnings.warn(message, EmptyDirWarning)
|
| 1985 |
+
|
| 1986 |
+
if fix:
|
| 1987 |
+
os.rmdir(dirpath)
|
| 1988 |
+
|
| 1989 |
+
# Check Settings.count against count of Cache rows.
|
| 1990 |
+
|
| 1991 |
+
self.reset('count')
|
| 1992 |
+
((count,),) = sql('SELECT COUNT(key) FROM Cache').fetchall()
|
| 1993 |
+
|
| 1994 |
+
if self.count != count:
|
| 1995 |
+
message = 'Settings.count != COUNT(Cache.key); %d != %d'
|
| 1996 |
+
warnings.warn(message % (self.count, count))
|
| 1997 |
+
|
| 1998 |
+
if fix:
|
| 1999 |
+
sql(
|
| 2000 |
+
'UPDATE Settings SET value = ? WHERE key = ?',
|
| 2001 |
+
(count, 'count'),
|
| 2002 |
+
)
|
| 2003 |
+
|
| 2004 |
+
# Check Settings.size against sum of Cache.size column.
|
| 2005 |
+
|
| 2006 |
+
self.reset('size')
|
| 2007 |
+
select_size = 'SELECT COALESCE(SUM(size), 0) FROM Cache'
|
| 2008 |
+
((size,),) = sql(select_size).fetchall()
|
| 2009 |
+
|
| 2010 |
+
if self.size != size:
|
| 2011 |
+
message = 'Settings.size != SUM(Cache.size); %d != %d'
|
| 2012 |
+
warnings.warn(message % (self.size, size))
|
| 2013 |
+
|
| 2014 |
+
if fix:
|
| 2015 |
+
sql(
|
| 2016 |
+
'UPDATE Settings SET value = ? WHERE key =?',
|
| 2017 |
+
(size, 'size'),
|
| 2018 |
+
)
|
| 2019 |
+
|
| 2020 |
+
return warns
|
| 2021 |
+
|
| 2022 |
+
def create_tag_index(self):
|
| 2023 |
+
"""Create tag index on cache database.
|
| 2024 |
+
|
| 2025 |
+
It is better to initialize cache with `tag_index=True` than use this.
|
| 2026 |
+
|
| 2027 |
+
:raises Timeout: if database timeout occurs
|
| 2028 |
+
|
| 2029 |
+
"""
|
| 2030 |
+
sql = self._sql
|
| 2031 |
+
sql('CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid)')
|
| 2032 |
+
self.reset('tag_index', 1)
|
| 2033 |
+
|
| 2034 |
+
def drop_tag_index(self):
|
| 2035 |
+
"""Drop tag index on cache database.
|
| 2036 |
+
|
| 2037 |
+
:raises Timeout: if database timeout occurs
|
| 2038 |
+
|
| 2039 |
+
"""
|
| 2040 |
+
sql = self._sql
|
| 2041 |
+
sql('DROP INDEX IF EXISTS Cache_tag_rowid')
|
| 2042 |
+
self.reset('tag_index', 0)
|
| 2043 |
+
|
| 2044 |
+
def evict(self, tag, retry=False):
|
| 2045 |
+
"""Remove items with matching `tag` from cache.
|
| 2046 |
+
|
| 2047 |
+
Removing items is an iterative process. In each iteration, a subset of
|
| 2048 |
+
items is removed. Concurrent writes may occur between iterations.
|
| 2049 |
+
|
| 2050 |
+
If a :exc:`Timeout` occurs, the first element of the exception's
|
| 2051 |
+
`args` attribute will be the number of items removed before the
|
| 2052 |
+
exception occurred.
|
| 2053 |
+
|
| 2054 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 2055 |
+
`False` (default).
|
| 2056 |
+
|
| 2057 |
+
:param str tag: tag identifying items
|
| 2058 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 2059 |
+
:return: count of rows removed
|
| 2060 |
+
:raises Timeout: if database timeout occurs
|
| 2061 |
+
|
| 2062 |
+
"""
|
| 2063 |
+
select = (
|
| 2064 |
+
'SELECT rowid, filename FROM Cache'
|
| 2065 |
+
' WHERE tag = ? AND rowid > ?'
|
| 2066 |
+
' ORDER BY rowid LIMIT ?'
|
| 2067 |
+
)
|
| 2068 |
+
args = [tag, 0, 100]
|
| 2069 |
+
return self._select_delete(select, args, arg_index=1, retry=retry)
|
| 2070 |
+
|
| 2071 |
+
def expire(self, now=None, retry=False):
|
| 2072 |
+
"""Remove expired items from cache.
|
| 2073 |
+
|
| 2074 |
+
Removing items is an iterative process. In each iteration, a subset of
|
| 2075 |
+
items is removed. Concurrent writes may occur between iterations.
|
| 2076 |
+
|
| 2077 |
+
If a :exc:`Timeout` occurs, the first element of the exception's
|
| 2078 |
+
`args` attribute will be the number of items removed before the
|
| 2079 |
+
exception occurred.
|
| 2080 |
+
|
| 2081 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 2082 |
+
`False` (default).
|
| 2083 |
+
|
| 2084 |
+
:param float now: current time (default None, ``time.time()`` used)
|
| 2085 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 2086 |
+
:return: count of items removed
|
| 2087 |
+
:raises Timeout: if database timeout occurs
|
| 2088 |
+
|
| 2089 |
+
"""
|
| 2090 |
+
select = (
|
| 2091 |
+
'SELECT rowid, expire_time, filename FROM Cache'
|
| 2092 |
+
' WHERE ? < expire_time AND expire_time < ?'
|
| 2093 |
+
' ORDER BY expire_time LIMIT ?'
|
| 2094 |
+
)
|
| 2095 |
+
args = [0, now or time.time(), 100]
|
| 2096 |
+
return self._select_delete(select, args, row_index=1, retry=retry)
|
| 2097 |
+
|
| 2098 |
+
def cull(self, retry=False):
|
| 2099 |
+
"""Cull items from cache until volume is less than size limit.
|
| 2100 |
+
|
| 2101 |
+
Removing items is an iterative process. In each iteration, a subset of
|
| 2102 |
+
items is removed. Concurrent writes may occur between iterations.
|
| 2103 |
+
|
| 2104 |
+
If a :exc:`Timeout` occurs, the first element of the exception's
|
| 2105 |
+
`args` attribute will be the number of items removed before the
|
| 2106 |
+
exception occurred.
|
| 2107 |
+
|
| 2108 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 2109 |
+
`False` (default).
|
| 2110 |
+
|
| 2111 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 2112 |
+
:return: count of items removed
|
| 2113 |
+
:raises Timeout: if database timeout occurs
|
| 2114 |
+
|
| 2115 |
+
"""
|
| 2116 |
+
now = time.time()
|
| 2117 |
+
|
| 2118 |
+
# Remove expired items.
|
| 2119 |
+
|
| 2120 |
+
count = self.expire(now)
|
| 2121 |
+
|
| 2122 |
+
# Remove items by policy.
|
| 2123 |
+
|
| 2124 |
+
select_policy = EVICTION_POLICY[self.eviction_policy]['cull']
|
| 2125 |
+
|
| 2126 |
+
if select_policy is None:
|
| 2127 |
+
return 0
|
| 2128 |
+
|
| 2129 |
+
select_filename = select_policy.format(fields='filename', now=now)
|
| 2130 |
+
|
| 2131 |
+
try:
|
| 2132 |
+
while self.volume() > self.size_limit:
|
| 2133 |
+
with self._transact(retry) as (sql, cleanup):
|
| 2134 |
+
rows = sql(select_filename, (10,)).fetchall()
|
| 2135 |
+
|
| 2136 |
+
if not rows:
|
| 2137 |
+
break
|
| 2138 |
+
|
| 2139 |
+
count += len(rows)
|
| 2140 |
+
delete = (
|
| 2141 |
+
'DELETE FROM Cache WHERE rowid IN (%s)'
|
| 2142 |
+
% select_policy.format(fields='rowid', now=now)
|
| 2143 |
+
)
|
| 2144 |
+
sql(delete, (10,))
|
| 2145 |
+
|
| 2146 |
+
for (filename,) in rows:
|
| 2147 |
+
cleanup(filename)
|
| 2148 |
+
except Timeout:
|
| 2149 |
+
raise Timeout(count) from None
|
| 2150 |
+
|
| 2151 |
+
return count
|
| 2152 |
+
|
| 2153 |
+
def clear(self, retry=False):
|
| 2154 |
+
"""Remove all items from cache.
|
| 2155 |
+
|
| 2156 |
+
Removing items is an iterative process. In each iteration, a subset of
|
| 2157 |
+
items is removed. Concurrent writes may occur between iterations.
|
| 2158 |
+
|
| 2159 |
+
If a :exc:`Timeout` occurs, the first element of the exception's
|
| 2160 |
+
`args` attribute will be the number of items removed before the
|
| 2161 |
+
exception occurred.
|
| 2162 |
+
|
| 2163 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 2164 |
+
`False` (default).
|
| 2165 |
+
|
| 2166 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 2167 |
+
:return: count of rows removed
|
| 2168 |
+
:raises Timeout: if database timeout occurs
|
| 2169 |
+
|
| 2170 |
+
"""
|
| 2171 |
+
select = (
|
| 2172 |
+
'SELECT rowid, filename FROM Cache'
|
| 2173 |
+
' WHERE rowid > ?'
|
| 2174 |
+
' ORDER BY rowid LIMIT ?'
|
| 2175 |
+
)
|
| 2176 |
+
args = [0, 100]
|
| 2177 |
+
return self._select_delete(select, args, retry=retry)
|
| 2178 |
+
|
| 2179 |
+
def _select_delete(
|
| 2180 |
+
self, select, args, row_index=0, arg_index=0, retry=False
|
| 2181 |
+
):
|
| 2182 |
+
count = 0
|
| 2183 |
+
delete = 'DELETE FROM Cache WHERE rowid IN (%s)'
|
| 2184 |
+
|
| 2185 |
+
try:
|
| 2186 |
+
while True:
|
| 2187 |
+
with self._transact(retry) as (sql, cleanup):
|
| 2188 |
+
rows = sql(select, args).fetchall()
|
| 2189 |
+
|
| 2190 |
+
if not rows:
|
| 2191 |
+
break
|
| 2192 |
+
|
| 2193 |
+
count += len(rows)
|
| 2194 |
+
sql(delete % ','.join(str(row[0]) for row in rows))
|
| 2195 |
+
|
| 2196 |
+
for row in rows:
|
| 2197 |
+
args[arg_index] = row[row_index]
|
| 2198 |
+
cleanup(row[-1])
|
| 2199 |
+
|
| 2200 |
+
except Timeout:
|
| 2201 |
+
raise Timeout(count) from None
|
| 2202 |
+
|
| 2203 |
+
return count
|
| 2204 |
+
|
| 2205 |
+
def iterkeys(self, reverse=False):
|
| 2206 |
+
"""Iterate Cache keys in database sort order.
|
| 2207 |
+
|
| 2208 |
+
>>> cache = Cache()
|
| 2209 |
+
>>> for key in [4, 1, 3, 0, 2]:
|
| 2210 |
+
... cache[key] = key
|
| 2211 |
+
>>> list(cache.iterkeys())
|
| 2212 |
+
[0, 1, 2, 3, 4]
|
| 2213 |
+
>>> list(cache.iterkeys(reverse=True))
|
| 2214 |
+
[4, 3, 2, 1, 0]
|
| 2215 |
+
|
| 2216 |
+
:param bool reverse: reverse sort order (default False)
|
| 2217 |
+
:return: iterator of Cache keys
|
| 2218 |
+
|
| 2219 |
+
"""
|
| 2220 |
+
sql = self._sql
|
| 2221 |
+
limit = 100
|
| 2222 |
+
_disk_get = self._disk.get
|
| 2223 |
+
|
| 2224 |
+
if reverse:
|
| 2225 |
+
select = (
|
| 2226 |
+
'SELECT key, raw FROM Cache'
|
| 2227 |
+
' ORDER BY key DESC, raw DESC LIMIT 1'
|
| 2228 |
+
)
|
| 2229 |
+
iterate = (
|
| 2230 |
+
'SELECT key, raw FROM Cache'
|
| 2231 |
+
' WHERE key = ? AND raw < ? OR key < ?'
|
| 2232 |
+
' ORDER BY key DESC, raw DESC LIMIT ?'
|
| 2233 |
+
)
|
| 2234 |
+
else:
|
| 2235 |
+
select = (
|
| 2236 |
+
'SELECT key, raw FROM Cache'
|
| 2237 |
+
' ORDER BY key ASC, raw ASC LIMIT 1'
|
| 2238 |
+
)
|
| 2239 |
+
iterate = (
|
| 2240 |
+
'SELECT key, raw FROM Cache'
|
| 2241 |
+
' WHERE key = ? AND raw > ? OR key > ?'
|
| 2242 |
+
' ORDER BY key ASC, raw ASC LIMIT ?'
|
| 2243 |
+
)
|
| 2244 |
+
|
| 2245 |
+
row = sql(select).fetchall()
|
| 2246 |
+
|
| 2247 |
+
if row:
|
| 2248 |
+
((key, raw),) = row
|
| 2249 |
+
else:
|
| 2250 |
+
return
|
| 2251 |
+
|
| 2252 |
+
yield _disk_get(key, raw)
|
| 2253 |
+
|
| 2254 |
+
while True:
|
| 2255 |
+
rows = sql(iterate, (key, raw, key, limit)).fetchall()
|
| 2256 |
+
|
| 2257 |
+
if not rows:
|
| 2258 |
+
break
|
| 2259 |
+
|
| 2260 |
+
for key, raw in rows:
|
| 2261 |
+
yield _disk_get(key, raw)
|
| 2262 |
+
|
| 2263 |
+
def _iter(self, ascending=True):
|
| 2264 |
+
sql = self._sql
|
| 2265 |
+
rows = sql('SELECT MAX(rowid) FROM Cache').fetchall()
|
| 2266 |
+
((max_rowid,),) = rows
|
| 2267 |
+
yield # Signal ready.
|
| 2268 |
+
|
| 2269 |
+
if max_rowid is None:
|
| 2270 |
+
return
|
| 2271 |
+
|
| 2272 |
+
bound = max_rowid + 1
|
| 2273 |
+
limit = 100
|
| 2274 |
+
_disk_get = self._disk.get
|
| 2275 |
+
rowid = 0 if ascending else bound
|
| 2276 |
+
select = (
|
| 2277 |
+
'SELECT rowid, key, raw FROM Cache'
|
| 2278 |
+
' WHERE ? < rowid AND rowid < ?'
|
| 2279 |
+
' ORDER BY rowid %s LIMIT ?'
|
| 2280 |
+
) % ('ASC' if ascending else 'DESC')
|
| 2281 |
+
|
| 2282 |
+
while True:
|
| 2283 |
+
if ascending:
|
| 2284 |
+
args = (rowid, bound, limit)
|
| 2285 |
+
else:
|
| 2286 |
+
args = (0, rowid, limit)
|
| 2287 |
+
|
| 2288 |
+
rows = sql(select, args).fetchall()
|
| 2289 |
+
|
| 2290 |
+
if not rows:
|
| 2291 |
+
break
|
| 2292 |
+
|
| 2293 |
+
for rowid, key, raw in rows:
|
| 2294 |
+
yield _disk_get(key, raw)
|
| 2295 |
+
|
| 2296 |
+
def __iter__(self):
|
| 2297 |
+
"""Iterate keys in cache including expired items."""
|
| 2298 |
+
iterator = self._iter()
|
| 2299 |
+
next(iterator)
|
| 2300 |
+
return iterator
|
| 2301 |
+
|
| 2302 |
+
def __reversed__(self):
|
| 2303 |
+
"""Reverse iterate keys in cache including expired items."""
|
| 2304 |
+
iterator = self._iter(ascending=False)
|
| 2305 |
+
next(iterator)
|
| 2306 |
+
return iterator
|
| 2307 |
+
|
| 2308 |
+
def stats(self, enable=True, reset=False):
|
| 2309 |
+
"""Return cache statistics hits and misses.
|
| 2310 |
+
|
| 2311 |
+
:param bool enable: enable collecting statistics (default True)
|
| 2312 |
+
:param bool reset: reset hits and misses to 0 (default False)
|
| 2313 |
+
:return: (hits, misses)
|
| 2314 |
+
|
| 2315 |
+
"""
|
| 2316 |
+
# pylint: disable=E0203,W0201
|
| 2317 |
+
result = (self.reset('hits'), self.reset('misses'))
|
| 2318 |
+
|
| 2319 |
+
if reset:
|
| 2320 |
+
self.reset('hits', 0)
|
| 2321 |
+
self.reset('misses', 0)
|
| 2322 |
+
|
| 2323 |
+
self.reset('statistics', enable)
|
| 2324 |
+
|
| 2325 |
+
return result
|
| 2326 |
+
|
| 2327 |
+
def volume(self):
|
| 2328 |
+
"""Return estimated total size of cache on disk.
|
| 2329 |
+
|
| 2330 |
+
:return: size in bytes
|
| 2331 |
+
|
| 2332 |
+
"""
|
| 2333 |
+
((page_count,),) = self._sql('PRAGMA page_count').fetchall()
|
| 2334 |
+
total_size = self._page_size * page_count + self.reset('size')
|
| 2335 |
+
return total_size
|
| 2336 |
+
|
| 2337 |
+
def close(self):
|
| 2338 |
+
"""Close database connection."""
|
| 2339 |
+
con = getattr(self._local, 'con', None)
|
| 2340 |
+
|
| 2341 |
+
if con is None:
|
| 2342 |
+
return
|
| 2343 |
+
|
| 2344 |
+
con.close()
|
| 2345 |
+
|
| 2346 |
+
try:
|
| 2347 |
+
delattr(self._local, 'con')
|
| 2348 |
+
except AttributeError:
|
| 2349 |
+
pass
|
| 2350 |
+
|
| 2351 |
+
def __enter__(self):
|
| 2352 |
+
# Create connection in thread.
|
| 2353 |
+
# pylint: disable=unused-variable
|
| 2354 |
+
connection = self._con # noqa
|
| 2355 |
+
return self
|
| 2356 |
+
|
| 2357 |
+
def __exit__(self, *exception):
|
| 2358 |
+
self.close()
|
| 2359 |
+
|
| 2360 |
+
def __len__(self):
|
| 2361 |
+
"""Count of items in cache including expired items."""
|
| 2362 |
+
return self.reset('count')
|
| 2363 |
+
|
| 2364 |
+
def __getstate__(self):
|
| 2365 |
+
return (self.directory, self.timeout, type(self.disk))
|
| 2366 |
+
|
| 2367 |
+
def __setstate__(self, state):
|
| 2368 |
+
self.__init__(*state)
|
| 2369 |
+
|
| 2370 |
+
def reset(self, key, value=ENOVAL, update=True):
|
| 2371 |
+
"""Reset `key` and `value` item from Settings table.
|
| 2372 |
+
|
| 2373 |
+
Use `reset` to update the value of Cache settings correctly. Cache
|
| 2374 |
+
settings are stored in the Settings table of the SQLite database. If
|
| 2375 |
+
`update` is ``False`` then no attempt is made to update the database.
|
| 2376 |
+
|
| 2377 |
+
If `value` is not given, it is reloaded from the Settings
|
| 2378 |
+
table. Otherwise, the Settings table is updated.
|
| 2379 |
+
|
| 2380 |
+
Settings with the ``disk_`` prefix correspond to Disk
|
| 2381 |
+
attributes. Updating the value will change the unprefixed attribute on
|
| 2382 |
+
the associated Disk instance.
|
| 2383 |
+
|
| 2384 |
+
Settings with the ``sqlite_`` prefix correspond to SQLite
|
| 2385 |
+
pragmas. Updating the value will execute the corresponding PRAGMA
|
| 2386 |
+
statement.
|
| 2387 |
+
|
| 2388 |
+
SQLite PRAGMA statements may be executed before the Settings table
|
| 2389 |
+
exists in the database by setting `update` to ``False``.
|
| 2390 |
+
|
| 2391 |
+
:param str key: Settings key for item
|
| 2392 |
+
:param value: value for item (optional)
|
| 2393 |
+
:param bool update: update database Settings table (default True)
|
| 2394 |
+
:return: updated value for item
|
| 2395 |
+
:raises Timeout: if database timeout occurs
|
| 2396 |
+
|
| 2397 |
+
"""
|
| 2398 |
+
sql = self._sql
|
| 2399 |
+
sql_retry = self._sql_retry
|
| 2400 |
+
|
| 2401 |
+
if value is ENOVAL:
|
| 2402 |
+
select = 'SELECT value FROM Settings WHERE key = ?'
|
| 2403 |
+
((value,),) = sql_retry(select, (key,)).fetchall()
|
| 2404 |
+
setattr(self, key, value)
|
| 2405 |
+
return value
|
| 2406 |
+
|
| 2407 |
+
if update:
|
| 2408 |
+
statement = 'UPDATE Settings SET value = ? WHERE key = ?'
|
| 2409 |
+
sql_retry(statement, (value, key))
|
| 2410 |
+
|
| 2411 |
+
if key.startswith('sqlite_'):
|
| 2412 |
+
pragma = key[7:]
|
| 2413 |
+
|
| 2414 |
+
# 2016-02-17 GrantJ - PRAGMA and isolation_level=None
|
| 2415 |
+
# don't always play nicely together. Retry setting the
|
| 2416 |
+
# PRAGMA. I think some PRAGMA statements expect to
|
| 2417 |
+
# immediately take an EXCLUSIVE lock on the database. I
|
| 2418 |
+
# can't find any documentation for this but without the
|
| 2419 |
+
# retry, stress will intermittently fail with multiple
|
| 2420 |
+
# processes.
|
| 2421 |
+
|
| 2422 |
+
# 2018-11-05 GrantJ - Avoid setting pragma values that
|
| 2423 |
+
# are already set. Pragma settings like auto_vacuum and
|
| 2424 |
+
# journal_mode can take a long time or may not work after
|
| 2425 |
+
# tables have been created.
|
| 2426 |
+
|
| 2427 |
+
start = time.time()
|
| 2428 |
+
while True:
|
| 2429 |
+
try:
|
| 2430 |
+
try:
|
| 2431 |
+
((old_value,),) = sql(
|
| 2432 |
+
'PRAGMA %s' % (pragma)
|
| 2433 |
+
).fetchall()
|
| 2434 |
+
update = old_value != value
|
| 2435 |
+
except ValueError:
|
| 2436 |
+
update = True
|
| 2437 |
+
if update:
|
| 2438 |
+
sql('PRAGMA %s = %s' % (pragma, value)).fetchall()
|
| 2439 |
+
break
|
| 2440 |
+
except sqlite3.OperationalError as exc:
|
| 2441 |
+
if str(exc) != 'database is locked':
|
| 2442 |
+
raise
|
| 2443 |
+
diff = time.time() - start
|
| 2444 |
+
if diff > 60:
|
| 2445 |
+
raise
|
| 2446 |
+
time.sleep(0.001)
|
| 2447 |
+
elif key.startswith('disk_'):
|
| 2448 |
+
attr = key[5:]
|
| 2449 |
+
setattr(self._disk, attr, value)
|
| 2450 |
+
|
| 2451 |
+
setattr(self, key, value)
|
| 2452 |
+
return value
|
deepseek/lib/python3.10/site-packages/diskcache/fanout.py
ADDED
|
@@ -0,0 +1,687 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fanout cache automatically shards keys and values."""
|
| 2 |
+
|
| 3 |
+
import contextlib as cl
|
| 4 |
+
import functools
|
| 5 |
+
import itertools as it
|
| 6 |
+
import operator
|
| 7 |
+
import os.path as op
|
| 8 |
+
import sqlite3
|
| 9 |
+
import tempfile
|
| 10 |
+
import time
|
| 11 |
+
|
| 12 |
+
from .core import DEFAULT_SETTINGS, ENOVAL, Cache, Disk, Timeout
|
| 13 |
+
from .persistent import Deque, Index
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class FanoutCache:
|
| 17 |
+
"""Cache that shards keys and values."""
|
| 18 |
+
|
| 19 |
+
def __init__(
|
| 20 |
+
self, directory=None, shards=8, timeout=0.010, disk=Disk, **settings
|
| 21 |
+
):
|
| 22 |
+
"""Initialize cache instance.
|
| 23 |
+
|
| 24 |
+
:param str directory: cache directory
|
| 25 |
+
:param int shards: number of shards to distribute writes
|
| 26 |
+
:param float timeout: SQLite connection timeout
|
| 27 |
+
:param disk: `Disk` instance for serialization
|
| 28 |
+
:param settings: any of `DEFAULT_SETTINGS`
|
| 29 |
+
|
| 30 |
+
"""
|
| 31 |
+
if directory is None:
|
| 32 |
+
directory = tempfile.mkdtemp(prefix='diskcache-')
|
| 33 |
+
directory = str(directory)
|
| 34 |
+
directory = op.expanduser(directory)
|
| 35 |
+
directory = op.expandvars(directory)
|
| 36 |
+
|
| 37 |
+
default_size_limit = DEFAULT_SETTINGS['size_limit']
|
| 38 |
+
size_limit = settings.pop('size_limit', default_size_limit) / shards
|
| 39 |
+
|
| 40 |
+
self._count = shards
|
| 41 |
+
self._directory = directory
|
| 42 |
+
self._disk = disk
|
| 43 |
+
self._shards = tuple(
|
| 44 |
+
Cache(
|
| 45 |
+
directory=op.join(directory, '%03d' % num),
|
| 46 |
+
timeout=timeout,
|
| 47 |
+
disk=disk,
|
| 48 |
+
size_limit=size_limit,
|
| 49 |
+
**settings,
|
| 50 |
+
)
|
| 51 |
+
for num in range(shards)
|
| 52 |
+
)
|
| 53 |
+
self._hash = self._shards[0].disk.hash
|
| 54 |
+
self._caches = {}
|
| 55 |
+
self._deques = {}
|
| 56 |
+
self._indexes = {}
|
| 57 |
+
|
| 58 |
+
@property
|
| 59 |
+
def directory(self):
|
| 60 |
+
"""Cache directory."""
|
| 61 |
+
return self._directory
|
| 62 |
+
|
| 63 |
+
def __getattr__(self, name):
|
| 64 |
+
safe_names = {'timeout', 'disk'}
|
| 65 |
+
valid_name = name in DEFAULT_SETTINGS or name in safe_names
|
| 66 |
+
assert valid_name, 'cannot access {} in cache shard'.format(name)
|
| 67 |
+
return getattr(self._shards[0], name)
|
| 68 |
+
|
| 69 |
+
@cl.contextmanager
|
| 70 |
+
def transact(self, retry=True):
|
| 71 |
+
"""Context manager to perform a transaction by locking the cache.
|
| 72 |
+
|
| 73 |
+
While the cache is locked, no other write operation is permitted.
|
| 74 |
+
Transactions should therefore be as short as possible. Read and write
|
| 75 |
+
operations performed in a transaction are atomic. Read operations may
|
| 76 |
+
occur concurrent to a transaction.
|
| 77 |
+
|
| 78 |
+
Transactions may be nested and may not be shared between threads.
|
| 79 |
+
|
| 80 |
+
Blocks until transactions are held on all cache shards by retrying as
|
| 81 |
+
necessary.
|
| 82 |
+
|
| 83 |
+
>>> cache = FanoutCache()
|
| 84 |
+
>>> with cache.transact(): # Atomically increment two keys.
|
| 85 |
+
... _ = cache.incr('total', 123.4)
|
| 86 |
+
... _ = cache.incr('count', 1)
|
| 87 |
+
>>> with cache.transact(): # Atomically calculate average.
|
| 88 |
+
... average = cache['total'] / cache['count']
|
| 89 |
+
>>> average
|
| 90 |
+
123.4
|
| 91 |
+
|
| 92 |
+
:return: context manager for use in `with` statement
|
| 93 |
+
|
| 94 |
+
"""
|
| 95 |
+
assert retry, 'retry must be True in FanoutCache'
|
| 96 |
+
with cl.ExitStack() as stack:
|
| 97 |
+
for shard in self._shards:
|
| 98 |
+
shard_transaction = shard.transact(retry=True)
|
| 99 |
+
stack.enter_context(shard_transaction)
|
| 100 |
+
yield
|
| 101 |
+
|
| 102 |
+
def set(self, key, value, expire=None, read=False, tag=None, retry=False):
|
| 103 |
+
"""Set `key` and `value` item in cache.
|
| 104 |
+
|
| 105 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 106 |
+
for reading in binary mode.
|
| 107 |
+
|
| 108 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 109 |
+
`True` (default `False`).
|
| 110 |
+
|
| 111 |
+
:param key: key for item
|
| 112 |
+
:param value: value for item
|
| 113 |
+
:param float expire: seconds until the key expires
|
| 114 |
+
(default None, no expiry)
|
| 115 |
+
:param bool read: read value as raw bytes from file (default False)
|
| 116 |
+
:param str tag: text to associate with key (default None)
|
| 117 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 118 |
+
:return: True if item was set
|
| 119 |
+
|
| 120 |
+
"""
|
| 121 |
+
index = self._hash(key) % self._count
|
| 122 |
+
shard = self._shards[index]
|
| 123 |
+
try:
|
| 124 |
+
return shard.set(key, value, expire, read, tag, retry)
|
| 125 |
+
except Timeout:
|
| 126 |
+
return False
|
| 127 |
+
|
| 128 |
+
def __setitem__(self, key, value):
|
| 129 |
+
"""Set `key` and `value` item in cache.
|
| 130 |
+
|
| 131 |
+
Calls :func:`FanoutCache.set` internally with `retry` set to `True`.
|
| 132 |
+
|
| 133 |
+
:param key: key for item
|
| 134 |
+
:param value: value for item
|
| 135 |
+
|
| 136 |
+
"""
|
| 137 |
+
index = self._hash(key) % self._count
|
| 138 |
+
shard = self._shards[index]
|
| 139 |
+
shard[key] = value
|
| 140 |
+
|
| 141 |
+
def touch(self, key, expire=None, retry=False):
|
| 142 |
+
"""Touch `key` in cache and update `expire` time.
|
| 143 |
+
|
| 144 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 145 |
+
`True` (default `False`).
|
| 146 |
+
|
| 147 |
+
:param key: key for item
|
| 148 |
+
:param float expire: seconds until the key expires
|
| 149 |
+
(default None, no expiry)
|
| 150 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 151 |
+
:return: True if key was touched
|
| 152 |
+
|
| 153 |
+
"""
|
| 154 |
+
index = self._hash(key) % self._count
|
| 155 |
+
shard = self._shards[index]
|
| 156 |
+
try:
|
| 157 |
+
return shard.touch(key, expire, retry)
|
| 158 |
+
except Timeout:
|
| 159 |
+
return False
|
| 160 |
+
|
| 161 |
+
def add(self, key, value, expire=None, read=False, tag=None, retry=False):
|
| 162 |
+
"""Add `key` and `value` item to cache.
|
| 163 |
+
|
| 164 |
+
Similar to `set`, but only add to cache if key not present.
|
| 165 |
+
|
| 166 |
+
This operation is atomic. Only one concurrent add operation for given
|
| 167 |
+
key from separate threads or processes will succeed.
|
| 168 |
+
|
| 169 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 170 |
+
for reading in binary mode.
|
| 171 |
+
|
| 172 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 173 |
+
`True` (default `False`).
|
| 174 |
+
|
| 175 |
+
:param key: key for item
|
| 176 |
+
:param value: value for item
|
| 177 |
+
:param float expire: seconds until the key expires
|
| 178 |
+
(default None, no expiry)
|
| 179 |
+
:param bool read: read value as bytes from file (default False)
|
| 180 |
+
:param str tag: text to associate with key (default None)
|
| 181 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 182 |
+
:return: True if item was added
|
| 183 |
+
|
| 184 |
+
"""
|
| 185 |
+
index = self._hash(key) % self._count
|
| 186 |
+
shard = self._shards[index]
|
| 187 |
+
try:
|
| 188 |
+
return shard.add(key, value, expire, read, tag, retry)
|
| 189 |
+
except Timeout:
|
| 190 |
+
return False
|
| 191 |
+
|
| 192 |
+
def incr(self, key, delta=1, default=0, retry=False):
|
| 193 |
+
"""Increment value by delta for item with key.
|
| 194 |
+
|
| 195 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 196 |
+
is missing and default is not None then use default for value.
|
| 197 |
+
|
| 198 |
+
Operation is atomic. All concurrent increment operations will be
|
| 199 |
+
counted individually.
|
| 200 |
+
|
| 201 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 202 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 203 |
+
integers.
|
| 204 |
+
|
| 205 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 206 |
+
`True` (default `False`).
|
| 207 |
+
|
| 208 |
+
:param key: key for item
|
| 209 |
+
:param int delta: amount to increment (default 1)
|
| 210 |
+
:param int default: value if key is missing (default 0)
|
| 211 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 212 |
+
:return: new value for item on success else None
|
| 213 |
+
:raises KeyError: if key is not found and default is None
|
| 214 |
+
|
| 215 |
+
"""
|
| 216 |
+
index = self._hash(key) % self._count
|
| 217 |
+
shard = self._shards[index]
|
| 218 |
+
try:
|
| 219 |
+
return shard.incr(key, delta, default, retry)
|
| 220 |
+
except Timeout:
|
| 221 |
+
return None
|
| 222 |
+
|
| 223 |
+
def decr(self, key, delta=1, default=0, retry=False):
|
| 224 |
+
"""Decrement value by delta for item with key.
|
| 225 |
+
|
| 226 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 227 |
+
is missing and default is not None then use default for value.
|
| 228 |
+
|
| 229 |
+
Operation is atomic. All concurrent decrement operations will be
|
| 230 |
+
counted individually.
|
| 231 |
+
|
| 232 |
+
Unlike Memcached, negative values are supported. Value may be
|
| 233 |
+
decremented below zero.
|
| 234 |
+
|
| 235 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 236 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 237 |
+
integers.
|
| 238 |
+
|
| 239 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 240 |
+
`True` (default `False`).
|
| 241 |
+
|
| 242 |
+
:param key: key for item
|
| 243 |
+
:param int delta: amount to decrement (default 1)
|
| 244 |
+
:param int default: value if key is missing (default 0)
|
| 245 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 246 |
+
:return: new value for item on success else None
|
| 247 |
+
:raises KeyError: if key is not found and default is None
|
| 248 |
+
|
| 249 |
+
"""
|
| 250 |
+
index = self._hash(key) % self._count
|
| 251 |
+
shard = self._shards[index]
|
| 252 |
+
try:
|
| 253 |
+
return shard.decr(key, delta, default, retry)
|
| 254 |
+
except Timeout:
|
| 255 |
+
return None
|
| 256 |
+
|
| 257 |
+
def get(
|
| 258 |
+
self,
|
| 259 |
+
key,
|
| 260 |
+
default=None,
|
| 261 |
+
read=False,
|
| 262 |
+
expire_time=False,
|
| 263 |
+
tag=False,
|
| 264 |
+
retry=False,
|
| 265 |
+
):
|
| 266 |
+
"""Retrieve value from cache. If `key` is missing, return `default`.
|
| 267 |
+
|
| 268 |
+
If database timeout occurs then returns `default` unless `retry` is set
|
| 269 |
+
to `True` (default `False`).
|
| 270 |
+
|
| 271 |
+
:param key: key for item
|
| 272 |
+
:param default: return value if key is missing (default None)
|
| 273 |
+
:param bool read: if True, return file handle to value
|
| 274 |
+
(default False)
|
| 275 |
+
:param float expire_time: if True, return expire_time in tuple
|
| 276 |
+
(default False)
|
| 277 |
+
:param tag: if True, return tag in tuple (default False)
|
| 278 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 279 |
+
:return: value for item if key is found else default
|
| 280 |
+
|
| 281 |
+
"""
|
| 282 |
+
index = self._hash(key) % self._count
|
| 283 |
+
shard = self._shards[index]
|
| 284 |
+
try:
|
| 285 |
+
return shard.get(key, default, read, expire_time, tag, retry)
|
| 286 |
+
except (Timeout, sqlite3.OperationalError):
|
| 287 |
+
return default
|
| 288 |
+
|
| 289 |
+
def __getitem__(self, key):
|
| 290 |
+
"""Return corresponding value for `key` from cache.
|
| 291 |
+
|
| 292 |
+
Calls :func:`FanoutCache.get` internally with `retry` set to `True`.
|
| 293 |
+
|
| 294 |
+
:param key: key for item
|
| 295 |
+
:return: value for item
|
| 296 |
+
:raises KeyError: if key is not found
|
| 297 |
+
|
| 298 |
+
"""
|
| 299 |
+
index = self._hash(key) % self._count
|
| 300 |
+
shard = self._shards[index]
|
| 301 |
+
return shard[key]
|
| 302 |
+
|
| 303 |
+
def read(self, key):
|
| 304 |
+
"""Return file handle corresponding to `key` from cache.
|
| 305 |
+
|
| 306 |
+
:param key: key for item
|
| 307 |
+
:return: file open for reading in binary mode
|
| 308 |
+
:raises KeyError: if key is not found
|
| 309 |
+
|
| 310 |
+
"""
|
| 311 |
+
handle = self.get(key, default=ENOVAL, read=True, retry=True)
|
| 312 |
+
if handle is ENOVAL:
|
| 313 |
+
raise KeyError(key)
|
| 314 |
+
return handle
|
| 315 |
+
|
| 316 |
+
def __contains__(self, key):
|
| 317 |
+
"""Return `True` if `key` matching item is found in cache.
|
| 318 |
+
|
| 319 |
+
:param key: key for item
|
| 320 |
+
:return: True if key is found
|
| 321 |
+
|
| 322 |
+
"""
|
| 323 |
+
index = self._hash(key) % self._count
|
| 324 |
+
shard = self._shards[index]
|
| 325 |
+
return key in shard
|
| 326 |
+
|
| 327 |
+
def pop(
|
| 328 |
+
self, key, default=None, expire_time=False, tag=False, retry=False
|
| 329 |
+
): # noqa: E501
|
| 330 |
+
"""Remove corresponding item for `key` from cache and return value.
|
| 331 |
+
|
| 332 |
+
If `key` is missing, return `default`.
|
| 333 |
+
|
| 334 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 335 |
+
|
| 336 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 337 |
+
`True` (default `False`).
|
| 338 |
+
|
| 339 |
+
:param key: key for item
|
| 340 |
+
:param default: return value if key is missing (default None)
|
| 341 |
+
:param float expire_time: if True, return expire_time in tuple
|
| 342 |
+
(default False)
|
| 343 |
+
:param tag: if True, return tag in tuple (default False)
|
| 344 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 345 |
+
:return: value for item if key is found else default
|
| 346 |
+
|
| 347 |
+
"""
|
| 348 |
+
index = self._hash(key) % self._count
|
| 349 |
+
shard = self._shards[index]
|
| 350 |
+
try:
|
| 351 |
+
return shard.pop(key, default, expire_time, tag, retry)
|
| 352 |
+
except Timeout:
|
| 353 |
+
return default
|
| 354 |
+
|
| 355 |
+
def delete(self, key, retry=False):
|
| 356 |
+
"""Delete corresponding item for `key` from cache.
|
| 357 |
+
|
| 358 |
+
Missing keys are ignored.
|
| 359 |
+
|
| 360 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 361 |
+
`True` (default `False`).
|
| 362 |
+
|
| 363 |
+
:param key: key for item
|
| 364 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 365 |
+
:return: True if item was deleted
|
| 366 |
+
|
| 367 |
+
"""
|
| 368 |
+
index = self._hash(key) % self._count
|
| 369 |
+
shard = self._shards[index]
|
| 370 |
+
try:
|
| 371 |
+
return shard.delete(key, retry)
|
| 372 |
+
except Timeout:
|
| 373 |
+
return False
|
| 374 |
+
|
| 375 |
+
def __delitem__(self, key):
|
| 376 |
+
"""Delete corresponding item for `key` from cache.
|
| 377 |
+
|
| 378 |
+
Calls :func:`FanoutCache.delete` internally with `retry` set to `True`.
|
| 379 |
+
|
| 380 |
+
:param key: key for item
|
| 381 |
+
:raises KeyError: if key is not found
|
| 382 |
+
|
| 383 |
+
"""
|
| 384 |
+
index = self._hash(key) % self._count
|
| 385 |
+
shard = self._shards[index]
|
| 386 |
+
del shard[key]
|
| 387 |
+
|
| 388 |
+
def check(self, fix=False, retry=False):
|
| 389 |
+
"""Check database and file system consistency.
|
| 390 |
+
|
| 391 |
+
Intended for use in testing and post-mortem error analysis.
|
| 392 |
+
|
| 393 |
+
While checking the cache table for consistency, a writer lock is held
|
| 394 |
+
on the database. The lock blocks other cache clients from writing to
|
| 395 |
+
the database. For caches with many file references, the lock may be
|
| 396 |
+
held for a long time. For example, local benchmarking shows that a
|
| 397 |
+
cache with 1,000 file references takes ~60ms to check.
|
| 398 |
+
|
| 399 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 400 |
+
`True` (default `False`).
|
| 401 |
+
|
| 402 |
+
:param bool fix: correct inconsistencies
|
| 403 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 404 |
+
:return: list of warnings
|
| 405 |
+
:raises Timeout: if database timeout occurs
|
| 406 |
+
|
| 407 |
+
"""
|
| 408 |
+
warnings = (shard.check(fix, retry) for shard in self._shards)
|
| 409 |
+
return functools.reduce(operator.iadd, warnings, [])
|
| 410 |
+
|
| 411 |
+
def expire(self, retry=False):
|
| 412 |
+
"""Remove expired items from cache.
|
| 413 |
+
|
| 414 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 415 |
+
`True` (default `False`).
|
| 416 |
+
|
| 417 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 418 |
+
:return: count of items removed
|
| 419 |
+
|
| 420 |
+
"""
|
| 421 |
+
return self._remove('expire', args=(time.time(),), retry=retry)
|
| 422 |
+
|
| 423 |
+
def create_tag_index(self):
|
| 424 |
+
"""Create tag index on cache database.
|
| 425 |
+
|
| 426 |
+
Better to initialize cache with `tag_index=True` than use this.
|
| 427 |
+
|
| 428 |
+
:raises Timeout: if database timeout occurs
|
| 429 |
+
|
| 430 |
+
"""
|
| 431 |
+
for shard in self._shards:
|
| 432 |
+
shard.create_tag_index()
|
| 433 |
+
|
| 434 |
+
def drop_tag_index(self):
|
| 435 |
+
"""Drop tag index on cache database.
|
| 436 |
+
|
| 437 |
+
:raises Timeout: if database timeout occurs
|
| 438 |
+
|
| 439 |
+
"""
|
| 440 |
+
for shard in self._shards:
|
| 441 |
+
shard.drop_tag_index()
|
| 442 |
+
|
| 443 |
+
def evict(self, tag, retry=False):
|
| 444 |
+
"""Remove items with matching `tag` from cache.
|
| 445 |
+
|
| 446 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 447 |
+
`True` (default `False`).
|
| 448 |
+
|
| 449 |
+
:param str tag: tag identifying items
|
| 450 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 451 |
+
:return: count of items removed
|
| 452 |
+
|
| 453 |
+
"""
|
| 454 |
+
return self._remove('evict', args=(tag,), retry=retry)
|
| 455 |
+
|
| 456 |
+
def cull(self, retry=False):
|
| 457 |
+
"""Cull items from cache until volume is less than size limit.
|
| 458 |
+
|
| 459 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 460 |
+
`True` (default `False`).
|
| 461 |
+
|
| 462 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 463 |
+
:return: count of items removed
|
| 464 |
+
|
| 465 |
+
"""
|
| 466 |
+
return self._remove('cull', retry=retry)
|
| 467 |
+
|
| 468 |
+
def clear(self, retry=False):
|
| 469 |
+
"""Remove all items from cache.
|
| 470 |
+
|
| 471 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 472 |
+
`True` (default `False`).
|
| 473 |
+
|
| 474 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 475 |
+
:return: count of items removed
|
| 476 |
+
|
| 477 |
+
"""
|
| 478 |
+
return self._remove('clear', retry=retry)
|
| 479 |
+
|
| 480 |
+
def _remove(self, name, args=(), retry=False):
|
| 481 |
+
total = 0
|
| 482 |
+
for shard in self._shards:
|
| 483 |
+
method = getattr(shard, name)
|
| 484 |
+
while True:
|
| 485 |
+
try:
|
| 486 |
+
count = method(*args, retry=retry)
|
| 487 |
+
total += count
|
| 488 |
+
except Timeout as timeout:
|
| 489 |
+
total += timeout.args[0]
|
| 490 |
+
else:
|
| 491 |
+
break
|
| 492 |
+
return total
|
| 493 |
+
|
| 494 |
+
def stats(self, enable=True, reset=False):
|
| 495 |
+
"""Return cache statistics hits and misses.
|
| 496 |
+
|
| 497 |
+
:param bool enable: enable collecting statistics (default True)
|
| 498 |
+
:param bool reset: reset hits and misses to 0 (default False)
|
| 499 |
+
:return: (hits, misses)
|
| 500 |
+
|
| 501 |
+
"""
|
| 502 |
+
results = [shard.stats(enable, reset) for shard in self._shards]
|
| 503 |
+
total_hits = sum(hits for hits, _ in results)
|
| 504 |
+
total_misses = sum(misses for _, misses in results)
|
| 505 |
+
return total_hits, total_misses
|
| 506 |
+
|
| 507 |
+
def volume(self):
|
| 508 |
+
"""Return estimated total size of cache on disk.
|
| 509 |
+
|
| 510 |
+
:return: size in bytes
|
| 511 |
+
|
| 512 |
+
"""
|
| 513 |
+
return sum(shard.volume() for shard in self._shards)
|
| 514 |
+
|
| 515 |
+
def close(self):
|
| 516 |
+
"""Close database connection."""
|
| 517 |
+
for shard in self._shards:
|
| 518 |
+
shard.close()
|
| 519 |
+
self._caches.clear()
|
| 520 |
+
self._deques.clear()
|
| 521 |
+
self._indexes.clear()
|
| 522 |
+
|
| 523 |
+
def __enter__(self):
|
| 524 |
+
return self
|
| 525 |
+
|
| 526 |
+
def __exit__(self, *exception):
|
| 527 |
+
self.close()
|
| 528 |
+
|
| 529 |
+
def __getstate__(self):
|
| 530 |
+
return (self._directory, self._count, self.timeout, type(self.disk))
|
| 531 |
+
|
| 532 |
+
def __setstate__(self, state):
|
| 533 |
+
self.__init__(*state)
|
| 534 |
+
|
| 535 |
+
def __iter__(self):
|
| 536 |
+
"""Iterate keys in cache including expired items."""
|
| 537 |
+
iterators = (iter(shard) for shard in self._shards)
|
| 538 |
+
return it.chain.from_iterable(iterators)
|
| 539 |
+
|
| 540 |
+
def __reversed__(self):
|
| 541 |
+
"""Reverse iterate keys in cache including expired items."""
|
| 542 |
+
iterators = (reversed(shard) for shard in reversed(self._shards))
|
| 543 |
+
return it.chain.from_iterable(iterators)
|
| 544 |
+
|
| 545 |
+
def __len__(self):
|
| 546 |
+
"""Count of items in cache including expired items."""
|
| 547 |
+
return sum(len(shard) for shard in self._shards)
|
| 548 |
+
|
| 549 |
+
def reset(self, key, value=ENOVAL):
|
| 550 |
+
"""Reset `key` and `value` item from Settings table.
|
| 551 |
+
|
| 552 |
+
If `value` is not given, it is reloaded from the Settings
|
| 553 |
+
table. Otherwise, the Settings table is updated.
|
| 554 |
+
|
| 555 |
+
Settings attributes on cache objects are lazy-loaded and
|
| 556 |
+
read-only. Use `reset` to update the value.
|
| 557 |
+
|
| 558 |
+
Settings with the ``sqlite_`` prefix correspond to SQLite
|
| 559 |
+
pragmas. Updating the value will execute the corresponding PRAGMA
|
| 560 |
+
statement.
|
| 561 |
+
|
| 562 |
+
:param str key: Settings key for item
|
| 563 |
+
:param value: value for item (optional)
|
| 564 |
+
:return: updated value for item
|
| 565 |
+
|
| 566 |
+
"""
|
| 567 |
+
for shard in self._shards:
|
| 568 |
+
while True:
|
| 569 |
+
try:
|
| 570 |
+
result = shard.reset(key, value)
|
| 571 |
+
except Timeout:
|
| 572 |
+
pass
|
| 573 |
+
else:
|
| 574 |
+
break
|
| 575 |
+
return result
|
| 576 |
+
|
| 577 |
+
def cache(self, name, timeout=60, disk=None, **settings):
|
| 578 |
+
"""Return Cache with given `name` in subdirectory.
|
| 579 |
+
|
| 580 |
+
If disk is none (default), uses the fanout cache disk.
|
| 581 |
+
|
| 582 |
+
>>> fanout_cache = FanoutCache()
|
| 583 |
+
>>> cache = fanout_cache.cache('test')
|
| 584 |
+
>>> cache.set('abc', 123)
|
| 585 |
+
True
|
| 586 |
+
>>> cache.get('abc')
|
| 587 |
+
123
|
| 588 |
+
>>> len(cache)
|
| 589 |
+
1
|
| 590 |
+
>>> cache.delete('abc')
|
| 591 |
+
True
|
| 592 |
+
|
| 593 |
+
:param str name: subdirectory name for Cache
|
| 594 |
+
:param float timeout: SQLite connection timeout
|
| 595 |
+
:param disk: Disk type or subclass for serialization
|
| 596 |
+
:param settings: any of DEFAULT_SETTINGS
|
| 597 |
+
:return: Cache with given name
|
| 598 |
+
|
| 599 |
+
"""
|
| 600 |
+
_caches = self._caches
|
| 601 |
+
|
| 602 |
+
try:
|
| 603 |
+
return _caches[name]
|
| 604 |
+
except KeyError:
|
| 605 |
+
parts = name.split('/')
|
| 606 |
+
directory = op.join(self._directory, 'cache', *parts)
|
| 607 |
+
temp = Cache(
|
| 608 |
+
directory=directory,
|
| 609 |
+
timeout=timeout,
|
| 610 |
+
disk=self._disk if disk is None else Disk,
|
| 611 |
+
**settings,
|
| 612 |
+
)
|
| 613 |
+
_caches[name] = temp
|
| 614 |
+
return temp
|
| 615 |
+
|
| 616 |
+
def deque(self, name, maxlen=None):
|
| 617 |
+
"""Return Deque with given `name` in subdirectory.
|
| 618 |
+
|
| 619 |
+
>>> cache = FanoutCache()
|
| 620 |
+
>>> deque = cache.deque('test')
|
| 621 |
+
>>> deque.extend('abc')
|
| 622 |
+
>>> deque.popleft()
|
| 623 |
+
'a'
|
| 624 |
+
>>> deque.pop()
|
| 625 |
+
'c'
|
| 626 |
+
>>> len(deque)
|
| 627 |
+
1
|
| 628 |
+
|
| 629 |
+
:param str name: subdirectory name for Deque
|
| 630 |
+
:param maxlen: max length (default None, no max)
|
| 631 |
+
:return: Deque with given name
|
| 632 |
+
|
| 633 |
+
"""
|
| 634 |
+
_deques = self._deques
|
| 635 |
+
|
| 636 |
+
try:
|
| 637 |
+
return _deques[name]
|
| 638 |
+
except KeyError:
|
| 639 |
+
parts = name.split('/')
|
| 640 |
+
directory = op.join(self._directory, 'deque', *parts)
|
| 641 |
+
cache = Cache(
|
| 642 |
+
directory=directory,
|
| 643 |
+
disk=self._disk,
|
| 644 |
+
eviction_policy='none',
|
| 645 |
+
)
|
| 646 |
+
deque = Deque.fromcache(cache, maxlen=maxlen)
|
| 647 |
+
_deques[name] = deque
|
| 648 |
+
return deque
|
| 649 |
+
|
| 650 |
+
def index(self, name):
|
| 651 |
+
"""Return Index with given `name` in subdirectory.
|
| 652 |
+
|
| 653 |
+
>>> cache = FanoutCache()
|
| 654 |
+
>>> index = cache.index('test')
|
| 655 |
+
>>> index['abc'] = 123
|
| 656 |
+
>>> index['def'] = 456
|
| 657 |
+
>>> index['ghi'] = 789
|
| 658 |
+
>>> index.popitem()
|
| 659 |
+
('ghi', 789)
|
| 660 |
+
>>> del index['abc']
|
| 661 |
+
>>> len(index)
|
| 662 |
+
1
|
| 663 |
+
>>> index['def']
|
| 664 |
+
456
|
| 665 |
+
|
| 666 |
+
:param str name: subdirectory name for Index
|
| 667 |
+
:return: Index with given name
|
| 668 |
+
|
| 669 |
+
"""
|
| 670 |
+
_indexes = self._indexes
|
| 671 |
+
|
| 672 |
+
try:
|
| 673 |
+
return _indexes[name]
|
| 674 |
+
except KeyError:
|
| 675 |
+
parts = name.split('/')
|
| 676 |
+
directory = op.join(self._directory, 'index', *parts)
|
| 677 |
+
cache = Cache(
|
| 678 |
+
directory=directory,
|
| 679 |
+
disk=self._disk,
|
| 680 |
+
eviction_policy='none',
|
| 681 |
+
)
|
| 682 |
+
index = Index.fromcache(cache)
|
| 683 |
+
_indexes[name] = index
|
| 684 |
+
return index
|
| 685 |
+
|
| 686 |
+
|
| 687 |
+
FanoutCache.memoize = Cache.memoize # type: ignore
|
deepseek/lib/python3.10/site-packages/diskcache/persistent.py
ADDED
|
@@ -0,0 +1,1245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Persistent Data Types
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import operator as op
|
| 5 |
+
from collections import OrderedDict
|
| 6 |
+
from collections.abc import (
|
| 7 |
+
ItemsView,
|
| 8 |
+
KeysView,
|
| 9 |
+
MutableMapping,
|
| 10 |
+
Sequence,
|
| 11 |
+
ValuesView,
|
| 12 |
+
)
|
| 13 |
+
from contextlib import contextmanager
|
| 14 |
+
from shutil import rmtree
|
| 15 |
+
|
| 16 |
+
from .core import ENOVAL, Cache
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _make_compare(seq_op, doc):
|
| 20 |
+
"""Make compare method with Sequence semantics."""
|
| 21 |
+
|
| 22 |
+
def compare(self, that):
|
| 23 |
+
"""Compare method for deque and sequence."""
|
| 24 |
+
if not isinstance(that, Sequence):
|
| 25 |
+
return NotImplemented
|
| 26 |
+
|
| 27 |
+
len_self = len(self)
|
| 28 |
+
len_that = len(that)
|
| 29 |
+
|
| 30 |
+
if len_self != len_that:
|
| 31 |
+
if seq_op is op.eq:
|
| 32 |
+
return False
|
| 33 |
+
if seq_op is op.ne:
|
| 34 |
+
return True
|
| 35 |
+
|
| 36 |
+
for alpha, beta in zip(self, that):
|
| 37 |
+
if alpha != beta:
|
| 38 |
+
return seq_op(alpha, beta)
|
| 39 |
+
|
| 40 |
+
return seq_op(len_self, len_that)
|
| 41 |
+
|
| 42 |
+
compare.__name__ = '__{0}__'.format(seq_op.__name__)
|
| 43 |
+
doc_str = 'Return True if and only if deque is {0} `that`.'
|
| 44 |
+
compare.__doc__ = doc_str.format(doc)
|
| 45 |
+
|
| 46 |
+
return compare
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class Deque(Sequence):
|
| 50 |
+
"""Persistent sequence with double-ended queue semantics.
|
| 51 |
+
|
| 52 |
+
Double-ended queue is an ordered collection with optimized access at its
|
| 53 |
+
endpoints.
|
| 54 |
+
|
| 55 |
+
Items are serialized to disk. Deque may be initialized from directory path
|
| 56 |
+
where items are stored.
|
| 57 |
+
|
| 58 |
+
>>> deque = Deque()
|
| 59 |
+
>>> deque += range(5)
|
| 60 |
+
>>> list(deque)
|
| 61 |
+
[0, 1, 2, 3, 4]
|
| 62 |
+
>>> for value in range(5):
|
| 63 |
+
... deque.appendleft(-value)
|
| 64 |
+
>>> len(deque)
|
| 65 |
+
10
|
| 66 |
+
>>> list(deque)
|
| 67 |
+
[-4, -3, -2, -1, 0, 0, 1, 2, 3, 4]
|
| 68 |
+
>>> deque.pop()
|
| 69 |
+
4
|
| 70 |
+
>>> deque.popleft()
|
| 71 |
+
-4
|
| 72 |
+
>>> deque.reverse()
|
| 73 |
+
>>> list(deque)
|
| 74 |
+
[3, 2, 1, 0, 0, -1, -2, -3]
|
| 75 |
+
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
def __init__(self, iterable=(), directory=None, maxlen=None):
|
| 79 |
+
"""Initialize deque instance.
|
| 80 |
+
|
| 81 |
+
If directory is None then temporary directory created. The directory
|
| 82 |
+
will *not* be automatically removed.
|
| 83 |
+
|
| 84 |
+
:param iterable: iterable of items to append to deque
|
| 85 |
+
:param directory: deque directory (default None)
|
| 86 |
+
|
| 87 |
+
"""
|
| 88 |
+
self._cache = Cache(directory, eviction_policy='none')
|
| 89 |
+
self._maxlen = float('inf') if maxlen is None else maxlen
|
| 90 |
+
self._extend(iterable)
|
| 91 |
+
|
| 92 |
+
@classmethod
|
| 93 |
+
def fromcache(cls, cache, iterable=(), maxlen=None):
|
| 94 |
+
"""Initialize deque using `cache`.
|
| 95 |
+
|
| 96 |
+
>>> cache = Cache()
|
| 97 |
+
>>> deque = Deque.fromcache(cache, [5, 6, 7, 8])
|
| 98 |
+
>>> deque.cache is cache
|
| 99 |
+
True
|
| 100 |
+
>>> len(deque)
|
| 101 |
+
4
|
| 102 |
+
>>> 7 in deque
|
| 103 |
+
True
|
| 104 |
+
>>> deque.popleft()
|
| 105 |
+
5
|
| 106 |
+
|
| 107 |
+
:param Cache cache: cache to use
|
| 108 |
+
:param iterable: iterable of items
|
| 109 |
+
:return: initialized Deque
|
| 110 |
+
|
| 111 |
+
"""
|
| 112 |
+
# pylint: disable=no-member,protected-access
|
| 113 |
+
self = cls.__new__(cls)
|
| 114 |
+
self._cache = cache
|
| 115 |
+
self._maxlen = float('inf') if maxlen is None else maxlen
|
| 116 |
+
self._extend(iterable)
|
| 117 |
+
return self
|
| 118 |
+
|
| 119 |
+
@property
|
| 120 |
+
def cache(self):
|
| 121 |
+
"""Cache used by deque."""
|
| 122 |
+
return self._cache
|
| 123 |
+
|
| 124 |
+
@property
|
| 125 |
+
def directory(self):
|
| 126 |
+
"""Directory path where deque is stored."""
|
| 127 |
+
return self._cache.directory
|
| 128 |
+
|
| 129 |
+
@property
|
| 130 |
+
def maxlen(self):
|
| 131 |
+
"""Max length of the deque."""
|
| 132 |
+
return self._maxlen
|
| 133 |
+
|
| 134 |
+
@maxlen.setter
|
| 135 |
+
def maxlen(self, value):
|
| 136 |
+
"""Set max length of the deque.
|
| 137 |
+
|
| 138 |
+
Pops items from left while length greater than max.
|
| 139 |
+
|
| 140 |
+
>>> deque = Deque()
|
| 141 |
+
>>> deque.extendleft('abcde')
|
| 142 |
+
>>> deque.maxlen = 3
|
| 143 |
+
>>> list(deque)
|
| 144 |
+
['c', 'd', 'e']
|
| 145 |
+
|
| 146 |
+
:param value: max length
|
| 147 |
+
|
| 148 |
+
"""
|
| 149 |
+
self._maxlen = value
|
| 150 |
+
with self._cache.transact(retry=True):
|
| 151 |
+
while len(self._cache) > self._maxlen:
|
| 152 |
+
self._popleft()
|
| 153 |
+
|
| 154 |
+
def _index(self, index, func):
|
| 155 |
+
len_self = len(self)
|
| 156 |
+
|
| 157 |
+
if index >= 0:
|
| 158 |
+
if index >= len_self:
|
| 159 |
+
raise IndexError('deque index out of range')
|
| 160 |
+
|
| 161 |
+
for key in self._cache.iterkeys():
|
| 162 |
+
if index == 0:
|
| 163 |
+
try:
|
| 164 |
+
return func(key)
|
| 165 |
+
except KeyError:
|
| 166 |
+
continue
|
| 167 |
+
index -= 1
|
| 168 |
+
else:
|
| 169 |
+
if index < -len_self:
|
| 170 |
+
raise IndexError('deque index out of range')
|
| 171 |
+
|
| 172 |
+
index += 1
|
| 173 |
+
|
| 174 |
+
for key in self._cache.iterkeys(reverse=True):
|
| 175 |
+
if index == 0:
|
| 176 |
+
try:
|
| 177 |
+
return func(key)
|
| 178 |
+
except KeyError:
|
| 179 |
+
continue
|
| 180 |
+
index += 1
|
| 181 |
+
|
| 182 |
+
raise IndexError('deque index out of range')
|
| 183 |
+
|
| 184 |
+
def __getitem__(self, index):
|
| 185 |
+
"""deque.__getitem__(index) <==> deque[index]
|
| 186 |
+
|
| 187 |
+
Return corresponding item for `index` in deque.
|
| 188 |
+
|
| 189 |
+
See also `Deque.peekleft` and `Deque.peek` for indexing deque at index
|
| 190 |
+
``0`` or ``-1``.
|
| 191 |
+
|
| 192 |
+
>>> deque = Deque()
|
| 193 |
+
>>> deque.extend('abcde')
|
| 194 |
+
>>> deque[1]
|
| 195 |
+
'b'
|
| 196 |
+
>>> deque[-2]
|
| 197 |
+
'd'
|
| 198 |
+
|
| 199 |
+
:param int index: index of item
|
| 200 |
+
:return: corresponding item
|
| 201 |
+
:raises IndexError: if index out of range
|
| 202 |
+
|
| 203 |
+
"""
|
| 204 |
+
return self._index(index, self._cache.__getitem__)
|
| 205 |
+
|
| 206 |
+
def __setitem__(self, index, value):
|
| 207 |
+
"""deque.__setitem__(index, value) <==> deque[index] = value
|
| 208 |
+
|
| 209 |
+
Store `value` in deque at `index`.
|
| 210 |
+
|
| 211 |
+
>>> deque = Deque()
|
| 212 |
+
>>> deque.extend([None] * 3)
|
| 213 |
+
>>> deque[0] = 'a'
|
| 214 |
+
>>> deque[1] = 'b'
|
| 215 |
+
>>> deque[-1] = 'c'
|
| 216 |
+
>>> ''.join(deque)
|
| 217 |
+
'abc'
|
| 218 |
+
|
| 219 |
+
:param int index: index of value
|
| 220 |
+
:param value: value to store
|
| 221 |
+
:raises IndexError: if index out of range
|
| 222 |
+
|
| 223 |
+
"""
|
| 224 |
+
|
| 225 |
+
def _set_value(key):
|
| 226 |
+
return self._cache.__setitem__(key, value)
|
| 227 |
+
|
| 228 |
+
self._index(index, _set_value)
|
| 229 |
+
|
| 230 |
+
def __delitem__(self, index):
|
| 231 |
+
"""deque.__delitem__(index) <==> del deque[index]
|
| 232 |
+
|
| 233 |
+
Delete item in deque at `index`.
|
| 234 |
+
|
| 235 |
+
>>> deque = Deque()
|
| 236 |
+
>>> deque.extend([None] * 3)
|
| 237 |
+
>>> del deque[0]
|
| 238 |
+
>>> del deque[1]
|
| 239 |
+
>>> del deque[-1]
|
| 240 |
+
>>> len(deque)
|
| 241 |
+
0
|
| 242 |
+
|
| 243 |
+
:param int index: index of item
|
| 244 |
+
:raises IndexError: if index out of range
|
| 245 |
+
|
| 246 |
+
"""
|
| 247 |
+
self._index(index, self._cache.__delitem__)
|
| 248 |
+
|
| 249 |
+
def __repr__(self):
|
| 250 |
+
"""deque.__repr__() <==> repr(deque)
|
| 251 |
+
|
| 252 |
+
Return string with printable representation of deque.
|
| 253 |
+
|
| 254 |
+
"""
|
| 255 |
+
name = type(self).__name__
|
| 256 |
+
return '{0}(directory={1!r})'.format(name, self.directory)
|
| 257 |
+
|
| 258 |
+
__eq__ = _make_compare(op.eq, 'equal to')
|
| 259 |
+
__ne__ = _make_compare(op.ne, 'not equal to')
|
| 260 |
+
__lt__ = _make_compare(op.lt, 'less than')
|
| 261 |
+
__gt__ = _make_compare(op.gt, 'greater than')
|
| 262 |
+
__le__ = _make_compare(op.le, 'less than or equal to')
|
| 263 |
+
__ge__ = _make_compare(op.ge, 'greater than or equal to')
|
| 264 |
+
|
| 265 |
+
def __iadd__(self, iterable):
|
| 266 |
+
"""deque.__iadd__(iterable) <==> deque += iterable
|
| 267 |
+
|
| 268 |
+
Extend back side of deque with items from iterable.
|
| 269 |
+
|
| 270 |
+
:param iterable: iterable of items to append to deque
|
| 271 |
+
:return: deque with added items
|
| 272 |
+
|
| 273 |
+
"""
|
| 274 |
+
self._extend(iterable)
|
| 275 |
+
return self
|
| 276 |
+
|
| 277 |
+
def __iter__(self):
|
| 278 |
+
"""deque.__iter__() <==> iter(deque)
|
| 279 |
+
|
| 280 |
+
Return iterator of deque from front to back.
|
| 281 |
+
|
| 282 |
+
"""
|
| 283 |
+
_cache = self._cache
|
| 284 |
+
|
| 285 |
+
for key in _cache.iterkeys():
|
| 286 |
+
try:
|
| 287 |
+
yield _cache[key]
|
| 288 |
+
except KeyError:
|
| 289 |
+
pass
|
| 290 |
+
|
| 291 |
+
def __len__(self):
|
| 292 |
+
"""deque.__len__() <==> len(deque)
|
| 293 |
+
|
| 294 |
+
Return length of deque.
|
| 295 |
+
|
| 296 |
+
"""
|
| 297 |
+
return len(self._cache)
|
| 298 |
+
|
| 299 |
+
def __reversed__(self):
|
| 300 |
+
"""deque.__reversed__() <==> reversed(deque)
|
| 301 |
+
|
| 302 |
+
Return iterator of deque from back to front.
|
| 303 |
+
|
| 304 |
+
>>> deque = Deque()
|
| 305 |
+
>>> deque.extend('abcd')
|
| 306 |
+
>>> iterator = reversed(deque)
|
| 307 |
+
>>> next(iterator)
|
| 308 |
+
'd'
|
| 309 |
+
>>> list(iterator)
|
| 310 |
+
['c', 'b', 'a']
|
| 311 |
+
|
| 312 |
+
"""
|
| 313 |
+
_cache = self._cache
|
| 314 |
+
|
| 315 |
+
for key in _cache.iterkeys(reverse=True):
|
| 316 |
+
try:
|
| 317 |
+
yield _cache[key]
|
| 318 |
+
except KeyError:
|
| 319 |
+
pass
|
| 320 |
+
|
| 321 |
+
def __getstate__(self):
|
| 322 |
+
return self.directory, self.maxlen
|
| 323 |
+
|
| 324 |
+
def __setstate__(self, state):
|
| 325 |
+
directory, maxlen = state
|
| 326 |
+
self.__init__(directory=directory, maxlen=maxlen)
|
| 327 |
+
|
| 328 |
+
def append(self, value):
|
| 329 |
+
"""Add `value` to back of deque.
|
| 330 |
+
|
| 331 |
+
>>> deque = Deque()
|
| 332 |
+
>>> deque.append('a')
|
| 333 |
+
>>> deque.append('b')
|
| 334 |
+
>>> deque.append('c')
|
| 335 |
+
>>> list(deque)
|
| 336 |
+
['a', 'b', 'c']
|
| 337 |
+
|
| 338 |
+
:param value: value to add to back of deque
|
| 339 |
+
|
| 340 |
+
"""
|
| 341 |
+
with self._cache.transact(retry=True):
|
| 342 |
+
self._cache.push(value, retry=True)
|
| 343 |
+
if len(self._cache) > self._maxlen:
|
| 344 |
+
self._popleft()
|
| 345 |
+
|
| 346 |
+
_append = append
|
| 347 |
+
|
| 348 |
+
def appendleft(self, value):
|
| 349 |
+
"""Add `value` to front of deque.
|
| 350 |
+
|
| 351 |
+
>>> deque = Deque()
|
| 352 |
+
>>> deque.appendleft('a')
|
| 353 |
+
>>> deque.appendleft('b')
|
| 354 |
+
>>> deque.appendleft('c')
|
| 355 |
+
>>> list(deque)
|
| 356 |
+
['c', 'b', 'a']
|
| 357 |
+
|
| 358 |
+
:param value: value to add to front of deque
|
| 359 |
+
|
| 360 |
+
"""
|
| 361 |
+
with self._cache.transact(retry=True):
|
| 362 |
+
self._cache.push(value, side='front', retry=True)
|
| 363 |
+
if len(self._cache) > self._maxlen:
|
| 364 |
+
self._pop()
|
| 365 |
+
|
| 366 |
+
_appendleft = appendleft
|
| 367 |
+
|
| 368 |
+
def clear(self):
|
| 369 |
+
"""Remove all elements from deque.
|
| 370 |
+
|
| 371 |
+
>>> deque = Deque('abc')
|
| 372 |
+
>>> len(deque)
|
| 373 |
+
3
|
| 374 |
+
>>> deque.clear()
|
| 375 |
+
>>> list(deque)
|
| 376 |
+
[]
|
| 377 |
+
|
| 378 |
+
"""
|
| 379 |
+
self._cache.clear(retry=True)
|
| 380 |
+
|
| 381 |
+
_clear = clear
|
| 382 |
+
|
| 383 |
+
def copy(self):
|
| 384 |
+
"""Copy deque with same directory and max length."""
|
| 385 |
+
TypeSelf = type(self)
|
| 386 |
+
return TypeSelf(directory=self.directory, maxlen=self.maxlen)
|
| 387 |
+
|
| 388 |
+
def count(self, value):
|
| 389 |
+
"""Return number of occurrences of `value` in deque.
|
| 390 |
+
|
| 391 |
+
>>> deque = Deque()
|
| 392 |
+
>>> deque += [num for num in range(1, 5) for _ in range(num)]
|
| 393 |
+
>>> deque.count(0)
|
| 394 |
+
0
|
| 395 |
+
>>> deque.count(1)
|
| 396 |
+
1
|
| 397 |
+
>>> deque.count(4)
|
| 398 |
+
4
|
| 399 |
+
|
| 400 |
+
:param value: value to count in deque
|
| 401 |
+
:return: count of items equal to value in deque
|
| 402 |
+
|
| 403 |
+
"""
|
| 404 |
+
return sum(1 for item in self if value == item)
|
| 405 |
+
|
| 406 |
+
def extend(self, iterable):
|
| 407 |
+
"""Extend back side of deque with values from `iterable`.
|
| 408 |
+
|
| 409 |
+
:param iterable: iterable of values
|
| 410 |
+
|
| 411 |
+
"""
|
| 412 |
+
for value in iterable:
|
| 413 |
+
self._append(value)
|
| 414 |
+
|
| 415 |
+
_extend = extend
|
| 416 |
+
|
| 417 |
+
def extendleft(self, iterable):
|
| 418 |
+
"""Extend front side of deque with value from `iterable`.
|
| 419 |
+
|
| 420 |
+
>>> deque = Deque()
|
| 421 |
+
>>> deque.extendleft('abc')
|
| 422 |
+
>>> list(deque)
|
| 423 |
+
['c', 'b', 'a']
|
| 424 |
+
|
| 425 |
+
:param iterable: iterable of values
|
| 426 |
+
|
| 427 |
+
"""
|
| 428 |
+
for value in iterable:
|
| 429 |
+
self._appendleft(value)
|
| 430 |
+
|
| 431 |
+
def peek(self):
|
| 432 |
+
"""Peek at value at back of deque.
|
| 433 |
+
|
| 434 |
+
Faster than indexing deque at -1.
|
| 435 |
+
|
| 436 |
+
If deque is empty then raise IndexError.
|
| 437 |
+
|
| 438 |
+
>>> deque = Deque()
|
| 439 |
+
>>> deque.peek()
|
| 440 |
+
Traceback (most recent call last):
|
| 441 |
+
...
|
| 442 |
+
IndexError: peek from an empty deque
|
| 443 |
+
>>> deque += 'abc'
|
| 444 |
+
>>> deque.peek()
|
| 445 |
+
'c'
|
| 446 |
+
|
| 447 |
+
:return: value at back of deque
|
| 448 |
+
:raises IndexError: if deque is empty
|
| 449 |
+
|
| 450 |
+
"""
|
| 451 |
+
default = None, ENOVAL
|
| 452 |
+
_, value = self._cache.peek(default=default, side='back', retry=True)
|
| 453 |
+
if value is ENOVAL:
|
| 454 |
+
raise IndexError('peek from an empty deque')
|
| 455 |
+
return value
|
| 456 |
+
|
| 457 |
+
def peekleft(self):
|
| 458 |
+
"""Peek at value at front of deque.
|
| 459 |
+
|
| 460 |
+
Faster than indexing deque at 0.
|
| 461 |
+
|
| 462 |
+
If deque is empty then raise IndexError.
|
| 463 |
+
|
| 464 |
+
>>> deque = Deque()
|
| 465 |
+
>>> deque.peekleft()
|
| 466 |
+
Traceback (most recent call last):
|
| 467 |
+
...
|
| 468 |
+
IndexError: peek from an empty deque
|
| 469 |
+
>>> deque += 'abc'
|
| 470 |
+
>>> deque.peekleft()
|
| 471 |
+
'a'
|
| 472 |
+
|
| 473 |
+
:return: value at front of deque
|
| 474 |
+
:raises IndexError: if deque is empty
|
| 475 |
+
|
| 476 |
+
"""
|
| 477 |
+
default = None, ENOVAL
|
| 478 |
+
_, value = self._cache.peek(default=default, side='front', retry=True)
|
| 479 |
+
if value is ENOVAL:
|
| 480 |
+
raise IndexError('peek from an empty deque')
|
| 481 |
+
return value
|
| 482 |
+
|
| 483 |
+
def pop(self):
|
| 484 |
+
"""Remove and return value at back of deque.
|
| 485 |
+
|
| 486 |
+
If deque is empty then raise IndexError.
|
| 487 |
+
|
| 488 |
+
>>> deque = Deque()
|
| 489 |
+
>>> deque += 'ab'
|
| 490 |
+
>>> deque.pop()
|
| 491 |
+
'b'
|
| 492 |
+
>>> deque.pop()
|
| 493 |
+
'a'
|
| 494 |
+
>>> deque.pop()
|
| 495 |
+
Traceback (most recent call last):
|
| 496 |
+
...
|
| 497 |
+
IndexError: pop from an empty deque
|
| 498 |
+
|
| 499 |
+
:return: value at back of deque
|
| 500 |
+
:raises IndexError: if deque is empty
|
| 501 |
+
|
| 502 |
+
"""
|
| 503 |
+
default = None, ENOVAL
|
| 504 |
+
_, value = self._cache.pull(default=default, side='back', retry=True)
|
| 505 |
+
if value is ENOVAL:
|
| 506 |
+
raise IndexError('pop from an empty deque')
|
| 507 |
+
return value
|
| 508 |
+
|
| 509 |
+
_pop = pop
|
| 510 |
+
|
| 511 |
+
def popleft(self):
|
| 512 |
+
"""Remove and return value at front of deque.
|
| 513 |
+
|
| 514 |
+
>>> deque = Deque()
|
| 515 |
+
>>> deque += 'ab'
|
| 516 |
+
>>> deque.popleft()
|
| 517 |
+
'a'
|
| 518 |
+
>>> deque.popleft()
|
| 519 |
+
'b'
|
| 520 |
+
>>> deque.popleft()
|
| 521 |
+
Traceback (most recent call last):
|
| 522 |
+
...
|
| 523 |
+
IndexError: pop from an empty deque
|
| 524 |
+
|
| 525 |
+
:return: value at front of deque
|
| 526 |
+
:raises IndexError: if deque is empty
|
| 527 |
+
|
| 528 |
+
"""
|
| 529 |
+
default = None, ENOVAL
|
| 530 |
+
_, value = self._cache.pull(default=default, retry=True)
|
| 531 |
+
if value is ENOVAL:
|
| 532 |
+
raise IndexError('pop from an empty deque')
|
| 533 |
+
return value
|
| 534 |
+
|
| 535 |
+
_popleft = popleft
|
| 536 |
+
|
| 537 |
+
def remove(self, value):
|
| 538 |
+
"""Remove first occurrence of `value` in deque.
|
| 539 |
+
|
| 540 |
+
>>> deque = Deque()
|
| 541 |
+
>>> deque += 'aab'
|
| 542 |
+
>>> deque.remove('a')
|
| 543 |
+
>>> list(deque)
|
| 544 |
+
['a', 'b']
|
| 545 |
+
>>> deque.remove('b')
|
| 546 |
+
>>> list(deque)
|
| 547 |
+
['a']
|
| 548 |
+
>>> deque.remove('c')
|
| 549 |
+
Traceback (most recent call last):
|
| 550 |
+
...
|
| 551 |
+
ValueError: deque.remove(value): value not in deque
|
| 552 |
+
|
| 553 |
+
:param value: value to remove
|
| 554 |
+
:raises ValueError: if value not in deque
|
| 555 |
+
|
| 556 |
+
"""
|
| 557 |
+
_cache = self._cache
|
| 558 |
+
|
| 559 |
+
for key in _cache.iterkeys():
|
| 560 |
+
try:
|
| 561 |
+
item = _cache[key]
|
| 562 |
+
except KeyError:
|
| 563 |
+
continue
|
| 564 |
+
else:
|
| 565 |
+
if value == item:
|
| 566 |
+
try:
|
| 567 |
+
del _cache[key]
|
| 568 |
+
except KeyError:
|
| 569 |
+
continue
|
| 570 |
+
return
|
| 571 |
+
|
| 572 |
+
raise ValueError('deque.remove(value): value not in deque')
|
| 573 |
+
|
| 574 |
+
def reverse(self):
|
| 575 |
+
"""Reverse deque in place.
|
| 576 |
+
|
| 577 |
+
>>> deque = Deque()
|
| 578 |
+
>>> deque += 'abc'
|
| 579 |
+
>>> deque.reverse()
|
| 580 |
+
>>> list(deque)
|
| 581 |
+
['c', 'b', 'a']
|
| 582 |
+
|
| 583 |
+
"""
|
| 584 |
+
# pylint: disable=protected-access
|
| 585 |
+
# GrantJ 2019-03-22 Consider using an algorithm that swaps the values
|
| 586 |
+
# at two keys. Like self._cache.swap(key1, key2, retry=True) The swap
|
| 587 |
+
# method would exchange the values at two given keys. Then, using a
|
| 588 |
+
# forward iterator and a reverse iterator, the reverse method could
|
| 589 |
+
# avoid making copies of the values.
|
| 590 |
+
temp = Deque(iterable=reversed(self))
|
| 591 |
+
self._clear()
|
| 592 |
+
self._extend(temp)
|
| 593 |
+
directory = temp.directory
|
| 594 |
+
temp._cache.close()
|
| 595 |
+
del temp
|
| 596 |
+
rmtree(directory)
|
| 597 |
+
|
| 598 |
+
def rotate(self, steps=1):
|
| 599 |
+
"""Rotate deque right by `steps`.
|
| 600 |
+
|
| 601 |
+
If steps is negative then rotate left.
|
| 602 |
+
|
| 603 |
+
>>> deque = Deque()
|
| 604 |
+
>>> deque += range(5)
|
| 605 |
+
>>> deque.rotate(2)
|
| 606 |
+
>>> list(deque)
|
| 607 |
+
[3, 4, 0, 1, 2]
|
| 608 |
+
>>> deque.rotate(-1)
|
| 609 |
+
>>> list(deque)
|
| 610 |
+
[4, 0, 1, 2, 3]
|
| 611 |
+
|
| 612 |
+
:param int steps: number of steps to rotate (default 1)
|
| 613 |
+
|
| 614 |
+
"""
|
| 615 |
+
if not isinstance(steps, int):
|
| 616 |
+
type_name = type(steps).__name__
|
| 617 |
+
raise TypeError('integer argument expected, got %s' % type_name)
|
| 618 |
+
|
| 619 |
+
len_self = len(self)
|
| 620 |
+
|
| 621 |
+
if not len_self:
|
| 622 |
+
return
|
| 623 |
+
|
| 624 |
+
if steps >= 0:
|
| 625 |
+
steps %= len_self
|
| 626 |
+
|
| 627 |
+
for _ in range(steps):
|
| 628 |
+
try:
|
| 629 |
+
value = self._pop()
|
| 630 |
+
except IndexError:
|
| 631 |
+
return
|
| 632 |
+
else:
|
| 633 |
+
self._appendleft(value)
|
| 634 |
+
else:
|
| 635 |
+
steps *= -1
|
| 636 |
+
steps %= len_self
|
| 637 |
+
|
| 638 |
+
for _ in range(steps):
|
| 639 |
+
try:
|
| 640 |
+
value = self._popleft()
|
| 641 |
+
except IndexError:
|
| 642 |
+
return
|
| 643 |
+
else:
|
| 644 |
+
self._append(value)
|
| 645 |
+
|
| 646 |
+
__hash__ = None # type: ignore
|
| 647 |
+
|
| 648 |
+
@contextmanager
|
| 649 |
+
def transact(self):
|
| 650 |
+
"""Context manager to perform a transaction by locking the deque.
|
| 651 |
+
|
| 652 |
+
While the deque is locked, no other write operation is permitted.
|
| 653 |
+
Transactions should therefore be as short as possible. Read and write
|
| 654 |
+
operations performed in a transaction are atomic. Read operations may
|
| 655 |
+
occur concurrent to a transaction.
|
| 656 |
+
|
| 657 |
+
Transactions may be nested and may not be shared between threads.
|
| 658 |
+
|
| 659 |
+
>>> from diskcache import Deque
|
| 660 |
+
>>> deque = Deque()
|
| 661 |
+
>>> deque += range(5)
|
| 662 |
+
>>> with deque.transact(): # Atomically rotate elements.
|
| 663 |
+
... value = deque.pop()
|
| 664 |
+
... deque.appendleft(value)
|
| 665 |
+
>>> list(deque)
|
| 666 |
+
[4, 0, 1, 2, 3]
|
| 667 |
+
|
| 668 |
+
:return: context manager for use in `with` statement
|
| 669 |
+
|
| 670 |
+
"""
|
| 671 |
+
with self._cache.transact(retry=True):
|
| 672 |
+
yield
|
| 673 |
+
|
| 674 |
+
|
| 675 |
+
class Index(MutableMapping):
|
| 676 |
+
"""Persistent mutable mapping with insertion order iteration.
|
| 677 |
+
|
| 678 |
+
Items are serialized to disk. Index may be initialized from directory path
|
| 679 |
+
where items are stored.
|
| 680 |
+
|
| 681 |
+
Hashing protocol is not used. Keys are looked up by their serialized
|
| 682 |
+
format. See ``diskcache.Disk`` for details.
|
| 683 |
+
|
| 684 |
+
>>> index = Index()
|
| 685 |
+
>>> index.update([('a', 1), ('b', 2), ('c', 3)])
|
| 686 |
+
>>> index['a']
|
| 687 |
+
1
|
| 688 |
+
>>> list(index)
|
| 689 |
+
['a', 'b', 'c']
|
| 690 |
+
>>> len(index)
|
| 691 |
+
3
|
| 692 |
+
>>> del index['b']
|
| 693 |
+
>>> index.popitem()
|
| 694 |
+
('c', 3)
|
| 695 |
+
|
| 696 |
+
"""
|
| 697 |
+
|
| 698 |
+
def __init__(self, *args, **kwargs):
|
| 699 |
+
"""Initialize index in directory and update items.
|
| 700 |
+
|
| 701 |
+
Optional first argument may be string specifying directory where items
|
| 702 |
+
are stored. When None or not given, temporary directory is created.
|
| 703 |
+
|
| 704 |
+
>>> index = Index({'a': 1, 'b': 2, 'c': 3})
|
| 705 |
+
>>> len(index)
|
| 706 |
+
3
|
| 707 |
+
>>> directory = index.directory
|
| 708 |
+
>>> inventory = Index(directory, d=4)
|
| 709 |
+
>>> inventory['b']
|
| 710 |
+
2
|
| 711 |
+
>>> len(inventory)
|
| 712 |
+
4
|
| 713 |
+
|
| 714 |
+
"""
|
| 715 |
+
if args and isinstance(args[0], (bytes, str)):
|
| 716 |
+
directory = args[0]
|
| 717 |
+
args = args[1:]
|
| 718 |
+
else:
|
| 719 |
+
if args and args[0] is None:
|
| 720 |
+
args = args[1:]
|
| 721 |
+
directory = None
|
| 722 |
+
self._cache = Cache(directory, eviction_policy='none')
|
| 723 |
+
self._update(*args, **kwargs)
|
| 724 |
+
|
| 725 |
+
_update = MutableMapping.update
|
| 726 |
+
|
| 727 |
+
@classmethod
|
| 728 |
+
def fromcache(cls, cache, *args, **kwargs):
|
| 729 |
+
"""Initialize index using `cache` and update items.
|
| 730 |
+
|
| 731 |
+
>>> cache = Cache()
|
| 732 |
+
>>> index = Index.fromcache(cache, {'a': 1, 'b': 2, 'c': 3})
|
| 733 |
+
>>> index.cache is cache
|
| 734 |
+
True
|
| 735 |
+
>>> len(index)
|
| 736 |
+
3
|
| 737 |
+
>>> 'b' in index
|
| 738 |
+
True
|
| 739 |
+
>>> index['c']
|
| 740 |
+
3
|
| 741 |
+
|
| 742 |
+
:param Cache cache: cache to use
|
| 743 |
+
:param args: mapping or sequence of items
|
| 744 |
+
:param kwargs: mapping of items
|
| 745 |
+
:return: initialized Index
|
| 746 |
+
|
| 747 |
+
"""
|
| 748 |
+
# pylint: disable=no-member,protected-access
|
| 749 |
+
self = cls.__new__(cls)
|
| 750 |
+
self._cache = cache
|
| 751 |
+
self._update(*args, **kwargs)
|
| 752 |
+
return self
|
| 753 |
+
|
| 754 |
+
@property
|
| 755 |
+
def cache(self):
|
| 756 |
+
"""Cache used by index."""
|
| 757 |
+
return self._cache
|
| 758 |
+
|
| 759 |
+
@property
|
| 760 |
+
def directory(self):
|
| 761 |
+
"""Directory path where items are stored."""
|
| 762 |
+
return self._cache.directory
|
| 763 |
+
|
| 764 |
+
def __getitem__(self, key):
|
| 765 |
+
"""index.__getitem__(key) <==> index[key]
|
| 766 |
+
|
| 767 |
+
Return corresponding value for `key` in index.
|
| 768 |
+
|
| 769 |
+
>>> index = Index()
|
| 770 |
+
>>> index.update({'a': 1, 'b': 2})
|
| 771 |
+
>>> index['a']
|
| 772 |
+
1
|
| 773 |
+
>>> index['b']
|
| 774 |
+
2
|
| 775 |
+
>>> index['c']
|
| 776 |
+
Traceback (most recent call last):
|
| 777 |
+
...
|
| 778 |
+
KeyError: 'c'
|
| 779 |
+
|
| 780 |
+
:param key: key for item
|
| 781 |
+
:return: value for item in index with given key
|
| 782 |
+
:raises KeyError: if key is not found
|
| 783 |
+
|
| 784 |
+
"""
|
| 785 |
+
return self._cache[key]
|
| 786 |
+
|
| 787 |
+
def __setitem__(self, key, value):
|
| 788 |
+
"""index.__setitem__(key, value) <==> index[key] = value
|
| 789 |
+
|
| 790 |
+
Set `key` and `value` item in index.
|
| 791 |
+
|
| 792 |
+
>>> index = Index()
|
| 793 |
+
>>> index['a'] = 1
|
| 794 |
+
>>> index[0] = None
|
| 795 |
+
>>> len(index)
|
| 796 |
+
2
|
| 797 |
+
|
| 798 |
+
:param key: key for item
|
| 799 |
+
:param value: value for item
|
| 800 |
+
|
| 801 |
+
"""
|
| 802 |
+
self._cache[key] = value
|
| 803 |
+
|
| 804 |
+
def __delitem__(self, key):
|
| 805 |
+
"""index.__delitem__(key) <==> del index[key]
|
| 806 |
+
|
| 807 |
+
Delete corresponding item for `key` from index.
|
| 808 |
+
|
| 809 |
+
>>> index = Index()
|
| 810 |
+
>>> index.update({'a': 1, 'b': 2})
|
| 811 |
+
>>> del index['a']
|
| 812 |
+
>>> del index['b']
|
| 813 |
+
>>> len(index)
|
| 814 |
+
0
|
| 815 |
+
>>> del index['c']
|
| 816 |
+
Traceback (most recent call last):
|
| 817 |
+
...
|
| 818 |
+
KeyError: 'c'
|
| 819 |
+
|
| 820 |
+
:param key: key for item
|
| 821 |
+
:raises KeyError: if key is not found
|
| 822 |
+
|
| 823 |
+
"""
|
| 824 |
+
del self._cache[key]
|
| 825 |
+
|
| 826 |
+
def setdefault(self, key, default=None):
|
| 827 |
+
"""Set and get value for `key` in index using `default`.
|
| 828 |
+
|
| 829 |
+
If `key` is not in index then set corresponding value to `default`. If
|
| 830 |
+
`key` is in index then ignore `default` and return existing value.
|
| 831 |
+
|
| 832 |
+
>>> index = Index()
|
| 833 |
+
>>> index.setdefault('a', 0)
|
| 834 |
+
0
|
| 835 |
+
>>> index.setdefault('a', 1)
|
| 836 |
+
0
|
| 837 |
+
|
| 838 |
+
:param key: key for item
|
| 839 |
+
:param default: value if key is missing (default None)
|
| 840 |
+
:return: value for item in index with given key
|
| 841 |
+
|
| 842 |
+
"""
|
| 843 |
+
_cache = self._cache
|
| 844 |
+
while True:
|
| 845 |
+
try:
|
| 846 |
+
return _cache[key]
|
| 847 |
+
except KeyError:
|
| 848 |
+
_cache.add(key, default, retry=True)
|
| 849 |
+
|
| 850 |
+
def peekitem(self, last=True):
|
| 851 |
+
"""Peek at key and value item pair in index based on iteration order.
|
| 852 |
+
|
| 853 |
+
>>> index = Index()
|
| 854 |
+
>>> for num, letter in enumerate('xyz'):
|
| 855 |
+
... index[letter] = num
|
| 856 |
+
>>> index.peekitem()
|
| 857 |
+
('z', 2)
|
| 858 |
+
>>> index.peekitem(last=False)
|
| 859 |
+
('x', 0)
|
| 860 |
+
|
| 861 |
+
:param bool last: last item in iteration order (default True)
|
| 862 |
+
:return: key and value item pair
|
| 863 |
+
:raises KeyError: if cache is empty
|
| 864 |
+
|
| 865 |
+
"""
|
| 866 |
+
return self._cache.peekitem(last, retry=True)
|
| 867 |
+
|
| 868 |
+
def pop(self, key, default=ENOVAL):
|
| 869 |
+
"""Remove corresponding item for `key` from index and return value.
|
| 870 |
+
|
| 871 |
+
If `key` is missing then return `default`. If `default` is `ENOVAL`
|
| 872 |
+
then raise KeyError.
|
| 873 |
+
|
| 874 |
+
>>> index = Index({'a': 1, 'b': 2})
|
| 875 |
+
>>> index.pop('a')
|
| 876 |
+
1
|
| 877 |
+
>>> index.pop('b')
|
| 878 |
+
2
|
| 879 |
+
>>> index.pop('c', default=3)
|
| 880 |
+
3
|
| 881 |
+
>>> index.pop('d')
|
| 882 |
+
Traceback (most recent call last):
|
| 883 |
+
...
|
| 884 |
+
KeyError: 'd'
|
| 885 |
+
|
| 886 |
+
:param key: key for item
|
| 887 |
+
:param default: return value if key is missing (default ENOVAL)
|
| 888 |
+
:return: value for item if key is found else default
|
| 889 |
+
:raises KeyError: if key is not found and default is ENOVAL
|
| 890 |
+
|
| 891 |
+
"""
|
| 892 |
+
_cache = self._cache
|
| 893 |
+
value = _cache.pop(key, default=default, retry=True)
|
| 894 |
+
if value is ENOVAL:
|
| 895 |
+
raise KeyError(key)
|
| 896 |
+
return value
|
| 897 |
+
|
| 898 |
+
def popitem(self, last=True):
|
| 899 |
+
"""Remove and return item pair.
|
| 900 |
+
|
| 901 |
+
Item pairs are returned in last-in-first-out (LIFO) order if last is
|
| 902 |
+
True else first-in-first-out (FIFO) order. LIFO order imitates a stack
|
| 903 |
+
and FIFO order imitates a queue.
|
| 904 |
+
|
| 905 |
+
>>> index = Index()
|
| 906 |
+
>>> index.update([('a', 1), ('b', 2), ('c', 3)])
|
| 907 |
+
>>> index.popitem()
|
| 908 |
+
('c', 3)
|
| 909 |
+
>>> index.popitem(last=False)
|
| 910 |
+
('a', 1)
|
| 911 |
+
>>> index.popitem()
|
| 912 |
+
('b', 2)
|
| 913 |
+
>>> index.popitem()
|
| 914 |
+
Traceback (most recent call last):
|
| 915 |
+
...
|
| 916 |
+
KeyError: 'dictionary is empty'
|
| 917 |
+
|
| 918 |
+
:param bool last: pop last item pair (default True)
|
| 919 |
+
:return: key and value item pair
|
| 920 |
+
:raises KeyError: if index is empty
|
| 921 |
+
|
| 922 |
+
"""
|
| 923 |
+
# pylint: disable=arguments-differ,unbalanced-tuple-unpacking
|
| 924 |
+
_cache = self._cache
|
| 925 |
+
|
| 926 |
+
with _cache.transact(retry=True):
|
| 927 |
+
key, value = _cache.peekitem(last=last)
|
| 928 |
+
del _cache[key]
|
| 929 |
+
|
| 930 |
+
return key, value
|
| 931 |
+
|
| 932 |
+
def push(self, value, prefix=None, side='back'):
|
| 933 |
+
"""Push `value` onto `side` of queue in index identified by `prefix`.
|
| 934 |
+
|
| 935 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 936 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 937 |
+
|
| 938 |
+
Defaults to pushing value on back of queue. Set side to 'front' to push
|
| 939 |
+
value on front of queue. Side must be one of 'back' or 'front'.
|
| 940 |
+
|
| 941 |
+
See also `Index.pull`.
|
| 942 |
+
|
| 943 |
+
>>> index = Index()
|
| 944 |
+
>>> print(index.push('apples'))
|
| 945 |
+
500000000000000
|
| 946 |
+
>>> print(index.push('beans'))
|
| 947 |
+
500000000000001
|
| 948 |
+
>>> print(index.push('cherries', side='front'))
|
| 949 |
+
499999999999999
|
| 950 |
+
>>> index[500000000000001]
|
| 951 |
+
'beans'
|
| 952 |
+
>>> index.push('dates', prefix='fruit')
|
| 953 |
+
'fruit-500000000000000'
|
| 954 |
+
|
| 955 |
+
:param value: value for item
|
| 956 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 957 |
+
:param str side: either 'back' or 'front' (default 'back')
|
| 958 |
+
:return: key for item in cache
|
| 959 |
+
|
| 960 |
+
"""
|
| 961 |
+
return self._cache.push(value, prefix, side, retry=True)
|
| 962 |
+
|
| 963 |
+
def pull(self, prefix=None, default=(None, None), side='front'):
|
| 964 |
+
"""Pull key and value item pair from `side` of queue in index.
|
| 965 |
+
|
| 966 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 967 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 968 |
+
|
| 969 |
+
If queue is empty, return default.
|
| 970 |
+
|
| 971 |
+
Defaults to pulling key and value item pairs from front of queue. Set
|
| 972 |
+
side to 'back' to pull from back of queue. Side must be one of 'front'
|
| 973 |
+
or 'back'.
|
| 974 |
+
|
| 975 |
+
See also `Index.push`.
|
| 976 |
+
|
| 977 |
+
>>> index = Index()
|
| 978 |
+
>>> for letter in 'abc':
|
| 979 |
+
... print(index.push(letter))
|
| 980 |
+
500000000000000
|
| 981 |
+
500000000000001
|
| 982 |
+
500000000000002
|
| 983 |
+
>>> key, value = index.pull()
|
| 984 |
+
>>> print(key)
|
| 985 |
+
500000000000000
|
| 986 |
+
>>> value
|
| 987 |
+
'a'
|
| 988 |
+
>>> _, value = index.pull(side='back')
|
| 989 |
+
>>> value
|
| 990 |
+
'c'
|
| 991 |
+
>>> index.pull(prefix='fruit')
|
| 992 |
+
(None, None)
|
| 993 |
+
|
| 994 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 995 |
+
:param default: value to return if key is missing
|
| 996 |
+
(default (None, None))
|
| 997 |
+
:param str side: either 'front' or 'back' (default 'front')
|
| 998 |
+
:return: key and value item pair or default if queue is empty
|
| 999 |
+
|
| 1000 |
+
"""
|
| 1001 |
+
return self._cache.pull(prefix, default, side, retry=True)
|
| 1002 |
+
|
| 1003 |
+
def clear(self):
|
| 1004 |
+
"""Remove all items from index.
|
| 1005 |
+
|
| 1006 |
+
>>> index = Index({'a': 0, 'b': 1, 'c': 2})
|
| 1007 |
+
>>> len(index)
|
| 1008 |
+
3
|
| 1009 |
+
>>> index.clear()
|
| 1010 |
+
>>> dict(index)
|
| 1011 |
+
{}
|
| 1012 |
+
|
| 1013 |
+
"""
|
| 1014 |
+
self._cache.clear(retry=True)
|
| 1015 |
+
|
| 1016 |
+
def __iter__(self):
|
| 1017 |
+
"""index.__iter__() <==> iter(index)
|
| 1018 |
+
|
| 1019 |
+
Return iterator of index keys in insertion order.
|
| 1020 |
+
|
| 1021 |
+
"""
|
| 1022 |
+
return iter(self._cache)
|
| 1023 |
+
|
| 1024 |
+
def __reversed__(self):
|
| 1025 |
+
"""index.__reversed__() <==> reversed(index)
|
| 1026 |
+
|
| 1027 |
+
Return iterator of index keys in reversed insertion order.
|
| 1028 |
+
|
| 1029 |
+
>>> index = Index()
|
| 1030 |
+
>>> index.update([('a', 1), ('b', 2), ('c', 3)])
|
| 1031 |
+
>>> iterator = reversed(index)
|
| 1032 |
+
>>> next(iterator)
|
| 1033 |
+
'c'
|
| 1034 |
+
>>> list(iterator)
|
| 1035 |
+
['b', 'a']
|
| 1036 |
+
|
| 1037 |
+
"""
|
| 1038 |
+
return reversed(self._cache)
|
| 1039 |
+
|
| 1040 |
+
def __len__(self):
|
| 1041 |
+
"""index.__len__() <==> len(index)
|
| 1042 |
+
|
| 1043 |
+
Return length of index.
|
| 1044 |
+
|
| 1045 |
+
"""
|
| 1046 |
+
return len(self._cache)
|
| 1047 |
+
|
| 1048 |
+
def keys(self):
|
| 1049 |
+
"""Set-like object providing a view of index keys.
|
| 1050 |
+
|
| 1051 |
+
>>> index = Index()
|
| 1052 |
+
>>> index.update({'a': 1, 'b': 2, 'c': 3})
|
| 1053 |
+
>>> keys_view = index.keys()
|
| 1054 |
+
>>> 'b' in keys_view
|
| 1055 |
+
True
|
| 1056 |
+
|
| 1057 |
+
:return: keys view
|
| 1058 |
+
|
| 1059 |
+
"""
|
| 1060 |
+
return KeysView(self)
|
| 1061 |
+
|
| 1062 |
+
def values(self):
|
| 1063 |
+
"""Set-like object providing a view of index values.
|
| 1064 |
+
|
| 1065 |
+
>>> index = Index()
|
| 1066 |
+
>>> index.update({'a': 1, 'b': 2, 'c': 3})
|
| 1067 |
+
>>> values_view = index.values()
|
| 1068 |
+
>>> 2 in values_view
|
| 1069 |
+
True
|
| 1070 |
+
|
| 1071 |
+
:return: values view
|
| 1072 |
+
|
| 1073 |
+
"""
|
| 1074 |
+
return ValuesView(self)
|
| 1075 |
+
|
| 1076 |
+
def items(self):
|
| 1077 |
+
"""Set-like object providing a view of index items.
|
| 1078 |
+
|
| 1079 |
+
>>> index = Index()
|
| 1080 |
+
>>> index.update({'a': 1, 'b': 2, 'c': 3})
|
| 1081 |
+
>>> items_view = index.items()
|
| 1082 |
+
>>> ('b', 2) in items_view
|
| 1083 |
+
True
|
| 1084 |
+
|
| 1085 |
+
:return: items view
|
| 1086 |
+
|
| 1087 |
+
"""
|
| 1088 |
+
return ItemsView(self)
|
| 1089 |
+
|
| 1090 |
+
__hash__ = None # type: ignore
|
| 1091 |
+
|
| 1092 |
+
def __getstate__(self):
|
| 1093 |
+
return self.directory
|
| 1094 |
+
|
| 1095 |
+
def __setstate__(self, state):
|
| 1096 |
+
self.__init__(state)
|
| 1097 |
+
|
| 1098 |
+
def __eq__(self, other):
|
| 1099 |
+
"""index.__eq__(other) <==> index == other
|
| 1100 |
+
|
| 1101 |
+
Compare equality for index and `other`.
|
| 1102 |
+
|
| 1103 |
+
Comparison to another index or ordered dictionary is
|
| 1104 |
+
order-sensitive. Comparison to all other mappings is order-insensitive.
|
| 1105 |
+
|
| 1106 |
+
>>> index = Index()
|
| 1107 |
+
>>> pairs = [('a', 1), ('b', 2), ('c', 3)]
|
| 1108 |
+
>>> index.update(pairs)
|
| 1109 |
+
>>> from collections import OrderedDict
|
| 1110 |
+
>>> od = OrderedDict(pairs)
|
| 1111 |
+
>>> index == od
|
| 1112 |
+
True
|
| 1113 |
+
>>> index == {'c': 3, 'b': 2, 'a': 1}
|
| 1114 |
+
True
|
| 1115 |
+
|
| 1116 |
+
:param other: other mapping in equality comparison
|
| 1117 |
+
:return: True if index equals other
|
| 1118 |
+
|
| 1119 |
+
"""
|
| 1120 |
+
if len(self) != len(other):
|
| 1121 |
+
return False
|
| 1122 |
+
|
| 1123 |
+
if isinstance(other, (Index, OrderedDict)):
|
| 1124 |
+
alpha = ((key, self[key]) for key in self)
|
| 1125 |
+
beta = ((key, other[key]) for key in other)
|
| 1126 |
+
pairs = zip(alpha, beta)
|
| 1127 |
+
return not any(a != x or b != y for (a, b), (x, y) in pairs)
|
| 1128 |
+
else:
|
| 1129 |
+
return all(self[key] == other.get(key, ENOVAL) for key in self)
|
| 1130 |
+
|
| 1131 |
+
def __ne__(self, other):
|
| 1132 |
+
"""index.__ne__(other) <==> index != other
|
| 1133 |
+
|
| 1134 |
+
Compare inequality for index and `other`.
|
| 1135 |
+
|
| 1136 |
+
Comparison to another index or ordered dictionary is
|
| 1137 |
+
order-sensitive. Comparison to all other mappings is order-insensitive.
|
| 1138 |
+
|
| 1139 |
+
>>> index = Index()
|
| 1140 |
+
>>> index.update([('a', 1), ('b', 2), ('c', 3)])
|
| 1141 |
+
>>> from collections import OrderedDict
|
| 1142 |
+
>>> od = OrderedDict([('c', 3), ('b', 2), ('a', 1)])
|
| 1143 |
+
>>> index != od
|
| 1144 |
+
True
|
| 1145 |
+
>>> index != {'a': 1, 'b': 2}
|
| 1146 |
+
True
|
| 1147 |
+
|
| 1148 |
+
:param other: other mapping in inequality comparison
|
| 1149 |
+
:return: True if index does not equal other
|
| 1150 |
+
|
| 1151 |
+
"""
|
| 1152 |
+
return not self == other
|
| 1153 |
+
|
| 1154 |
+
def memoize(self, name=None, typed=False, ignore=()):
|
| 1155 |
+
"""Memoizing cache decorator.
|
| 1156 |
+
|
| 1157 |
+
Decorator to wrap callable with memoizing function using cache.
|
| 1158 |
+
Repeated calls with the same arguments will lookup result in cache and
|
| 1159 |
+
avoid function evaluation.
|
| 1160 |
+
|
| 1161 |
+
If name is set to None (default), the callable name will be determined
|
| 1162 |
+
automatically.
|
| 1163 |
+
|
| 1164 |
+
If typed is set to True, function arguments of different types will be
|
| 1165 |
+
cached separately. For example, f(3) and f(3.0) will be treated as
|
| 1166 |
+
distinct calls with distinct results.
|
| 1167 |
+
|
| 1168 |
+
The original underlying function is accessible through the __wrapped__
|
| 1169 |
+
attribute. This is useful for introspection, for bypassing the cache,
|
| 1170 |
+
or for rewrapping the function with a different cache.
|
| 1171 |
+
|
| 1172 |
+
>>> from diskcache import Index
|
| 1173 |
+
>>> mapping = Index()
|
| 1174 |
+
>>> @mapping.memoize()
|
| 1175 |
+
... def fibonacci(number):
|
| 1176 |
+
... if number == 0:
|
| 1177 |
+
... return 0
|
| 1178 |
+
... elif number == 1:
|
| 1179 |
+
... return 1
|
| 1180 |
+
... else:
|
| 1181 |
+
... return fibonacci(number - 1) + fibonacci(number - 2)
|
| 1182 |
+
>>> print(fibonacci(100))
|
| 1183 |
+
354224848179261915075
|
| 1184 |
+
|
| 1185 |
+
An additional `__cache_key__` attribute can be used to generate the
|
| 1186 |
+
cache key used for the given arguments.
|
| 1187 |
+
|
| 1188 |
+
>>> key = fibonacci.__cache_key__(100)
|
| 1189 |
+
>>> print(mapping[key])
|
| 1190 |
+
354224848179261915075
|
| 1191 |
+
|
| 1192 |
+
Remember to call memoize when decorating a callable. If you forget,
|
| 1193 |
+
then a TypeError will occur. Note the lack of parenthenses after
|
| 1194 |
+
memoize below:
|
| 1195 |
+
|
| 1196 |
+
>>> @mapping.memoize
|
| 1197 |
+
... def test():
|
| 1198 |
+
... pass
|
| 1199 |
+
Traceback (most recent call last):
|
| 1200 |
+
...
|
| 1201 |
+
TypeError: name cannot be callable
|
| 1202 |
+
|
| 1203 |
+
:param str name: name given for callable (default None, automatic)
|
| 1204 |
+
:param bool typed: cache different types separately (default False)
|
| 1205 |
+
:param set ignore: positional or keyword args to ignore (default ())
|
| 1206 |
+
:return: callable decorator
|
| 1207 |
+
|
| 1208 |
+
"""
|
| 1209 |
+
return self._cache.memoize(name, typed, ignore=ignore)
|
| 1210 |
+
|
| 1211 |
+
@contextmanager
|
| 1212 |
+
def transact(self):
|
| 1213 |
+
"""Context manager to perform a transaction by locking the index.
|
| 1214 |
+
|
| 1215 |
+
While the index is locked, no other write operation is permitted.
|
| 1216 |
+
Transactions should therefore be as short as possible. Read and write
|
| 1217 |
+
operations performed in a transaction are atomic. Read operations may
|
| 1218 |
+
occur concurrent to a transaction.
|
| 1219 |
+
|
| 1220 |
+
Transactions may be nested and may not be shared between threads.
|
| 1221 |
+
|
| 1222 |
+
>>> from diskcache import Index
|
| 1223 |
+
>>> mapping = Index()
|
| 1224 |
+
>>> with mapping.transact(): # Atomically increment two keys.
|
| 1225 |
+
... mapping['total'] = mapping.get('total', 0) + 123.4
|
| 1226 |
+
... mapping['count'] = mapping.get('count', 0) + 1
|
| 1227 |
+
>>> with mapping.transact(): # Atomically calculate average.
|
| 1228 |
+
... average = mapping['total'] / mapping['count']
|
| 1229 |
+
>>> average
|
| 1230 |
+
123.4
|
| 1231 |
+
|
| 1232 |
+
:return: context manager for use in `with` statement
|
| 1233 |
+
|
| 1234 |
+
"""
|
| 1235 |
+
with self._cache.transact(retry=True):
|
| 1236 |
+
yield
|
| 1237 |
+
|
| 1238 |
+
def __repr__(self):
|
| 1239 |
+
"""index.__repr__() <==> repr(index)
|
| 1240 |
+
|
| 1241 |
+
Return string with printable representation of index.
|
| 1242 |
+
|
| 1243 |
+
"""
|
| 1244 |
+
name = type(self).__name__
|
| 1245 |
+
return '{0}({1!r})'.format(name, self.directory)
|
deepseek/lib/python3.10/site-packages/diskcache/recipes.py
ADDED
|
@@ -0,0 +1,488 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Disk Cache Recipes
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import functools
|
| 5 |
+
import math
|
| 6 |
+
import os
|
| 7 |
+
import random
|
| 8 |
+
import threading
|
| 9 |
+
import time
|
| 10 |
+
|
| 11 |
+
from .core import ENOVAL, args_to_key, full_name
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class Averager:
|
| 15 |
+
"""Recipe for calculating a running average.
|
| 16 |
+
|
| 17 |
+
Sometimes known as "online statistics," the running average maintains the
|
| 18 |
+
total and count. The average can then be calculated at any time.
|
| 19 |
+
|
| 20 |
+
Assumes the key will not be evicted. Set the eviction policy to 'none' on
|
| 21 |
+
the cache to guarantee the key is not evicted.
|
| 22 |
+
|
| 23 |
+
>>> import diskcache
|
| 24 |
+
>>> cache = diskcache.FanoutCache()
|
| 25 |
+
>>> ave = Averager(cache, 'latency')
|
| 26 |
+
>>> ave.add(0.080)
|
| 27 |
+
>>> ave.add(0.120)
|
| 28 |
+
>>> ave.get()
|
| 29 |
+
0.1
|
| 30 |
+
>>> ave.add(0.160)
|
| 31 |
+
>>> ave.pop()
|
| 32 |
+
0.12
|
| 33 |
+
>>> print(ave.get())
|
| 34 |
+
None
|
| 35 |
+
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def __init__(self, cache, key, expire=None, tag=None):
|
| 39 |
+
self._cache = cache
|
| 40 |
+
self._key = key
|
| 41 |
+
self._expire = expire
|
| 42 |
+
self._tag = tag
|
| 43 |
+
|
| 44 |
+
def add(self, value):
|
| 45 |
+
"""Add `value` to average."""
|
| 46 |
+
with self._cache.transact(retry=True):
|
| 47 |
+
total, count = self._cache.get(self._key, default=(0.0, 0))
|
| 48 |
+
total += value
|
| 49 |
+
count += 1
|
| 50 |
+
self._cache.set(
|
| 51 |
+
self._key,
|
| 52 |
+
(total, count),
|
| 53 |
+
expire=self._expire,
|
| 54 |
+
tag=self._tag,
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
def get(self):
|
| 58 |
+
"""Get current average or return `None` if count equals zero."""
|
| 59 |
+
total, count = self._cache.get(self._key, default=(0.0, 0), retry=True)
|
| 60 |
+
return None if count == 0 else total / count
|
| 61 |
+
|
| 62 |
+
def pop(self):
|
| 63 |
+
"""Return current average and delete key."""
|
| 64 |
+
total, count = self._cache.pop(self._key, default=(0.0, 0), retry=True)
|
| 65 |
+
return None if count == 0 else total / count
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class Lock:
|
| 69 |
+
"""Recipe for cross-process and cross-thread lock.
|
| 70 |
+
|
| 71 |
+
Assumes the key will not be evicted. Set the eviction policy to 'none' on
|
| 72 |
+
the cache to guarantee the key is not evicted.
|
| 73 |
+
|
| 74 |
+
>>> import diskcache
|
| 75 |
+
>>> cache = diskcache.Cache()
|
| 76 |
+
>>> lock = Lock(cache, 'report-123')
|
| 77 |
+
>>> lock.acquire()
|
| 78 |
+
>>> lock.release()
|
| 79 |
+
>>> with lock:
|
| 80 |
+
... pass
|
| 81 |
+
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
def __init__(self, cache, key, expire=None, tag=None):
|
| 85 |
+
self._cache = cache
|
| 86 |
+
self._key = key
|
| 87 |
+
self._expire = expire
|
| 88 |
+
self._tag = tag
|
| 89 |
+
|
| 90 |
+
def acquire(self):
|
| 91 |
+
"""Acquire lock using spin-lock algorithm."""
|
| 92 |
+
while True:
|
| 93 |
+
added = self._cache.add(
|
| 94 |
+
self._key,
|
| 95 |
+
None,
|
| 96 |
+
expire=self._expire,
|
| 97 |
+
tag=self._tag,
|
| 98 |
+
retry=True,
|
| 99 |
+
)
|
| 100 |
+
if added:
|
| 101 |
+
break
|
| 102 |
+
time.sleep(0.001)
|
| 103 |
+
|
| 104 |
+
def release(self):
|
| 105 |
+
"""Release lock by deleting key."""
|
| 106 |
+
self._cache.delete(self._key, retry=True)
|
| 107 |
+
|
| 108 |
+
def locked(self):
|
| 109 |
+
"""Return true if the lock is acquired."""
|
| 110 |
+
return self._key in self._cache
|
| 111 |
+
|
| 112 |
+
def __enter__(self):
|
| 113 |
+
self.acquire()
|
| 114 |
+
|
| 115 |
+
def __exit__(self, *exc_info):
|
| 116 |
+
self.release()
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
class RLock:
|
| 120 |
+
"""Recipe for cross-process and cross-thread re-entrant lock.
|
| 121 |
+
|
| 122 |
+
Assumes the key will not be evicted. Set the eviction policy to 'none' on
|
| 123 |
+
the cache to guarantee the key is not evicted.
|
| 124 |
+
|
| 125 |
+
>>> import diskcache
|
| 126 |
+
>>> cache = diskcache.Cache()
|
| 127 |
+
>>> rlock = RLock(cache, 'user-123')
|
| 128 |
+
>>> rlock.acquire()
|
| 129 |
+
>>> rlock.acquire()
|
| 130 |
+
>>> rlock.release()
|
| 131 |
+
>>> with rlock:
|
| 132 |
+
... pass
|
| 133 |
+
>>> rlock.release()
|
| 134 |
+
>>> rlock.release()
|
| 135 |
+
Traceback (most recent call last):
|
| 136 |
+
...
|
| 137 |
+
AssertionError: cannot release un-acquired lock
|
| 138 |
+
|
| 139 |
+
"""
|
| 140 |
+
|
| 141 |
+
def __init__(self, cache, key, expire=None, tag=None):
|
| 142 |
+
self._cache = cache
|
| 143 |
+
self._key = key
|
| 144 |
+
self._expire = expire
|
| 145 |
+
self._tag = tag
|
| 146 |
+
|
| 147 |
+
def acquire(self):
|
| 148 |
+
"""Acquire lock by incrementing count using spin-lock algorithm."""
|
| 149 |
+
pid = os.getpid()
|
| 150 |
+
tid = threading.get_ident()
|
| 151 |
+
pid_tid = '{}-{}'.format(pid, tid)
|
| 152 |
+
|
| 153 |
+
while True:
|
| 154 |
+
with self._cache.transact(retry=True):
|
| 155 |
+
value, count = self._cache.get(self._key, default=(None, 0))
|
| 156 |
+
if pid_tid == value or count == 0:
|
| 157 |
+
self._cache.set(
|
| 158 |
+
self._key,
|
| 159 |
+
(pid_tid, count + 1),
|
| 160 |
+
expire=self._expire,
|
| 161 |
+
tag=self._tag,
|
| 162 |
+
)
|
| 163 |
+
return
|
| 164 |
+
time.sleep(0.001)
|
| 165 |
+
|
| 166 |
+
def release(self):
|
| 167 |
+
"""Release lock by decrementing count."""
|
| 168 |
+
pid = os.getpid()
|
| 169 |
+
tid = threading.get_ident()
|
| 170 |
+
pid_tid = '{}-{}'.format(pid, tid)
|
| 171 |
+
|
| 172 |
+
with self._cache.transact(retry=True):
|
| 173 |
+
value, count = self._cache.get(self._key, default=(None, 0))
|
| 174 |
+
is_owned = pid_tid == value and count > 0
|
| 175 |
+
assert is_owned, 'cannot release un-acquired lock'
|
| 176 |
+
self._cache.set(
|
| 177 |
+
self._key,
|
| 178 |
+
(value, count - 1),
|
| 179 |
+
expire=self._expire,
|
| 180 |
+
tag=self._tag,
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
def __enter__(self):
|
| 184 |
+
self.acquire()
|
| 185 |
+
|
| 186 |
+
def __exit__(self, *exc_info):
|
| 187 |
+
self.release()
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
class BoundedSemaphore:
|
| 191 |
+
"""Recipe for cross-process and cross-thread bounded semaphore.
|
| 192 |
+
|
| 193 |
+
Assumes the key will not be evicted. Set the eviction policy to 'none' on
|
| 194 |
+
the cache to guarantee the key is not evicted.
|
| 195 |
+
|
| 196 |
+
>>> import diskcache
|
| 197 |
+
>>> cache = diskcache.Cache()
|
| 198 |
+
>>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2)
|
| 199 |
+
>>> semaphore.acquire()
|
| 200 |
+
>>> semaphore.acquire()
|
| 201 |
+
>>> semaphore.release()
|
| 202 |
+
>>> with semaphore:
|
| 203 |
+
... pass
|
| 204 |
+
>>> semaphore.release()
|
| 205 |
+
>>> semaphore.release()
|
| 206 |
+
Traceback (most recent call last):
|
| 207 |
+
...
|
| 208 |
+
AssertionError: cannot release un-acquired semaphore
|
| 209 |
+
|
| 210 |
+
"""
|
| 211 |
+
|
| 212 |
+
def __init__(self, cache, key, value=1, expire=None, tag=None):
|
| 213 |
+
self._cache = cache
|
| 214 |
+
self._key = key
|
| 215 |
+
self._value = value
|
| 216 |
+
self._expire = expire
|
| 217 |
+
self._tag = tag
|
| 218 |
+
|
| 219 |
+
def acquire(self):
|
| 220 |
+
"""Acquire semaphore by decrementing value using spin-lock algorithm."""
|
| 221 |
+
while True:
|
| 222 |
+
with self._cache.transact(retry=True):
|
| 223 |
+
value = self._cache.get(self._key, default=self._value)
|
| 224 |
+
if value > 0:
|
| 225 |
+
self._cache.set(
|
| 226 |
+
self._key,
|
| 227 |
+
value - 1,
|
| 228 |
+
expire=self._expire,
|
| 229 |
+
tag=self._tag,
|
| 230 |
+
)
|
| 231 |
+
return
|
| 232 |
+
time.sleep(0.001)
|
| 233 |
+
|
| 234 |
+
def release(self):
|
| 235 |
+
"""Release semaphore by incrementing value."""
|
| 236 |
+
with self._cache.transact(retry=True):
|
| 237 |
+
value = self._cache.get(self._key, default=self._value)
|
| 238 |
+
assert self._value > value, 'cannot release un-acquired semaphore'
|
| 239 |
+
value += 1
|
| 240 |
+
self._cache.set(
|
| 241 |
+
self._key,
|
| 242 |
+
value,
|
| 243 |
+
expire=self._expire,
|
| 244 |
+
tag=self._tag,
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
def __enter__(self):
|
| 248 |
+
self.acquire()
|
| 249 |
+
|
| 250 |
+
def __exit__(self, *exc_info):
|
| 251 |
+
self.release()
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def throttle(
|
| 255 |
+
cache,
|
| 256 |
+
count,
|
| 257 |
+
seconds,
|
| 258 |
+
name=None,
|
| 259 |
+
expire=None,
|
| 260 |
+
tag=None,
|
| 261 |
+
time_func=time.time,
|
| 262 |
+
sleep_func=time.sleep,
|
| 263 |
+
):
|
| 264 |
+
"""Decorator to throttle calls to function.
|
| 265 |
+
|
| 266 |
+
Assumes keys will not be evicted. Set the eviction policy to 'none' on the
|
| 267 |
+
cache to guarantee the keys are not evicted.
|
| 268 |
+
|
| 269 |
+
>>> import diskcache, time
|
| 270 |
+
>>> cache = diskcache.Cache()
|
| 271 |
+
>>> count = 0
|
| 272 |
+
>>> @throttle(cache, 2, 1) # 2 calls per 1 second
|
| 273 |
+
... def increment():
|
| 274 |
+
... global count
|
| 275 |
+
... count += 1
|
| 276 |
+
>>> start = time.time()
|
| 277 |
+
>>> while (time.time() - start) <= 2:
|
| 278 |
+
... increment()
|
| 279 |
+
>>> count in (6, 7) # 6 or 7 calls depending on CPU load
|
| 280 |
+
True
|
| 281 |
+
|
| 282 |
+
"""
|
| 283 |
+
|
| 284 |
+
def decorator(func):
|
| 285 |
+
rate = count / float(seconds)
|
| 286 |
+
key = full_name(func) if name is None else name
|
| 287 |
+
now = time_func()
|
| 288 |
+
cache.set(key, (now, count), expire=expire, tag=tag, retry=True)
|
| 289 |
+
|
| 290 |
+
@functools.wraps(func)
|
| 291 |
+
def wrapper(*args, **kwargs):
|
| 292 |
+
while True:
|
| 293 |
+
with cache.transact(retry=True):
|
| 294 |
+
last, tally = cache.get(key)
|
| 295 |
+
now = time_func()
|
| 296 |
+
tally += (now - last) * rate
|
| 297 |
+
delay = 0
|
| 298 |
+
|
| 299 |
+
if tally > count:
|
| 300 |
+
cache.set(key, (now, count - 1), expire)
|
| 301 |
+
elif tally >= 1:
|
| 302 |
+
cache.set(key, (now, tally - 1), expire)
|
| 303 |
+
else:
|
| 304 |
+
delay = (1 - tally) / rate
|
| 305 |
+
|
| 306 |
+
if delay:
|
| 307 |
+
sleep_func(delay)
|
| 308 |
+
else:
|
| 309 |
+
break
|
| 310 |
+
|
| 311 |
+
return func(*args, **kwargs)
|
| 312 |
+
|
| 313 |
+
return wrapper
|
| 314 |
+
|
| 315 |
+
return decorator
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def barrier(cache, lock_factory, name=None, expire=None, tag=None):
|
| 319 |
+
"""Barrier to calling decorated function.
|
| 320 |
+
|
| 321 |
+
Supports different kinds of locks: Lock, RLock, BoundedSemaphore.
|
| 322 |
+
|
| 323 |
+
Assumes keys will not be evicted. Set the eviction policy to 'none' on the
|
| 324 |
+
cache to guarantee the keys are not evicted.
|
| 325 |
+
|
| 326 |
+
>>> import diskcache, time
|
| 327 |
+
>>> cache = diskcache.Cache()
|
| 328 |
+
>>> @barrier(cache, Lock)
|
| 329 |
+
... def work(num):
|
| 330 |
+
... print('worker started')
|
| 331 |
+
... time.sleep(1)
|
| 332 |
+
... print('worker finished')
|
| 333 |
+
>>> import multiprocessing.pool
|
| 334 |
+
>>> pool = multiprocessing.pool.ThreadPool(2)
|
| 335 |
+
>>> _ = pool.map(work, range(2))
|
| 336 |
+
worker started
|
| 337 |
+
worker finished
|
| 338 |
+
worker started
|
| 339 |
+
worker finished
|
| 340 |
+
>>> pool.terminate()
|
| 341 |
+
|
| 342 |
+
"""
|
| 343 |
+
|
| 344 |
+
def decorator(func):
|
| 345 |
+
key = full_name(func) if name is None else name
|
| 346 |
+
lock = lock_factory(cache, key, expire=expire, tag=tag)
|
| 347 |
+
|
| 348 |
+
@functools.wraps(func)
|
| 349 |
+
def wrapper(*args, **kwargs):
|
| 350 |
+
with lock:
|
| 351 |
+
return func(*args, **kwargs)
|
| 352 |
+
|
| 353 |
+
return wrapper
|
| 354 |
+
|
| 355 |
+
return decorator
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def memoize_stampede(
|
| 359 |
+
cache, expire, name=None, typed=False, tag=None, beta=1, ignore=()
|
| 360 |
+
):
|
| 361 |
+
"""Memoizing cache decorator with cache stampede protection.
|
| 362 |
+
|
| 363 |
+
Cache stampedes are a type of system overload that can occur when parallel
|
| 364 |
+
computing systems using memoization come under heavy load. This behaviour
|
| 365 |
+
is sometimes also called dog-piling, cache miss storm, cache choking, or
|
| 366 |
+
the thundering herd problem.
|
| 367 |
+
|
| 368 |
+
The memoization decorator implements cache stampede protection through
|
| 369 |
+
early recomputation. Early recomputation of function results will occur
|
| 370 |
+
probabilistically before expiration in a background thread of
|
| 371 |
+
execution. Early probabilistic recomputation is based on research by
|
| 372 |
+
Vattani, A.; Chierichetti, F.; Lowenstein, K. (2015), Optimal Probabilistic
|
| 373 |
+
Cache Stampede Prevention, VLDB, pp. 886-897, ISSN 2150-8097
|
| 374 |
+
|
| 375 |
+
If name is set to None (default), the callable name will be determined
|
| 376 |
+
automatically.
|
| 377 |
+
|
| 378 |
+
If typed is set to True, function arguments of different types will be
|
| 379 |
+
cached separately. For example, f(3) and f(3.0) will be treated as distinct
|
| 380 |
+
calls with distinct results.
|
| 381 |
+
|
| 382 |
+
The original underlying function is accessible through the `__wrapped__`
|
| 383 |
+
attribute. This is useful for introspection, for bypassing the cache, or
|
| 384 |
+
for rewrapping the function with a different cache.
|
| 385 |
+
|
| 386 |
+
>>> from diskcache import Cache
|
| 387 |
+
>>> cache = Cache()
|
| 388 |
+
>>> @memoize_stampede(cache, expire=1)
|
| 389 |
+
... def fib(number):
|
| 390 |
+
... if number == 0:
|
| 391 |
+
... return 0
|
| 392 |
+
... elif number == 1:
|
| 393 |
+
... return 1
|
| 394 |
+
... else:
|
| 395 |
+
... return fib(number - 1) + fib(number - 2)
|
| 396 |
+
>>> print(fib(100))
|
| 397 |
+
354224848179261915075
|
| 398 |
+
|
| 399 |
+
An additional `__cache_key__` attribute can be used to generate the cache
|
| 400 |
+
key used for the given arguments.
|
| 401 |
+
|
| 402 |
+
>>> key = fib.__cache_key__(100)
|
| 403 |
+
>>> del cache[key]
|
| 404 |
+
|
| 405 |
+
Remember to call memoize when decorating a callable. If you forget, then a
|
| 406 |
+
TypeError will occur.
|
| 407 |
+
|
| 408 |
+
:param cache: cache to store callable arguments and return values
|
| 409 |
+
:param float expire: seconds until arguments expire
|
| 410 |
+
:param str name: name given for callable (default None, automatic)
|
| 411 |
+
:param bool typed: cache different types separately (default False)
|
| 412 |
+
:param str tag: text to associate with arguments (default None)
|
| 413 |
+
:param set ignore: positional or keyword args to ignore (default ())
|
| 414 |
+
:return: callable decorator
|
| 415 |
+
|
| 416 |
+
"""
|
| 417 |
+
# Caution: Nearly identical code exists in Cache.memoize
|
| 418 |
+
def decorator(func):
|
| 419 |
+
"""Decorator created by memoize call for callable."""
|
| 420 |
+
base = (full_name(func),) if name is None else (name,)
|
| 421 |
+
|
| 422 |
+
def timer(*args, **kwargs):
|
| 423 |
+
"""Time execution of `func` and return result and time delta."""
|
| 424 |
+
start = time.time()
|
| 425 |
+
result = func(*args, **kwargs)
|
| 426 |
+
delta = time.time() - start
|
| 427 |
+
return result, delta
|
| 428 |
+
|
| 429 |
+
@functools.wraps(func)
|
| 430 |
+
def wrapper(*args, **kwargs):
|
| 431 |
+
"""Wrapper for callable to cache arguments and return values."""
|
| 432 |
+
key = wrapper.__cache_key__(*args, **kwargs)
|
| 433 |
+
pair, expire_time = cache.get(
|
| 434 |
+
key,
|
| 435 |
+
default=ENOVAL,
|
| 436 |
+
expire_time=True,
|
| 437 |
+
retry=True,
|
| 438 |
+
)
|
| 439 |
+
|
| 440 |
+
if pair is not ENOVAL:
|
| 441 |
+
result, delta = pair
|
| 442 |
+
now = time.time()
|
| 443 |
+
ttl = expire_time - now
|
| 444 |
+
|
| 445 |
+
if (-delta * beta * math.log(random.random())) < ttl:
|
| 446 |
+
return result # Cache hit.
|
| 447 |
+
|
| 448 |
+
# Check whether a thread has started for early recomputation.
|
| 449 |
+
|
| 450 |
+
thread_key = key + (ENOVAL,)
|
| 451 |
+
thread_added = cache.add(
|
| 452 |
+
thread_key,
|
| 453 |
+
None,
|
| 454 |
+
expire=delta,
|
| 455 |
+
retry=True,
|
| 456 |
+
)
|
| 457 |
+
|
| 458 |
+
if thread_added:
|
| 459 |
+
# Start thread for early recomputation.
|
| 460 |
+
def recompute():
|
| 461 |
+
with cache:
|
| 462 |
+
pair = timer(*args, **kwargs)
|
| 463 |
+
cache.set(
|
| 464 |
+
key,
|
| 465 |
+
pair,
|
| 466 |
+
expire=expire,
|
| 467 |
+
tag=tag,
|
| 468 |
+
retry=True,
|
| 469 |
+
)
|
| 470 |
+
|
| 471 |
+
thread = threading.Thread(target=recompute)
|
| 472 |
+
thread.daemon = True
|
| 473 |
+
thread.start()
|
| 474 |
+
|
| 475 |
+
return result
|
| 476 |
+
|
| 477 |
+
pair = timer(*args, **kwargs)
|
| 478 |
+
cache.set(key, pair, expire=expire, tag=tag, retry=True)
|
| 479 |
+
return pair[0]
|
| 480 |
+
|
| 481 |
+
def __cache_key__(*args, **kwargs):
|
| 482 |
+
"""Make key for cache given function arguments."""
|
| 483 |
+
return args_to_key(base, args, kwargs, typed, ignore)
|
| 484 |
+
|
| 485 |
+
wrapper.__cache_key__ = __cache_key__
|
| 486 |
+
return wrapper
|
| 487 |
+
|
| 488 |
+
return decorator
|
deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2012, Anaconda, Inc.
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without
|
| 5 |
+
modification, are permitted provided that the following conditions are
|
| 6 |
+
met:
|
| 7 |
+
|
| 8 |
+
Redistributions of source code must retain the above copyright notice,
|
| 9 |
+
this list of conditions and the following disclaimer.
|
| 10 |
+
|
| 11 |
+
Redistributions in binary form must reproduce the above copyright
|
| 12 |
+
notice, this list of conditions and the following disclaimer in the
|
| 13 |
+
documentation and/or other materials provided with the distribution.
|
| 14 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 15 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 16 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 17 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 18 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 19 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 20 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 21 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 22 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 23 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 24 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/LICENSES.third-party
ADDED
|
@@ -0,0 +1,507 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The Numba source tree includes vendored libraries governed by the following
|
| 2 |
+
licenses.
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
appdirs
|
| 6 |
+
-------
|
| 7 |
+
|
| 8 |
+
# This is the MIT license
|
| 9 |
+
|
| 10 |
+
Copyright (c) 2010 ActiveState Software Inc.
|
| 11 |
+
|
| 12 |
+
Permission is hereby granted, free of charge, to any person obtaining a
|
| 13 |
+
copy of this software and associated documentation files (the
|
| 14 |
+
"Software"), to deal in the Software without restriction, including
|
| 15 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
| 16 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
| 17 |
+
permit persons to whom the Software is furnished to do so, subject to
|
| 18 |
+
the following conditions:
|
| 19 |
+
|
| 20 |
+
The above copyright notice and this permission notice shall be included
|
| 21 |
+
in all copies or substantial portions of the Software.
|
| 22 |
+
|
| 23 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
| 24 |
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 25 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
| 26 |
+
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
| 27 |
+
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
| 28 |
+
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
| 29 |
+
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
NetworkX
|
| 33 |
+
--------
|
| 34 |
+
The dominance frontier algorithm is from a pull request
|
| 35 |
+
https://github.com/numba/numba/pull/4149/files which is based
|
| 36 |
+
on the implementation of NetworkX of dominance. NetworkX has the following
|
| 37 |
+
license:
|
| 38 |
+
|
| 39 |
+
NetworkX is distributed with the 3-clause BSD license.
|
| 40 |
+
|
| 41 |
+
::
|
| 42 |
+
|
| 43 |
+
Copyright (C) 2004-2019, NetworkX Developers
|
| 44 |
+
Aric Hagberg <hagberg@lanl.gov>
|
| 45 |
+
Dan Schult <dschult@colgate.edu>
|
| 46 |
+
Pieter Swart <swart@lanl.gov>
|
| 47 |
+
All rights reserved.
|
| 48 |
+
|
| 49 |
+
Redistribution and use in source and binary forms, with or without
|
| 50 |
+
modification, are permitted provided that the following conditions are
|
| 51 |
+
met:
|
| 52 |
+
|
| 53 |
+
* Redistributions of source code must retain the above copyright
|
| 54 |
+
notice, this list of conditions and the following disclaimer.
|
| 55 |
+
|
| 56 |
+
* Redistributions in binary form must reproduce the above
|
| 57 |
+
copyright notice, this list of conditions and the following
|
| 58 |
+
disclaimer in the documentation and/or other materials provided
|
| 59 |
+
with the distribution.
|
| 60 |
+
|
| 61 |
+
* Neither the name of the NetworkX Developers nor the names of its
|
| 62 |
+
contributors may be used to endorse or promote products derived
|
| 63 |
+
from this software without specific prior written permission.
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 67 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 68 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 69 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 70 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 71 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 72 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 73 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 74 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 75 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 76 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
jquery.graphviz.svg (https://github.com/mountainstorm/jquery.graphviz.svg/)
|
| 80 |
+
---------------------------------------------------------------------------
|
| 81 |
+
The DAG roadmap rendering code in docs/dagmap/ uses Javascript from this
|
| 82 |
+
package to draw graphs in HTML.
|
| 83 |
+
|
| 84 |
+
Copyright (c) 2015 Mountainstorm
|
| 85 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 86 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 87 |
+
in the Software without restriction, including without limitation the rights
|
| 88 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 89 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 90 |
+
furnished to do so, subject to the following conditions:
|
| 91 |
+
The above copyright notice and this permission notice shall be included in all
|
| 92 |
+
copies or substantial portions of the Software.
|
| 93 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 94 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 95 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 96 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 97 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 98 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 99 |
+
SOFTWARE.
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
CPython (https://github.com/python/cpython)
|
| 104 |
+
-------------------------------------------
|
| 105 |
+
Numba source code that references URLs starting with:
|
| 106 |
+
|
| 107 |
+
https://github.com/python/cpython/
|
| 108 |
+
|
| 109 |
+
relates to use/inclusion of CPython source code which has the following license:
|
| 110 |
+
|
| 111 |
+
A. HISTORY OF THE SOFTWARE
|
| 112 |
+
==========================
|
| 113 |
+
|
| 114 |
+
Python was created in the early 1990s by Guido van Rossum at Stichting
|
| 115 |
+
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
| 116 |
+
as a successor of a language called ABC. Guido remains Python's
|
| 117 |
+
principal author, although it includes many contributions from others.
|
| 118 |
+
|
| 119 |
+
In 1995, Guido continued his work on Python at the Corporation for
|
| 120 |
+
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
| 121 |
+
in Reston, Virginia where he released several versions of the
|
| 122 |
+
software.
|
| 123 |
+
|
| 124 |
+
In May 2000, Guido and the Python core development team moved to
|
| 125 |
+
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
| 126 |
+
year, the PythonLabs team moved to Digital Creations, which became
|
| 127 |
+
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
|
| 128 |
+
https://www.python.org/psf/) was formed, a non-profit organization
|
| 129 |
+
created specifically to own Python-related Intellectual Property.
|
| 130 |
+
Zope Corporation was a sponsoring member of the PSF.
|
| 131 |
+
|
| 132 |
+
All Python releases are Open Source (see http://www.opensource.org for
|
| 133 |
+
the Open Source Definition). Historically, most, but not all, Python
|
| 134 |
+
releases have also been GPL-compatible; the table below summarizes
|
| 135 |
+
the various releases.
|
| 136 |
+
|
| 137 |
+
Release Derived Year Owner GPL-
|
| 138 |
+
from compatible? (1)
|
| 139 |
+
|
| 140 |
+
0.9.0 thru 1.2 1991-1995 CWI yes
|
| 141 |
+
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
| 142 |
+
1.6 1.5.2 2000 CNRI no
|
| 143 |
+
2.0 1.6 2000 BeOpen.com no
|
| 144 |
+
1.6.1 1.6 2001 CNRI yes (2)
|
| 145 |
+
2.1 2.0+1.6.1 2001 PSF no
|
| 146 |
+
2.0.1 2.0+1.6.1 2001 PSF yes
|
| 147 |
+
2.1.1 2.1+2.0.1 2001 PSF yes
|
| 148 |
+
2.1.2 2.1.1 2002 PSF yes
|
| 149 |
+
2.1.3 2.1.2 2002 PSF yes
|
| 150 |
+
2.2 and above 2.1.1 2001-now PSF yes
|
| 151 |
+
|
| 152 |
+
Footnotes:
|
| 153 |
+
|
| 154 |
+
(1) GPL-compatible doesn't mean that we're distributing Python under
|
| 155 |
+
the GPL. All Python licenses, unlike the GPL, let you distribute
|
| 156 |
+
a modified version without making your changes open source. The
|
| 157 |
+
GPL-compatible licenses make it possible to combine Python with
|
| 158 |
+
other software that is released under the GPL; the others don't.
|
| 159 |
+
|
| 160 |
+
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
| 161 |
+
because its license has a choice of law clause. According to
|
| 162 |
+
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
| 163 |
+
is "not incompatible" with the GPL.
|
| 164 |
+
|
| 165 |
+
Thanks to the many outside volunteers who have worked under Guido's
|
| 166 |
+
direction to make these releases possible.
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
| 170 |
+
===============================================================
|
| 171 |
+
|
| 172 |
+
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
| 173 |
+
--------------------------------------------
|
| 174 |
+
|
| 175 |
+
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
| 176 |
+
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
| 177 |
+
otherwise using this software ("Python") in source or binary form and
|
| 178 |
+
its associated documentation.
|
| 179 |
+
|
| 180 |
+
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
| 181 |
+
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
| 182 |
+
analyze, test, perform and/or display publicly, prepare derivative works,
|
| 183 |
+
distribute, and otherwise use Python alone or in any derivative version,
|
| 184 |
+
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
| 185 |
+
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
| 186 |
+
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation;
|
| 187 |
+
All Rights Reserved" are retained in Python alone or in any derivative version
|
| 188 |
+
prepared by Licensee.
|
| 189 |
+
|
| 190 |
+
3. In the event Licensee prepares a derivative work that is based on
|
| 191 |
+
or incorporates Python or any part thereof, and wants to make
|
| 192 |
+
the derivative work available to others as provided herein, then
|
| 193 |
+
Licensee hereby agrees to include in any such work a brief summary of
|
| 194 |
+
the changes made to Python.
|
| 195 |
+
|
| 196 |
+
4. PSF is making Python available to Licensee on an "AS IS"
|
| 197 |
+
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
| 198 |
+
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
| 199 |
+
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
| 200 |
+
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
| 201 |
+
INFRINGE ANY THIRD PARTY RIGHTS.
|
| 202 |
+
|
| 203 |
+
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
| 204 |
+
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
| 205 |
+
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
| 206 |
+
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
| 207 |
+
|
| 208 |
+
6. This License Agreement will automatically terminate upon a material
|
| 209 |
+
breach of its terms and conditions.
|
| 210 |
+
|
| 211 |
+
7. Nothing in this License Agreement shall be deemed to create any
|
| 212 |
+
relationship of agency, partnership, or joint venture between PSF and
|
| 213 |
+
Licensee. This License Agreement does not grant permission to use PSF
|
| 214 |
+
trademarks or trade name in a trademark sense to endorse or promote
|
| 215 |
+
products or services of Licensee, or any third party.
|
| 216 |
+
|
| 217 |
+
8. By copying, installing or otherwise using Python, Licensee
|
| 218 |
+
agrees to be bound by the terms and conditions of this License
|
| 219 |
+
Agreement.
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
| 223 |
+
-------------------------------------------
|
| 224 |
+
|
| 225 |
+
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
| 226 |
+
|
| 227 |
+
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
| 228 |
+
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
| 229 |
+
Individual or Organization ("Licensee") accessing and otherwise using
|
| 230 |
+
this software in source or binary form and its associated
|
| 231 |
+
documentation ("the Software").
|
| 232 |
+
|
| 233 |
+
2. Subject to the terms and conditions of this BeOpen Python License
|
| 234 |
+
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
| 235 |
+
royalty-free, world-wide license to reproduce, analyze, test, perform
|
| 236 |
+
and/or display publicly, prepare derivative works, distribute, and
|
| 237 |
+
otherwise use the Software alone or in any derivative version,
|
| 238 |
+
provided, however, that the BeOpen Python License is retained in the
|
| 239 |
+
Software, alone or in any derivative version prepared by Licensee.
|
| 240 |
+
|
| 241 |
+
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
| 242 |
+
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
| 243 |
+
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
| 244 |
+
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
| 245 |
+
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
| 246 |
+
INFRINGE ANY THIRD PARTY RIGHTS.
|
| 247 |
+
|
| 248 |
+
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
| 249 |
+
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
| 250 |
+
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
| 251 |
+
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
| 252 |
+
|
| 253 |
+
5. This License Agreement will automatically terminate upon a material
|
| 254 |
+
breach of its terms and conditions.
|
| 255 |
+
|
| 256 |
+
6. This License Agreement shall be governed by and interpreted in all
|
| 257 |
+
respects by the law of the State of California, excluding conflict of
|
| 258 |
+
law provisions. Nothing in this License Agreement shall be deemed to
|
| 259 |
+
create any relationship of agency, partnership, or joint venture
|
| 260 |
+
between BeOpen and Licensee. This License Agreement does not grant
|
| 261 |
+
permission to use BeOpen trademarks or trade names in a trademark
|
| 262 |
+
sense to endorse or promote products or services of Licensee, or any
|
| 263 |
+
third party. As an exception, the "BeOpen Python" logos available at
|
| 264 |
+
http://www.pythonlabs.com/logos.html may be used according to the
|
| 265 |
+
permissions granted on that web page.
|
| 266 |
+
|
| 267 |
+
7. By copying, installing or otherwise using the software, Licensee
|
| 268 |
+
agrees to be bound by the terms and conditions of this License
|
| 269 |
+
Agreement.
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
| 273 |
+
---------------------------------------
|
| 274 |
+
|
| 275 |
+
1. This LICENSE AGREEMENT is between the Corporation for National
|
| 276 |
+
Research Initiatives, having an office at 1895 Preston White Drive,
|
| 277 |
+
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
| 278 |
+
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
| 279 |
+
source or binary form and its associated documentation.
|
| 280 |
+
|
| 281 |
+
2. Subject to the terms and conditions of this License Agreement, CNRI
|
| 282 |
+
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
| 283 |
+
license to reproduce, analyze, test, perform and/or display publicly,
|
| 284 |
+
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
| 285 |
+
alone or in any derivative version, provided, however, that CNRI's
|
| 286 |
+
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
| 287 |
+
1995-2001 Corporation for National Research Initiatives; All Rights
|
| 288 |
+
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
| 289 |
+
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
| 290 |
+
Agreement, Licensee may substitute the following text (omitting the
|
| 291 |
+
quotes): "Python 1.6.1 is made available subject to the terms and
|
| 292 |
+
conditions in CNRI's License Agreement. This Agreement together with
|
| 293 |
+
Python 1.6.1 may be located on the Internet using the following
|
| 294 |
+
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
| 295 |
+
Agreement may also be obtained from a proxy server on the Internet
|
| 296 |
+
using the following URL: http://hdl.handle.net/1895.22/1013".
|
| 297 |
+
|
| 298 |
+
3. In the event Licensee prepares a derivative work that is based on
|
| 299 |
+
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
| 300 |
+
the derivative work available to others as provided herein, then
|
| 301 |
+
Licensee hereby agrees to include in any such work a brief summary of
|
| 302 |
+
the changes made to Python 1.6.1.
|
| 303 |
+
|
| 304 |
+
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
| 305 |
+
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
| 306 |
+
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
| 307 |
+
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
| 308 |
+
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
| 309 |
+
INFRINGE ANY THIRD PARTY RIGHTS.
|
| 310 |
+
|
| 311 |
+
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
| 312 |
+
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
| 313 |
+
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
| 314 |
+
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
| 315 |
+
|
| 316 |
+
6. This License Agreement will automatically terminate upon a material
|
| 317 |
+
breach of its terms and conditions.
|
| 318 |
+
|
| 319 |
+
7. This License Agreement shall be governed by the federal
|
| 320 |
+
intellectual property law of the United States, including without
|
| 321 |
+
limitation the federal copyright law, and, to the extent such
|
| 322 |
+
U.S. federal law does not apply, by the law of the Commonwealth of
|
| 323 |
+
Virginia, excluding Virginia's conflict of law provisions.
|
| 324 |
+
Notwithstanding the foregoing, with regard to derivative works based
|
| 325 |
+
on Python 1.6.1 that incorporate non-separable material that was
|
| 326 |
+
previously distributed under the GNU General Public License (GPL), the
|
| 327 |
+
law of the Commonwealth of Virginia shall govern this License
|
| 328 |
+
Agreement only as to issues arising under or with respect to
|
| 329 |
+
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
| 330 |
+
License Agreement shall be deemed to create any relationship of
|
| 331 |
+
agency, partnership, or joint venture between CNRI and Licensee. This
|
| 332 |
+
License Agreement does not grant permission to use CNRI trademarks or
|
| 333 |
+
trade name in a trademark sense to endorse or promote products or
|
| 334 |
+
services of Licensee, or any third party.
|
| 335 |
+
|
| 336 |
+
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
| 337 |
+
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
| 338 |
+
bound by the terms and conditions of this License Agreement.
|
| 339 |
+
|
| 340 |
+
ACCEPT
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
| 344 |
+
--------------------------------------------------
|
| 345 |
+
|
| 346 |
+
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
| 347 |
+
The Netherlands. All rights reserved.
|
| 348 |
+
|
| 349 |
+
Permission to use, copy, modify, and distribute this software and its
|
| 350 |
+
documentation for any purpose and without fee is hereby granted,
|
| 351 |
+
provided that the above copyright notice appear in all copies and that
|
| 352 |
+
both that copyright notice and this permission notice appear in
|
| 353 |
+
supporting documentation, and that the name of Stichting Mathematisch
|
| 354 |
+
Centrum or CWI not be used in advertising or publicity pertaining to
|
| 355 |
+
distribution of the software without specific, written prior
|
| 356 |
+
permission.
|
| 357 |
+
|
| 358 |
+
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
| 359 |
+
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
| 360 |
+
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
| 361 |
+
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
| 362 |
+
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
| 363 |
+
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
| 364 |
+
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
CPython unicode (https://github.com/python/cpython)
|
| 368 |
+
---------------------------------------------------
|
| 369 |
+
Numba's unicode support includes source code/algorithms from CPython's unicode
|
| 370 |
+
implementation, Numba source code that has a reference starting with:
|
| 371 |
+
|
| 372 |
+
https://github.com/python/cpython/
|
| 373 |
+
|
| 374 |
+
and contains in the path "Objects/unicodeobject.c" relates to use/inclusion of
|
| 375 |
+
CPython source code which has the following license along with the standard
|
| 376 |
+
CPython license:
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
Unicode implementation based on original code by Fredrik Lundh,
|
| 380 |
+
modified by Marc-Andre Lemburg <mal@lemburg.com>.
|
| 381 |
+
|
| 382 |
+
Major speed upgrades to the method implementations at the Reykjavik
|
| 383 |
+
NeedForSpeed sprint, by Fredrik Lundh and Andrew Dalke.
|
| 384 |
+
|
| 385 |
+
Copyright (c) Corporation for National Research Initiatives.
|
| 386 |
+
|
| 387 |
+
--------------------------------------------------------------------
|
| 388 |
+
The original string type implementation is:
|
| 389 |
+
|
| 390 |
+
Copyright (c) 1999 by Secret Labs AB
|
| 391 |
+
Copyright (c) 1999 by Fredrik Lundh
|
| 392 |
+
|
| 393 |
+
By obtaining, using, and/or copying this software and/or its
|
| 394 |
+
associated documentation, you agree that you have read, understood,
|
| 395 |
+
and will comply with the following terms and conditions:
|
| 396 |
+
|
| 397 |
+
Permission to use, copy, modify, and distribute this software and its
|
| 398 |
+
associated documentation for any purpose and without fee is hereby
|
| 399 |
+
granted, provided that the above copyright notice appears in all
|
| 400 |
+
copies, and that both that copyright notice and this permission notice
|
| 401 |
+
appear in supporting documentation, and that the name of Secret Labs
|
| 402 |
+
AB or the author not be used in advertising or publicity pertaining to
|
| 403 |
+
distribution of the software without specific, written prior
|
| 404 |
+
permission.
|
| 405 |
+
|
| 406 |
+
SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
| 407 |
+
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
| 408 |
+
FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR
|
| 409 |
+
ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
| 410 |
+
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
| 411 |
+
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
| 412 |
+
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
| 413 |
+
--------------------------------------------------------------------
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
cloudpickle
|
| 417 |
+
-----------
|
| 418 |
+
|
| 419 |
+
This module was extracted from the `cloud` package, developed by
|
| 420 |
+
PiCloud, Inc.
|
| 421 |
+
|
| 422 |
+
Copyright (c) 2015, Cloudpickle contributors.
|
| 423 |
+
Copyright (c) 2012, Regents of the University of California.
|
| 424 |
+
Copyright (c) 2009 PiCloud, Inc. http://www.picloud.com.
|
| 425 |
+
All rights reserved.
|
| 426 |
+
|
| 427 |
+
Redistribution and use in source and binary forms, with or without
|
| 428 |
+
modification, are permitted provided that the following conditions
|
| 429 |
+
are met:
|
| 430 |
+
* Redistributions of source code must retain the above copyright
|
| 431 |
+
notice, this list of conditions and the following disclaimer.
|
| 432 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 433 |
+
notice, this list of conditions and the following disclaimer in the
|
| 434 |
+
documentation and/or other materials provided with the distribution.
|
| 435 |
+
* Neither the name of the University of California, Berkeley nor the
|
| 436 |
+
names of its contributors may be used to endorse or promote
|
| 437 |
+
products derived from this software without specific prior written
|
| 438 |
+
permission.
|
| 439 |
+
|
| 440 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 441 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 442 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 443 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 444 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 445 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
| 446 |
+
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
| 447 |
+
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
| 448 |
+
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
| 449 |
+
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
| 450 |
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 451 |
+
© 2020 GitHub, Inc.
|
| 452 |
+
|
| 453 |
+
|
| 454 |
+
NumPy (https://github.com/numpy/numpy)
|
| 455 |
+
--------------------------------------
|
| 456 |
+
Numba source code that references URLs starting with:
|
| 457 |
+
|
| 458 |
+
https://github.com/numpy/numpy
|
| 459 |
+
|
| 460 |
+
relates to use of/inclusion of/derivate work based on NumPy source code which
|
| 461 |
+
has the following license:
|
| 462 |
+
|
| 463 |
+
|
| 464 |
+
Copyright (c) 2005-2021, NumPy Developers.
|
| 465 |
+
All rights reserved.
|
| 466 |
+
|
| 467 |
+
Redistribution and use in source and binary forms, with or without
|
| 468 |
+
modification, are permitted provided that the following conditions are
|
| 469 |
+
met:
|
| 470 |
+
|
| 471 |
+
* Redistributions of source code must retain the above copyright
|
| 472 |
+
notice, this list of conditions and the following disclaimer.
|
| 473 |
+
|
| 474 |
+
* Redistributions in binary form must reproduce the above
|
| 475 |
+
copyright notice, this list of conditions and the following
|
| 476 |
+
disclaimer in the documentation and/or other materials provided
|
| 477 |
+
with the distribution.
|
| 478 |
+
|
| 479 |
+
* Neither the name of the NumPy Developers nor the names of any
|
| 480 |
+
contributors may be used to endorse or promote products derived
|
| 481 |
+
from this software without specific prior written permission.
|
| 482 |
+
|
| 483 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 484 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 485 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 486 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 487 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 488 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 489 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 490 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 491 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 492 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 493 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
CUDA Half Precision Headers
|
| 497 |
+
---------------------------
|
| 498 |
+
|
| 499 |
+
The files numba/cuda/cuda_fp16.h and numba/cuda/cuda_fp16.hpp are vendored from
|
| 500 |
+
the CUDA Toolkit version 11.2.2 under the terms of the NVIDIA Software License
|
| 501 |
+
Agreement and CUDA Supplement to Software License Agreement, available at:
|
| 502 |
+
|
| 503 |
+
https://docs.nvidia.com/cuda/archive/11.2.2/eula/index.html
|
| 504 |
+
|
| 505 |
+
These files are distributable as listed in Attachment A:
|
| 506 |
+
|
| 507 |
+
https://docs.nvidia.com/cuda/archive/11.2.2/eula/index.html#attachment-a
|
deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: numba
|
| 3 |
+
Version: 0.60.0
|
| 4 |
+
Summary: compiling Python code using LLVM
|
| 5 |
+
Home-page: https://numba.pydata.org
|
| 6 |
+
License: BSD
|
| 7 |
+
Classifier: Development Status :: 4 - Beta
|
| 8 |
+
Classifier: Intended Audience :: Developers
|
| 9 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 10 |
+
Classifier: Operating System :: OS Independent
|
| 11 |
+
Classifier: Programming Language :: Python
|
| 12 |
+
Classifier: Programming Language :: Python :: 3
|
| 13 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 14 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 15 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 17 |
+
Classifier: Topic :: Software Development :: Compilers
|
| 18 |
+
Requires-Python: >=3.9
|
| 19 |
+
License-File: LICENSE
|
| 20 |
+
License-File: LICENSES.third-party
|
| 21 |
+
Requires-Dist: llvmlite <0.44,>=0.43.0dev0
|
| 22 |
+
Requires-Dist: numpy <2.1,>=1.22
|
| 23 |
+
|
| 24 |
+
*****
|
| 25 |
+
Numba
|
| 26 |
+
*****
|
| 27 |
+
|
| 28 |
+
.. image:: https://badges.gitter.im/numba/numba.svg
|
| 29 |
+
:target: https://gitter.im/numba/numba?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge
|
| 30 |
+
:alt: Gitter
|
| 31 |
+
|
| 32 |
+
.. image:: https://img.shields.io/badge/discuss-on%20discourse-blue
|
| 33 |
+
:target: https://numba.discourse.group/
|
| 34 |
+
:alt: Discourse
|
| 35 |
+
|
| 36 |
+
.. image:: https://zenodo.org/badge/3659275.svg
|
| 37 |
+
:target: https://zenodo.org/badge/latestdoi/3659275
|
| 38 |
+
:alt: Zenodo DOI
|
| 39 |
+
|
| 40 |
+
.. image:: https://img.shields.io/pypi/v/numba.svg
|
| 41 |
+
:target: https://pypi.python.org/pypi/numba/
|
| 42 |
+
:alt: PyPI
|
| 43 |
+
|
| 44 |
+
.. image:: https://dev.azure.com/numba/numba/_apis/build/status/numba.numba?branchName=main
|
| 45 |
+
:target: https://dev.azure.com/numba/numba/_build/latest?definitionId=1?branchName=main
|
| 46 |
+
:alt: Azure Pipelines
|
| 47 |
+
|
| 48 |
+
A Just-In-Time Compiler for Numerical Functions in Python
|
| 49 |
+
#########################################################
|
| 50 |
+
|
| 51 |
+
Numba is an open source, NumPy-aware optimizing compiler for Python sponsored
|
| 52 |
+
by Anaconda, Inc. It uses the LLVM compiler project to generate machine code
|
| 53 |
+
from Python syntax.
|
| 54 |
+
|
| 55 |
+
Numba can compile a large subset of numerically-focused Python, including many
|
| 56 |
+
NumPy functions. Additionally, Numba has support for automatic
|
| 57 |
+
parallelization of loops, generation of GPU-accelerated code, and creation of
|
| 58 |
+
ufuncs and C callbacks.
|
| 59 |
+
|
| 60 |
+
For more information about Numba, see the Numba homepage:
|
| 61 |
+
https://numba.pydata.org and the online documentation:
|
| 62 |
+
https://numba.readthedocs.io/en/stable/index.html
|
| 63 |
+
|
| 64 |
+
Installation
|
| 65 |
+
============
|
| 66 |
+
|
| 67 |
+
Please follow the instructions:
|
| 68 |
+
|
| 69 |
+
https://numba.readthedocs.io/en/stable/user/installing.html
|
| 70 |
+
|
| 71 |
+
Demo
|
| 72 |
+
====
|
| 73 |
+
|
| 74 |
+
Please have a look and the demo notebooks via the mybinder service:
|
| 75 |
+
|
| 76 |
+
https://mybinder.org/v2/gh/numba/numba-examples/master?filepath=notebooks
|
| 77 |
+
|
| 78 |
+
Contact
|
| 79 |
+
=======
|
| 80 |
+
|
| 81 |
+
Numba has a discourse forum for discussions:
|
| 82 |
+
|
| 83 |
+
* https://numba.discourse.group
|
| 84 |
+
|
deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/RECORD
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.43.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp310-cp310-manylinux_2_17_x86_64
|
| 5 |
+
Tag: cp310-cp310-manylinux2014_x86_64
|
| 6 |
+
|
deepseek/lib/python3.10/site-packages/numba-0.60.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
numba
|
deepseek/lib/python3.10/site-packages/numpy/random/tests/data/pcg64-testset-1.csv
ADDED
|
@@ -0,0 +1,1001 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
seed, 0xdeadbeaf
|
| 2 |
+
0, 0x60d24054e17a0698
|
| 3 |
+
1, 0xd5e79d89856e4f12
|
| 4 |
+
2, 0xd254972fe64bd782
|
| 5 |
+
3, 0xf1e3072a53c72571
|
| 6 |
+
4, 0xd7c1d7393d4115c9
|
| 7 |
+
5, 0x77b75928b763e1e2
|
| 8 |
+
6, 0xee6dee05190f7909
|
| 9 |
+
7, 0x15f7b1c51d7fa319
|
| 10 |
+
8, 0x27e44105f26ac2d7
|
| 11 |
+
9, 0xcc0d88b29e5b415
|
| 12 |
+
10, 0xe07b1a90c685e361
|
| 13 |
+
11, 0xd2e430240de95e38
|
| 14 |
+
12, 0x3260bca9a24ca9da
|
| 15 |
+
13, 0x9b3cf2e92385adb7
|
| 16 |
+
14, 0x30b5514548271976
|
| 17 |
+
15, 0xa3a1fa16c124faf9
|
| 18 |
+
16, 0xf53e17e918e45bb6
|
| 19 |
+
17, 0x26f19faaeb833bfc
|
| 20 |
+
18, 0x95e1d605730cce1b
|
| 21 |
+
19, 0xa7b520c5c093c1aa
|
| 22 |
+
20, 0x4b68c010c9b106a3
|
| 23 |
+
21, 0x25e19fe91df703f0
|
| 24 |
+
22, 0x898364bb0bf593cb
|
| 25 |
+
23, 0x5bd6ab7dbaa125db
|
| 26 |
+
24, 0xd1fe47f25152045c
|
| 27 |
+
25, 0x3bb11919addf2409
|
| 28 |
+
26, 0x26a8cb7b3f54af8
|
| 29 |
+
27, 0xe6a27ee11200aa24
|
| 30 |
+
28, 0x7cb585ab01e22000
|
| 31 |
+
29, 0x78e60028676d2ef3
|
| 32 |
+
30, 0x5c32535e5a899528
|
| 33 |
+
31, 0x83e8b6f8c4a46fb3
|
| 34 |
+
32, 0xe56ef7668a161246
|
| 35 |
+
33, 0x36dcbc15aeb73055
|
| 36 |
+
34, 0x5ea247f0bd188acb
|
| 37 |
+
35, 0x438b547b84601a80
|
| 38 |
+
36, 0x8acda2a1273e9e3d
|
| 39 |
+
37, 0x2b05e30a4b40c24c
|
| 40 |
+
38, 0xfd87236bd13af032
|
| 41 |
+
39, 0x471df211d8d985ef
|
| 42 |
+
40, 0x18e8a5609a793292
|
| 43 |
+
41, 0x46f0951fab6dc4e3
|
| 44 |
+
42, 0x6c199c4e700f6795
|
| 45 |
+
43, 0xf04aa16bfb7d22cb
|
| 46 |
+
44, 0xd763d269fbaffc89
|
| 47 |
+
45, 0x9991930cefbe5c2b
|
| 48 |
+
46, 0xb2a11b953f824c96
|
| 49 |
+
47, 0x63fd9f52172c44b0
|
| 50 |
+
48, 0x183bdad907b1d848
|
| 51 |
+
49, 0xe17953cddb931c52
|
| 52 |
+
50, 0x515cf16726ec205a
|
| 53 |
+
51, 0x88c327605150711a
|
| 54 |
+
52, 0xc7090dd79cbc8dc3
|
| 55 |
+
53, 0xcb487cedeb00a350
|
| 56 |
+
54, 0xc8abf254d87b657
|
| 57 |
+
55, 0xd43cc4cbfb493d1a
|
| 58 |
+
56, 0x8705452e5d9ed1e
|
| 59 |
+
57, 0xcecd11446769cf43
|
| 60 |
+
58, 0xde72156c8d65bc69
|
| 61 |
+
59, 0x796a8f0f47d52ee8
|
| 62 |
+
60, 0xb4c0da443917d6c3
|
| 63 |
+
61, 0xe07ad7568a8e3dc3
|
| 64 |
+
62, 0xc24a8da39ce6dc21
|
| 65 |
+
63, 0x92b21ea80a8556eb
|
| 66 |
+
64, 0x572f21e531edf3af
|
| 67 |
+
65, 0x9b917ed56bbed198
|
| 68 |
+
66, 0xe65fd8ddc5ab3d7d
|
| 69 |
+
67, 0xf55a80a8ec84fa18
|
| 70 |
+
68, 0x18fc22e1a5227b61
|
| 71 |
+
69, 0x72305dc7eeaa79d3
|
| 72 |
+
70, 0x47ce58a36e7592cf
|
| 73 |
+
71, 0x14c6374340c0f7cc
|
| 74 |
+
72, 0x6f98273d4eb5a2c
|
| 75 |
+
73, 0x59a8702c46fe8f8a
|
| 76 |
+
74, 0xb67cbd8113cfe57f
|
| 77 |
+
75, 0xaa03c5db5f5b7690
|
| 78 |
+
76, 0x3fb0f77ea4568013
|
| 79 |
+
77, 0x756530990398b26e
|
| 80 |
+
78, 0x4c1952b2a3a6a343
|
| 81 |
+
79, 0x1da15c5383074582
|
| 82 |
+
80, 0xb405b21c81c274f7
|
| 83 |
+
81, 0xbe664677a16788b
|
| 84 |
+
82, 0x9d2e37550bcee656
|
| 85 |
+
83, 0x8b4589f0d9defe02
|
| 86 |
+
84, 0x2935f018ee06a59
|
| 87 |
+
85, 0x3834bf88be97ed11
|
| 88 |
+
86, 0xa610d049cea79b6d
|
| 89 |
+
87, 0xd49ffc0d09a59ea9
|
| 90 |
+
88, 0x4073365b76567adf
|
| 91 |
+
89, 0x499eefb9bb7513e2
|
| 92 |
+
90, 0x74a743ee6b0138a9
|
| 93 |
+
91, 0x3bf0880f2d947594
|
| 94 |
+
92, 0x555d1c0498600a99
|
| 95 |
+
93, 0x923b32a88ef2ffa4
|
| 96 |
+
94, 0x7325411065fbedea
|
| 97 |
+
95, 0x9f4129ff8b79d300
|
| 98 |
+
96, 0xab2b0a9b8a3785dc
|
| 99 |
+
97, 0x11734bdfba3a1713
|
| 100 |
+
98, 0xc8333398841ba585
|
| 101 |
+
99, 0xee2409cc234e6742
|
| 102 |
+
100, 0xf6638e700872ecd2
|
| 103 |
+
101, 0x10875300c13cd284
|
| 104 |
+
102, 0x27a9bbed7c15b2d3
|
| 105 |
+
103, 0x3c87f8fef31ce9bd
|
| 106 |
+
104, 0x92be263cd0914a95
|
| 107 |
+
105, 0xa7b0f11bc742307e
|
| 108 |
+
106, 0x4a56f788cc1c1a3c
|
| 109 |
+
107, 0x4a130fa32257a48b
|
| 110 |
+
108, 0x5d4d9eda16e90286
|
| 111 |
+
109, 0x7cc2af564844bedc
|
| 112 |
+
110, 0x2532867bfe7cda1a
|
| 113 |
+
111, 0xb1c504676611fd17
|
| 114 |
+
112, 0xce8e86cfb4189aee
|
| 115 |
+
113, 0x99685898980d1970
|
| 116 |
+
114, 0x8c3b67db23bcf1e
|
| 117 |
+
115, 0x73e14c93905b135f
|
| 118 |
+
116, 0xf0271b64ac2bd4d3
|
| 119 |
+
117, 0xf4beba82f3ec1b2d
|
| 120 |
+
118, 0x1cdbf3ee9f210af
|
| 121 |
+
119, 0x2e938557c09c3ea6
|
| 122 |
+
120, 0x2d314ccfa6ffd81d
|
| 123 |
+
121, 0x31ad47079950ade4
|
| 124 |
+
122, 0x342b27547b900872
|
| 125 |
+
123, 0x171b0e20b9ef1a76
|
| 126 |
+
124, 0xdf10ce6318b03654
|
| 127 |
+
125, 0x1d625df4aa718897
|
| 128 |
+
126, 0x8712715a9f6e02ec
|
| 129 |
+
127, 0xb4a072da725bca3b
|
| 130 |
+
128, 0x19d346cb7734bd42
|
| 131 |
+
129, 0xfd4281d311cb2958
|
| 132 |
+
130, 0x58274c9519fc8789
|
| 133 |
+
131, 0x4cacf29d885fd544
|
| 134 |
+
132, 0x784b14d1c2523b80
|
| 135 |
+
133, 0x2d25242131bb2373
|
| 136 |
+
134, 0xcd2a5e43a7d9abf9
|
| 137 |
+
135, 0x15eda3806e650ecb
|
| 138 |
+
136, 0xdaac5e277d764d96
|
| 139 |
+
137, 0xdc5a5dd59aaa94e0
|
| 140 |
+
138, 0x40d00237a46d5999
|
| 141 |
+
139, 0x6205dd35a692743f
|
| 142 |
+
140, 0xbbd8236740361f09
|
| 143 |
+
141, 0x1625c9f4e7288bf9
|
| 144 |
+
142, 0xb74f12df1479e3ce
|
| 145 |
+
143, 0xb2d72a51b43d7131
|
| 146 |
+
144, 0xf006a324b3707c83
|
| 147 |
+
145, 0x28e8ab4abe7655b8
|
| 148 |
+
146, 0xfb480093ad7ab55
|
| 149 |
+
147, 0x3f8abd0d6ff8d272
|
| 150 |
+
148, 0xc81a94177ac26bb7
|
| 151 |
+
149, 0x3cdc178307751b14
|
| 152 |
+
150, 0x9de84cc2b10ba025
|
| 153 |
+
151, 0x3f8ab5aefcd046e2
|
| 154 |
+
152, 0x43bdb894e1ee83b2
|
| 155 |
+
153, 0xe288a40f3f06ac9d
|
| 156 |
+
154, 0xdab62a7d04b4f30f
|
| 157 |
+
155, 0x49f4e20295e1a805
|
| 158 |
+
156, 0x3643764805e0edef
|
| 159 |
+
157, 0x9449954618b6b
|
| 160 |
+
158, 0x6c87e0d4508e0ce0
|
| 161 |
+
159, 0x3a334be688a9dd7b
|
| 162 |
+
160, 0xb35c39228776e499
|
| 163 |
+
161, 0xc4118bfff938490e
|
| 164 |
+
162, 0x88cbde3dcbb034b2
|
| 165 |
+
163, 0xf91b287793c417c3
|
| 166 |
+
164, 0x42b15f731a59f5b3
|
| 167 |
+
165, 0xffa27104bbe4814d
|
| 168 |
+
166, 0x1b6789d138beccde
|
| 169 |
+
167, 0x542c2c1440d0ceb9
|
| 170 |
+
168, 0x367294504d18fa0d
|
| 171 |
+
169, 0xf918b60e804a1b58
|
| 172 |
+
170, 0xd390964e33a9d0e3
|
| 173 |
+
171, 0x23bb1be7c4030fe8
|
| 174 |
+
172, 0x9731054d039a8afb
|
| 175 |
+
173, 0x1a6205026b9d139b
|
| 176 |
+
174, 0x2fa13b318254a07e
|
| 177 |
+
175, 0x69571de7d8520626
|
| 178 |
+
176, 0x641a13d7c03332b7
|
| 179 |
+
177, 0x76a6237818f7a441
|
| 180 |
+
178, 0x4e77860d0c660d81
|
| 181 |
+
179, 0x4441448a1c1cbdb2
|
| 182 |
+
180, 0xccd7783a042046e5
|
| 183 |
+
181, 0xf620d8e0805e3200
|
| 184 |
+
182, 0x7de02971367fdd0c
|
| 185 |
+
183, 0x539c263c5914cab1
|
| 186 |
+
184, 0x9c3b9ba1a87bbf08
|
| 187 |
+
185, 0x6d95baa34cda215f
|
| 188 |
+
186, 0x2db3f83ace0bac5f
|
| 189 |
+
187, 0x7f5af1da2dc670a4
|
| 190 |
+
188, 0xfcc098d16c891bfb
|
| 191 |
+
189, 0x81a33df1d7a5ab12
|
| 192 |
+
190, 0x767b0f863c8e9882
|
| 193 |
+
191, 0x7a92983830de483d
|
| 194 |
+
192, 0xfa7598c37a79ac25
|
| 195 |
+
193, 0xb89b3ca42ce03053
|
| 196 |
+
194, 0x457a542b8efed4f7
|
| 197 |
+
195, 0x571b7737fd0eeda7
|
| 198 |
+
196, 0xa0f59e524485c0a
|
| 199 |
+
197, 0x82dca766b7901efd
|
| 200 |
+
198, 0xa68243caf6a3bd5d
|
| 201 |
+
199, 0x1bac981c6c740e5e
|
| 202 |
+
200, 0xbcd51bedf9103e44
|
| 203 |
+
201, 0x4e197efd3ae5a7bf
|
| 204 |
+
202, 0x523568efd782268b
|
| 205 |
+
203, 0x5ec4ef1191fef09
|
| 206 |
+
204, 0xed751ed5e31c9ab
|
| 207 |
+
205, 0x44eac24de03e1b29
|
| 208 |
+
206, 0x9237d57c011d3fb3
|
| 209 |
+
207, 0xa8c6da0f7692f235
|
| 210 |
+
208, 0x9f9eb6bc15d6cac7
|
| 211 |
+
209, 0x34bb8e0c93427aad
|
| 212 |
+
210, 0x115febd738eaac4a
|
| 213 |
+
211, 0xa439991ed139d27a
|
| 214 |
+
212, 0x45c7c2633d8710a2
|
| 215 |
+
213, 0x48b7475f3405a3ce
|
| 216 |
+
214, 0x80158497c77bd00b
|
| 217 |
+
215, 0x935c316a5b1657cb
|
| 218 |
+
216, 0x59c5d54440e9695e
|
| 219 |
+
217, 0x337c78c5b3d0ede2
|
| 220 |
+
218, 0x8c46bb956b93790d
|
| 221 |
+
219, 0xbf1dd03e471d71c5
|
| 222 |
+
220, 0x2d375e90a4bef583
|
| 223 |
+
221, 0xd0365428331b3790
|
| 224 |
+
222, 0xfcd3969ac827ecd4
|
| 225 |
+
223, 0x392fb6c580498410
|
| 226 |
+
224, 0x6d6db4ceab5ea6c0
|
| 227 |
+
225, 0x9bf84f1972e24786
|
| 228 |
+
226, 0x798dfd820959dcc5
|
| 229 |
+
227, 0x2e425095e65e8bfb
|
| 230 |
+
228, 0x8c1aa11536b1c9c3
|
| 231 |
+
229, 0xd28e2ef9b12f6f74
|
| 232 |
+
230, 0x86583bc98c8f78d2
|
| 233 |
+
231, 0x489877530e3f93e7
|
| 234 |
+
232, 0xb1d9430631104a15
|
| 235 |
+
233, 0x1814f6098e6263bd
|
| 236 |
+
234, 0x8e2658a4e0d4cd53
|
| 237 |
+
235, 0x5afe20e2531cdb2a
|
| 238 |
+
236, 0x30d02f7c4755c9bf
|
| 239 |
+
237, 0xe1e217cda16ed2d2
|
| 240 |
+
238, 0xccb4913a42e3b791
|
| 241 |
+
239, 0xfff21363ac183226
|
| 242 |
+
240, 0xe788690bbda147a7
|
| 243 |
+
241, 0x76905cf5917bfc6a
|
| 244 |
+
242, 0x2a8fa58f7916f52c
|
| 245 |
+
243, 0xf903c0cc0357815a
|
| 246 |
+
244, 0x15d20f243a4998d2
|
| 247 |
+
245, 0x5b7decee5a86ea44
|
| 248 |
+
246, 0x114f7fc421211185
|
| 249 |
+
247, 0x328eb21715764c50
|
| 250 |
+
248, 0xaffaa3f45c0678fd
|
| 251 |
+
249, 0x2579e6ef50378393
|
| 252 |
+
250, 0x7610ab7743c19795
|
| 253 |
+
251, 0xf9923d2bd101b197
|
| 254 |
+
252, 0x57e42e7a62ba7e53
|
| 255 |
+
253, 0x9f1dc217b4f02901
|
| 256 |
+
254, 0x88a9ebd86509b234
|
| 257 |
+
255, 0x867fc926aecc8591
|
| 258 |
+
256, 0xaf22c1bfef04c718
|
| 259 |
+
257, 0x39f701f0313f4288
|
| 260 |
+
258, 0x6171ad397e6faab2
|
| 261 |
+
259, 0x239bb5b9abdec4fc
|
| 262 |
+
260, 0xd9a591e25dd01c6e
|
| 263 |
+
261, 0x826dc4a75b628e49
|
| 264 |
+
262, 0xf112b152c408f47
|
| 265 |
+
263, 0x6843a06110f86c0
|
| 266 |
+
264, 0x965e56a7185c1332
|
| 267 |
+
265, 0x8d84492edbc71710
|
| 268 |
+
266, 0xeee8ec111cfd1319
|
| 269 |
+
267, 0xf2858e94ad98e458
|
| 270 |
+
268, 0xbc9589fdf5f3a97e
|
| 271 |
+
269, 0xaf0ceef3bc375130
|
| 272 |
+
270, 0x48f4aaf13fa75c1e
|
| 273 |
+
271, 0x111e9db47bee758f
|
| 274 |
+
272, 0xea3171df130164ba
|
| 275 |
+
273, 0x2a7bbe30bf827ab6
|
| 276 |
+
274, 0xc516c3fdbf758c35
|
| 277 |
+
275, 0xec55097754b04be5
|
| 278 |
+
276, 0x374a997d52b6d3e6
|
| 279 |
+
277, 0x487df5456085ffbc
|
| 280 |
+
278, 0x528883b84df8eafe
|
| 281 |
+
279, 0x805f77ab5ba26f86
|
| 282 |
+
280, 0x8eb81477dc04f213
|
| 283 |
+
281, 0x471ea08ec6794d72
|
| 284 |
+
282, 0x69d3667ecc4d2176
|
| 285 |
+
283, 0x98b7b6e295548a66
|
| 286 |
+
284, 0x3877713c173f8f2
|
| 287 |
+
285, 0xa00542570d0e8de3
|
| 288 |
+
286, 0xf534b1bfa4033e50
|
| 289 |
+
287, 0x7e1fedeac8bf6b26
|
| 290 |
+
288, 0x8043f37c89628af4
|
| 291 |
+
289, 0x1dd7039ec295e86d
|
| 292 |
+
290, 0xce9c05b763a40cc4
|
| 293 |
+
291, 0x246926481e61028f
|
| 294 |
+
292, 0xb7cb0f1babf5893b
|
| 295 |
+
293, 0xefe6b777f37fc63e
|
| 296 |
+
294, 0xebbcabb4cb35cdcb
|
| 297 |
+
295, 0x39fa63cd711eeea9
|
| 298 |
+
296, 0xad5d3ba7aaf30c8d
|
| 299 |
+
297, 0x8e9e78fe46021990
|
| 300 |
+
298, 0xc7eaef6e7d5a3c62
|
| 301 |
+
299, 0xefccdd5495d3f386
|
| 302 |
+
300, 0x2179557ee8cfc76a
|
| 303 |
+
301, 0x88a77f621f0885ce
|
| 304 |
+
302, 0xafda62674543d90c
|
| 305 |
+
303, 0xb8e6fbe2e13e56c0
|
| 306 |
+
304, 0x8bfbbe26a14f9b1a
|
| 307 |
+
305, 0x1404f59f5851f8c3
|
| 308 |
+
306, 0x1140c53a0489566d
|
| 309 |
+
307, 0x3edf2d138b5c3f1d
|
| 310 |
+
308, 0x75d6bb275d817dc
|
| 311 |
+
309, 0x8e660ae27107664e
|
| 312 |
+
310, 0x7a8021038ee303e1
|
| 313 |
+
311, 0x2042ef5eefa9079f
|
| 314 |
+
312, 0xe3e7b90bbf6d457a
|
| 315 |
+
313, 0xf3f819d2bb9405b
|
| 316 |
+
314, 0x522e42155cae0c10
|
| 317 |
+
315, 0xf5bfbb975b40e233
|
| 318 |
+
316, 0x2cf82b614dd95cfa
|
| 319 |
+
317, 0x183ef4a96bc40e55
|
| 320 |
+
318, 0x9f6e351c5ba4e752
|
| 321 |
+
319, 0x37c1110683c90846
|
| 322 |
+
320, 0x1d89b7a996d8a977
|
| 323 |
+
321, 0x18a444f77c7cb4d9
|
| 324 |
+
322, 0xd0a8a971b78dc893
|
| 325 |
+
323, 0x860232fb9e6543f1
|
| 326 |
+
324, 0x60b6097f51002555
|
| 327 |
+
325, 0xca1e5214123e3894
|
| 328 |
+
326, 0xe03fe695c95f99bb
|
| 329 |
+
327, 0x2c7c6779d5f03622
|
| 330 |
+
328, 0xafeeee42f63055d1
|
| 331 |
+
329, 0x670dde905515936a
|
| 332 |
+
330, 0x9a922f42b59fb094
|
| 333 |
+
331, 0xddb5ff49af5a651a
|
| 334 |
+
332, 0xe61b04c9e58ebbf8
|
| 335 |
+
333, 0x4e459dcf272e7fc4
|
| 336 |
+
334, 0xd549e92c16adceeb
|
| 337 |
+
335, 0x7a17dba1299d4a9c
|
| 338 |
+
336, 0x825d756109f2b585
|
| 339 |
+
337, 0xba142e61a9cb203e
|
| 340 |
+
338, 0xc2a19f00e9c04a30
|
| 341 |
+
339, 0x2d0f8140d23d0652
|
| 342 |
+
340, 0x8b866d4d4d6caaf4
|
| 343 |
+
341, 0x4f11d90dd91f8217
|
| 344 |
+
342, 0xf6efc37373b9e0d
|
| 345 |
+
343, 0x248493d6cd6a4736
|
| 346 |
+
344, 0xd12b6ae74a951a3e
|
| 347 |
+
345, 0x56e34722070b70a7
|
| 348 |
+
346, 0x22d3f201cc9fa0eb
|
| 349 |
+
347, 0xbfdcc320008291b7
|
| 350 |
+
348, 0x1a7a6922e9204fbd
|
| 351 |
+
349, 0x831421e0c4945ae4
|
| 352 |
+
350, 0x66316feddddf0e11
|
| 353 |
+
351, 0xa8c86a1517456554
|
| 354 |
+
352, 0x14a9049ad989e335
|
| 355 |
+
353, 0x837022259f141ecd
|
| 356 |
+
354, 0xcb71793a06c261f7
|
| 357 |
+
355, 0x4aeefc07ebe09a79
|
| 358 |
+
356, 0x8982f15aa3b6594b
|
| 359 |
+
357, 0x67bccfa7ed9b0d5b
|
| 360 |
+
358, 0xb377463b523e9dec
|
| 361 |
+
359, 0x53d3d594870fecb7
|
| 362 |
+
360, 0xa5274b1caec5a60a
|
| 363 |
+
361, 0xd6316d0cb643db39
|
| 364 |
+
362, 0xabc1a9b536de88ce
|
| 365 |
+
363, 0xed2fdb1383d2a077
|
| 366 |
+
364, 0x12319c6feb97221b
|
| 367 |
+
365, 0x7e0f6cd40ef47403
|
| 368 |
+
366, 0x86135c84fe26dbf8
|
| 369 |
+
367, 0xc96622d3fbbee19b
|
| 370 |
+
368, 0xe3989d8d8511573f
|
| 371 |
+
369, 0x42cc365554d1fdc7
|
| 372 |
+
370, 0x4c1a1eb8bbce8b4f
|
| 373 |
+
371, 0xfc4e30e7ef2034c1
|
| 374 |
+
372, 0xc490444317a91e76
|
| 375 |
+
373, 0x7ccdf469ff5dc81c
|
| 376 |
+
374, 0xf5a0da4110cc09d7
|
| 377 |
+
375, 0x505227baf34c0fb5
|
| 378 |
+
376, 0xbe58737e8a35cc88
|
| 379 |
+
377, 0xd449bee91b3e8c41
|
| 380 |
+
378, 0x3e590e23299d0e6
|
| 381 |
+
379, 0x291a7d9e0a64caf7
|
| 382 |
+
380, 0xdc6fafbdfebd2293
|
| 383 |
+
381, 0x8223f1e259fe8a65
|
| 384 |
+
382, 0x6186fbc9efd9e3df
|
| 385 |
+
383, 0xfda39b07e4007ffb
|
| 386 |
+
384, 0xfc19aea98574dc02
|
| 387 |
+
385, 0xd0e10d354fcacd8c
|
| 388 |
+
386, 0xc9619916544a55a5
|
| 389 |
+
387, 0xd454d50a8c8558cd
|
| 390 |
+
388, 0xcd94a246712d91e
|
| 391 |
+
389, 0x76a771f5d1231cce
|
| 392 |
+
390, 0xdd20cb2b7b370ee5
|
| 393 |
+
391, 0xa6f4f50feca57c49
|
| 394 |
+
392, 0x78c8fb431f17ab9c
|
| 395 |
+
393, 0x1b692b79a59b43cc
|
| 396 |
+
394, 0x4c45045d287da7e6
|
| 397 |
+
395, 0x522132e18bf43928
|
| 398 |
+
396, 0x25c458983138b41c
|
| 399 |
+
397, 0x2a1fb426ef229796
|
| 400 |
+
398, 0x74dc324c74e5dd3d
|
| 401 |
+
399, 0x6df75e3eb6eb5374
|
| 402 |
+
400, 0xb63f2f4f9ca25b61
|
| 403 |
+
401, 0xac72286112ee54d6
|
| 404 |
+
402, 0x5a966f3d0a6863c4
|
| 405 |
+
403, 0x8d7046bc64a46fc2
|
| 406 |
+
404, 0xa7b740fd6e3087eb
|
| 407 |
+
405, 0xcdbcbe0340cfcdf5
|
| 408 |
+
406, 0xcb632613bf312b65
|
| 409 |
+
407, 0xa91b3f2c2aac238b
|
| 410 |
+
408, 0xa06deb3f5ae555a3
|
| 411 |
+
409, 0x29d72e1f8db69
|
| 412 |
+
410, 0x2d004bae09728ea6
|
| 413 |
+
411, 0xc6eee5dce0736cc1
|
| 414 |
+
412, 0xa7493145500ff60f
|
| 415 |
+
413, 0xc4d68c4aa18ab93c
|
| 416 |
+
414, 0x8210c29e79d48d7f
|
| 417 |
+
415, 0xd0999d7889ecbef6
|
| 418 |
+
416, 0x6e3bd61e66e93566
|
| 419 |
+
417, 0xe6cc13d47d7d7b1f
|
| 420 |
+
418, 0x3d6f181f42e03979
|
| 421 |
+
419, 0xbed4e14fd867604a
|
| 422 |
+
420, 0xbe511c84067bd86d
|
| 423 |
+
421, 0x49a876d89e697d38
|
| 424 |
+
422, 0xc04c3dde8f889c98
|
| 425 |
+
423, 0xaf293eeab0f53e3f
|
| 426 |
+
424, 0x9f6291dd65732cd6
|
| 427 |
+
425, 0xd7811ac01de78c01
|
| 428 |
+
426, 0xe385cf0261d50ec2
|
| 429 |
+
427, 0x5a64134b3542bbf
|
| 430 |
+
428, 0xf9d1302bc6f13a68
|
| 431 |
+
429, 0x5d2aabbea37d8c31
|
| 432 |
+
430, 0xd9842e99a5192970
|
| 433 |
+
431, 0x713eadc4cd30e837
|
| 434 |
+
432, 0xb7b002fc72abb413
|
| 435 |
+
433, 0x276cfeea526af1cf
|
| 436 |
+
434, 0x8519fe79b633a0ce
|
| 437 |
+
435, 0x2f0e87363705a3e2
|
| 438 |
+
436, 0x9adbac0be3c371e7
|
| 439 |
+
437, 0xf3f44ba899a6173c
|
| 440 |
+
438, 0x782d6c29618fde2b
|
| 441 |
+
439, 0x7f61062acec408f
|
| 442 |
+
440, 0x6e79cd836359258f
|
| 443 |
+
441, 0x5c8e9b138df5785a
|
| 444 |
+
442, 0xa54359c9f39a9a84
|
| 445 |
+
443, 0xeec3f033135084b0
|
| 446 |
+
444, 0x883ee717787a535c
|
| 447 |
+
445, 0x9a2422b513a73b00
|
| 448 |
+
446, 0x2dd4beddcdd64a58
|
| 449 |
+
447, 0x90c8a13202239c7b
|
| 450 |
+
448, 0x85b352ab759646d9
|
| 451 |
+
449, 0x139f5cb2e46c53aa
|
| 452 |
+
450, 0xe1d3ba6c721c66d1
|
| 453 |
+
451, 0xaa66e0edc4b60a98
|
| 454 |
+
452, 0x3521275c75be29b6
|
| 455 |
+
453, 0x490a5190b3edfa5d
|
| 456 |
+
454, 0xd2abcdd2ccb2f14e
|
| 457 |
+
455, 0x9d9be8bef4a5857d
|
| 458 |
+
456, 0xde19676f13ef7755
|
| 459 |
+
457, 0xdac2fee2e42615f3
|
| 460 |
+
458, 0xf4239801cb02f2ab
|
| 461 |
+
459, 0xaa8bf923ed91875c
|
| 462 |
+
460, 0x61d18a1940e4c7c0
|
| 463 |
+
461, 0x1eb6aa3d5f077a6d
|
| 464 |
+
462, 0xee7374c063bf29d8
|
| 465 |
+
463, 0x2f0a59e34d76268d
|
| 466 |
+
464, 0xc92e80e17d1eb3e9
|
| 467 |
+
465, 0xafd05b3ec3d2ca72
|
| 468 |
+
466, 0x28a61ad8d6c497b8
|
| 469 |
+
467, 0xa7094d6834ad7d47
|
| 470 |
+
468, 0x57d80ea9eccbb4f
|
| 471 |
+
469, 0xb047e0fee6cdaf16
|
| 472 |
+
470, 0x44f41b5eb48c00bb
|
| 473 |
+
471, 0xd6dc8e1eb9c8c9ba
|
| 474 |
+
472, 0x47adfd2c638c7849
|
| 475 |
+
473, 0x365d63db7d526c68
|
| 476 |
+
474, 0xc21cda439016135d
|
| 477 |
+
475, 0x14d10c3f0f98863c
|
| 478 |
+
476, 0xa93e56f74e037602
|
| 479 |
+
477, 0x3b4e9c8915bdc9
|
| 480 |
+
478, 0xb46f5ae155e54aa2
|
| 481 |
+
479, 0x8e470d21ce1943e1
|
| 482 |
+
480, 0x60b96301b5ba2e8d
|
| 483 |
+
481, 0x1b473a41d381f9ff
|
| 484 |
+
482, 0xabcf5a8e3269e73f
|
| 485 |
+
483, 0xd410f6e94fb21fa1
|
| 486 |
+
484, 0x65d1a47eebf87e5e
|
| 487 |
+
485, 0x48eaa201c61cb843
|
| 488 |
+
486, 0x212c1abc2499bfc5
|
| 489 |
+
487, 0x4255ad8377d2d8d
|
| 490 |
+
488, 0x44caeef472010612
|
| 491 |
+
489, 0xffae764524f572f2
|
| 492 |
+
490, 0x78d374d20c9ee550
|
| 493 |
+
491, 0x6e003206c0511cee
|
| 494 |
+
492, 0x7998a159145bfb82
|
| 495 |
+
493, 0x921239650bda1d4d
|
| 496 |
+
494, 0xae05025509bcfdc5
|
| 497 |
+
495, 0xc6430c980be407b4
|
| 498 |
+
496, 0x78524f1744b153f1
|
| 499 |
+
497, 0x84089e6f468181fe
|
| 500 |
+
498, 0x8d0d21d7dfb6c254
|
| 501 |
+
499, 0x90bad90502a33603
|
| 502 |
+
500, 0x3072a403cbd16315
|
| 503 |
+
501, 0xdfadddf3f1c040c2
|
| 504 |
+
502, 0x22f0b0639d9ff975
|
| 505 |
+
503, 0xb49e48a4cad0765b
|
| 506 |
+
504, 0x95a0a04f8239709d
|
| 507 |
+
505, 0x56e147a24a4c481f
|
| 508 |
+
506, 0xacf16ef61dea4c7e
|
| 509 |
+
507, 0x424040afd2700de6
|
| 510 |
+
508, 0xc67e8096a3c717a9
|
| 511 |
+
509, 0x39f164181dd0a399
|
| 512 |
+
510, 0x2449cedc1d62198c
|
| 513 |
+
511, 0x7a53df11a1f1a61c
|
| 514 |
+
512, 0x5596f1d4a3badae3
|
| 515 |
+
513, 0x38ed4c822072b3d0
|
| 516 |
+
514, 0xf07ef346b3fd730a
|
| 517 |
+
515, 0xfd349c35c3ed51fd
|
| 518 |
+
516, 0x2f15c9c7890f8f32
|
| 519 |
+
517, 0x3b470df52b173c29
|
| 520 |
+
518, 0xd31bfc8981281af7
|
| 521 |
+
519, 0xbbcc9bdf561215bb
|
| 522 |
+
520, 0x5782fffea326574f
|
| 523 |
+
521, 0xb0ebdcfcc5e03290
|
| 524 |
+
522, 0x7fd89d93d2b3fbef
|
| 525 |
+
523, 0x280ea1865d9ba2
|
| 526 |
+
524, 0xe726959845b2c100
|
| 527 |
+
525, 0xd0361f032cd7dbb1
|
| 528 |
+
526, 0x3c65ec2028b81a22
|
| 529 |
+
527, 0x5221e9b2188920bf
|
| 530 |
+
528, 0xeb5ab27c4125ec20
|
| 531 |
+
529, 0x80a32dd48b54f0a4
|
| 532 |
+
530, 0x369b5ced1012bebb
|
| 533 |
+
531, 0x582d35d76530bc6f
|
| 534 |
+
532, 0x7b50dc9b48e1e37d
|
| 535 |
+
533, 0x37fdfe8bbacf8dad
|
| 536 |
+
534, 0x7a0cb7e6e93840ea
|
| 537 |
+
535, 0xa1132c870be0b2ce
|
| 538 |
+
536, 0x9d8ac2c68267cd1a
|
| 539 |
+
537, 0x470969b647fa7df4
|
| 540 |
+
538, 0xabcb7d8adf7e2d24
|
| 541 |
+
539, 0xacdebec9bdf9eb1c
|
| 542 |
+
540, 0xe30f4cbf7eb6a59
|
| 543 |
+
541, 0x746673836c4df41d
|
| 544 |
+
542, 0x75120a6b647bb326
|
| 545 |
+
543, 0x2f4eab556c3f6878
|
| 546 |
+
544, 0xd84651ab05405b7a
|
| 547 |
+
545, 0x9e695808b9622284
|
| 548 |
+
546, 0xc93b71e56aa6e1a5
|
| 549 |
+
547, 0x2be7f3be4a7b7050
|
| 550 |
+
548, 0x6497e910b6733241
|
| 551 |
+
549, 0xcf7050dfd08076fc
|
| 552 |
+
550, 0x4e3cc156eca183f7
|
| 553 |
+
551, 0xf801a33d9326c265
|
| 554 |
+
552, 0x6aa293c8a47d40e6
|
| 555 |
+
553, 0x28c429755faa6230
|
| 556 |
+
554, 0x82b818651f54e7bb
|
| 557 |
+
555, 0xa84d726d7acdbead
|
| 558 |
+
556, 0x5cfa535d5774965d
|
| 559 |
+
557, 0x4a34b7b1cb48d53
|
| 560 |
+
558, 0x86a7b5bce426de84
|
| 561 |
+
559, 0xfcd2307cecdb7318
|
| 562 |
+
560, 0x16dbaaa71181a038
|
| 563 |
+
561, 0x88e7e8cd261c2547
|
| 564 |
+
562, 0x3c09ba6d1d5ea913
|
| 565 |
+
563, 0x5dd3d643734ee5b6
|
| 566 |
+
564, 0x326d725fe8cbb33
|
| 567 |
+
565, 0x7bcca9ca2da8e784
|
| 568 |
+
566, 0x482dcf6b11d7f9a4
|
| 569 |
+
567, 0x1291b605b4cd3e04
|
| 570 |
+
568, 0x6988181b50e2f4a8
|
| 571 |
+
569, 0x649e3c37131fc292
|
| 572 |
+
570, 0x4eeb67b9e21eba54
|
| 573 |
+
571, 0xc051d39073dec45f
|
| 574 |
+
572, 0xc99c52e110270d67
|
| 575 |
+
573, 0xcb813d5d77868add
|
| 576 |
+
574, 0x423a5f13573e7ac0
|
| 577 |
+
575, 0x231ac4cc4fe73616
|
| 578 |
+
576, 0x4c22b888a6e600ea
|
| 579 |
+
577, 0x8059a6dc7c9e25c6
|
| 580 |
+
578, 0x49f498a5b8ad22de
|
| 581 |
+
579, 0xf1e812cc6d1826c8
|
| 582 |
+
580, 0xbbaf60abe8b11e00
|
| 583 |
+
581, 0x1d31d7f4d8be9a6a
|
| 584 |
+
582, 0xfeadce70a9a10c14
|
| 585 |
+
583, 0xb47c635bc136996a
|
| 586 |
+
584, 0xd88e694c8da030cb
|
| 587 |
+
585, 0xc41bbe132aff1364
|
| 588 |
+
586, 0x34249ab18a4b0800
|
| 589 |
+
587, 0xf14b5c825aa736cc
|
| 590 |
+
588, 0x2710be6b08df78e
|
| 591 |
+
589, 0x2ab56bcc9bf9e740
|
| 592 |
+
590, 0x9b7f6e591b5f648
|
| 593 |
+
591, 0xfb665c3772f34135
|
| 594 |
+
592, 0x628a0a5d2db5d8d5
|
| 595 |
+
593, 0xb3e3f251e61b5259
|
| 596 |
+
594, 0x82310ae33faf1b23
|
| 597 |
+
595, 0x24af8723a65cbd0b
|
| 598 |
+
596, 0x671c93282fc4ad97
|
| 599 |
+
597, 0x6cabeaac77270cad
|
| 600 |
+
598, 0xef4643fe38b02b7f
|
| 601 |
+
599, 0x7b011549d1ac6653
|
| 602 |
+
600, 0xe2af87b9fccfe89
|
| 603 |
+
601, 0x36b71ad67197ac8a
|
| 604 |
+
602, 0xdbba55d06f2fd93b
|
| 605 |
+
603, 0xf571dbd764b7f7e5
|
| 606 |
+
604, 0x38ea402501cdbd45
|
| 607 |
+
605, 0xb8ab5b5b1bab2913
|
| 608 |
+
606, 0xfab973c4d45f32bd
|
| 609 |
+
607, 0x9364f1717c2636b9
|
| 610 |
+
608, 0xfad00f4d983e00fe
|
| 611 |
+
609, 0xc90c532a11aef75a
|
| 612 |
+
610, 0x64a6eda96e44783c
|
| 613 |
+
611, 0x35891f2eb84520be
|
| 614 |
+
612, 0x28d216080caed43
|
| 615 |
+
613, 0x129629cc5bd206f6
|
| 616 |
+
614, 0x22c3d39822cbb4b3
|
| 617 |
+
615, 0xf1efbf4cce1eaa2b
|
| 618 |
+
616, 0x7070cba12524ed08
|
| 619 |
+
617, 0xa7ed0be9deabf20d
|
| 620 |
+
618, 0x8ddb4cd6b454f76b
|
| 621 |
+
619, 0xb82814b1db37b63
|
| 622 |
+
620, 0x418e83b36de01876
|
| 623 |
+
621, 0x9a538c7f39c6413
|
| 624 |
+
622, 0xee0cd7abf8a2ecb9
|
| 625 |
+
623, 0xa9222b07e95590f3
|
| 626 |
+
624, 0x6296a415d68341e6
|
| 627 |
+
625, 0x981e0a5a8f811929
|
| 628 |
+
626, 0x4bb372d3b0de283d
|
| 629 |
+
627, 0xa9805b5971866e16
|
| 630 |
+
628, 0xaf3b5f5183497657
|
| 631 |
+
629, 0x2152b0fd23c3d9f
|
| 632 |
+
630, 0xb730c325b7173180
|
| 633 |
+
631, 0x1e3439d231608c19
|
| 634 |
+
632, 0x1c5ba6031379823c
|
| 635 |
+
633, 0x87f5d12d6d365cbc
|
| 636 |
+
634, 0xd3bc7f29614bc594
|
| 637 |
+
635, 0x63102214bb391268
|
| 638 |
+
636, 0x482bbd5bba648a44
|
| 639 |
+
637, 0x6a23604690759dc4
|
| 640 |
+
638, 0x4091d41408d3a39e
|
| 641 |
+
639, 0x7cd017f922101b15
|
| 642 |
+
640, 0x7ce9004ac5f9231
|
| 643 |
+
641, 0x978bc3d8ec7f7fdf
|
| 644 |
+
642, 0x5bd0c4d780580c11
|
| 645 |
+
643, 0x4313c068bb040153
|
| 646 |
+
644, 0x3ab7dab7bc38bf80
|
| 647 |
+
645, 0x3aaf9c187728deea
|
| 648 |
+
646, 0x6633a4ce8efb88d9
|
| 649 |
+
647, 0x7263b089878f00fc
|
| 650 |
+
648, 0xd0d767e96fe00eb8
|
| 651 |
+
649, 0x184a7c0c01908028
|
| 652 |
+
650, 0x1ebdf41e6f76e186
|
| 653 |
+
651, 0xeb740ee1d0402083
|
| 654 |
+
652, 0xfccf4974edb1c339
|
| 655 |
+
653, 0x16e2707aa28306d
|
| 656 |
+
654, 0x1684f0bdb018c3a5
|
| 657 |
+
655, 0x887b6b67b88aa862
|
| 658 |
+
656, 0x923d7810a2bea33a
|
| 659 |
+
657, 0x56b3560babef5d6b
|
| 660 |
+
658, 0xb39a14614c54b8c6
|
| 661 |
+
659, 0x33e4dc545a509fc8
|
| 662 |
+
660, 0x26e21f84142da9b
|
| 663 |
+
661, 0xdd07598125756855
|
| 664 |
+
662, 0x572d49a071d7ae0a
|
| 665 |
+
663, 0xba3c7e3baea28760
|
| 666 |
+
664, 0x7ecdb2d714db4b61
|
| 667 |
+
665, 0x1c62b4920e1b2fe2
|
| 668 |
+
666, 0x71bfafb70092834a
|
| 669 |
+
667, 0xd710a4228f60d56a
|
| 670 |
+
668, 0xeb16277d4ce4e95b
|
| 671 |
+
669, 0x968168c90b16d3a1
|
| 672 |
+
670, 0xac3439dfe8ad0062
|
| 673 |
+
671, 0x5a8226f9dd5876ad
|
| 674 |
+
672, 0xb843affe917291b0
|
| 675 |
+
673, 0xd76d1e67051f8259
|
| 676 |
+
674, 0xb73a6638cce8ccde
|
| 677 |
+
675, 0xa0e6afd3c7295f9
|
| 678 |
+
676, 0xff8857b4bbb5f4c6
|
| 679 |
+
677, 0x99becf78938f0426
|
| 680 |
+
678, 0xfcd17edc1e70f004
|
| 681 |
+
679, 0x6223b8b23f2f50
|
| 682 |
+
680, 0xca875f3e84587b4c
|
| 683 |
+
681, 0x7d1e81e589f87fb9
|
| 684 |
+
682, 0x9eb621586aa826fc
|
| 685 |
+
683, 0xf46fb9ef5b9c2086
|
| 686 |
+
684, 0x2882c9b7092725f3
|
| 687 |
+
685, 0x5493f099bbedcd02
|
| 688 |
+
686, 0x90c1ec979ffa811d
|
| 689 |
+
687, 0x963f765025bcc53
|
| 690 |
+
688, 0x56194e3ec3d9d4e9
|
| 691 |
+
689, 0x7ec4720954cac1f0
|
| 692 |
+
690, 0xfab3145171af7f90
|
| 693 |
+
691, 0x52a0b4e41a13b593
|
| 694 |
+
692, 0x740e2d4d5909d126
|
| 695 |
+
693, 0x98f5339c09c94a28
|
| 696 |
+
694, 0x1700e462fe8dec76
|
| 697 |
+
695, 0x3dbffc2aa4695ac3
|
| 698 |
+
696, 0x5763edacabdfe2a1
|
| 699 |
+
697, 0x7b5b623ce49ef21d
|
| 700 |
+
698, 0x30addc66f49860df
|
| 701 |
+
699, 0xcc7511a6c31bceda
|
| 702 |
+
700, 0x1b25b61ca75db43b
|
| 703 |
+
701, 0x416bc4c298e59046
|
| 704 |
+
702, 0x4cd11fe2d74e4649
|
| 705 |
+
703, 0xb54458a9229fc978
|
| 706 |
+
704, 0x8c21a27882b6ca35
|
| 707 |
+
705, 0x57887c8b5e01639b
|
| 708 |
+
706, 0xf4e893da996680bb
|
| 709 |
+
707, 0x8d601297702c9c0d
|
| 710 |
+
708, 0x2a27904a30aa53af
|
| 711 |
+
709, 0x497800f6917ea8d0
|
| 712 |
+
710, 0xe96db3340ada9c00
|
| 713 |
+
711, 0xcc23166f14c010ee
|
| 714 |
+
712, 0x782690d78fa65ec9
|
| 715 |
+
713, 0xf3e00d74a0878eda
|
| 716 |
+
714, 0xa7cbb683decca0a3
|
| 717 |
+
715, 0xdd2e038e683a94aa
|
| 718 |
+
716, 0xe2096ff8da896ca5
|
| 719 |
+
717, 0xf7c83400afdabe11
|
| 720 |
+
718, 0x395b8c6f6a4086a4
|
| 721 |
+
719, 0x4a164ec05bee71d4
|
| 722 |
+
720, 0xe87aa5d1ca0462fe
|
| 723 |
+
721, 0x8dbc5aed6dff9ceb
|
| 724 |
+
722, 0x12120d1e9552707b
|
| 725 |
+
723, 0x877dca6889b3e6cd
|
| 726 |
+
724, 0xbd65605c01e900fb
|
| 727 |
+
725, 0xbd6b82c4157c3115
|
| 728 |
+
726, 0x8b60282732caf78a
|
| 729 |
+
727, 0x279fcf5e5de9e57f
|
| 730 |
+
728, 0x34b34ebfb6a37eae
|
| 731 |
+
729, 0xd258cc1a14e03b7b
|
| 732 |
+
730, 0x9a528ba3db4a13fb
|
| 733 |
+
731, 0xffa0aea59d057746
|
| 734 |
+
732, 0x27fa7f456cd37c4e
|
| 735 |
+
733, 0xe1117a57a6fdce63
|
| 736 |
+
734, 0xdc8fc903970a1551
|
| 737 |
+
735, 0x492dd104f30faf29
|
| 738 |
+
736, 0x110def0959e5652b
|
| 739 |
+
737, 0x7f8d1997636fdd15
|
| 740 |
+
738, 0xfb77b05e538a9b59
|
| 741 |
+
739, 0x2e41fa35b4b01fc6
|
| 742 |
+
740, 0xbc35ae69a3374085
|
| 743 |
+
741, 0x192c2a681c2d9b4b
|
| 744 |
+
742, 0x12566b8866c189d6
|
| 745 |
+
743, 0x9d88ea785c5185c8
|
| 746 |
+
744, 0x30a621ad5f983c4
|
| 747 |
+
745, 0x8b875efe1206f587
|
| 748 |
+
746, 0x224d25c3af6e3423
|
| 749 |
+
747, 0x7503e976a1ac7bcc
|
| 750 |
+
748, 0x3c98aa869e823859
|
| 751 |
+
749, 0x3d8835304b646892
|
| 752 |
+
750, 0xf6353330ff970bc2
|
| 753 |
+
751, 0x8a673f5e2edb8acb
|
| 754 |
+
752, 0xf2fdcc53493838b9
|
| 755 |
+
753, 0x85ddcd526236af16
|
| 756 |
+
754, 0x60afb99814c676c5
|
| 757 |
+
755, 0x32a1c2749e281ca8
|
| 758 |
+
756, 0x2367a92ae3bee9ca
|
| 759 |
+
757, 0x219fe082703743cc
|
| 760 |
+
758, 0x34d8b74dc85182a9
|
| 761 |
+
759, 0xdd04164c72db23f
|
| 762 |
+
760, 0xe293ac28fe2671a9
|
| 763 |
+
761, 0x9ca7d169cbda6f45
|
| 764 |
+
762, 0x705c47972b4240ed
|
| 765 |
+
763, 0xc10eda9eeb536209
|
| 766 |
+
764, 0xc36ddacd0c94e85d
|
| 767 |
+
765, 0x8eb592c27e8cd0d2
|
| 768 |
+
766, 0x3e815991c76e7cc4
|
| 769 |
+
767, 0xac9cfce31acf7580
|
| 770 |
+
768, 0xbf7a4cb31c7aee94
|
| 771 |
+
769, 0x663077444aceecf6
|
| 772 |
+
770, 0xe7f614ff386eb568
|
| 773 |
+
771, 0x79d7a229c66912c0
|
| 774 |
+
772, 0x161ed4311f63e1f3
|
| 775 |
+
773, 0x308a5faeb9982ede
|
| 776 |
+
774, 0x7b38ddb9b7efd10
|
| 777 |
+
775, 0x1e103a2589b27ecf
|
| 778 |
+
776, 0x67b02baf4259f27e
|
| 779 |
+
777, 0x868921c115ea2eee
|
| 780 |
+
778, 0x959791912200f71e
|
| 781 |
+
779, 0x4dd55f36dec10557
|
| 782 |
+
780, 0xe3464d90080cb99d
|
| 783 |
+
781, 0xfb2d4f6accce652f
|
| 784 |
+
782, 0x109900a9257d77ba
|
| 785 |
+
783, 0x3c4bda8e2c83684c
|
| 786 |
+
784, 0xc9ae040fb7f868c6
|
| 787 |
+
785, 0x78098ffe994f4905
|
| 788 |
+
786, 0x7a94c33eca77f0b4
|
| 789 |
+
787, 0xbe6a2a95e9b5c0e8
|
| 790 |
+
788, 0x797d39cf963f4837
|
| 791 |
+
789, 0x8d2e249e4425d06d
|
| 792 |
+
790, 0x6ae2c30cd5da06f4
|
| 793 |
+
791, 0x904489de762b179f
|
| 794 |
+
792, 0x84713e2dfb591e3b
|
| 795 |
+
793, 0x6405a40da3f6f51b
|
| 796 |
+
794, 0x976b560d663a2df1
|
| 797 |
+
795, 0xed1c544784ba1e22
|
| 798 |
+
796, 0xca658e995ed9344c
|
| 799 |
+
797, 0x2b1c6b8e4db49025
|
| 800 |
+
798, 0x52b1513da528bad
|
| 801 |
+
799, 0x3c63406d256d9968
|
| 802 |
+
800, 0x63a31ca3d423f85e
|
| 803 |
+
801, 0xb05a81f55789a720
|
| 804 |
+
802, 0xd04412992c476c8e
|
| 805 |
+
803, 0x828ec2f77a150a3d
|
| 806 |
+
804, 0xee50926671bb60c6
|
| 807 |
+
805, 0x5aa70f93e2df61b4
|
| 808 |
+
806, 0x94d60fa2e8655858
|
| 809 |
+
807, 0x3f5e5b770703cc7d
|
| 810 |
+
808, 0xc62dfb2688ca7784
|
| 811 |
+
809, 0xaaf02e1e8ba89fe4
|
| 812 |
+
810, 0x4ab74e0d8c047405
|
| 813 |
+
811, 0x31ee04fbac6fcead
|
| 814 |
+
812, 0x1203b78b8228f5af
|
| 815 |
+
813, 0x412a70836f9aa71a
|
| 816 |
+
814, 0xab51cf98c03f1819
|
| 817 |
+
815, 0x783a3ce9ce137f65
|
| 818 |
+
816, 0x8897085b0a072cf2
|
| 819 |
+
817, 0x685dd9bde8798cb
|
| 820 |
+
818, 0x9a1fac7b1705e2c1
|
| 821 |
+
819, 0xf3e9ff98de48e9cb
|
| 822 |
+
820, 0x5c2d3eb1a1fbe917
|
| 823 |
+
821, 0x3bda718b6b54d82e
|
| 824 |
+
822, 0x29f2dd18f22f0821
|
| 825 |
+
823, 0xb992da1572ac3597
|
| 826 |
+
824, 0xacb69e7aa14b34f7
|
| 827 |
+
825, 0xcd36e3ad14f088d1
|
| 828 |
+
826, 0x6aaacc96a1ec55e8
|
| 829 |
+
827, 0xf8ac593f154fe68f
|
| 830 |
+
828, 0x18fc9cbff012339f
|
| 831 |
+
829, 0x2f3368ccbbb99899
|
| 832 |
+
830, 0x7cec7d17f37031f7
|
| 833 |
+
831, 0x96e86bfaadcb8fc2
|
| 834 |
+
832, 0x74f9e7ee3d42a752
|
| 835 |
+
833, 0xbd52f6c7d9b0733
|
| 836 |
+
834, 0xa48e6d96bb6ce1c9
|
| 837 |
+
835, 0xaefa058254b82133
|
| 838 |
+
836, 0xb7a19edfd0929107
|
| 839 |
+
837, 0x6160ce9125b26e26
|
| 840 |
+
838, 0x6537dbbde1d2aed
|
| 841 |
+
839, 0xc567f9a6bec52dde
|
| 842 |
+
840, 0xca29fd3f22443342
|
| 843 |
+
841, 0x7732aa6db6a1c476
|
| 844 |
+
842, 0x8f5a4d7df6b11b3
|
| 845 |
+
843, 0x76649262aa7e31e1
|
| 846 |
+
844, 0x60a13eb125fbc829
|
| 847 |
+
845, 0xc81e4d123dd21ac1
|
| 848 |
+
846, 0x643cbb09bb72f86b
|
| 849 |
+
847, 0xf971a98fb25555a6
|
| 850 |
+
848, 0xffa2774c66692d56
|
| 851 |
+
849, 0xcb33c16c50b13ea9
|
| 852 |
+
850, 0xfabf388dffda0e9b
|
| 853 |
+
851, 0x55d41ec12ca24b9f
|
| 854 |
+
852, 0x91cf693a3467e807
|
| 855 |
+
853, 0x6be2c00b2c31d6dd
|
| 856 |
+
854, 0xc5cf513b5251ae28
|
| 857 |
+
855, 0xffc4384212403dec
|
| 858 |
+
856, 0x45d4e1865255a69d
|
| 859 |
+
857, 0xfb1dcf956972086a
|
| 860 |
+
858, 0xcae946a55c4c55b8
|
| 861 |
+
859, 0x7351ac7720e385c1
|
| 862 |
+
860, 0x19aa8ffd86240254
|
| 863 |
+
861, 0x8f515ae78f4040da
|
| 864 |
+
862, 0x1e1ed2058de50fce
|
| 865 |
+
863, 0x22d006dcdb374243
|
| 866 |
+
864, 0x6e0f0ede7c95b441
|
| 867 |
+
865, 0x70e8aa81b53b4d25
|
| 868 |
+
866, 0x998f309ea41e3814
|
| 869 |
+
867, 0x89ed6598fb66f390
|
| 870 |
+
868, 0xb5997dc3278060df
|
| 871 |
+
869, 0xb2a021eac4f7e046
|
| 872 |
+
870, 0x3705b60aa2fd0768
|
| 873 |
+
871, 0xfc415079ab9200e
|
| 874 |
+
872, 0xf2871ac4cf45ecc9
|
| 875 |
+
873, 0x24bf758d2246175f
|
| 876 |
+
874, 0xac503dd6f8141b3
|
| 877 |
+
875, 0x4e879d12d9f03b3
|
| 878 |
+
876, 0x82034af8cf93b644
|
| 879 |
+
877, 0x59899dd7e478a6c7
|
| 880 |
+
878, 0xae90addb6eb11507
|
| 881 |
+
879, 0x1524ddf76730cdef
|
| 882 |
+
880, 0x6fd4afd5456b1c9d
|
| 883 |
+
881, 0xcddb9221ea001cbc
|
| 884 |
+
882, 0x64ff400bbf2e8604
|
| 885 |
+
883, 0x6dda10549b06ed9b
|
| 886 |
+
884, 0xed2c85104c261527
|
| 887 |
+
885, 0xc7e09217d29929a8
|
| 888 |
+
886, 0x56284df611a428b1
|
| 889 |
+
887, 0x1a7608289c0a61
|
| 890 |
+
888, 0x7cb63db15166ff66
|
| 891 |
+
889, 0xc6013c76fcdcdc72
|
| 892 |
+
890, 0x8e5dd566c7a5a676
|
| 893 |
+
891, 0x5a8e8565f40d133b
|
| 894 |
+
892, 0xe465973455848c44
|
| 895 |
+
893, 0xf92eecbfe0f3c2c0
|
| 896 |
+
894, 0x7d64155d4dcc5cac
|
| 897 |
+
895, 0xf17595706f988dad
|
| 898 |
+
896, 0xd590a001a6a19c5c
|
| 899 |
+
897, 0x82a164475758db3d
|
| 900 |
+
898, 0x6b144993ea1bbe32
|
| 901 |
+
899, 0x22a81a7a6e453779
|
| 902 |
+
900, 0x8e8c298df1a68a73
|
| 903 |
+
901, 0x78056afd6d936b4c
|
| 904 |
+
902, 0xaaceef0325faaf62
|
| 905 |
+
903, 0xe78bb7699f82266f
|
| 906 |
+
904, 0x523a2d283c5a5166
|
| 907 |
+
905, 0x7076d87088f6c6db
|
| 908 |
+
906, 0x6087dd54cff5aeb2
|
| 909 |
+
907, 0x7ef82e62cb851680
|
| 910 |
+
908, 0x4e8bcc8ed84d03d8
|
| 911 |
+
909, 0xd12fa0361df3cfd3
|
| 912 |
+
910, 0xefb89c79f8127297
|
| 913 |
+
911, 0xa9af4e2fbce0b1f8
|
| 914 |
+
912, 0x462136685b70331e
|
| 915 |
+
913, 0xe9e74c93da699b77
|
| 916 |
+
914, 0x9ec69215fb11d0c3
|
| 917 |
+
915, 0xc10f229939e3e111
|
| 918 |
+
916, 0x3f67fa79e41d2374
|
| 919 |
+
917, 0xd5e7c1a9a7185162
|
| 920 |
+
918, 0xa1dcce9ec91492fe
|
| 921 |
+
919, 0xd4e61f0727b5d21b
|
| 922 |
+
920, 0xdf6cdce46551800a
|
| 923 |
+
921, 0xa3f256ce906982d3
|
| 924 |
+
922, 0x209742a6b9ffc27
|
| 925 |
+
923, 0x4006c96958526a57
|
| 926 |
+
924, 0x9606aebc75a1967e
|
| 927 |
+
925, 0x91b9f42fb64189df
|
| 928 |
+
926, 0xb27119defcb938bc
|
| 929 |
+
927, 0x128cc7a84ba05597
|
| 930 |
+
928, 0x6c3df613c62d0d30
|
| 931 |
+
929, 0x3adf69d48b629ec7
|
| 932 |
+
930, 0xda42ee493837b128
|
| 933 |
+
931, 0xb8e770480e760bb5
|
| 934 |
+
932, 0x9feb55d57c99c626
|
| 935 |
+
933, 0x29812d80afdae3ed
|
| 936 |
+
934, 0xae4222a64276a8c7
|
| 937 |
+
935, 0xe3897212a5b4ed53
|
| 938 |
+
936, 0x98bedfd13886e669
|
| 939 |
+
937, 0xca858675d7fc0d0e
|
| 940 |
+
938, 0x28a359f665354234
|
| 941 |
+
939, 0xfac2ccabe4128b35
|
| 942 |
+
940, 0x61373cc5d11ca180
|
| 943 |
+
941, 0x7007605a4512a87a
|
| 944 |
+
942, 0xe71f8eade7b30b3d
|
| 945 |
+
943, 0x3a9e77f9b99bd04d
|
| 946 |
+
944, 0x70d3e42488098866
|
| 947 |
+
945, 0xd30fc159c7cd4d99
|
| 948 |
+
946, 0xe4d3f6600d2e2d6f
|
| 949 |
+
947, 0x1088324dfa955c25
|
| 950 |
+
948, 0x516437acd4764623
|
| 951 |
+
949, 0x38a31abe50d0aa03
|
| 952 |
+
950, 0x72e1054e9dc02ba
|
| 953 |
+
951, 0xe6971dd664d1a2e2
|
| 954 |
+
952, 0xf6698cb095d3b702
|
| 955 |
+
953, 0xad995a5a8c19bd92
|
| 956 |
+
954, 0x34e53c6936f656e6
|
| 957 |
+
955, 0x10de240bc07c757a
|
| 958 |
+
956, 0x3e3b9a6861c2bd1c
|
| 959 |
+
957, 0x9c0b0b97d3712ec9
|
| 960 |
+
958, 0xabf1505a75043aed
|
| 961 |
+
959, 0xbdf93d3de3274179
|
| 962 |
+
960, 0x28fa5904d3f62c28
|
| 963 |
+
961, 0xc3b97b39ef6c5133
|
| 964 |
+
962, 0xf2b2219225b8679d
|
| 965 |
+
963, 0x8be4ec0f930c0aaa
|
| 966 |
+
964, 0x47de5a56aa590643
|
| 967 |
+
965, 0xb6f871b304129856
|
| 968 |
+
966, 0x80a61c06233ab0f9
|
| 969 |
+
967, 0x3ce6c3af8101b055
|
| 970 |
+
968, 0x85b911708274e7d1
|
| 971 |
+
969, 0x4cab65d093a488b7
|
| 972 |
+
970, 0xaabc4b10661fe28e
|
| 973 |
+
971, 0x35b16dea64474a68
|
| 974 |
+
972, 0x1d6eb5b093361223
|
| 975 |
+
973, 0xc39107b92f0fe1fb
|
| 976 |
+
974, 0x1d09e048073c4841
|
| 977 |
+
975, 0xc6a02f43aca8cb2f
|
| 978 |
+
976, 0xaf6613dbc7da909c
|
| 979 |
+
977, 0x5ac2a40c230aa756
|
| 980 |
+
978, 0x33afb5e7c01c39a5
|
| 981 |
+
979, 0xc7b0b20ea8b7d0ef
|
| 982 |
+
980, 0xdf7306c8ccb1bbea
|
| 983 |
+
981, 0x9710efc0c188b2a0
|
| 984 |
+
982, 0xd6303eadb72c873e
|
| 985 |
+
983, 0xa38ca609b118f35a
|
| 986 |
+
984, 0x8390613065c6e535
|
| 987 |
+
985, 0xdf9a0106757e431f
|
| 988 |
+
986, 0x8bcf77039788e143
|
| 989 |
+
987, 0x6026806a986b378e
|
| 990 |
+
988, 0x482ff3b1394cb1dc
|
| 991 |
+
989, 0x2a27d0ccac9ede9c
|
| 992 |
+
990, 0x53c77f26e271b3ab
|
| 993 |
+
991, 0x1ba004cf276cf3f
|
| 994 |
+
992, 0xc135b0517dc81f7c
|
| 995 |
+
993, 0x5d137838db75e442
|
| 996 |
+
994, 0x3fe505f93d1dbdd7
|
| 997 |
+
995, 0x351654ae7d598294
|
| 998 |
+
996, 0x173f8d182af9d84d
|
| 999 |
+
997, 0xf97dfcd164fe11c5
|
| 1000 |
+
998, 0xcda423e5ad43b290
|
| 1001 |
+
999, 0xa5cb380b8de10d10
|
deepseek/lib/python3.10/site-packages/pytz/__init__.py
ADDED
|
@@ -0,0 +1,1554 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
datetime.tzinfo timezone definitions generated from the
|
| 3 |
+
Olson timezone database:
|
| 4 |
+
|
| 5 |
+
ftp://elsie.nci.nih.gov/pub/tz*.tar.gz
|
| 6 |
+
|
| 7 |
+
See the datetime section of the Python Library Reference for information
|
| 8 |
+
on how to use these modules.
|
| 9 |
+
'''
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
import datetime
|
| 13 |
+
import os.path
|
| 14 |
+
|
| 15 |
+
from pytz.exceptions import AmbiguousTimeError
|
| 16 |
+
from pytz.exceptions import InvalidTimeError
|
| 17 |
+
from pytz.exceptions import NonExistentTimeError
|
| 18 |
+
from pytz.exceptions import UnknownTimeZoneError
|
| 19 |
+
from pytz.lazy import LazyDict, LazyList, LazySet # noqa
|
| 20 |
+
from pytz.tzinfo import unpickler, BaseTzInfo
|
| 21 |
+
from pytz.tzfile import build_tzinfo
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
# The IANA (nee Olson) database is updated several times a year.
|
| 25 |
+
OLSON_VERSION = '2024b'
|
| 26 |
+
VERSION = '2024.2' # pip compatible version number.
|
| 27 |
+
__version__ = VERSION
|
| 28 |
+
|
| 29 |
+
OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling
|
| 30 |
+
|
| 31 |
+
__all__ = [
|
| 32 |
+
'timezone', 'utc', 'country_timezones', 'country_names',
|
| 33 |
+
'AmbiguousTimeError', 'InvalidTimeError',
|
| 34 |
+
'NonExistentTimeError', 'UnknownTimeZoneError',
|
| 35 |
+
'all_timezones', 'all_timezones_set',
|
| 36 |
+
'common_timezones', 'common_timezones_set',
|
| 37 |
+
'BaseTzInfo', 'FixedOffset',
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
if sys.version_info[0] > 2: # Python 3.x
|
| 42 |
+
|
| 43 |
+
# Python 3.x doesn't have unicode(), making writing code
|
| 44 |
+
# for Python 2.3 and Python 3.x a pain.
|
| 45 |
+
unicode = str
|
| 46 |
+
|
| 47 |
+
def ascii(s):
|
| 48 |
+
r"""
|
| 49 |
+
>>> ascii('Hello')
|
| 50 |
+
'Hello'
|
| 51 |
+
>>> ascii('\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL
|
| 52 |
+
Traceback (most recent call last):
|
| 53 |
+
...
|
| 54 |
+
UnicodeEncodeError: ...
|
| 55 |
+
"""
|
| 56 |
+
if type(s) == bytes:
|
| 57 |
+
s = s.decode('ASCII')
|
| 58 |
+
else:
|
| 59 |
+
s.encode('ASCII') # Raise an exception if not ASCII
|
| 60 |
+
return s # But the string - not a byte string.
|
| 61 |
+
|
| 62 |
+
else: # Python 2.x
|
| 63 |
+
|
| 64 |
+
def ascii(s):
|
| 65 |
+
r"""
|
| 66 |
+
>>> ascii('Hello')
|
| 67 |
+
'Hello'
|
| 68 |
+
>>> ascii(u'Hello')
|
| 69 |
+
'Hello'
|
| 70 |
+
>>> ascii(u'\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL
|
| 71 |
+
Traceback (most recent call last):
|
| 72 |
+
...
|
| 73 |
+
UnicodeEncodeError: ...
|
| 74 |
+
"""
|
| 75 |
+
return s.encode('ASCII')
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def open_resource(name):
|
| 79 |
+
"""Open a resource from the zoneinfo subdir for reading.
|
| 80 |
+
|
| 81 |
+
Uses the pkg_resources module if available and no standard file
|
| 82 |
+
found at the calculated location.
|
| 83 |
+
|
| 84 |
+
It is possible to specify different location for zoneinfo
|
| 85 |
+
subdir by using the PYTZ_TZDATADIR environment variable.
|
| 86 |
+
"""
|
| 87 |
+
name_parts = name.lstrip('/').split('/')
|
| 88 |
+
for part in name_parts:
|
| 89 |
+
if part == os.path.pardir or os.sep in part:
|
| 90 |
+
raise ValueError('Bad path segment: %r' % part)
|
| 91 |
+
zoneinfo_dir = os.environ.get('PYTZ_TZDATADIR', None)
|
| 92 |
+
if zoneinfo_dir is not None:
|
| 93 |
+
filename = os.path.join(zoneinfo_dir, *name_parts)
|
| 94 |
+
else:
|
| 95 |
+
filename = os.path.join(os.path.dirname(__file__),
|
| 96 |
+
'zoneinfo', *name_parts)
|
| 97 |
+
if not os.path.exists(filename):
|
| 98 |
+
# http://bugs.launchpad.net/bugs/383171 - we avoid using this
|
| 99 |
+
# unless absolutely necessary to help when a broken version of
|
| 100 |
+
# pkg_resources is installed.
|
| 101 |
+
try:
|
| 102 |
+
from pkg_resources import resource_stream
|
| 103 |
+
except ImportError:
|
| 104 |
+
resource_stream = None
|
| 105 |
+
|
| 106 |
+
if resource_stream is not None:
|
| 107 |
+
return resource_stream(__name__, 'zoneinfo/' + name)
|
| 108 |
+
return open(filename, 'rb')
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def resource_exists(name):
|
| 112 |
+
"""Return true if the given resource exists"""
|
| 113 |
+
try:
|
| 114 |
+
if os.environ.get('PYTZ_SKIPEXISTSCHECK', ''):
|
| 115 |
+
# In "standard" distributions, we can assume that
|
| 116 |
+
# all the listed timezones are present. As an
|
| 117 |
+
# import-speed optimization, you can set the
|
| 118 |
+
# PYTZ_SKIPEXISTSCHECK flag to skip checking
|
| 119 |
+
# for the presence of the resource file on disk.
|
| 120 |
+
return True
|
| 121 |
+
open_resource(name).close()
|
| 122 |
+
return True
|
| 123 |
+
except IOError:
|
| 124 |
+
return False
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
_tzinfo_cache = {}
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def timezone(zone):
|
| 131 |
+
r''' Return a datetime.tzinfo implementation for the given timezone
|
| 132 |
+
|
| 133 |
+
>>> from datetime import datetime, timedelta
|
| 134 |
+
>>> utc = timezone('UTC')
|
| 135 |
+
>>> eastern = timezone('US/Eastern')
|
| 136 |
+
>>> eastern.zone
|
| 137 |
+
'US/Eastern'
|
| 138 |
+
>>> timezone(unicode('US/Eastern')) is eastern
|
| 139 |
+
True
|
| 140 |
+
>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
|
| 141 |
+
>>> loc_dt = utc_dt.astimezone(eastern)
|
| 142 |
+
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
|
| 143 |
+
>>> loc_dt.strftime(fmt)
|
| 144 |
+
'2002-10-27 01:00:00 EST (-0500)'
|
| 145 |
+
>>> (loc_dt - timedelta(minutes=10)).strftime(fmt)
|
| 146 |
+
'2002-10-27 00:50:00 EST (-0500)'
|
| 147 |
+
>>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt)
|
| 148 |
+
'2002-10-27 01:50:00 EDT (-0400)'
|
| 149 |
+
>>> (loc_dt + timedelta(minutes=10)).strftime(fmt)
|
| 150 |
+
'2002-10-27 01:10:00 EST (-0500)'
|
| 151 |
+
|
| 152 |
+
Raises UnknownTimeZoneError if passed an unknown zone.
|
| 153 |
+
|
| 154 |
+
>>> try:
|
| 155 |
+
... timezone('Asia/Shangri-La')
|
| 156 |
+
... except UnknownTimeZoneError:
|
| 157 |
+
... print('Unknown')
|
| 158 |
+
Unknown
|
| 159 |
+
|
| 160 |
+
>>> try:
|
| 161 |
+
... timezone(unicode('\N{TRADE MARK SIGN}'))
|
| 162 |
+
... except UnknownTimeZoneError:
|
| 163 |
+
... print('Unknown')
|
| 164 |
+
Unknown
|
| 165 |
+
|
| 166 |
+
'''
|
| 167 |
+
if zone is None:
|
| 168 |
+
raise UnknownTimeZoneError(None)
|
| 169 |
+
|
| 170 |
+
if zone.upper() == 'UTC':
|
| 171 |
+
return utc
|
| 172 |
+
|
| 173 |
+
try:
|
| 174 |
+
zone = ascii(zone)
|
| 175 |
+
except UnicodeEncodeError:
|
| 176 |
+
# All valid timezones are ASCII
|
| 177 |
+
raise UnknownTimeZoneError(zone)
|
| 178 |
+
|
| 179 |
+
zone = _case_insensitive_zone_lookup(_unmunge_zone(zone))
|
| 180 |
+
if zone not in _tzinfo_cache:
|
| 181 |
+
if zone in all_timezones_set: # noqa
|
| 182 |
+
fp = open_resource(zone)
|
| 183 |
+
try:
|
| 184 |
+
_tzinfo_cache[zone] = build_tzinfo(zone, fp)
|
| 185 |
+
finally:
|
| 186 |
+
fp.close()
|
| 187 |
+
else:
|
| 188 |
+
raise UnknownTimeZoneError(zone)
|
| 189 |
+
|
| 190 |
+
return _tzinfo_cache[zone]
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def _unmunge_zone(zone):
|
| 194 |
+
"""Undo the time zone name munging done by older versions of pytz."""
|
| 195 |
+
return zone.replace('_plus_', '+').replace('_minus_', '-')
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
_all_timezones_lower_to_standard = None
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def _case_insensitive_zone_lookup(zone):
|
| 202 |
+
"""case-insensitively matching timezone, else return zone unchanged"""
|
| 203 |
+
global _all_timezones_lower_to_standard
|
| 204 |
+
if _all_timezones_lower_to_standard is None:
|
| 205 |
+
_all_timezones_lower_to_standard = dict((tz.lower(), tz) for tz in _all_timezones_unchecked) # noqa
|
| 206 |
+
return _all_timezones_lower_to_standard.get(zone.lower()) or zone # noqa
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
ZERO = datetime.timedelta(0)
|
| 210 |
+
HOUR = datetime.timedelta(hours=1)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
class UTC(BaseTzInfo):
|
| 214 |
+
"""UTC
|
| 215 |
+
|
| 216 |
+
Optimized UTC implementation. It unpickles using the single module global
|
| 217 |
+
instance defined beneath this class declaration.
|
| 218 |
+
"""
|
| 219 |
+
zone = "UTC"
|
| 220 |
+
|
| 221 |
+
_utcoffset = ZERO
|
| 222 |
+
_dst = ZERO
|
| 223 |
+
_tzname = zone
|
| 224 |
+
|
| 225 |
+
def fromutc(self, dt):
|
| 226 |
+
if dt.tzinfo is None:
|
| 227 |
+
return self.localize(dt)
|
| 228 |
+
return super(utc.__class__, self).fromutc(dt)
|
| 229 |
+
|
| 230 |
+
def utcoffset(self, dt):
|
| 231 |
+
return ZERO
|
| 232 |
+
|
| 233 |
+
def tzname(self, dt):
|
| 234 |
+
return "UTC"
|
| 235 |
+
|
| 236 |
+
def dst(self, dt):
|
| 237 |
+
return ZERO
|
| 238 |
+
|
| 239 |
+
def __reduce__(self):
|
| 240 |
+
return _UTC, ()
|
| 241 |
+
|
| 242 |
+
def localize(self, dt, is_dst=False):
|
| 243 |
+
'''Convert naive time to local time'''
|
| 244 |
+
if dt.tzinfo is not None:
|
| 245 |
+
raise ValueError('Not naive datetime (tzinfo is already set)')
|
| 246 |
+
return dt.replace(tzinfo=self)
|
| 247 |
+
|
| 248 |
+
def normalize(self, dt, is_dst=False):
|
| 249 |
+
'''Correct the timezone information on the given datetime'''
|
| 250 |
+
if dt.tzinfo is self:
|
| 251 |
+
return dt
|
| 252 |
+
if dt.tzinfo is None:
|
| 253 |
+
raise ValueError('Naive time - no tzinfo set')
|
| 254 |
+
return dt.astimezone(self)
|
| 255 |
+
|
| 256 |
+
def __repr__(self):
|
| 257 |
+
return "<UTC>"
|
| 258 |
+
|
| 259 |
+
def __str__(self):
|
| 260 |
+
return "UTC"
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
UTC = utc = UTC() # UTC is a singleton
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
def _UTC():
|
| 267 |
+
"""Factory function for utc unpickling.
|
| 268 |
+
|
| 269 |
+
Makes sure that unpickling a utc instance always returns the same
|
| 270 |
+
module global.
|
| 271 |
+
|
| 272 |
+
These examples belong in the UTC class above, but it is obscured; or in
|
| 273 |
+
the README.rst, but we are not depending on Python 2.4 so integrating
|
| 274 |
+
the README.rst examples with the unit tests is not trivial.
|
| 275 |
+
|
| 276 |
+
>>> import datetime, pickle
|
| 277 |
+
>>> dt = datetime.datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc)
|
| 278 |
+
>>> naive = dt.replace(tzinfo=None)
|
| 279 |
+
>>> p = pickle.dumps(dt, 1)
|
| 280 |
+
>>> naive_p = pickle.dumps(naive, 1)
|
| 281 |
+
>>> len(p) - len(naive_p)
|
| 282 |
+
17
|
| 283 |
+
>>> new = pickle.loads(p)
|
| 284 |
+
>>> new == dt
|
| 285 |
+
True
|
| 286 |
+
>>> new is dt
|
| 287 |
+
False
|
| 288 |
+
>>> new.tzinfo is dt.tzinfo
|
| 289 |
+
True
|
| 290 |
+
>>> utc is UTC is timezone('UTC')
|
| 291 |
+
True
|
| 292 |
+
>>> utc is timezone('GMT')
|
| 293 |
+
False
|
| 294 |
+
"""
|
| 295 |
+
return utc
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
_UTC.__safe_for_unpickling__ = True
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def _p(*args):
|
| 302 |
+
"""Factory function for unpickling pytz tzinfo instances.
|
| 303 |
+
|
| 304 |
+
Just a wrapper around tzinfo.unpickler to save a few bytes in each pickle
|
| 305 |
+
by shortening the path.
|
| 306 |
+
"""
|
| 307 |
+
return unpickler(*args)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
_p.__safe_for_unpickling__ = True
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class _CountryTimezoneDict(LazyDict):
|
| 314 |
+
"""Map ISO 3166 country code to a list of timezone names commonly used
|
| 315 |
+
in that country.
|
| 316 |
+
|
| 317 |
+
iso3166_code is the two letter code used to identify the country.
|
| 318 |
+
|
| 319 |
+
>>> def print_list(list_of_strings):
|
| 320 |
+
... 'We use a helper so doctests work under Python 2.3 -> 3.x'
|
| 321 |
+
... for s in list_of_strings:
|
| 322 |
+
... print(s)
|
| 323 |
+
|
| 324 |
+
>>> print_list(country_timezones['nz'])
|
| 325 |
+
Pacific/Auckland
|
| 326 |
+
Pacific/Chatham
|
| 327 |
+
>>> print_list(country_timezones['ch'])
|
| 328 |
+
Europe/Zurich
|
| 329 |
+
>>> print_list(country_timezones['CH'])
|
| 330 |
+
Europe/Zurich
|
| 331 |
+
>>> print_list(country_timezones[unicode('ch')])
|
| 332 |
+
Europe/Zurich
|
| 333 |
+
>>> print_list(country_timezones['XXX'])
|
| 334 |
+
Traceback (most recent call last):
|
| 335 |
+
...
|
| 336 |
+
KeyError: 'XXX'
|
| 337 |
+
|
| 338 |
+
Previously, this information was exposed as a function rather than a
|
| 339 |
+
dictionary. This is still supported::
|
| 340 |
+
|
| 341 |
+
>>> print_list(country_timezones('nz'))
|
| 342 |
+
Pacific/Auckland
|
| 343 |
+
Pacific/Chatham
|
| 344 |
+
"""
|
| 345 |
+
def __call__(self, iso3166_code):
|
| 346 |
+
"""Backwards compatibility."""
|
| 347 |
+
return self[iso3166_code]
|
| 348 |
+
|
| 349 |
+
def _fill(self):
|
| 350 |
+
data = {}
|
| 351 |
+
zone_tab = open_resource('zone.tab')
|
| 352 |
+
try:
|
| 353 |
+
for line in zone_tab:
|
| 354 |
+
line = line.decode('UTF-8')
|
| 355 |
+
if line.startswith('#'):
|
| 356 |
+
continue
|
| 357 |
+
code, coordinates, zone = line.split(None, 4)[:3]
|
| 358 |
+
if zone not in all_timezones_set: # noqa
|
| 359 |
+
continue
|
| 360 |
+
try:
|
| 361 |
+
data[code].append(zone)
|
| 362 |
+
except KeyError:
|
| 363 |
+
data[code] = [zone]
|
| 364 |
+
self.data = data
|
| 365 |
+
finally:
|
| 366 |
+
zone_tab.close()
|
| 367 |
+
|
| 368 |
+
|
| 369 |
+
country_timezones = _CountryTimezoneDict()
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
class _CountryNameDict(LazyDict):
|
| 373 |
+
'''Dictionary proving ISO3166 code -> English name.
|
| 374 |
+
|
| 375 |
+
>>> print(country_names['au'])
|
| 376 |
+
Australia
|
| 377 |
+
'''
|
| 378 |
+
def _fill(self):
|
| 379 |
+
data = {}
|
| 380 |
+
zone_tab = open_resource('iso3166.tab')
|
| 381 |
+
try:
|
| 382 |
+
for line in zone_tab.readlines():
|
| 383 |
+
line = line.decode('UTF-8')
|
| 384 |
+
if line.startswith('#'):
|
| 385 |
+
continue
|
| 386 |
+
code, name = line.split(None, 1)
|
| 387 |
+
data[code] = name.strip()
|
| 388 |
+
self.data = data
|
| 389 |
+
finally:
|
| 390 |
+
zone_tab.close()
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
country_names = _CountryNameDict()
|
| 394 |
+
|
| 395 |
+
|
| 396 |
+
# Time-zone info based solely on fixed offsets
|
| 397 |
+
|
| 398 |
+
class _FixedOffset(datetime.tzinfo):
|
| 399 |
+
|
| 400 |
+
zone = None # to match the standard pytz API
|
| 401 |
+
|
| 402 |
+
def __init__(self, minutes):
|
| 403 |
+
if abs(minutes) >= 1440:
|
| 404 |
+
raise ValueError("absolute offset is too large", minutes)
|
| 405 |
+
self._minutes = minutes
|
| 406 |
+
self._offset = datetime.timedelta(minutes=minutes)
|
| 407 |
+
|
| 408 |
+
def utcoffset(self, dt):
|
| 409 |
+
return self._offset
|
| 410 |
+
|
| 411 |
+
def __reduce__(self):
|
| 412 |
+
return FixedOffset, (self._minutes, )
|
| 413 |
+
|
| 414 |
+
def dst(self, dt):
|
| 415 |
+
return ZERO
|
| 416 |
+
|
| 417 |
+
def tzname(self, dt):
|
| 418 |
+
return None
|
| 419 |
+
|
| 420 |
+
def __repr__(self):
|
| 421 |
+
return 'pytz.FixedOffset(%d)' % self._minutes
|
| 422 |
+
|
| 423 |
+
def localize(self, dt, is_dst=False):
|
| 424 |
+
'''Convert naive time to local time'''
|
| 425 |
+
if dt.tzinfo is not None:
|
| 426 |
+
raise ValueError('Not naive datetime (tzinfo is already set)')
|
| 427 |
+
return dt.replace(tzinfo=self)
|
| 428 |
+
|
| 429 |
+
def normalize(self, dt, is_dst=False):
|
| 430 |
+
'''Correct the timezone information on the given datetime'''
|
| 431 |
+
if dt.tzinfo is self:
|
| 432 |
+
return dt
|
| 433 |
+
if dt.tzinfo is None:
|
| 434 |
+
raise ValueError('Naive time - no tzinfo set')
|
| 435 |
+
return dt.astimezone(self)
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
def FixedOffset(offset, _tzinfos={}):
|
| 439 |
+
"""return a fixed-offset timezone based off a number of minutes.
|
| 440 |
+
|
| 441 |
+
>>> one = FixedOffset(-330)
|
| 442 |
+
>>> one
|
| 443 |
+
pytz.FixedOffset(-330)
|
| 444 |
+
>>> str(one.utcoffset(datetime.datetime.now()))
|
| 445 |
+
'-1 day, 18:30:00'
|
| 446 |
+
>>> str(one.dst(datetime.datetime.now()))
|
| 447 |
+
'0:00:00'
|
| 448 |
+
|
| 449 |
+
>>> two = FixedOffset(1380)
|
| 450 |
+
>>> two
|
| 451 |
+
pytz.FixedOffset(1380)
|
| 452 |
+
>>> str(two.utcoffset(datetime.datetime.now()))
|
| 453 |
+
'23:00:00'
|
| 454 |
+
>>> str(two.dst(datetime.datetime.now()))
|
| 455 |
+
'0:00:00'
|
| 456 |
+
|
| 457 |
+
The datetime.timedelta must be between the range of -1 and 1 day,
|
| 458 |
+
non-inclusive.
|
| 459 |
+
|
| 460 |
+
>>> FixedOffset(1440)
|
| 461 |
+
Traceback (most recent call last):
|
| 462 |
+
...
|
| 463 |
+
ValueError: ('absolute offset is too large', 1440)
|
| 464 |
+
|
| 465 |
+
>>> FixedOffset(-1440)
|
| 466 |
+
Traceback (most recent call last):
|
| 467 |
+
...
|
| 468 |
+
ValueError: ('absolute offset is too large', -1440)
|
| 469 |
+
|
| 470 |
+
An offset of 0 is special-cased to return UTC.
|
| 471 |
+
|
| 472 |
+
>>> FixedOffset(0) is UTC
|
| 473 |
+
True
|
| 474 |
+
|
| 475 |
+
There should always be only one instance of a FixedOffset per timedelta.
|
| 476 |
+
This should be true for multiple creation calls.
|
| 477 |
+
|
| 478 |
+
>>> FixedOffset(-330) is one
|
| 479 |
+
True
|
| 480 |
+
>>> FixedOffset(1380) is two
|
| 481 |
+
True
|
| 482 |
+
|
| 483 |
+
It should also be true for pickling.
|
| 484 |
+
|
| 485 |
+
>>> import pickle
|
| 486 |
+
>>> pickle.loads(pickle.dumps(one)) is one
|
| 487 |
+
True
|
| 488 |
+
>>> pickle.loads(pickle.dumps(two)) is two
|
| 489 |
+
True
|
| 490 |
+
"""
|
| 491 |
+
if offset == 0:
|
| 492 |
+
return UTC
|
| 493 |
+
|
| 494 |
+
info = _tzinfos.get(offset)
|
| 495 |
+
if info is None:
|
| 496 |
+
# We haven't seen this one before. we need to save it.
|
| 497 |
+
|
| 498 |
+
# Use setdefault to avoid a race condition and make sure we have
|
| 499 |
+
# only one
|
| 500 |
+
info = _tzinfos.setdefault(offset, _FixedOffset(offset))
|
| 501 |
+
|
| 502 |
+
return info
|
| 503 |
+
|
| 504 |
+
|
| 505 |
+
FixedOffset.__safe_for_unpickling__ = True
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
def _test():
|
| 509 |
+
import doctest
|
| 510 |
+
sys.path.insert(0, os.pardir)
|
| 511 |
+
import pytz
|
| 512 |
+
return doctest.testmod(pytz)
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
if __name__ == '__main__':
|
| 516 |
+
_test()
|
| 517 |
+
_all_timezones_unchecked = \
|
| 518 |
+
['Africa/Abidjan',
|
| 519 |
+
'Africa/Accra',
|
| 520 |
+
'Africa/Addis_Ababa',
|
| 521 |
+
'Africa/Algiers',
|
| 522 |
+
'Africa/Asmara',
|
| 523 |
+
'Africa/Asmera',
|
| 524 |
+
'Africa/Bamako',
|
| 525 |
+
'Africa/Bangui',
|
| 526 |
+
'Africa/Banjul',
|
| 527 |
+
'Africa/Bissau',
|
| 528 |
+
'Africa/Blantyre',
|
| 529 |
+
'Africa/Brazzaville',
|
| 530 |
+
'Africa/Bujumbura',
|
| 531 |
+
'Africa/Cairo',
|
| 532 |
+
'Africa/Casablanca',
|
| 533 |
+
'Africa/Ceuta',
|
| 534 |
+
'Africa/Conakry',
|
| 535 |
+
'Africa/Dakar',
|
| 536 |
+
'Africa/Dar_es_Salaam',
|
| 537 |
+
'Africa/Djibouti',
|
| 538 |
+
'Africa/Douala',
|
| 539 |
+
'Africa/El_Aaiun',
|
| 540 |
+
'Africa/Freetown',
|
| 541 |
+
'Africa/Gaborone',
|
| 542 |
+
'Africa/Harare',
|
| 543 |
+
'Africa/Johannesburg',
|
| 544 |
+
'Africa/Juba',
|
| 545 |
+
'Africa/Kampala',
|
| 546 |
+
'Africa/Khartoum',
|
| 547 |
+
'Africa/Kigali',
|
| 548 |
+
'Africa/Kinshasa',
|
| 549 |
+
'Africa/Lagos',
|
| 550 |
+
'Africa/Libreville',
|
| 551 |
+
'Africa/Lome',
|
| 552 |
+
'Africa/Luanda',
|
| 553 |
+
'Africa/Lubumbashi',
|
| 554 |
+
'Africa/Lusaka',
|
| 555 |
+
'Africa/Malabo',
|
| 556 |
+
'Africa/Maputo',
|
| 557 |
+
'Africa/Maseru',
|
| 558 |
+
'Africa/Mbabane',
|
| 559 |
+
'Africa/Mogadishu',
|
| 560 |
+
'Africa/Monrovia',
|
| 561 |
+
'Africa/Nairobi',
|
| 562 |
+
'Africa/Ndjamena',
|
| 563 |
+
'Africa/Niamey',
|
| 564 |
+
'Africa/Nouakchott',
|
| 565 |
+
'Africa/Ouagadougou',
|
| 566 |
+
'Africa/Porto-Novo',
|
| 567 |
+
'Africa/Sao_Tome',
|
| 568 |
+
'Africa/Timbuktu',
|
| 569 |
+
'Africa/Tripoli',
|
| 570 |
+
'Africa/Tunis',
|
| 571 |
+
'Africa/Windhoek',
|
| 572 |
+
'America/Adak',
|
| 573 |
+
'America/Anchorage',
|
| 574 |
+
'America/Anguilla',
|
| 575 |
+
'America/Antigua',
|
| 576 |
+
'America/Araguaina',
|
| 577 |
+
'America/Argentina/Buenos_Aires',
|
| 578 |
+
'America/Argentina/Catamarca',
|
| 579 |
+
'America/Argentina/ComodRivadavia',
|
| 580 |
+
'America/Argentina/Cordoba',
|
| 581 |
+
'America/Argentina/Jujuy',
|
| 582 |
+
'America/Argentina/La_Rioja',
|
| 583 |
+
'America/Argentina/Mendoza',
|
| 584 |
+
'America/Argentina/Rio_Gallegos',
|
| 585 |
+
'America/Argentina/Salta',
|
| 586 |
+
'America/Argentina/San_Juan',
|
| 587 |
+
'America/Argentina/San_Luis',
|
| 588 |
+
'America/Argentina/Tucuman',
|
| 589 |
+
'America/Argentina/Ushuaia',
|
| 590 |
+
'America/Aruba',
|
| 591 |
+
'America/Asuncion',
|
| 592 |
+
'America/Atikokan',
|
| 593 |
+
'America/Atka',
|
| 594 |
+
'America/Bahia',
|
| 595 |
+
'America/Bahia_Banderas',
|
| 596 |
+
'America/Barbados',
|
| 597 |
+
'America/Belem',
|
| 598 |
+
'America/Belize',
|
| 599 |
+
'America/Blanc-Sablon',
|
| 600 |
+
'America/Boa_Vista',
|
| 601 |
+
'America/Bogota',
|
| 602 |
+
'America/Boise',
|
| 603 |
+
'America/Buenos_Aires',
|
| 604 |
+
'America/Cambridge_Bay',
|
| 605 |
+
'America/Campo_Grande',
|
| 606 |
+
'America/Cancun',
|
| 607 |
+
'America/Caracas',
|
| 608 |
+
'America/Catamarca',
|
| 609 |
+
'America/Cayenne',
|
| 610 |
+
'America/Cayman',
|
| 611 |
+
'America/Chicago',
|
| 612 |
+
'America/Chihuahua',
|
| 613 |
+
'America/Ciudad_Juarez',
|
| 614 |
+
'America/Coral_Harbour',
|
| 615 |
+
'America/Cordoba',
|
| 616 |
+
'America/Costa_Rica',
|
| 617 |
+
'America/Creston',
|
| 618 |
+
'America/Cuiaba',
|
| 619 |
+
'America/Curacao',
|
| 620 |
+
'America/Danmarkshavn',
|
| 621 |
+
'America/Dawson',
|
| 622 |
+
'America/Dawson_Creek',
|
| 623 |
+
'America/Denver',
|
| 624 |
+
'America/Detroit',
|
| 625 |
+
'America/Dominica',
|
| 626 |
+
'America/Edmonton',
|
| 627 |
+
'America/Eirunepe',
|
| 628 |
+
'America/El_Salvador',
|
| 629 |
+
'America/Ensenada',
|
| 630 |
+
'America/Fort_Nelson',
|
| 631 |
+
'America/Fort_Wayne',
|
| 632 |
+
'America/Fortaleza',
|
| 633 |
+
'America/Glace_Bay',
|
| 634 |
+
'America/Godthab',
|
| 635 |
+
'America/Goose_Bay',
|
| 636 |
+
'America/Grand_Turk',
|
| 637 |
+
'America/Grenada',
|
| 638 |
+
'America/Guadeloupe',
|
| 639 |
+
'America/Guatemala',
|
| 640 |
+
'America/Guayaquil',
|
| 641 |
+
'America/Guyana',
|
| 642 |
+
'America/Halifax',
|
| 643 |
+
'America/Havana',
|
| 644 |
+
'America/Hermosillo',
|
| 645 |
+
'America/Indiana/Indianapolis',
|
| 646 |
+
'America/Indiana/Knox',
|
| 647 |
+
'America/Indiana/Marengo',
|
| 648 |
+
'America/Indiana/Petersburg',
|
| 649 |
+
'America/Indiana/Tell_City',
|
| 650 |
+
'America/Indiana/Vevay',
|
| 651 |
+
'America/Indiana/Vincennes',
|
| 652 |
+
'America/Indiana/Winamac',
|
| 653 |
+
'America/Indianapolis',
|
| 654 |
+
'America/Inuvik',
|
| 655 |
+
'America/Iqaluit',
|
| 656 |
+
'America/Jamaica',
|
| 657 |
+
'America/Jujuy',
|
| 658 |
+
'America/Juneau',
|
| 659 |
+
'America/Kentucky/Louisville',
|
| 660 |
+
'America/Kentucky/Monticello',
|
| 661 |
+
'America/Knox_IN',
|
| 662 |
+
'America/Kralendijk',
|
| 663 |
+
'America/La_Paz',
|
| 664 |
+
'America/Lima',
|
| 665 |
+
'America/Los_Angeles',
|
| 666 |
+
'America/Louisville',
|
| 667 |
+
'America/Lower_Princes',
|
| 668 |
+
'America/Maceio',
|
| 669 |
+
'America/Managua',
|
| 670 |
+
'America/Manaus',
|
| 671 |
+
'America/Marigot',
|
| 672 |
+
'America/Martinique',
|
| 673 |
+
'America/Matamoros',
|
| 674 |
+
'America/Mazatlan',
|
| 675 |
+
'America/Mendoza',
|
| 676 |
+
'America/Menominee',
|
| 677 |
+
'America/Merida',
|
| 678 |
+
'America/Metlakatla',
|
| 679 |
+
'America/Mexico_City',
|
| 680 |
+
'America/Miquelon',
|
| 681 |
+
'America/Moncton',
|
| 682 |
+
'America/Monterrey',
|
| 683 |
+
'America/Montevideo',
|
| 684 |
+
'America/Montreal',
|
| 685 |
+
'America/Montserrat',
|
| 686 |
+
'America/Nassau',
|
| 687 |
+
'America/New_York',
|
| 688 |
+
'America/Nipigon',
|
| 689 |
+
'America/Nome',
|
| 690 |
+
'America/Noronha',
|
| 691 |
+
'America/North_Dakota/Beulah',
|
| 692 |
+
'America/North_Dakota/Center',
|
| 693 |
+
'America/North_Dakota/New_Salem',
|
| 694 |
+
'America/Nuuk',
|
| 695 |
+
'America/Ojinaga',
|
| 696 |
+
'America/Panama',
|
| 697 |
+
'America/Pangnirtung',
|
| 698 |
+
'America/Paramaribo',
|
| 699 |
+
'America/Phoenix',
|
| 700 |
+
'America/Port-au-Prince',
|
| 701 |
+
'America/Port_of_Spain',
|
| 702 |
+
'America/Porto_Acre',
|
| 703 |
+
'America/Porto_Velho',
|
| 704 |
+
'America/Puerto_Rico',
|
| 705 |
+
'America/Punta_Arenas',
|
| 706 |
+
'America/Rainy_River',
|
| 707 |
+
'America/Rankin_Inlet',
|
| 708 |
+
'America/Recife',
|
| 709 |
+
'America/Regina',
|
| 710 |
+
'America/Resolute',
|
| 711 |
+
'America/Rio_Branco',
|
| 712 |
+
'America/Rosario',
|
| 713 |
+
'America/Santa_Isabel',
|
| 714 |
+
'America/Santarem',
|
| 715 |
+
'America/Santiago',
|
| 716 |
+
'America/Santo_Domingo',
|
| 717 |
+
'America/Sao_Paulo',
|
| 718 |
+
'America/Scoresbysund',
|
| 719 |
+
'America/Shiprock',
|
| 720 |
+
'America/Sitka',
|
| 721 |
+
'America/St_Barthelemy',
|
| 722 |
+
'America/St_Johns',
|
| 723 |
+
'America/St_Kitts',
|
| 724 |
+
'America/St_Lucia',
|
| 725 |
+
'America/St_Thomas',
|
| 726 |
+
'America/St_Vincent',
|
| 727 |
+
'America/Swift_Current',
|
| 728 |
+
'America/Tegucigalpa',
|
| 729 |
+
'America/Thule',
|
| 730 |
+
'America/Thunder_Bay',
|
| 731 |
+
'America/Tijuana',
|
| 732 |
+
'America/Toronto',
|
| 733 |
+
'America/Tortola',
|
| 734 |
+
'America/Vancouver',
|
| 735 |
+
'America/Virgin',
|
| 736 |
+
'America/Whitehorse',
|
| 737 |
+
'America/Winnipeg',
|
| 738 |
+
'America/Yakutat',
|
| 739 |
+
'America/Yellowknife',
|
| 740 |
+
'Antarctica/Casey',
|
| 741 |
+
'Antarctica/Davis',
|
| 742 |
+
'Antarctica/DumontDUrville',
|
| 743 |
+
'Antarctica/Macquarie',
|
| 744 |
+
'Antarctica/Mawson',
|
| 745 |
+
'Antarctica/McMurdo',
|
| 746 |
+
'Antarctica/Palmer',
|
| 747 |
+
'Antarctica/Rothera',
|
| 748 |
+
'Antarctica/South_Pole',
|
| 749 |
+
'Antarctica/Syowa',
|
| 750 |
+
'Antarctica/Troll',
|
| 751 |
+
'Antarctica/Vostok',
|
| 752 |
+
'Arctic/Longyearbyen',
|
| 753 |
+
'Asia/Aden',
|
| 754 |
+
'Asia/Almaty',
|
| 755 |
+
'Asia/Amman',
|
| 756 |
+
'Asia/Anadyr',
|
| 757 |
+
'Asia/Aqtau',
|
| 758 |
+
'Asia/Aqtobe',
|
| 759 |
+
'Asia/Ashgabat',
|
| 760 |
+
'Asia/Ashkhabad',
|
| 761 |
+
'Asia/Atyrau',
|
| 762 |
+
'Asia/Baghdad',
|
| 763 |
+
'Asia/Bahrain',
|
| 764 |
+
'Asia/Baku',
|
| 765 |
+
'Asia/Bangkok',
|
| 766 |
+
'Asia/Barnaul',
|
| 767 |
+
'Asia/Beirut',
|
| 768 |
+
'Asia/Bishkek',
|
| 769 |
+
'Asia/Brunei',
|
| 770 |
+
'Asia/Calcutta',
|
| 771 |
+
'Asia/Chita',
|
| 772 |
+
'Asia/Choibalsan',
|
| 773 |
+
'Asia/Chongqing',
|
| 774 |
+
'Asia/Chungking',
|
| 775 |
+
'Asia/Colombo',
|
| 776 |
+
'Asia/Dacca',
|
| 777 |
+
'Asia/Damascus',
|
| 778 |
+
'Asia/Dhaka',
|
| 779 |
+
'Asia/Dili',
|
| 780 |
+
'Asia/Dubai',
|
| 781 |
+
'Asia/Dushanbe',
|
| 782 |
+
'Asia/Famagusta',
|
| 783 |
+
'Asia/Gaza',
|
| 784 |
+
'Asia/Harbin',
|
| 785 |
+
'Asia/Hebron',
|
| 786 |
+
'Asia/Ho_Chi_Minh',
|
| 787 |
+
'Asia/Hong_Kong',
|
| 788 |
+
'Asia/Hovd',
|
| 789 |
+
'Asia/Irkutsk',
|
| 790 |
+
'Asia/Istanbul',
|
| 791 |
+
'Asia/Jakarta',
|
| 792 |
+
'Asia/Jayapura',
|
| 793 |
+
'Asia/Jerusalem',
|
| 794 |
+
'Asia/Kabul',
|
| 795 |
+
'Asia/Kamchatka',
|
| 796 |
+
'Asia/Karachi',
|
| 797 |
+
'Asia/Kashgar',
|
| 798 |
+
'Asia/Kathmandu',
|
| 799 |
+
'Asia/Katmandu',
|
| 800 |
+
'Asia/Khandyga',
|
| 801 |
+
'Asia/Kolkata',
|
| 802 |
+
'Asia/Krasnoyarsk',
|
| 803 |
+
'Asia/Kuala_Lumpur',
|
| 804 |
+
'Asia/Kuching',
|
| 805 |
+
'Asia/Kuwait',
|
| 806 |
+
'Asia/Macao',
|
| 807 |
+
'Asia/Macau',
|
| 808 |
+
'Asia/Magadan',
|
| 809 |
+
'Asia/Makassar',
|
| 810 |
+
'Asia/Manila',
|
| 811 |
+
'Asia/Muscat',
|
| 812 |
+
'Asia/Nicosia',
|
| 813 |
+
'Asia/Novokuznetsk',
|
| 814 |
+
'Asia/Novosibirsk',
|
| 815 |
+
'Asia/Omsk',
|
| 816 |
+
'Asia/Oral',
|
| 817 |
+
'Asia/Phnom_Penh',
|
| 818 |
+
'Asia/Pontianak',
|
| 819 |
+
'Asia/Pyongyang',
|
| 820 |
+
'Asia/Qatar',
|
| 821 |
+
'Asia/Qostanay',
|
| 822 |
+
'Asia/Qyzylorda',
|
| 823 |
+
'Asia/Rangoon',
|
| 824 |
+
'Asia/Riyadh',
|
| 825 |
+
'Asia/Saigon',
|
| 826 |
+
'Asia/Sakhalin',
|
| 827 |
+
'Asia/Samarkand',
|
| 828 |
+
'Asia/Seoul',
|
| 829 |
+
'Asia/Shanghai',
|
| 830 |
+
'Asia/Singapore',
|
| 831 |
+
'Asia/Srednekolymsk',
|
| 832 |
+
'Asia/Taipei',
|
| 833 |
+
'Asia/Tashkent',
|
| 834 |
+
'Asia/Tbilisi',
|
| 835 |
+
'Asia/Tehran',
|
| 836 |
+
'Asia/Tel_Aviv',
|
| 837 |
+
'Asia/Thimbu',
|
| 838 |
+
'Asia/Thimphu',
|
| 839 |
+
'Asia/Tokyo',
|
| 840 |
+
'Asia/Tomsk',
|
| 841 |
+
'Asia/Ujung_Pandang',
|
| 842 |
+
'Asia/Ulaanbaatar',
|
| 843 |
+
'Asia/Ulan_Bator',
|
| 844 |
+
'Asia/Urumqi',
|
| 845 |
+
'Asia/Ust-Nera',
|
| 846 |
+
'Asia/Vientiane',
|
| 847 |
+
'Asia/Vladivostok',
|
| 848 |
+
'Asia/Yakutsk',
|
| 849 |
+
'Asia/Yangon',
|
| 850 |
+
'Asia/Yekaterinburg',
|
| 851 |
+
'Asia/Yerevan',
|
| 852 |
+
'Atlantic/Azores',
|
| 853 |
+
'Atlantic/Bermuda',
|
| 854 |
+
'Atlantic/Canary',
|
| 855 |
+
'Atlantic/Cape_Verde',
|
| 856 |
+
'Atlantic/Faeroe',
|
| 857 |
+
'Atlantic/Faroe',
|
| 858 |
+
'Atlantic/Jan_Mayen',
|
| 859 |
+
'Atlantic/Madeira',
|
| 860 |
+
'Atlantic/Reykjavik',
|
| 861 |
+
'Atlantic/South_Georgia',
|
| 862 |
+
'Atlantic/St_Helena',
|
| 863 |
+
'Atlantic/Stanley',
|
| 864 |
+
'Australia/ACT',
|
| 865 |
+
'Australia/Adelaide',
|
| 866 |
+
'Australia/Brisbane',
|
| 867 |
+
'Australia/Broken_Hill',
|
| 868 |
+
'Australia/Canberra',
|
| 869 |
+
'Australia/Currie',
|
| 870 |
+
'Australia/Darwin',
|
| 871 |
+
'Australia/Eucla',
|
| 872 |
+
'Australia/Hobart',
|
| 873 |
+
'Australia/LHI',
|
| 874 |
+
'Australia/Lindeman',
|
| 875 |
+
'Australia/Lord_Howe',
|
| 876 |
+
'Australia/Melbourne',
|
| 877 |
+
'Australia/NSW',
|
| 878 |
+
'Australia/North',
|
| 879 |
+
'Australia/Perth',
|
| 880 |
+
'Australia/Queensland',
|
| 881 |
+
'Australia/South',
|
| 882 |
+
'Australia/Sydney',
|
| 883 |
+
'Australia/Tasmania',
|
| 884 |
+
'Australia/Victoria',
|
| 885 |
+
'Australia/West',
|
| 886 |
+
'Australia/Yancowinna',
|
| 887 |
+
'Brazil/Acre',
|
| 888 |
+
'Brazil/DeNoronha',
|
| 889 |
+
'Brazil/East',
|
| 890 |
+
'Brazil/West',
|
| 891 |
+
'CET',
|
| 892 |
+
'CST6CDT',
|
| 893 |
+
'Canada/Atlantic',
|
| 894 |
+
'Canada/Central',
|
| 895 |
+
'Canada/Eastern',
|
| 896 |
+
'Canada/Mountain',
|
| 897 |
+
'Canada/Newfoundland',
|
| 898 |
+
'Canada/Pacific',
|
| 899 |
+
'Canada/Saskatchewan',
|
| 900 |
+
'Canada/Yukon',
|
| 901 |
+
'Chile/Continental',
|
| 902 |
+
'Chile/EasterIsland',
|
| 903 |
+
'Cuba',
|
| 904 |
+
'EET',
|
| 905 |
+
'EST',
|
| 906 |
+
'EST5EDT',
|
| 907 |
+
'Egypt',
|
| 908 |
+
'Eire',
|
| 909 |
+
'Etc/GMT',
|
| 910 |
+
'Etc/GMT+0',
|
| 911 |
+
'Etc/GMT+1',
|
| 912 |
+
'Etc/GMT+10',
|
| 913 |
+
'Etc/GMT+11',
|
| 914 |
+
'Etc/GMT+12',
|
| 915 |
+
'Etc/GMT+2',
|
| 916 |
+
'Etc/GMT+3',
|
| 917 |
+
'Etc/GMT+4',
|
| 918 |
+
'Etc/GMT+5',
|
| 919 |
+
'Etc/GMT+6',
|
| 920 |
+
'Etc/GMT+7',
|
| 921 |
+
'Etc/GMT+8',
|
| 922 |
+
'Etc/GMT+9',
|
| 923 |
+
'Etc/GMT-0',
|
| 924 |
+
'Etc/GMT-1',
|
| 925 |
+
'Etc/GMT-10',
|
| 926 |
+
'Etc/GMT-11',
|
| 927 |
+
'Etc/GMT-12',
|
| 928 |
+
'Etc/GMT-13',
|
| 929 |
+
'Etc/GMT-14',
|
| 930 |
+
'Etc/GMT-2',
|
| 931 |
+
'Etc/GMT-3',
|
| 932 |
+
'Etc/GMT-4',
|
| 933 |
+
'Etc/GMT-5',
|
| 934 |
+
'Etc/GMT-6',
|
| 935 |
+
'Etc/GMT-7',
|
| 936 |
+
'Etc/GMT-8',
|
| 937 |
+
'Etc/GMT-9',
|
| 938 |
+
'Etc/GMT0',
|
| 939 |
+
'Etc/Greenwich',
|
| 940 |
+
'Etc/UCT',
|
| 941 |
+
'Etc/UTC',
|
| 942 |
+
'Etc/Universal',
|
| 943 |
+
'Etc/Zulu',
|
| 944 |
+
'Europe/Amsterdam',
|
| 945 |
+
'Europe/Andorra',
|
| 946 |
+
'Europe/Astrakhan',
|
| 947 |
+
'Europe/Athens',
|
| 948 |
+
'Europe/Belfast',
|
| 949 |
+
'Europe/Belgrade',
|
| 950 |
+
'Europe/Berlin',
|
| 951 |
+
'Europe/Bratislava',
|
| 952 |
+
'Europe/Brussels',
|
| 953 |
+
'Europe/Bucharest',
|
| 954 |
+
'Europe/Budapest',
|
| 955 |
+
'Europe/Busingen',
|
| 956 |
+
'Europe/Chisinau',
|
| 957 |
+
'Europe/Copenhagen',
|
| 958 |
+
'Europe/Dublin',
|
| 959 |
+
'Europe/Gibraltar',
|
| 960 |
+
'Europe/Guernsey',
|
| 961 |
+
'Europe/Helsinki',
|
| 962 |
+
'Europe/Isle_of_Man',
|
| 963 |
+
'Europe/Istanbul',
|
| 964 |
+
'Europe/Jersey',
|
| 965 |
+
'Europe/Kaliningrad',
|
| 966 |
+
'Europe/Kiev',
|
| 967 |
+
'Europe/Kirov',
|
| 968 |
+
'Europe/Kyiv',
|
| 969 |
+
'Europe/Lisbon',
|
| 970 |
+
'Europe/Ljubljana',
|
| 971 |
+
'Europe/London',
|
| 972 |
+
'Europe/Luxembourg',
|
| 973 |
+
'Europe/Madrid',
|
| 974 |
+
'Europe/Malta',
|
| 975 |
+
'Europe/Mariehamn',
|
| 976 |
+
'Europe/Minsk',
|
| 977 |
+
'Europe/Monaco',
|
| 978 |
+
'Europe/Moscow',
|
| 979 |
+
'Europe/Nicosia',
|
| 980 |
+
'Europe/Oslo',
|
| 981 |
+
'Europe/Paris',
|
| 982 |
+
'Europe/Podgorica',
|
| 983 |
+
'Europe/Prague',
|
| 984 |
+
'Europe/Riga',
|
| 985 |
+
'Europe/Rome',
|
| 986 |
+
'Europe/Samara',
|
| 987 |
+
'Europe/San_Marino',
|
| 988 |
+
'Europe/Sarajevo',
|
| 989 |
+
'Europe/Saratov',
|
| 990 |
+
'Europe/Simferopol',
|
| 991 |
+
'Europe/Skopje',
|
| 992 |
+
'Europe/Sofia',
|
| 993 |
+
'Europe/Stockholm',
|
| 994 |
+
'Europe/Tallinn',
|
| 995 |
+
'Europe/Tirane',
|
| 996 |
+
'Europe/Tiraspol',
|
| 997 |
+
'Europe/Ulyanovsk',
|
| 998 |
+
'Europe/Uzhgorod',
|
| 999 |
+
'Europe/Vaduz',
|
| 1000 |
+
'Europe/Vatican',
|
| 1001 |
+
'Europe/Vienna',
|
| 1002 |
+
'Europe/Vilnius',
|
| 1003 |
+
'Europe/Volgograd',
|
| 1004 |
+
'Europe/Warsaw',
|
| 1005 |
+
'Europe/Zagreb',
|
| 1006 |
+
'Europe/Zaporozhye',
|
| 1007 |
+
'Europe/Zurich',
|
| 1008 |
+
'GB',
|
| 1009 |
+
'GB-Eire',
|
| 1010 |
+
'GMT',
|
| 1011 |
+
'GMT+0',
|
| 1012 |
+
'GMT-0',
|
| 1013 |
+
'GMT0',
|
| 1014 |
+
'Greenwich',
|
| 1015 |
+
'HST',
|
| 1016 |
+
'Hongkong',
|
| 1017 |
+
'Iceland',
|
| 1018 |
+
'Indian/Antananarivo',
|
| 1019 |
+
'Indian/Chagos',
|
| 1020 |
+
'Indian/Christmas',
|
| 1021 |
+
'Indian/Cocos',
|
| 1022 |
+
'Indian/Comoro',
|
| 1023 |
+
'Indian/Kerguelen',
|
| 1024 |
+
'Indian/Mahe',
|
| 1025 |
+
'Indian/Maldives',
|
| 1026 |
+
'Indian/Mauritius',
|
| 1027 |
+
'Indian/Mayotte',
|
| 1028 |
+
'Indian/Reunion',
|
| 1029 |
+
'Iran',
|
| 1030 |
+
'Israel',
|
| 1031 |
+
'Jamaica',
|
| 1032 |
+
'Japan',
|
| 1033 |
+
'Kwajalein',
|
| 1034 |
+
'Libya',
|
| 1035 |
+
'MET',
|
| 1036 |
+
'MST',
|
| 1037 |
+
'MST7MDT',
|
| 1038 |
+
'Mexico/BajaNorte',
|
| 1039 |
+
'Mexico/BajaSur',
|
| 1040 |
+
'Mexico/General',
|
| 1041 |
+
'NZ',
|
| 1042 |
+
'NZ-CHAT',
|
| 1043 |
+
'Navajo',
|
| 1044 |
+
'PRC',
|
| 1045 |
+
'PST8PDT',
|
| 1046 |
+
'Pacific/Apia',
|
| 1047 |
+
'Pacific/Auckland',
|
| 1048 |
+
'Pacific/Bougainville',
|
| 1049 |
+
'Pacific/Chatham',
|
| 1050 |
+
'Pacific/Chuuk',
|
| 1051 |
+
'Pacific/Easter',
|
| 1052 |
+
'Pacific/Efate',
|
| 1053 |
+
'Pacific/Enderbury',
|
| 1054 |
+
'Pacific/Fakaofo',
|
| 1055 |
+
'Pacific/Fiji',
|
| 1056 |
+
'Pacific/Funafuti',
|
| 1057 |
+
'Pacific/Galapagos',
|
| 1058 |
+
'Pacific/Gambier',
|
| 1059 |
+
'Pacific/Guadalcanal',
|
| 1060 |
+
'Pacific/Guam',
|
| 1061 |
+
'Pacific/Honolulu',
|
| 1062 |
+
'Pacific/Johnston',
|
| 1063 |
+
'Pacific/Kanton',
|
| 1064 |
+
'Pacific/Kiritimati',
|
| 1065 |
+
'Pacific/Kosrae',
|
| 1066 |
+
'Pacific/Kwajalein',
|
| 1067 |
+
'Pacific/Majuro',
|
| 1068 |
+
'Pacific/Marquesas',
|
| 1069 |
+
'Pacific/Midway',
|
| 1070 |
+
'Pacific/Nauru',
|
| 1071 |
+
'Pacific/Niue',
|
| 1072 |
+
'Pacific/Norfolk',
|
| 1073 |
+
'Pacific/Noumea',
|
| 1074 |
+
'Pacific/Pago_Pago',
|
| 1075 |
+
'Pacific/Palau',
|
| 1076 |
+
'Pacific/Pitcairn',
|
| 1077 |
+
'Pacific/Pohnpei',
|
| 1078 |
+
'Pacific/Ponape',
|
| 1079 |
+
'Pacific/Port_Moresby',
|
| 1080 |
+
'Pacific/Rarotonga',
|
| 1081 |
+
'Pacific/Saipan',
|
| 1082 |
+
'Pacific/Samoa',
|
| 1083 |
+
'Pacific/Tahiti',
|
| 1084 |
+
'Pacific/Tarawa',
|
| 1085 |
+
'Pacific/Tongatapu',
|
| 1086 |
+
'Pacific/Truk',
|
| 1087 |
+
'Pacific/Wake',
|
| 1088 |
+
'Pacific/Wallis',
|
| 1089 |
+
'Pacific/Yap',
|
| 1090 |
+
'Poland',
|
| 1091 |
+
'Portugal',
|
| 1092 |
+
'ROC',
|
| 1093 |
+
'ROK',
|
| 1094 |
+
'Singapore',
|
| 1095 |
+
'Turkey',
|
| 1096 |
+
'UCT',
|
| 1097 |
+
'US/Alaska',
|
| 1098 |
+
'US/Aleutian',
|
| 1099 |
+
'US/Arizona',
|
| 1100 |
+
'US/Central',
|
| 1101 |
+
'US/East-Indiana',
|
| 1102 |
+
'US/Eastern',
|
| 1103 |
+
'US/Hawaii',
|
| 1104 |
+
'US/Indiana-Starke',
|
| 1105 |
+
'US/Michigan',
|
| 1106 |
+
'US/Mountain',
|
| 1107 |
+
'US/Pacific',
|
| 1108 |
+
'US/Samoa',
|
| 1109 |
+
'UTC',
|
| 1110 |
+
'Universal',
|
| 1111 |
+
'W-SU',
|
| 1112 |
+
'WET',
|
| 1113 |
+
'Zulu']
|
| 1114 |
+
all_timezones = LazyList(
|
| 1115 |
+
tz for tz in _all_timezones_unchecked if resource_exists(tz))
|
| 1116 |
+
|
| 1117 |
+
all_timezones_set = LazySet(all_timezones)
|
| 1118 |
+
common_timezones = \
|
| 1119 |
+
['Africa/Abidjan',
|
| 1120 |
+
'Africa/Accra',
|
| 1121 |
+
'Africa/Addis_Ababa',
|
| 1122 |
+
'Africa/Algiers',
|
| 1123 |
+
'Africa/Asmara',
|
| 1124 |
+
'Africa/Bamako',
|
| 1125 |
+
'Africa/Bangui',
|
| 1126 |
+
'Africa/Banjul',
|
| 1127 |
+
'Africa/Bissau',
|
| 1128 |
+
'Africa/Blantyre',
|
| 1129 |
+
'Africa/Brazzaville',
|
| 1130 |
+
'Africa/Bujumbura',
|
| 1131 |
+
'Africa/Cairo',
|
| 1132 |
+
'Africa/Casablanca',
|
| 1133 |
+
'Africa/Ceuta',
|
| 1134 |
+
'Africa/Conakry',
|
| 1135 |
+
'Africa/Dakar',
|
| 1136 |
+
'Africa/Dar_es_Salaam',
|
| 1137 |
+
'Africa/Djibouti',
|
| 1138 |
+
'Africa/Douala',
|
| 1139 |
+
'Africa/El_Aaiun',
|
| 1140 |
+
'Africa/Freetown',
|
| 1141 |
+
'Africa/Gaborone',
|
| 1142 |
+
'Africa/Harare',
|
| 1143 |
+
'Africa/Johannesburg',
|
| 1144 |
+
'Africa/Juba',
|
| 1145 |
+
'Africa/Kampala',
|
| 1146 |
+
'Africa/Khartoum',
|
| 1147 |
+
'Africa/Kigali',
|
| 1148 |
+
'Africa/Kinshasa',
|
| 1149 |
+
'Africa/Lagos',
|
| 1150 |
+
'Africa/Libreville',
|
| 1151 |
+
'Africa/Lome',
|
| 1152 |
+
'Africa/Luanda',
|
| 1153 |
+
'Africa/Lubumbashi',
|
| 1154 |
+
'Africa/Lusaka',
|
| 1155 |
+
'Africa/Malabo',
|
| 1156 |
+
'Africa/Maputo',
|
| 1157 |
+
'Africa/Maseru',
|
| 1158 |
+
'Africa/Mbabane',
|
| 1159 |
+
'Africa/Mogadishu',
|
| 1160 |
+
'Africa/Monrovia',
|
| 1161 |
+
'Africa/Nairobi',
|
| 1162 |
+
'Africa/Ndjamena',
|
| 1163 |
+
'Africa/Niamey',
|
| 1164 |
+
'Africa/Nouakchott',
|
| 1165 |
+
'Africa/Ouagadougou',
|
| 1166 |
+
'Africa/Porto-Novo',
|
| 1167 |
+
'Africa/Sao_Tome',
|
| 1168 |
+
'Africa/Tripoli',
|
| 1169 |
+
'Africa/Tunis',
|
| 1170 |
+
'Africa/Windhoek',
|
| 1171 |
+
'America/Adak',
|
| 1172 |
+
'America/Anchorage',
|
| 1173 |
+
'America/Anguilla',
|
| 1174 |
+
'America/Antigua',
|
| 1175 |
+
'America/Araguaina',
|
| 1176 |
+
'America/Argentina/Buenos_Aires',
|
| 1177 |
+
'America/Argentina/Catamarca',
|
| 1178 |
+
'America/Argentina/Cordoba',
|
| 1179 |
+
'America/Argentina/Jujuy',
|
| 1180 |
+
'America/Argentina/La_Rioja',
|
| 1181 |
+
'America/Argentina/Mendoza',
|
| 1182 |
+
'America/Argentina/Rio_Gallegos',
|
| 1183 |
+
'America/Argentina/Salta',
|
| 1184 |
+
'America/Argentina/San_Juan',
|
| 1185 |
+
'America/Argentina/San_Luis',
|
| 1186 |
+
'America/Argentina/Tucuman',
|
| 1187 |
+
'America/Argentina/Ushuaia',
|
| 1188 |
+
'America/Aruba',
|
| 1189 |
+
'America/Asuncion',
|
| 1190 |
+
'America/Atikokan',
|
| 1191 |
+
'America/Bahia',
|
| 1192 |
+
'America/Bahia_Banderas',
|
| 1193 |
+
'America/Barbados',
|
| 1194 |
+
'America/Belem',
|
| 1195 |
+
'America/Belize',
|
| 1196 |
+
'America/Blanc-Sablon',
|
| 1197 |
+
'America/Boa_Vista',
|
| 1198 |
+
'America/Bogota',
|
| 1199 |
+
'America/Boise',
|
| 1200 |
+
'America/Cambridge_Bay',
|
| 1201 |
+
'America/Campo_Grande',
|
| 1202 |
+
'America/Cancun',
|
| 1203 |
+
'America/Caracas',
|
| 1204 |
+
'America/Cayenne',
|
| 1205 |
+
'America/Cayman',
|
| 1206 |
+
'America/Chicago',
|
| 1207 |
+
'America/Chihuahua',
|
| 1208 |
+
'America/Ciudad_Juarez',
|
| 1209 |
+
'America/Costa_Rica',
|
| 1210 |
+
'America/Creston',
|
| 1211 |
+
'America/Cuiaba',
|
| 1212 |
+
'America/Curacao',
|
| 1213 |
+
'America/Danmarkshavn',
|
| 1214 |
+
'America/Dawson',
|
| 1215 |
+
'America/Dawson_Creek',
|
| 1216 |
+
'America/Denver',
|
| 1217 |
+
'America/Detroit',
|
| 1218 |
+
'America/Dominica',
|
| 1219 |
+
'America/Edmonton',
|
| 1220 |
+
'America/Eirunepe',
|
| 1221 |
+
'America/El_Salvador',
|
| 1222 |
+
'America/Fort_Nelson',
|
| 1223 |
+
'America/Fortaleza',
|
| 1224 |
+
'America/Glace_Bay',
|
| 1225 |
+
'America/Goose_Bay',
|
| 1226 |
+
'America/Grand_Turk',
|
| 1227 |
+
'America/Grenada',
|
| 1228 |
+
'America/Guadeloupe',
|
| 1229 |
+
'America/Guatemala',
|
| 1230 |
+
'America/Guayaquil',
|
| 1231 |
+
'America/Guyana',
|
| 1232 |
+
'America/Halifax',
|
| 1233 |
+
'America/Havana',
|
| 1234 |
+
'America/Hermosillo',
|
| 1235 |
+
'America/Indiana/Indianapolis',
|
| 1236 |
+
'America/Indiana/Knox',
|
| 1237 |
+
'America/Indiana/Marengo',
|
| 1238 |
+
'America/Indiana/Petersburg',
|
| 1239 |
+
'America/Indiana/Tell_City',
|
| 1240 |
+
'America/Indiana/Vevay',
|
| 1241 |
+
'America/Indiana/Vincennes',
|
| 1242 |
+
'America/Indiana/Winamac',
|
| 1243 |
+
'America/Inuvik',
|
| 1244 |
+
'America/Iqaluit',
|
| 1245 |
+
'America/Jamaica',
|
| 1246 |
+
'America/Juneau',
|
| 1247 |
+
'America/Kentucky/Louisville',
|
| 1248 |
+
'America/Kentucky/Monticello',
|
| 1249 |
+
'America/Kralendijk',
|
| 1250 |
+
'America/La_Paz',
|
| 1251 |
+
'America/Lima',
|
| 1252 |
+
'America/Los_Angeles',
|
| 1253 |
+
'America/Lower_Princes',
|
| 1254 |
+
'America/Maceio',
|
| 1255 |
+
'America/Managua',
|
| 1256 |
+
'America/Manaus',
|
| 1257 |
+
'America/Marigot',
|
| 1258 |
+
'America/Martinique',
|
| 1259 |
+
'America/Matamoros',
|
| 1260 |
+
'America/Mazatlan',
|
| 1261 |
+
'America/Menominee',
|
| 1262 |
+
'America/Merida',
|
| 1263 |
+
'America/Metlakatla',
|
| 1264 |
+
'America/Mexico_City',
|
| 1265 |
+
'America/Miquelon',
|
| 1266 |
+
'America/Moncton',
|
| 1267 |
+
'America/Monterrey',
|
| 1268 |
+
'America/Montevideo',
|
| 1269 |
+
'America/Montserrat',
|
| 1270 |
+
'America/Nassau',
|
| 1271 |
+
'America/New_York',
|
| 1272 |
+
'America/Nome',
|
| 1273 |
+
'America/Noronha',
|
| 1274 |
+
'America/North_Dakota/Beulah',
|
| 1275 |
+
'America/North_Dakota/Center',
|
| 1276 |
+
'America/North_Dakota/New_Salem',
|
| 1277 |
+
'America/Nuuk',
|
| 1278 |
+
'America/Ojinaga',
|
| 1279 |
+
'America/Panama',
|
| 1280 |
+
'America/Paramaribo',
|
| 1281 |
+
'America/Phoenix',
|
| 1282 |
+
'America/Port-au-Prince',
|
| 1283 |
+
'America/Port_of_Spain',
|
| 1284 |
+
'America/Porto_Velho',
|
| 1285 |
+
'America/Puerto_Rico',
|
| 1286 |
+
'America/Punta_Arenas',
|
| 1287 |
+
'America/Rankin_Inlet',
|
| 1288 |
+
'America/Recife',
|
| 1289 |
+
'America/Regina',
|
| 1290 |
+
'America/Resolute',
|
| 1291 |
+
'America/Rio_Branco',
|
| 1292 |
+
'America/Santarem',
|
| 1293 |
+
'America/Santiago',
|
| 1294 |
+
'America/Santo_Domingo',
|
| 1295 |
+
'America/Sao_Paulo',
|
| 1296 |
+
'America/Scoresbysund',
|
| 1297 |
+
'America/Sitka',
|
| 1298 |
+
'America/St_Barthelemy',
|
| 1299 |
+
'America/St_Johns',
|
| 1300 |
+
'America/St_Kitts',
|
| 1301 |
+
'America/St_Lucia',
|
| 1302 |
+
'America/St_Thomas',
|
| 1303 |
+
'America/St_Vincent',
|
| 1304 |
+
'America/Swift_Current',
|
| 1305 |
+
'America/Tegucigalpa',
|
| 1306 |
+
'America/Thule',
|
| 1307 |
+
'America/Tijuana',
|
| 1308 |
+
'America/Toronto',
|
| 1309 |
+
'America/Tortola',
|
| 1310 |
+
'America/Vancouver',
|
| 1311 |
+
'America/Whitehorse',
|
| 1312 |
+
'America/Winnipeg',
|
| 1313 |
+
'America/Yakutat',
|
| 1314 |
+
'Antarctica/Casey',
|
| 1315 |
+
'Antarctica/Davis',
|
| 1316 |
+
'Antarctica/DumontDUrville',
|
| 1317 |
+
'Antarctica/Macquarie',
|
| 1318 |
+
'Antarctica/Mawson',
|
| 1319 |
+
'Antarctica/McMurdo',
|
| 1320 |
+
'Antarctica/Palmer',
|
| 1321 |
+
'Antarctica/Rothera',
|
| 1322 |
+
'Antarctica/Syowa',
|
| 1323 |
+
'Antarctica/Troll',
|
| 1324 |
+
'Antarctica/Vostok',
|
| 1325 |
+
'Arctic/Longyearbyen',
|
| 1326 |
+
'Asia/Aden',
|
| 1327 |
+
'Asia/Almaty',
|
| 1328 |
+
'Asia/Amman',
|
| 1329 |
+
'Asia/Anadyr',
|
| 1330 |
+
'Asia/Aqtau',
|
| 1331 |
+
'Asia/Aqtobe',
|
| 1332 |
+
'Asia/Ashgabat',
|
| 1333 |
+
'Asia/Atyrau',
|
| 1334 |
+
'Asia/Baghdad',
|
| 1335 |
+
'Asia/Bahrain',
|
| 1336 |
+
'Asia/Baku',
|
| 1337 |
+
'Asia/Bangkok',
|
| 1338 |
+
'Asia/Barnaul',
|
| 1339 |
+
'Asia/Beirut',
|
| 1340 |
+
'Asia/Bishkek',
|
| 1341 |
+
'Asia/Brunei',
|
| 1342 |
+
'Asia/Chita',
|
| 1343 |
+
'Asia/Colombo',
|
| 1344 |
+
'Asia/Damascus',
|
| 1345 |
+
'Asia/Dhaka',
|
| 1346 |
+
'Asia/Dili',
|
| 1347 |
+
'Asia/Dubai',
|
| 1348 |
+
'Asia/Dushanbe',
|
| 1349 |
+
'Asia/Famagusta',
|
| 1350 |
+
'Asia/Gaza',
|
| 1351 |
+
'Asia/Hebron',
|
| 1352 |
+
'Asia/Ho_Chi_Minh',
|
| 1353 |
+
'Asia/Hong_Kong',
|
| 1354 |
+
'Asia/Hovd',
|
| 1355 |
+
'Asia/Irkutsk',
|
| 1356 |
+
'Asia/Jakarta',
|
| 1357 |
+
'Asia/Jayapura',
|
| 1358 |
+
'Asia/Jerusalem',
|
| 1359 |
+
'Asia/Kabul',
|
| 1360 |
+
'Asia/Kamchatka',
|
| 1361 |
+
'Asia/Karachi',
|
| 1362 |
+
'Asia/Kathmandu',
|
| 1363 |
+
'Asia/Khandyga',
|
| 1364 |
+
'Asia/Kolkata',
|
| 1365 |
+
'Asia/Krasnoyarsk',
|
| 1366 |
+
'Asia/Kuala_Lumpur',
|
| 1367 |
+
'Asia/Kuching',
|
| 1368 |
+
'Asia/Kuwait',
|
| 1369 |
+
'Asia/Macau',
|
| 1370 |
+
'Asia/Magadan',
|
| 1371 |
+
'Asia/Makassar',
|
| 1372 |
+
'Asia/Manila',
|
| 1373 |
+
'Asia/Muscat',
|
| 1374 |
+
'Asia/Nicosia',
|
| 1375 |
+
'Asia/Novokuznetsk',
|
| 1376 |
+
'Asia/Novosibirsk',
|
| 1377 |
+
'Asia/Omsk',
|
| 1378 |
+
'Asia/Oral',
|
| 1379 |
+
'Asia/Phnom_Penh',
|
| 1380 |
+
'Asia/Pontianak',
|
| 1381 |
+
'Asia/Pyongyang',
|
| 1382 |
+
'Asia/Qatar',
|
| 1383 |
+
'Asia/Qostanay',
|
| 1384 |
+
'Asia/Qyzylorda',
|
| 1385 |
+
'Asia/Riyadh',
|
| 1386 |
+
'Asia/Sakhalin',
|
| 1387 |
+
'Asia/Samarkand',
|
| 1388 |
+
'Asia/Seoul',
|
| 1389 |
+
'Asia/Shanghai',
|
| 1390 |
+
'Asia/Singapore',
|
| 1391 |
+
'Asia/Srednekolymsk',
|
| 1392 |
+
'Asia/Taipei',
|
| 1393 |
+
'Asia/Tashkent',
|
| 1394 |
+
'Asia/Tbilisi',
|
| 1395 |
+
'Asia/Tehran',
|
| 1396 |
+
'Asia/Thimphu',
|
| 1397 |
+
'Asia/Tokyo',
|
| 1398 |
+
'Asia/Tomsk',
|
| 1399 |
+
'Asia/Ulaanbaatar',
|
| 1400 |
+
'Asia/Urumqi',
|
| 1401 |
+
'Asia/Ust-Nera',
|
| 1402 |
+
'Asia/Vientiane',
|
| 1403 |
+
'Asia/Vladivostok',
|
| 1404 |
+
'Asia/Yakutsk',
|
| 1405 |
+
'Asia/Yangon',
|
| 1406 |
+
'Asia/Yekaterinburg',
|
| 1407 |
+
'Asia/Yerevan',
|
| 1408 |
+
'Atlantic/Azores',
|
| 1409 |
+
'Atlantic/Bermuda',
|
| 1410 |
+
'Atlantic/Canary',
|
| 1411 |
+
'Atlantic/Cape_Verde',
|
| 1412 |
+
'Atlantic/Faroe',
|
| 1413 |
+
'Atlantic/Madeira',
|
| 1414 |
+
'Atlantic/Reykjavik',
|
| 1415 |
+
'Atlantic/South_Georgia',
|
| 1416 |
+
'Atlantic/St_Helena',
|
| 1417 |
+
'Atlantic/Stanley',
|
| 1418 |
+
'Australia/Adelaide',
|
| 1419 |
+
'Australia/Brisbane',
|
| 1420 |
+
'Australia/Broken_Hill',
|
| 1421 |
+
'Australia/Darwin',
|
| 1422 |
+
'Australia/Eucla',
|
| 1423 |
+
'Australia/Hobart',
|
| 1424 |
+
'Australia/Lindeman',
|
| 1425 |
+
'Australia/Lord_Howe',
|
| 1426 |
+
'Australia/Melbourne',
|
| 1427 |
+
'Australia/Perth',
|
| 1428 |
+
'Australia/Sydney',
|
| 1429 |
+
'Canada/Atlantic',
|
| 1430 |
+
'Canada/Central',
|
| 1431 |
+
'Canada/Eastern',
|
| 1432 |
+
'Canada/Mountain',
|
| 1433 |
+
'Canada/Newfoundland',
|
| 1434 |
+
'Canada/Pacific',
|
| 1435 |
+
'Europe/Amsterdam',
|
| 1436 |
+
'Europe/Andorra',
|
| 1437 |
+
'Europe/Astrakhan',
|
| 1438 |
+
'Europe/Athens',
|
| 1439 |
+
'Europe/Belgrade',
|
| 1440 |
+
'Europe/Berlin',
|
| 1441 |
+
'Europe/Bratislava',
|
| 1442 |
+
'Europe/Brussels',
|
| 1443 |
+
'Europe/Bucharest',
|
| 1444 |
+
'Europe/Budapest',
|
| 1445 |
+
'Europe/Busingen',
|
| 1446 |
+
'Europe/Chisinau',
|
| 1447 |
+
'Europe/Copenhagen',
|
| 1448 |
+
'Europe/Dublin',
|
| 1449 |
+
'Europe/Gibraltar',
|
| 1450 |
+
'Europe/Guernsey',
|
| 1451 |
+
'Europe/Helsinki',
|
| 1452 |
+
'Europe/Isle_of_Man',
|
| 1453 |
+
'Europe/Istanbul',
|
| 1454 |
+
'Europe/Jersey',
|
| 1455 |
+
'Europe/Kaliningrad',
|
| 1456 |
+
'Europe/Kirov',
|
| 1457 |
+
'Europe/Kyiv',
|
| 1458 |
+
'Europe/Lisbon',
|
| 1459 |
+
'Europe/Ljubljana',
|
| 1460 |
+
'Europe/London',
|
| 1461 |
+
'Europe/Luxembourg',
|
| 1462 |
+
'Europe/Madrid',
|
| 1463 |
+
'Europe/Malta',
|
| 1464 |
+
'Europe/Mariehamn',
|
| 1465 |
+
'Europe/Minsk',
|
| 1466 |
+
'Europe/Monaco',
|
| 1467 |
+
'Europe/Moscow',
|
| 1468 |
+
'Europe/Oslo',
|
| 1469 |
+
'Europe/Paris',
|
| 1470 |
+
'Europe/Podgorica',
|
| 1471 |
+
'Europe/Prague',
|
| 1472 |
+
'Europe/Riga',
|
| 1473 |
+
'Europe/Rome',
|
| 1474 |
+
'Europe/Samara',
|
| 1475 |
+
'Europe/San_Marino',
|
| 1476 |
+
'Europe/Sarajevo',
|
| 1477 |
+
'Europe/Saratov',
|
| 1478 |
+
'Europe/Simferopol',
|
| 1479 |
+
'Europe/Skopje',
|
| 1480 |
+
'Europe/Sofia',
|
| 1481 |
+
'Europe/Stockholm',
|
| 1482 |
+
'Europe/Tallinn',
|
| 1483 |
+
'Europe/Tirane',
|
| 1484 |
+
'Europe/Ulyanovsk',
|
| 1485 |
+
'Europe/Vaduz',
|
| 1486 |
+
'Europe/Vatican',
|
| 1487 |
+
'Europe/Vienna',
|
| 1488 |
+
'Europe/Vilnius',
|
| 1489 |
+
'Europe/Volgograd',
|
| 1490 |
+
'Europe/Warsaw',
|
| 1491 |
+
'Europe/Zagreb',
|
| 1492 |
+
'Europe/Zurich',
|
| 1493 |
+
'GMT',
|
| 1494 |
+
'Indian/Antananarivo',
|
| 1495 |
+
'Indian/Chagos',
|
| 1496 |
+
'Indian/Christmas',
|
| 1497 |
+
'Indian/Cocos',
|
| 1498 |
+
'Indian/Comoro',
|
| 1499 |
+
'Indian/Kerguelen',
|
| 1500 |
+
'Indian/Mahe',
|
| 1501 |
+
'Indian/Maldives',
|
| 1502 |
+
'Indian/Mauritius',
|
| 1503 |
+
'Indian/Mayotte',
|
| 1504 |
+
'Indian/Reunion',
|
| 1505 |
+
'Pacific/Apia',
|
| 1506 |
+
'Pacific/Auckland',
|
| 1507 |
+
'Pacific/Bougainville',
|
| 1508 |
+
'Pacific/Chatham',
|
| 1509 |
+
'Pacific/Chuuk',
|
| 1510 |
+
'Pacific/Easter',
|
| 1511 |
+
'Pacific/Efate',
|
| 1512 |
+
'Pacific/Fakaofo',
|
| 1513 |
+
'Pacific/Fiji',
|
| 1514 |
+
'Pacific/Funafuti',
|
| 1515 |
+
'Pacific/Galapagos',
|
| 1516 |
+
'Pacific/Gambier',
|
| 1517 |
+
'Pacific/Guadalcanal',
|
| 1518 |
+
'Pacific/Guam',
|
| 1519 |
+
'Pacific/Honolulu',
|
| 1520 |
+
'Pacific/Kanton',
|
| 1521 |
+
'Pacific/Kiritimati',
|
| 1522 |
+
'Pacific/Kosrae',
|
| 1523 |
+
'Pacific/Kwajalein',
|
| 1524 |
+
'Pacific/Majuro',
|
| 1525 |
+
'Pacific/Marquesas',
|
| 1526 |
+
'Pacific/Midway',
|
| 1527 |
+
'Pacific/Nauru',
|
| 1528 |
+
'Pacific/Niue',
|
| 1529 |
+
'Pacific/Norfolk',
|
| 1530 |
+
'Pacific/Noumea',
|
| 1531 |
+
'Pacific/Pago_Pago',
|
| 1532 |
+
'Pacific/Palau',
|
| 1533 |
+
'Pacific/Pitcairn',
|
| 1534 |
+
'Pacific/Pohnpei',
|
| 1535 |
+
'Pacific/Port_Moresby',
|
| 1536 |
+
'Pacific/Rarotonga',
|
| 1537 |
+
'Pacific/Saipan',
|
| 1538 |
+
'Pacific/Tahiti',
|
| 1539 |
+
'Pacific/Tarawa',
|
| 1540 |
+
'Pacific/Tongatapu',
|
| 1541 |
+
'Pacific/Wake',
|
| 1542 |
+
'Pacific/Wallis',
|
| 1543 |
+
'US/Alaska',
|
| 1544 |
+
'US/Arizona',
|
| 1545 |
+
'US/Central',
|
| 1546 |
+
'US/Eastern',
|
| 1547 |
+
'US/Hawaii',
|
| 1548 |
+
'US/Mountain',
|
| 1549 |
+
'US/Pacific',
|
| 1550 |
+
'UTC']
|
| 1551 |
+
common_timezones = LazyList(
|
| 1552 |
+
tz for tz in common_timezones if tz in all_timezones)
|
| 1553 |
+
|
| 1554 |
+
common_timezones_set = LazySet(common_timezones)
|
deepseek/lib/python3.10/site-packages/pytz/exceptions.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
Custom exceptions raised by pytz.
|
| 3 |
+
'''
|
| 4 |
+
|
| 5 |
+
__all__ = [
|
| 6 |
+
'UnknownTimeZoneError', 'InvalidTimeError', 'AmbiguousTimeError',
|
| 7 |
+
'NonExistentTimeError',
|
| 8 |
+
]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class Error(Exception):
|
| 12 |
+
'''Base class for all exceptions raised by the pytz library'''
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class UnknownTimeZoneError(KeyError, Error):
|
| 16 |
+
'''Exception raised when pytz is passed an unknown timezone.
|
| 17 |
+
|
| 18 |
+
>>> isinstance(UnknownTimeZoneError(), LookupError)
|
| 19 |
+
True
|
| 20 |
+
|
| 21 |
+
This class is actually a subclass of KeyError to provide backwards
|
| 22 |
+
compatibility with code relying on the undocumented behavior of earlier
|
| 23 |
+
pytz releases.
|
| 24 |
+
|
| 25 |
+
>>> isinstance(UnknownTimeZoneError(), KeyError)
|
| 26 |
+
True
|
| 27 |
+
|
| 28 |
+
And also a subclass of pytz.exceptions.Error, as are other pytz
|
| 29 |
+
exceptions.
|
| 30 |
+
|
| 31 |
+
>>> isinstance(UnknownTimeZoneError(), Error)
|
| 32 |
+
True
|
| 33 |
+
|
| 34 |
+
'''
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class InvalidTimeError(Error):
|
| 39 |
+
'''Base class for invalid time exceptions.'''
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class AmbiguousTimeError(InvalidTimeError):
|
| 43 |
+
'''Exception raised when attempting to create an ambiguous wallclock time.
|
| 44 |
+
|
| 45 |
+
At the end of a DST transition period, a particular wallclock time will
|
| 46 |
+
occur twice (once before the clocks are set back, once after). Both
|
| 47 |
+
possibilities may be correct, unless further information is supplied.
|
| 48 |
+
|
| 49 |
+
See DstTzInfo.normalize() for more info
|
| 50 |
+
'''
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class NonExistentTimeError(InvalidTimeError):
|
| 54 |
+
'''Exception raised when attempting to create a wallclock time that
|
| 55 |
+
cannot exist.
|
| 56 |
+
|
| 57 |
+
At the start of a DST transition period, the wallclock time jumps forward.
|
| 58 |
+
The instants jumped over never occur.
|
| 59 |
+
'''
|
deepseek/lib/python3.10/site-packages/pytz/lazy.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from threading import RLock
|
| 2 |
+
try:
|
| 3 |
+
from collections.abc import Mapping as DictMixin
|
| 4 |
+
except ImportError: # Python < 3.3
|
| 5 |
+
try:
|
| 6 |
+
from UserDict import DictMixin # Python 2
|
| 7 |
+
except ImportError: # Python 3.0-3.3
|
| 8 |
+
from collections import Mapping as DictMixin
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
# With lazy loading, we might end up with multiple threads triggering
|
| 12 |
+
# it at the same time. We need a lock.
|
| 13 |
+
_fill_lock = RLock()
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class LazyDict(DictMixin):
|
| 17 |
+
"""Dictionary populated on first use."""
|
| 18 |
+
data = None
|
| 19 |
+
|
| 20 |
+
def __getitem__(self, key):
|
| 21 |
+
if self.data is None:
|
| 22 |
+
_fill_lock.acquire()
|
| 23 |
+
try:
|
| 24 |
+
if self.data is None:
|
| 25 |
+
self._fill()
|
| 26 |
+
finally:
|
| 27 |
+
_fill_lock.release()
|
| 28 |
+
return self.data[key.upper()]
|
| 29 |
+
|
| 30 |
+
def __contains__(self, key):
|
| 31 |
+
if self.data is None:
|
| 32 |
+
_fill_lock.acquire()
|
| 33 |
+
try:
|
| 34 |
+
if self.data is None:
|
| 35 |
+
self._fill()
|
| 36 |
+
finally:
|
| 37 |
+
_fill_lock.release()
|
| 38 |
+
return key in self.data
|
| 39 |
+
|
| 40 |
+
def __iter__(self):
|
| 41 |
+
if self.data is None:
|
| 42 |
+
_fill_lock.acquire()
|
| 43 |
+
try:
|
| 44 |
+
if self.data is None:
|
| 45 |
+
self._fill()
|
| 46 |
+
finally:
|
| 47 |
+
_fill_lock.release()
|
| 48 |
+
return iter(self.data)
|
| 49 |
+
|
| 50 |
+
def __len__(self):
|
| 51 |
+
if self.data is None:
|
| 52 |
+
_fill_lock.acquire()
|
| 53 |
+
try:
|
| 54 |
+
if self.data is None:
|
| 55 |
+
self._fill()
|
| 56 |
+
finally:
|
| 57 |
+
_fill_lock.release()
|
| 58 |
+
return len(self.data)
|
| 59 |
+
|
| 60 |
+
def keys(self):
|
| 61 |
+
if self.data is None:
|
| 62 |
+
_fill_lock.acquire()
|
| 63 |
+
try:
|
| 64 |
+
if self.data is None:
|
| 65 |
+
self._fill()
|
| 66 |
+
finally:
|
| 67 |
+
_fill_lock.release()
|
| 68 |
+
return self.data.keys()
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class LazyList(list):
|
| 72 |
+
"""List populated on first use."""
|
| 73 |
+
|
| 74 |
+
_props = [
|
| 75 |
+
'__str__', '__repr__', '__unicode__',
|
| 76 |
+
'__hash__', '__sizeof__', '__cmp__',
|
| 77 |
+
'__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
|
| 78 |
+
'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove',
|
| 79 |
+
'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__',
|
| 80 |
+
'__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__',
|
| 81 |
+
'__getitem__', '__setitem__', '__delitem__', '__iter__',
|
| 82 |
+
'__reversed__', '__getslice__', '__setslice__', '__delslice__']
|
| 83 |
+
|
| 84 |
+
def __new__(cls, fill_iter=None):
|
| 85 |
+
|
| 86 |
+
if fill_iter is None:
|
| 87 |
+
return list()
|
| 88 |
+
|
| 89 |
+
# We need a new class as we will be dynamically messing with its
|
| 90 |
+
# methods.
|
| 91 |
+
class LazyList(list):
|
| 92 |
+
pass
|
| 93 |
+
|
| 94 |
+
fill_iter = [fill_iter]
|
| 95 |
+
|
| 96 |
+
def lazy(name):
|
| 97 |
+
def _lazy(self, *args, **kw):
|
| 98 |
+
_fill_lock.acquire()
|
| 99 |
+
try:
|
| 100 |
+
if len(fill_iter) > 0:
|
| 101 |
+
list.extend(self, fill_iter.pop())
|
| 102 |
+
for method_name in cls._props:
|
| 103 |
+
delattr(LazyList, method_name)
|
| 104 |
+
finally:
|
| 105 |
+
_fill_lock.release()
|
| 106 |
+
return getattr(list, name)(self, *args, **kw)
|
| 107 |
+
return _lazy
|
| 108 |
+
|
| 109 |
+
for name in cls._props:
|
| 110 |
+
setattr(LazyList, name, lazy(name))
|
| 111 |
+
|
| 112 |
+
new_list = LazyList()
|
| 113 |
+
return new_list
|
| 114 |
+
|
| 115 |
+
# Not all versions of Python declare the same magic methods.
|
| 116 |
+
# Filter out properties that don't exist in this version of Python
|
| 117 |
+
# from the list.
|
| 118 |
+
LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)]
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
class LazySet(set):
|
| 122 |
+
"""Set populated on first use."""
|
| 123 |
+
|
| 124 |
+
_props = (
|
| 125 |
+
'__str__', '__repr__', '__unicode__',
|
| 126 |
+
'__hash__', '__sizeof__', '__cmp__',
|
| 127 |
+
'__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
|
| 128 |
+
'__contains__', '__len__', '__nonzero__',
|
| 129 |
+
'__getitem__', '__setitem__', '__delitem__', '__iter__',
|
| 130 |
+
'__sub__', '__and__', '__xor__', '__or__',
|
| 131 |
+
'__rsub__', '__rand__', '__rxor__', '__ror__',
|
| 132 |
+
'__isub__', '__iand__', '__ixor__', '__ior__',
|
| 133 |
+
'add', 'clear', 'copy', 'difference', 'difference_update',
|
| 134 |
+
'discard', 'intersection', 'intersection_update', 'isdisjoint',
|
| 135 |
+
'issubset', 'issuperset', 'pop', 'remove',
|
| 136 |
+
'symmetric_difference', 'symmetric_difference_update',
|
| 137 |
+
'union', 'update')
|
| 138 |
+
|
| 139 |
+
def __new__(cls, fill_iter=None):
|
| 140 |
+
|
| 141 |
+
if fill_iter is None:
|
| 142 |
+
return set()
|
| 143 |
+
|
| 144 |
+
class LazySet(set):
|
| 145 |
+
pass
|
| 146 |
+
|
| 147 |
+
fill_iter = [fill_iter]
|
| 148 |
+
|
| 149 |
+
def lazy(name):
|
| 150 |
+
def _lazy(self, *args, **kw):
|
| 151 |
+
_fill_lock.acquire()
|
| 152 |
+
try:
|
| 153 |
+
if len(fill_iter) > 0:
|
| 154 |
+
for i in fill_iter.pop():
|
| 155 |
+
set.add(self, i)
|
| 156 |
+
for method_name in cls._props:
|
| 157 |
+
delattr(LazySet, method_name)
|
| 158 |
+
finally:
|
| 159 |
+
_fill_lock.release()
|
| 160 |
+
return getattr(set, name)(self, *args, **kw)
|
| 161 |
+
return _lazy
|
| 162 |
+
|
| 163 |
+
for name in cls._props:
|
| 164 |
+
setattr(LazySet, name, lazy(name))
|
| 165 |
+
|
| 166 |
+
new_set = LazySet()
|
| 167 |
+
return new_set
|
| 168 |
+
|
| 169 |
+
# Not all versions of Python declare the same magic methods.
|
| 170 |
+
# Filter out properties that don't exist in this version of Python
|
| 171 |
+
# from the list.
|
| 172 |
+
LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)]
|
deepseek/lib/python3.10/site-packages/pytz/reference.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
Reference tzinfo implementations from the Python docs.
|
| 3 |
+
Used for testing against as they are only correct for the years
|
| 4 |
+
1987 to 2006. Do not use these for real code.
|
| 5 |
+
'''
|
| 6 |
+
|
| 7 |
+
from datetime import tzinfo, timedelta, datetime
|
| 8 |
+
from pytz import HOUR, ZERO, UTC
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
'FixedOffset',
|
| 12 |
+
'LocalTimezone',
|
| 13 |
+
'USTimeZone',
|
| 14 |
+
'Eastern',
|
| 15 |
+
'Central',
|
| 16 |
+
'Mountain',
|
| 17 |
+
'Pacific',
|
| 18 |
+
'UTC'
|
| 19 |
+
]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# A class building tzinfo objects for fixed-offset time zones.
|
| 23 |
+
# Note that FixedOffset(0, "UTC") is a different way to build a
|
| 24 |
+
# UTC tzinfo object.
|
| 25 |
+
class FixedOffset(tzinfo):
|
| 26 |
+
"""Fixed offset in minutes east from UTC."""
|
| 27 |
+
|
| 28 |
+
def __init__(self, offset, name):
|
| 29 |
+
self.__offset = timedelta(minutes=offset)
|
| 30 |
+
self.__name = name
|
| 31 |
+
|
| 32 |
+
def utcoffset(self, dt):
|
| 33 |
+
return self.__offset
|
| 34 |
+
|
| 35 |
+
def tzname(self, dt):
|
| 36 |
+
return self.__name
|
| 37 |
+
|
| 38 |
+
def dst(self, dt):
|
| 39 |
+
return ZERO
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
import time as _time
|
| 43 |
+
|
| 44 |
+
STDOFFSET = timedelta(seconds=-_time.timezone)
|
| 45 |
+
if _time.daylight:
|
| 46 |
+
DSTOFFSET = timedelta(seconds=-_time.altzone)
|
| 47 |
+
else:
|
| 48 |
+
DSTOFFSET = STDOFFSET
|
| 49 |
+
|
| 50 |
+
DSTDIFF = DSTOFFSET - STDOFFSET
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
# A class capturing the platform's idea of local time.
|
| 54 |
+
class LocalTimezone(tzinfo):
|
| 55 |
+
|
| 56 |
+
def utcoffset(self, dt):
|
| 57 |
+
if self._isdst(dt):
|
| 58 |
+
return DSTOFFSET
|
| 59 |
+
else:
|
| 60 |
+
return STDOFFSET
|
| 61 |
+
|
| 62 |
+
def dst(self, dt):
|
| 63 |
+
if self._isdst(dt):
|
| 64 |
+
return DSTDIFF
|
| 65 |
+
else:
|
| 66 |
+
return ZERO
|
| 67 |
+
|
| 68 |
+
def tzname(self, dt):
|
| 69 |
+
return _time.tzname[self._isdst(dt)]
|
| 70 |
+
|
| 71 |
+
def _isdst(self, dt):
|
| 72 |
+
tt = (dt.year, dt.month, dt.day,
|
| 73 |
+
dt.hour, dt.minute, dt.second,
|
| 74 |
+
dt.weekday(), 0, -1)
|
| 75 |
+
stamp = _time.mktime(tt)
|
| 76 |
+
tt = _time.localtime(stamp)
|
| 77 |
+
return tt.tm_isdst > 0
|
| 78 |
+
|
| 79 |
+
Local = LocalTimezone()
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def first_sunday_on_or_after(dt):
|
| 83 |
+
days_to_go = 6 - dt.weekday()
|
| 84 |
+
if days_to_go:
|
| 85 |
+
dt += timedelta(days_to_go)
|
| 86 |
+
return dt
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# In the US, DST starts at 2am (standard time) on the first Sunday in April.
|
| 90 |
+
DSTSTART = datetime(1, 4, 1, 2)
|
| 91 |
+
# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct.
|
| 92 |
+
# which is the first Sunday on or after Oct 25.
|
| 93 |
+
DSTEND = datetime(1, 10, 25, 1)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
# A complete implementation of current DST rules for major US time zones.
|
| 97 |
+
class USTimeZone(tzinfo):
|
| 98 |
+
|
| 99 |
+
def __init__(self, hours, reprname, stdname, dstname):
|
| 100 |
+
self.stdoffset = timedelta(hours=hours)
|
| 101 |
+
self.reprname = reprname
|
| 102 |
+
self.stdname = stdname
|
| 103 |
+
self.dstname = dstname
|
| 104 |
+
|
| 105 |
+
def __repr__(self):
|
| 106 |
+
return self.reprname
|
| 107 |
+
|
| 108 |
+
def tzname(self, dt):
|
| 109 |
+
if self.dst(dt):
|
| 110 |
+
return self.dstname
|
| 111 |
+
else:
|
| 112 |
+
return self.stdname
|
| 113 |
+
|
| 114 |
+
def utcoffset(self, dt):
|
| 115 |
+
return self.stdoffset + self.dst(dt)
|
| 116 |
+
|
| 117 |
+
def dst(self, dt):
|
| 118 |
+
if dt is None or dt.tzinfo is None:
|
| 119 |
+
# An exception may be sensible here, in one or both cases.
|
| 120 |
+
# It depends on how you want to treat them. The default
|
| 121 |
+
# fromutc() implementation (called by the default astimezone()
|
| 122 |
+
# implementation) passes a datetime with dt.tzinfo is self.
|
| 123 |
+
return ZERO
|
| 124 |
+
assert dt.tzinfo is self
|
| 125 |
+
|
| 126 |
+
# Find first Sunday in April & the last in October.
|
| 127 |
+
start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year))
|
| 128 |
+
end = first_sunday_on_or_after(DSTEND.replace(year=dt.year))
|
| 129 |
+
|
| 130 |
+
# Can't compare naive to aware objects, so strip the timezone from
|
| 131 |
+
# dt first.
|
| 132 |
+
if start <= dt.replace(tzinfo=None) < end:
|
| 133 |
+
return HOUR
|
| 134 |
+
else:
|
| 135 |
+
return ZERO
|
| 136 |
+
|
| 137 |
+
Eastern = USTimeZone(-5, "Eastern", "EST", "EDT")
|
| 138 |
+
Central = USTimeZone(-6, "Central", "CST", "CDT")
|
| 139 |
+
Mountain = USTimeZone(-7, "Mountain", "MST", "MDT")
|
| 140 |
+
Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
|
deepseek/lib/python3.10/site-packages/pytz/tzfile.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $
|
| 3 |
+
'''
|
| 4 |
+
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
from struct import unpack, calcsize
|
| 7 |
+
|
| 8 |
+
from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo
|
| 9 |
+
from pytz.tzinfo import memorized_datetime, memorized_timedelta
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def _byte_string(s):
|
| 13 |
+
"""Cast a string or byte string to an ASCII byte string."""
|
| 14 |
+
return s.encode('ASCII')
|
| 15 |
+
|
| 16 |
+
_NULL = _byte_string('\0')
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _std_string(s):
|
| 20 |
+
"""Cast a string or byte string to an ASCII string."""
|
| 21 |
+
return str(s.decode('ASCII'))
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def build_tzinfo(zone, fp):
|
| 25 |
+
head_fmt = '>4s c 15x 6l'
|
| 26 |
+
head_size = calcsize(head_fmt)
|
| 27 |
+
(magic, format, ttisgmtcnt, ttisstdcnt, leapcnt, timecnt,
|
| 28 |
+
typecnt, charcnt) = unpack(head_fmt, fp.read(head_size))
|
| 29 |
+
|
| 30 |
+
# Make sure it is a tzfile(5) file
|
| 31 |
+
assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic)
|
| 32 |
+
|
| 33 |
+
# Read out the transition times, localtime indices and ttinfo structures.
|
| 34 |
+
data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict(
|
| 35 |
+
timecnt=timecnt, ttinfo='lBB' * typecnt, charcnt=charcnt)
|
| 36 |
+
data_size = calcsize(data_fmt)
|
| 37 |
+
data = unpack(data_fmt, fp.read(data_size))
|
| 38 |
+
|
| 39 |
+
# make sure we unpacked the right number of values
|
| 40 |
+
assert len(data) == 2 * timecnt + 3 * typecnt + 1
|
| 41 |
+
transitions = [memorized_datetime(trans)
|
| 42 |
+
for trans in data[:timecnt]]
|
| 43 |
+
lindexes = list(data[timecnt:2 * timecnt])
|
| 44 |
+
ttinfo_raw = data[2 * timecnt:-1]
|
| 45 |
+
tznames_raw = data[-1]
|
| 46 |
+
del data
|
| 47 |
+
|
| 48 |
+
# Process ttinfo into separate structs
|
| 49 |
+
ttinfo = []
|
| 50 |
+
tznames = {}
|
| 51 |
+
i = 0
|
| 52 |
+
while i < len(ttinfo_raw):
|
| 53 |
+
# have we looked up this timezone name yet?
|
| 54 |
+
tzname_offset = ttinfo_raw[i + 2]
|
| 55 |
+
if tzname_offset not in tznames:
|
| 56 |
+
nul = tznames_raw.find(_NULL, tzname_offset)
|
| 57 |
+
if nul < 0:
|
| 58 |
+
nul = len(tznames_raw)
|
| 59 |
+
tznames[tzname_offset] = _std_string(
|
| 60 |
+
tznames_raw[tzname_offset:nul])
|
| 61 |
+
ttinfo.append((ttinfo_raw[i],
|
| 62 |
+
bool(ttinfo_raw[i + 1]),
|
| 63 |
+
tznames[tzname_offset]))
|
| 64 |
+
i += 3
|
| 65 |
+
|
| 66 |
+
# Now build the timezone object
|
| 67 |
+
if len(ttinfo) == 1 or len(transitions) == 0:
|
| 68 |
+
ttinfo[0][0], ttinfo[0][2]
|
| 69 |
+
cls = type(zone, (StaticTzInfo,), dict(
|
| 70 |
+
zone=zone,
|
| 71 |
+
_utcoffset=memorized_timedelta(ttinfo[0][0]),
|
| 72 |
+
_tzname=ttinfo[0][2]))
|
| 73 |
+
else:
|
| 74 |
+
# Early dates use the first standard time ttinfo
|
| 75 |
+
i = 0
|
| 76 |
+
while ttinfo[i][1]:
|
| 77 |
+
i += 1
|
| 78 |
+
if ttinfo[i] == ttinfo[lindexes[0]]:
|
| 79 |
+
transitions[0] = datetime.min
|
| 80 |
+
else:
|
| 81 |
+
transitions.insert(0, datetime.min)
|
| 82 |
+
lindexes.insert(0, i)
|
| 83 |
+
|
| 84 |
+
# calculate transition info
|
| 85 |
+
transition_info = []
|
| 86 |
+
for i in range(len(transitions)):
|
| 87 |
+
inf = ttinfo[lindexes[i]]
|
| 88 |
+
utcoffset = inf[0]
|
| 89 |
+
if not inf[1]:
|
| 90 |
+
dst = 0
|
| 91 |
+
else:
|
| 92 |
+
for j in range(i - 1, -1, -1):
|
| 93 |
+
prev_inf = ttinfo[lindexes[j]]
|
| 94 |
+
if not prev_inf[1]:
|
| 95 |
+
break
|
| 96 |
+
dst = inf[0] - prev_inf[0] # dst offset
|
| 97 |
+
|
| 98 |
+
# Bad dst? Look further. DST > 24 hours happens when
|
| 99 |
+
# a timzone has moved across the international dateline.
|
| 100 |
+
if dst <= 0 or dst > 3600 * 3:
|
| 101 |
+
for j in range(i + 1, len(transitions)):
|
| 102 |
+
stdinf = ttinfo[lindexes[j]]
|
| 103 |
+
if not stdinf[1]:
|
| 104 |
+
dst = inf[0] - stdinf[0]
|
| 105 |
+
if dst > 0:
|
| 106 |
+
break # Found a useful std time.
|
| 107 |
+
|
| 108 |
+
tzname = inf[2]
|
| 109 |
+
|
| 110 |
+
# Round utcoffset and dst to the nearest minute or the
|
| 111 |
+
# datetime library will complain. Conversions to these timezones
|
| 112 |
+
# might be up to plus or minus 30 seconds out, but it is
|
| 113 |
+
# the best we can do.
|
| 114 |
+
utcoffset = int((utcoffset + 30) // 60) * 60
|
| 115 |
+
dst = int((dst + 30) // 60) * 60
|
| 116 |
+
transition_info.append(memorized_ttinfo(utcoffset, dst, tzname))
|
| 117 |
+
|
| 118 |
+
cls = type(zone, (DstTzInfo,), dict(
|
| 119 |
+
zone=zone,
|
| 120 |
+
_utc_transition_times=transitions,
|
| 121 |
+
_transition_info=transition_info))
|
| 122 |
+
|
| 123 |
+
return cls()
|
| 124 |
+
|
| 125 |
+
if __name__ == '__main__':
|
| 126 |
+
import os.path
|
| 127 |
+
from pprint import pprint
|
| 128 |
+
base = os.path.join(os.path.dirname(__file__), 'zoneinfo')
|
| 129 |
+
tz = build_tzinfo('Australia/Melbourne',
|
| 130 |
+
open(os.path.join(base, 'Australia', 'Melbourne'), 'rb'))
|
| 131 |
+
tz = build_tzinfo('US/Eastern',
|
| 132 |
+
open(os.path.join(base, 'US', 'Eastern'), 'rb'))
|
| 133 |
+
pprint(tz._utc_transition_times)
|
deepseek/lib/python3.10/site-packages/pytz/tzinfo.py
ADDED
|
@@ -0,0 +1,580 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''Base classes and helpers for building zone specific tzinfo classes'''
|
| 2 |
+
|
| 3 |
+
from datetime import datetime, timedelta, tzinfo
|
| 4 |
+
from bisect import bisect_right
|
| 5 |
+
try:
|
| 6 |
+
set
|
| 7 |
+
except NameError:
|
| 8 |
+
from sets import Set as set
|
| 9 |
+
|
| 10 |
+
import pytz
|
| 11 |
+
from pytz.exceptions import AmbiguousTimeError, NonExistentTimeError
|
| 12 |
+
|
| 13 |
+
__all__ = []
|
| 14 |
+
|
| 15 |
+
_timedelta_cache = {}
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def memorized_timedelta(seconds):
|
| 19 |
+
'''Create only one instance of each distinct timedelta'''
|
| 20 |
+
try:
|
| 21 |
+
return _timedelta_cache[seconds]
|
| 22 |
+
except KeyError:
|
| 23 |
+
delta = timedelta(seconds=seconds)
|
| 24 |
+
_timedelta_cache[seconds] = delta
|
| 25 |
+
return delta
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
_epoch = datetime(1970, 1, 1, 0, 0) # datetime.utcfromtimestamp(0)
|
| 29 |
+
_datetime_cache = {0: _epoch}
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def memorized_datetime(seconds):
|
| 33 |
+
'''Create only one instance of each distinct datetime'''
|
| 34 |
+
try:
|
| 35 |
+
return _datetime_cache[seconds]
|
| 36 |
+
except KeyError:
|
| 37 |
+
# NB. We can't just do datetime.fromtimestamp(seconds, tz=timezone.utc).replace(tzinfo=None)
|
| 38 |
+
# as this fails with negative values under Windows (Bug #90096)
|
| 39 |
+
dt = _epoch + timedelta(seconds=seconds)
|
| 40 |
+
_datetime_cache[seconds] = dt
|
| 41 |
+
return dt
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
_ttinfo_cache = {}
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def memorized_ttinfo(*args):
|
| 48 |
+
'''Create only one instance of each distinct tuple'''
|
| 49 |
+
try:
|
| 50 |
+
return _ttinfo_cache[args]
|
| 51 |
+
except KeyError:
|
| 52 |
+
ttinfo = (
|
| 53 |
+
memorized_timedelta(args[0]),
|
| 54 |
+
memorized_timedelta(args[1]),
|
| 55 |
+
args[2]
|
| 56 |
+
)
|
| 57 |
+
_ttinfo_cache[args] = ttinfo
|
| 58 |
+
return ttinfo
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
_notime = memorized_timedelta(0)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def _to_seconds(td):
|
| 65 |
+
'''Convert a timedelta to seconds'''
|
| 66 |
+
return td.seconds + td.days * 24 * 60 * 60
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class BaseTzInfo(tzinfo):
|
| 70 |
+
# Overridden in subclass
|
| 71 |
+
_utcoffset = None
|
| 72 |
+
_tzname = None
|
| 73 |
+
zone = None
|
| 74 |
+
|
| 75 |
+
def __str__(self):
|
| 76 |
+
return self.zone
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class StaticTzInfo(BaseTzInfo):
|
| 80 |
+
'''A timezone that has a constant offset from UTC
|
| 81 |
+
|
| 82 |
+
These timezones are rare, as most locations have changed their
|
| 83 |
+
offset at some point in their history
|
| 84 |
+
'''
|
| 85 |
+
def fromutc(self, dt):
|
| 86 |
+
'''See datetime.tzinfo.fromutc'''
|
| 87 |
+
if dt.tzinfo is not None and dt.tzinfo is not self:
|
| 88 |
+
raise ValueError('fromutc: dt.tzinfo is not self')
|
| 89 |
+
return (dt + self._utcoffset).replace(tzinfo=self)
|
| 90 |
+
|
| 91 |
+
def utcoffset(self, dt, is_dst=None):
|
| 92 |
+
'''See datetime.tzinfo.utcoffset
|
| 93 |
+
|
| 94 |
+
is_dst is ignored for StaticTzInfo, and exists only to
|
| 95 |
+
retain compatibility with DstTzInfo.
|
| 96 |
+
'''
|
| 97 |
+
return self._utcoffset
|
| 98 |
+
|
| 99 |
+
def dst(self, dt, is_dst=None):
|
| 100 |
+
'''See datetime.tzinfo.dst
|
| 101 |
+
|
| 102 |
+
is_dst is ignored for StaticTzInfo, and exists only to
|
| 103 |
+
retain compatibility with DstTzInfo.
|
| 104 |
+
'''
|
| 105 |
+
return _notime
|
| 106 |
+
|
| 107 |
+
def tzname(self, dt, is_dst=None):
|
| 108 |
+
'''See datetime.tzinfo.tzname
|
| 109 |
+
|
| 110 |
+
is_dst is ignored for StaticTzInfo, and exists only to
|
| 111 |
+
retain compatibility with DstTzInfo.
|
| 112 |
+
'''
|
| 113 |
+
return self._tzname
|
| 114 |
+
|
| 115 |
+
def localize(self, dt, is_dst=False):
|
| 116 |
+
'''Convert naive time to local time'''
|
| 117 |
+
if dt.tzinfo is not None:
|
| 118 |
+
raise ValueError('Not naive datetime (tzinfo is already set)')
|
| 119 |
+
return dt.replace(tzinfo=self)
|
| 120 |
+
|
| 121 |
+
def normalize(self, dt, is_dst=False):
|
| 122 |
+
'''Correct the timezone information on the given datetime.
|
| 123 |
+
|
| 124 |
+
This is normally a no-op, as StaticTzInfo timezones never have
|
| 125 |
+
ambiguous cases to correct:
|
| 126 |
+
|
| 127 |
+
>>> from pytz import timezone
|
| 128 |
+
>>> gmt = timezone('GMT')
|
| 129 |
+
>>> isinstance(gmt, StaticTzInfo)
|
| 130 |
+
True
|
| 131 |
+
>>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt)
|
| 132 |
+
>>> gmt.normalize(dt) is dt
|
| 133 |
+
True
|
| 134 |
+
|
| 135 |
+
The supported method of converting between timezones is to use
|
| 136 |
+
datetime.astimezone(). Currently normalize() also works:
|
| 137 |
+
|
| 138 |
+
>>> la = timezone('America/Los_Angeles')
|
| 139 |
+
>>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3))
|
| 140 |
+
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
|
| 141 |
+
>>> gmt.normalize(dt).strftime(fmt)
|
| 142 |
+
'2011-05-07 08:02:03 GMT (+0000)'
|
| 143 |
+
'''
|
| 144 |
+
if dt.tzinfo is self:
|
| 145 |
+
return dt
|
| 146 |
+
if dt.tzinfo is None:
|
| 147 |
+
raise ValueError('Naive time - no tzinfo set')
|
| 148 |
+
return dt.astimezone(self)
|
| 149 |
+
|
| 150 |
+
def __repr__(self):
|
| 151 |
+
return '<StaticTzInfo %r>' % (self.zone,)
|
| 152 |
+
|
| 153 |
+
def __reduce__(self):
|
| 154 |
+
# Special pickle to zone remains a singleton and to cope with
|
| 155 |
+
# database changes.
|
| 156 |
+
return pytz._p, (self.zone,)
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
class DstTzInfo(BaseTzInfo):
|
| 160 |
+
'''A timezone that has a variable offset from UTC
|
| 161 |
+
|
| 162 |
+
The offset might change if daylight saving time comes into effect,
|
| 163 |
+
or at a point in history when the region decides to change their
|
| 164 |
+
timezone definition.
|
| 165 |
+
'''
|
| 166 |
+
# Overridden in subclass
|
| 167 |
+
|
| 168 |
+
# Sorted list of DST transition times, UTC
|
| 169 |
+
_utc_transition_times = None
|
| 170 |
+
|
| 171 |
+
# [(utcoffset, dstoffset, tzname)] corresponding to
|
| 172 |
+
# _utc_transition_times entries
|
| 173 |
+
_transition_info = None
|
| 174 |
+
|
| 175 |
+
zone = None
|
| 176 |
+
|
| 177 |
+
# Set in __init__
|
| 178 |
+
|
| 179 |
+
_tzinfos = None
|
| 180 |
+
_dst = None # DST offset
|
| 181 |
+
|
| 182 |
+
def __init__(self, _inf=None, _tzinfos=None):
|
| 183 |
+
if _inf:
|
| 184 |
+
self._tzinfos = _tzinfos
|
| 185 |
+
self._utcoffset, self._dst, self._tzname = _inf
|
| 186 |
+
else:
|
| 187 |
+
_tzinfos = {}
|
| 188 |
+
self._tzinfos = _tzinfos
|
| 189 |
+
self._utcoffset, self._dst, self._tzname = (
|
| 190 |
+
self._transition_info[0])
|
| 191 |
+
_tzinfos[self._transition_info[0]] = self
|
| 192 |
+
for inf in self._transition_info[1:]:
|
| 193 |
+
if inf not in _tzinfos:
|
| 194 |
+
_tzinfos[inf] = self.__class__(inf, _tzinfos)
|
| 195 |
+
|
| 196 |
+
def fromutc(self, dt):
|
| 197 |
+
'''See datetime.tzinfo.fromutc'''
|
| 198 |
+
if (dt.tzinfo is not None and
|
| 199 |
+
getattr(dt.tzinfo, '_tzinfos', None) is not self._tzinfos):
|
| 200 |
+
raise ValueError('fromutc: dt.tzinfo is not self')
|
| 201 |
+
dt = dt.replace(tzinfo=None)
|
| 202 |
+
idx = max(0, bisect_right(self._utc_transition_times, dt) - 1)
|
| 203 |
+
inf = self._transition_info[idx]
|
| 204 |
+
return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf])
|
| 205 |
+
|
| 206 |
+
def normalize(self, dt):
|
| 207 |
+
'''Correct the timezone information on the given datetime
|
| 208 |
+
|
| 209 |
+
If date arithmetic crosses DST boundaries, the tzinfo
|
| 210 |
+
is not magically adjusted. This method normalizes the
|
| 211 |
+
tzinfo to the correct one.
|
| 212 |
+
|
| 213 |
+
To test, first we need to do some setup
|
| 214 |
+
|
| 215 |
+
>>> from pytz import timezone
|
| 216 |
+
>>> utc = timezone('UTC')
|
| 217 |
+
>>> eastern = timezone('US/Eastern')
|
| 218 |
+
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
|
| 219 |
+
|
| 220 |
+
We next create a datetime right on an end-of-DST transition point,
|
| 221 |
+
the instant when the wallclocks are wound back one hour.
|
| 222 |
+
|
| 223 |
+
>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
|
| 224 |
+
>>> loc_dt = utc_dt.astimezone(eastern)
|
| 225 |
+
>>> loc_dt.strftime(fmt)
|
| 226 |
+
'2002-10-27 01:00:00 EST (-0500)'
|
| 227 |
+
|
| 228 |
+
Now, if we subtract a few minutes from it, note that the timezone
|
| 229 |
+
information has not changed.
|
| 230 |
+
|
| 231 |
+
>>> before = loc_dt - timedelta(minutes=10)
|
| 232 |
+
>>> before.strftime(fmt)
|
| 233 |
+
'2002-10-27 00:50:00 EST (-0500)'
|
| 234 |
+
|
| 235 |
+
But we can fix that by calling the normalize method
|
| 236 |
+
|
| 237 |
+
>>> before = eastern.normalize(before)
|
| 238 |
+
>>> before.strftime(fmt)
|
| 239 |
+
'2002-10-27 01:50:00 EDT (-0400)'
|
| 240 |
+
|
| 241 |
+
The supported method of converting between timezones is to use
|
| 242 |
+
datetime.astimezone(). Currently, normalize() also works:
|
| 243 |
+
|
| 244 |
+
>>> th = timezone('Asia/Bangkok')
|
| 245 |
+
>>> am = timezone('Europe/Amsterdam')
|
| 246 |
+
>>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3))
|
| 247 |
+
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
|
| 248 |
+
>>> am.normalize(dt).strftime(fmt)
|
| 249 |
+
'2011-05-06 20:02:03 CEST (+0200)'
|
| 250 |
+
'''
|
| 251 |
+
if dt.tzinfo is None:
|
| 252 |
+
raise ValueError('Naive time - no tzinfo set')
|
| 253 |
+
|
| 254 |
+
# Convert dt in localtime to UTC
|
| 255 |
+
offset = dt.tzinfo._utcoffset
|
| 256 |
+
dt = dt.replace(tzinfo=None)
|
| 257 |
+
dt = dt - offset
|
| 258 |
+
# convert it back, and return it
|
| 259 |
+
return self.fromutc(dt)
|
| 260 |
+
|
| 261 |
+
def localize(self, dt, is_dst=False):
|
| 262 |
+
'''Convert naive time to local time.
|
| 263 |
+
|
| 264 |
+
This method should be used to construct localtimes, rather
|
| 265 |
+
than passing a tzinfo argument to a datetime constructor.
|
| 266 |
+
|
| 267 |
+
is_dst is used to determine the correct timezone in the ambigous
|
| 268 |
+
period at the end of daylight saving time.
|
| 269 |
+
|
| 270 |
+
>>> from pytz import timezone
|
| 271 |
+
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
|
| 272 |
+
>>> amdam = timezone('Europe/Amsterdam')
|
| 273 |
+
>>> dt = datetime(2004, 10, 31, 2, 0, 0)
|
| 274 |
+
>>> loc_dt1 = amdam.localize(dt, is_dst=True)
|
| 275 |
+
>>> loc_dt2 = amdam.localize(dt, is_dst=False)
|
| 276 |
+
>>> loc_dt1.strftime(fmt)
|
| 277 |
+
'2004-10-31 02:00:00 CEST (+0200)'
|
| 278 |
+
>>> loc_dt2.strftime(fmt)
|
| 279 |
+
'2004-10-31 02:00:00 CET (+0100)'
|
| 280 |
+
>>> str(loc_dt2 - loc_dt1)
|
| 281 |
+
'1:00:00'
|
| 282 |
+
|
| 283 |
+
Use is_dst=None to raise an AmbiguousTimeError for ambiguous
|
| 284 |
+
times at the end of daylight saving time
|
| 285 |
+
|
| 286 |
+
>>> try:
|
| 287 |
+
... loc_dt1 = amdam.localize(dt, is_dst=None)
|
| 288 |
+
... except AmbiguousTimeError:
|
| 289 |
+
... print('Ambiguous')
|
| 290 |
+
Ambiguous
|
| 291 |
+
|
| 292 |
+
is_dst defaults to False
|
| 293 |
+
|
| 294 |
+
>>> amdam.localize(dt) == amdam.localize(dt, False)
|
| 295 |
+
True
|
| 296 |
+
|
| 297 |
+
is_dst is also used to determine the correct timezone in the
|
| 298 |
+
wallclock times jumped over at the start of daylight saving time.
|
| 299 |
+
|
| 300 |
+
>>> pacific = timezone('US/Pacific')
|
| 301 |
+
>>> dt = datetime(2008, 3, 9, 2, 0, 0)
|
| 302 |
+
>>> ploc_dt1 = pacific.localize(dt, is_dst=True)
|
| 303 |
+
>>> ploc_dt2 = pacific.localize(dt, is_dst=False)
|
| 304 |
+
>>> ploc_dt1.strftime(fmt)
|
| 305 |
+
'2008-03-09 02:00:00 PDT (-0700)'
|
| 306 |
+
>>> ploc_dt2.strftime(fmt)
|
| 307 |
+
'2008-03-09 02:00:00 PST (-0800)'
|
| 308 |
+
>>> str(ploc_dt2 - ploc_dt1)
|
| 309 |
+
'1:00:00'
|
| 310 |
+
|
| 311 |
+
Use is_dst=None to raise a NonExistentTimeError for these skipped
|
| 312 |
+
times.
|
| 313 |
+
|
| 314 |
+
>>> try:
|
| 315 |
+
... loc_dt1 = pacific.localize(dt, is_dst=None)
|
| 316 |
+
... except NonExistentTimeError:
|
| 317 |
+
... print('Non-existent')
|
| 318 |
+
Non-existent
|
| 319 |
+
'''
|
| 320 |
+
if dt.tzinfo is not None:
|
| 321 |
+
raise ValueError('Not naive datetime (tzinfo is already set)')
|
| 322 |
+
|
| 323 |
+
# Find the two best possibilities.
|
| 324 |
+
possible_loc_dt = set()
|
| 325 |
+
for delta in [timedelta(days=-1), timedelta(days=1)]:
|
| 326 |
+
loc_dt = dt + delta
|
| 327 |
+
idx = max(0, bisect_right(
|
| 328 |
+
self._utc_transition_times, loc_dt) - 1)
|
| 329 |
+
inf = self._transition_info[idx]
|
| 330 |
+
tzinfo = self._tzinfos[inf]
|
| 331 |
+
loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo))
|
| 332 |
+
if loc_dt.replace(tzinfo=None) == dt:
|
| 333 |
+
possible_loc_dt.add(loc_dt)
|
| 334 |
+
|
| 335 |
+
if len(possible_loc_dt) == 1:
|
| 336 |
+
return possible_loc_dt.pop()
|
| 337 |
+
|
| 338 |
+
# If there are no possibly correct timezones, we are attempting
|
| 339 |
+
# to convert a time that never happened - the time period jumped
|
| 340 |
+
# during the start-of-DST transition period.
|
| 341 |
+
if len(possible_loc_dt) == 0:
|
| 342 |
+
# If we refuse to guess, raise an exception.
|
| 343 |
+
if is_dst is None:
|
| 344 |
+
raise NonExistentTimeError(dt)
|
| 345 |
+
|
| 346 |
+
# If we are forcing the pre-DST side of the DST transition, we
|
| 347 |
+
# obtain the correct timezone by winding the clock forward a few
|
| 348 |
+
# hours.
|
| 349 |
+
elif is_dst:
|
| 350 |
+
return self.localize(
|
| 351 |
+
dt + timedelta(hours=6), is_dst=True) - timedelta(hours=6)
|
| 352 |
+
|
| 353 |
+
# If we are forcing the post-DST side of the DST transition, we
|
| 354 |
+
# obtain the correct timezone by winding the clock back.
|
| 355 |
+
else:
|
| 356 |
+
return self.localize(
|
| 357 |
+
dt - timedelta(hours=6),
|
| 358 |
+
is_dst=False) + timedelta(hours=6)
|
| 359 |
+
|
| 360 |
+
# If we get this far, we have multiple possible timezones - this
|
| 361 |
+
# is an ambiguous case occurring during the end-of-DST transition.
|
| 362 |
+
|
| 363 |
+
# If told to be strict, raise an exception since we have an
|
| 364 |
+
# ambiguous case
|
| 365 |
+
if is_dst is None:
|
| 366 |
+
raise AmbiguousTimeError(dt)
|
| 367 |
+
|
| 368 |
+
# Filter out the possiblilities that don't match the requested
|
| 369 |
+
# is_dst
|
| 370 |
+
filtered_possible_loc_dt = [
|
| 371 |
+
p for p in possible_loc_dt if bool(p.tzinfo._dst) == is_dst
|
| 372 |
+
]
|
| 373 |
+
|
| 374 |
+
# Hopefully we only have one possibility left. Return it.
|
| 375 |
+
if len(filtered_possible_loc_dt) == 1:
|
| 376 |
+
return filtered_possible_loc_dt[0]
|
| 377 |
+
|
| 378 |
+
if len(filtered_possible_loc_dt) == 0:
|
| 379 |
+
filtered_possible_loc_dt = list(possible_loc_dt)
|
| 380 |
+
|
| 381 |
+
# If we get this far, we have in a wierd timezone transition
|
| 382 |
+
# where the clocks have been wound back but is_dst is the same
|
| 383 |
+
# in both (eg. Europe/Warsaw 1915 when they switched to CET).
|
| 384 |
+
# At this point, we just have to guess unless we allow more
|
| 385 |
+
# hints to be passed in (such as the UTC offset or abbreviation),
|
| 386 |
+
# but that is just getting silly.
|
| 387 |
+
#
|
| 388 |
+
# Choose the earliest (by UTC) applicable timezone if is_dst=True
|
| 389 |
+
# Choose the latest (by UTC) applicable timezone if is_dst=False
|
| 390 |
+
# i.e., behave like end-of-DST transition
|
| 391 |
+
dates = {} # utc -> local
|
| 392 |
+
for local_dt in filtered_possible_loc_dt:
|
| 393 |
+
utc_time = (
|
| 394 |
+
local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset)
|
| 395 |
+
assert utc_time not in dates
|
| 396 |
+
dates[utc_time] = local_dt
|
| 397 |
+
return dates[[min, max][not is_dst](dates)]
|
| 398 |
+
|
| 399 |
+
def utcoffset(self, dt, is_dst=None):
|
| 400 |
+
'''See datetime.tzinfo.utcoffset
|
| 401 |
+
|
| 402 |
+
The is_dst parameter may be used to remove ambiguity during DST
|
| 403 |
+
transitions.
|
| 404 |
+
|
| 405 |
+
>>> from pytz import timezone
|
| 406 |
+
>>> tz = timezone('America/St_Johns')
|
| 407 |
+
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
|
| 408 |
+
|
| 409 |
+
>>> str(tz.utcoffset(ambiguous, is_dst=False))
|
| 410 |
+
'-1 day, 20:30:00'
|
| 411 |
+
|
| 412 |
+
>>> str(tz.utcoffset(ambiguous, is_dst=True))
|
| 413 |
+
'-1 day, 21:30:00'
|
| 414 |
+
|
| 415 |
+
>>> try:
|
| 416 |
+
... tz.utcoffset(ambiguous)
|
| 417 |
+
... except AmbiguousTimeError:
|
| 418 |
+
... print('Ambiguous')
|
| 419 |
+
Ambiguous
|
| 420 |
+
|
| 421 |
+
'''
|
| 422 |
+
if dt is None:
|
| 423 |
+
return None
|
| 424 |
+
elif dt.tzinfo is not self:
|
| 425 |
+
dt = self.localize(dt, is_dst)
|
| 426 |
+
return dt.tzinfo._utcoffset
|
| 427 |
+
else:
|
| 428 |
+
return self._utcoffset
|
| 429 |
+
|
| 430 |
+
def dst(self, dt, is_dst=None):
|
| 431 |
+
'''See datetime.tzinfo.dst
|
| 432 |
+
|
| 433 |
+
The is_dst parameter may be used to remove ambiguity during DST
|
| 434 |
+
transitions.
|
| 435 |
+
|
| 436 |
+
>>> from pytz import timezone
|
| 437 |
+
>>> tz = timezone('America/St_Johns')
|
| 438 |
+
|
| 439 |
+
>>> normal = datetime(2009, 9, 1)
|
| 440 |
+
|
| 441 |
+
>>> str(tz.dst(normal))
|
| 442 |
+
'1:00:00'
|
| 443 |
+
>>> str(tz.dst(normal, is_dst=False))
|
| 444 |
+
'1:00:00'
|
| 445 |
+
>>> str(tz.dst(normal, is_dst=True))
|
| 446 |
+
'1:00:00'
|
| 447 |
+
|
| 448 |
+
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
|
| 449 |
+
|
| 450 |
+
>>> str(tz.dst(ambiguous, is_dst=False))
|
| 451 |
+
'0:00:00'
|
| 452 |
+
>>> str(tz.dst(ambiguous, is_dst=True))
|
| 453 |
+
'1:00:00'
|
| 454 |
+
>>> try:
|
| 455 |
+
... tz.dst(ambiguous)
|
| 456 |
+
... except AmbiguousTimeError:
|
| 457 |
+
... print('Ambiguous')
|
| 458 |
+
Ambiguous
|
| 459 |
+
|
| 460 |
+
'''
|
| 461 |
+
if dt is None:
|
| 462 |
+
return None
|
| 463 |
+
elif dt.tzinfo is not self:
|
| 464 |
+
dt = self.localize(dt, is_dst)
|
| 465 |
+
return dt.tzinfo._dst
|
| 466 |
+
else:
|
| 467 |
+
return self._dst
|
| 468 |
+
|
| 469 |
+
def tzname(self, dt, is_dst=None):
|
| 470 |
+
'''See datetime.tzinfo.tzname
|
| 471 |
+
|
| 472 |
+
The is_dst parameter may be used to remove ambiguity during DST
|
| 473 |
+
transitions.
|
| 474 |
+
|
| 475 |
+
>>> from pytz import timezone
|
| 476 |
+
>>> tz = timezone('America/St_Johns')
|
| 477 |
+
|
| 478 |
+
>>> normal = datetime(2009, 9, 1)
|
| 479 |
+
|
| 480 |
+
>>> tz.tzname(normal)
|
| 481 |
+
'NDT'
|
| 482 |
+
>>> tz.tzname(normal, is_dst=False)
|
| 483 |
+
'NDT'
|
| 484 |
+
>>> tz.tzname(normal, is_dst=True)
|
| 485 |
+
'NDT'
|
| 486 |
+
|
| 487 |
+
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
|
| 488 |
+
|
| 489 |
+
>>> tz.tzname(ambiguous, is_dst=False)
|
| 490 |
+
'NST'
|
| 491 |
+
>>> tz.tzname(ambiguous, is_dst=True)
|
| 492 |
+
'NDT'
|
| 493 |
+
>>> try:
|
| 494 |
+
... tz.tzname(ambiguous)
|
| 495 |
+
... except AmbiguousTimeError:
|
| 496 |
+
... print('Ambiguous')
|
| 497 |
+
Ambiguous
|
| 498 |
+
'''
|
| 499 |
+
if dt is None:
|
| 500 |
+
return self.zone
|
| 501 |
+
elif dt.tzinfo is not self:
|
| 502 |
+
dt = self.localize(dt, is_dst)
|
| 503 |
+
return dt.tzinfo._tzname
|
| 504 |
+
else:
|
| 505 |
+
return self._tzname
|
| 506 |
+
|
| 507 |
+
def __repr__(self):
|
| 508 |
+
if self._dst:
|
| 509 |
+
dst = 'DST'
|
| 510 |
+
else:
|
| 511 |
+
dst = 'STD'
|
| 512 |
+
if self._utcoffset > _notime:
|
| 513 |
+
return '<DstTzInfo %r %s+%s %s>' % (
|
| 514 |
+
self.zone, self._tzname, self._utcoffset, dst
|
| 515 |
+
)
|
| 516 |
+
else:
|
| 517 |
+
return '<DstTzInfo %r %s%s %s>' % (
|
| 518 |
+
self.zone, self._tzname, self._utcoffset, dst
|
| 519 |
+
)
|
| 520 |
+
|
| 521 |
+
def __reduce__(self):
|
| 522 |
+
# Special pickle to zone remains a singleton and to cope with
|
| 523 |
+
# database changes.
|
| 524 |
+
return pytz._p, (
|
| 525 |
+
self.zone,
|
| 526 |
+
_to_seconds(self._utcoffset),
|
| 527 |
+
_to_seconds(self._dst),
|
| 528 |
+
self._tzname
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
|
| 532 |
+
def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None):
|
| 533 |
+
"""Factory function for unpickling pytz tzinfo instances.
|
| 534 |
+
|
| 535 |
+
This is shared for both StaticTzInfo and DstTzInfo instances, because
|
| 536 |
+
database changes could cause a zones implementation to switch between
|
| 537 |
+
these two base classes and we can't break pickles on a pytz version
|
| 538 |
+
upgrade.
|
| 539 |
+
"""
|
| 540 |
+
# Raises a KeyError if zone no longer exists, which should never happen
|
| 541 |
+
# and would be a bug.
|
| 542 |
+
tz = pytz.timezone(zone)
|
| 543 |
+
|
| 544 |
+
# A StaticTzInfo - just return it
|
| 545 |
+
if utcoffset is None:
|
| 546 |
+
return tz
|
| 547 |
+
|
| 548 |
+
# This pickle was created from a DstTzInfo. We need to
|
| 549 |
+
# determine which of the list of tzinfo instances for this zone
|
| 550 |
+
# to use in order to restore the state of any datetime instances using
|
| 551 |
+
# it correctly.
|
| 552 |
+
utcoffset = memorized_timedelta(utcoffset)
|
| 553 |
+
dstoffset = memorized_timedelta(dstoffset)
|
| 554 |
+
try:
|
| 555 |
+
return tz._tzinfos[(utcoffset, dstoffset, tzname)]
|
| 556 |
+
except KeyError:
|
| 557 |
+
# The particular state requested in this timezone no longer exists.
|
| 558 |
+
# This indicates a corrupt pickle, or the timezone database has been
|
| 559 |
+
# corrected violently enough to make this particular
|
| 560 |
+
# (utcoffset,dstoffset) no longer exist in the zone, or the
|
| 561 |
+
# abbreviation has been changed.
|
| 562 |
+
pass
|
| 563 |
+
|
| 564 |
+
# See if we can find an entry differing only by tzname. Abbreviations
|
| 565 |
+
# get changed from the initial guess by the database maintainers to
|
| 566 |
+
# match reality when this information is discovered.
|
| 567 |
+
for localized_tz in tz._tzinfos.values():
|
| 568 |
+
if (localized_tz._utcoffset == utcoffset and
|
| 569 |
+
localized_tz._dst == dstoffset):
|
| 570 |
+
return localized_tz
|
| 571 |
+
|
| 572 |
+
# This (utcoffset, dstoffset) information has been removed from the
|
| 573 |
+
# zone. Add it back. This might occur when the database maintainers have
|
| 574 |
+
# corrected incorrect information. datetime instances using this
|
| 575 |
+
# incorrect information will continue to do so, exactly as they were
|
| 576 |
+
# before being pickled. This is purely an overly paranoid safety net - I
|
| 577 |
+
# doubt this will ever been needed in real life.
|
| 578 |
+
inf = (utcoffset, dstoffset, tzname)
|
| 579 |
+
tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos)
|
| 580 |
+
return tz._tzinfos[inf]
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Cuba
ADDED
|
Binary file (2.42 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/EST
ADDED
|
Binary file (182 Bytes). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/EST5EDT
ADDED
|
Binary file (3.55 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Egypt
ADDED
|
Binary file (2.4 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/GMT-0
ADDED
|
Binary file (114 Bytes). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Jamaica
ADDED
|
Binary file (482 Bytes). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Japan
ADDED
|
Binary file (309 Bytes). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Kwajalein
ADDED
|
Binary file (302 Bytes). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/MST7MDT
ADDED
|
Binary file (2.46 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/NZ
ADDED
|
Binary file (2.44 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/Portugal
ADDED
|
Binary file (3.53 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/ROC
ADDED
|
Binary file (761 Bytes). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/UCT
ADDED
|
Binary file (114 Bytes). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/W-SU
ADDED
|
Binary file (1.54 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/WET
ADDED
|
Binary file (3.53 kB). View file
|
|
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/leapseconds
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Allowance for leap seconds added to each time zone file.
|
| 2 |
+
|
| 3 |
+
# This file is in the public domain.
|
| 4 |
+
|
| 5 |
+
# This file is generated automatically from the data in the public-domain
|
| 6 |
+
# NIST/IERS format leap-seconds.list file, which can be copied from
|
| 7 |
+
# <https://hpiers.obspm.fr/iers/bul/bulc/ntp/leap-seconds.list>
|
| 8 |
+
# or, in a variant with different comments, from
|
| 9 |
+
# <ftp://ftp.boulder.nist.gov/pub/time/leap-seconds.list>.
|
| 10 |
+
# For more about leap-seconds.list, please see
|
| 11 |
+
# The NTP Timescale and Leap Seconds
|
| 12 |
+
# <https://www.eecis.udel.edu/~mills/leap.html>.
|
| 13 |
+
|
| 14 |
+
# The rules for leap seconds are specified in Annex 1 (Time scales) of:
|
| 15 |
+
# Standard-frequency and time-signal emissions.
|
| 16 |
+
# International Telecommunication Union - Radiocommunication Sector
|
| 17 |
+
# (ITU-R) Recommendation TF.460-6 (02/2002)
|
| 18 |
+
# <https://www.itu.int/rec/R-REC-TF.460-6-200202-I/>.
|
| 19 |
+
# The International Earth Rotation and Reference Systems Service (IERS)
|
| 20 |
+
# periodically uses leap seconds to keep UTC to within 0.9 s of UT1
|
| 21 |
+
# (a proxy for Earth's angle in space as measured by astronomers)
|
| 22 |
+
# and publishes leap second data in a copyrighted file
|
| 23 |
+
# <https://hpiers.obspm.fr/iers/bul/bulc/Leap_Second.dat>.
|
| 24 |
+
# See: Levine J. Coordinated Universal Time and the leap second.
|
| 25 |
+
# URSI Radio Sci Bull. 2016;89(4):30-6. doi:10.23919/URSIRSB.2016.7909995
|
| 26 |
+
# <https://ieeexplore.ieee.org/document/7909995>.
|
| 27 |
+
|
| 28 |
+
# There were no leap seconds before 1972, as no official mechanism
|
| 29 |
+
# accounted for the discrepancy between atomic time (TAI) and the earth's
|
| 30 |
+
# rotation. The first ("1 Jan 1972") data line in leap-seconds.list
|
| 31 |
+
# does not denote a leap second; it denotes the start of the current definition
|
| 32 |
+
# of UTC.
|
| 33 |
+
|
| 34 |
+
# All leap-seconds are Stationary (S) at the given UTC time.
|
| 35 |
+
# The correction (+ or -) is made at the given time, so in the unlikely
|
| 36 |
+
# event of a negative leap second, a line would look like this:
|
| 37 |
+
# Leap YEAR MON DAY 23:59:59 - S
|
| 38 |
+
# Typical lines look like this:
|
| 39 |
+
# Leap YEAR MON DAY 23:59:60 + S
|
| 40 |
+
Leap 1972 Jun 30 23:59:60 + S
|
| 41 |
+
Leap 1972 Dec 31 23:59:60 + S
|
| 42 |
+
Leap 1973 Dec 31 23:59:60 + S
|
| 43 |
+
Leap 1974 Dec 31 23:59:60 + S
|
| 44 |
+
Leap 1975 Dec 31 23:59:60 + S
|
| 45 |
+
Leap 1976 Dec 31 23:59:60 + S
|
| 46 |
+
Leap 1977 Dec 31 23:59:60 + S
|
| 47 |
+
Leap 1978 Dec 31 23:59:60 + S
|
| 48 |
+
Leap 1979 Dec 31 23:59:60 + S
|
| 49 |
+
Leap 1981 Jun 30 23:59:60 + S
|
| 50 |
+
Leap 1982 Jun 30 23:59:60 + S
|
| 51 |
+
Leap 1983 Jun 30 23:59:60 + S
|
| 52 |
+
Leap 1985 Jun 30 23:59:60 + S
|
| 53 |
+
Leap 1987 Dec 31 23:59:60 + S
|
| 54 |
+
Leap 1989 Dec 31 23:59:60 + S
|
| 55 |
+
Leap 1990 Dec 31 23:59:60 + S
|
| 56 |
+
Leap 1992 Jun 30 23:59:60 + S
|
| 57 |
+
Leap 1993 Jun 30 23:59:60 + S
|
| 58 |
+
Leap 1994 Jun 30 23:59:60 + S
|
| 59 |
+
Leap 1995 Dec 31 23:59:60 + S
|
| 60 |
+
Leap 1997 Jun 30 23:59:60 + S
|
| 61 |
+
Leap 1998 Dec 31 23:59:60 + S
|
| 62 |
+
Leap 2005 Dec 31 23:59:60 + S
|
| 63 |
+
Leap 2008 Dec 31 23:59:60 + S
|
| 64 |
+
Leap 2012 Jun 30 23:59:60 + S
|
| 65 |
+
Leap 2015 Jun 30 23:59:60 + S
|
| 66 |
+
Leap 2016 Dec 31 23:59:60 + S
|
| 67 |
+
|
| 68 |
+
# UTC timestamp when this leap second list expires.
|
| 69 |
+
# Any additional leap seconds will come after this.
|
| 70 |
+
# This Expires line is commented out for now,
|
| 71 |
+
# so that pre-2020a zic implementations do not reject this file.
|
| 72 |
+
#Expires 2025 Jun 28 00:00:00
|
| 73 |
+
|
| 74 |
+
# POSIX timestamps for the data in this file:
|
| 75 |
+
#updated 1720104763 (2024-07-04 14:52:43 UTC)
|
| 76 |
+
#expires 1751068800 (2025-06-28 00:00:00 UTC)
|
| 77 |
+
|
| 78 |
+
# Updated through IERS Bulletin C (https://hpiers.obspm.fr/iers/bul/bulc/bulletinc.dat)
|
| 79 |
+
# File expires on 28 June 2025
|
deepseek/lib/python3.10/site-packages/pytz/zoneinfo/zonenow.tab
ADDED
|
@@ -0,0 +1,299 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# tzdb timezone descriptions, for users who do not care about old timestamps
|
| 2 |
+
#
|
| 3 |
+
# This file is in the public domain.
|
| 4 |
+
#
|
| 5 |
+
# From Paul Eggert (2023-12-18):
|
| 6 |
+
# This file contains a table where each row stands for a timezone
|
| 7 |
+
# where civil timestamps are predicted to agree from now on.
|
| 8 |
+
# This file is like zone1970.tab (see zone1970.tab's comments),
|
| 9 |
+
# but with the following changes:
|
| 10 |
+
#
|
| 11 |
+
# 1. Each timezone corresponds to a set of clocks that are planned
|
| 12 |
+
# to agree from now on. This is a larger set of clocks than in
|
| 13 |
+
# zone1970.tab, where each timezone's clocks must agree from 1970 on.
|
| 14 |
+
# 2. The first column is irrelevant and ignored.
|
| 15 |
+
# 3. The table is sorted in a different way:
|
| 16 |
+
# first by standard time UTC offset;
|
| 17 |
+
# then, if DST is used, by daylight saving UTC offset;
|
| 18 |
+
# then by time zone abbreviation.
|
| 19 |
+
# 4. Every timezone has a nonempty comments column, with wording
|
| 20 |
+
# distinguishing the timezone only from other timezones with the
|
| 21 |
+
# same UTC offset at some point during the year.
|
| 22 |
+
#
|
| 23 |
+
# The format of this table is experimental, and may change in future versions.
|
| 24 |
+
#
|
| 25 |
+
# This table is intended as an aid for users, to help them select timezones
|
| 26 |
+
# appropriate for their practical needs. It is not intended to take or
|
| 27 |
+
# endorse any position on legal or territorial claims.
|
| 28 |
+
#
|
| 29 |
+
#XX coordinates TZ comments
|
| 30 |
+
#
|
| 31 |
+
# -11 - SST
|
| 32 |
+
XX -1416-17042 Pacific/Pago_Pago Midway; Samoa ("SST")
|
| 33 |
+
#
|
| 34 |
+
# -11
|
| 35 |
+
XX -1901-16955 Pacific/Niue Niue
|
| 36 |
+
#
|
| 37 |
+
# -10 - HST
|
| 38 |
+
XX +211825-1575130 Pacific/Honolulu Hawaii ("HST")
|
| 39 |
+
#
|
| 40 |
+
# -10
|
| 41 |
+
XX -1732-14934 Pacific/Tahiti Tahiti; Cook Islands
|
| 42 |
+
#
|
| 43 |
+
# -10/-09 - HST / HDT (North America DST)
|
| 44 |
+
XX +515248-1763929 America/Adak western Aleutians in Alaska ("HST/HDT")
|
| 45 |
+
#
|
| 46 |
+
# -09:30
|
| 47 |
+
XX -0900-13930 Pacific/Marquesas Marquesas
|
| 48 |
+
#
|
| 49 |
+
# -09
|
| 50 |
+
XX -2308-13457 Pacific/Gambier Gambier
|
| 51 |
+
#
|
| 52 |
+
# -09/-08 - AKST/AKDT (North America DST)
|
| 53 |
+
XX +611305-1495401 America/Anchorage most of Alaska ("AKST/AKDT")
|
| 54 |
+
#
|
| 55 |
+
# -08
|
| 56 |
+
XX -2504-13005 Pacific/Pitcairn Pitcairn
|
| 57 |
+
#
|
| 58 |
+
# -08/-07 - PST/PDT (North America DST)
|
| 59 |
+
XX +340308-1181434 America/Los_Angeles Pacific ("PST/PDT") - US & Canada; Mexico near US border
|
| 60 |
+
#
|
| 61 |
+
# -07 - MST
|
| 62 |
+
XX +332654-1120424 America/Phoenix Mountain Standard ("MST") - Arizona; western Mexico; Yukon
|
| 63 |
+
#
|
| 64 |
+
# -07/-06 - MST/MDT (North America DST)
|
| 65 |
+
XX +394421-1045903 America/Denver Mountain ("MST/MDT") - US & Canada; Mexico near US border
|
| 66 |
+
#
|
| 67 |
+
# -06
|
| 68 |
+
XX -0054-08936 Pacific/Galapagos Galápagos
|
| 69 |
+
#
|
| 70 |
+
# -06 - CST
|
| 71 |
+
XX +1924-09909 America/Mexico_City Central Standard ("CST") - Saskatchewan; central Mexico; Central America
|
| 72 |
+
#
|
| 73 |
+
# -06/-05 (Chile DST)
|
| 74 |
+
XX -2709-10926 Pacific/Easter Easter Island
|
| 75 |
+
#
|
| 76 |
+
# -06/-05 - CST/CDT (North America DST)
|
| 77 |
+
XX +415100-0873900 America/Chicago Central ("CST/CDT") - US & Canada; Mexico near US border
|
| 78 |
+
#
|
| 79 |
+
# -05
|
| 80 |
+
XX -1203-07703 America/Lima eastern South America
|
| 81 |
+
#
|
| 82 |
+
# -05 - EST
|
| 83 |
+
XX +175805-0764736 America/Jamaica Eastern Standard ("EST") - Caymans; Jamaica; eastern Mexico; Panama
|
| 84 |
+
#
|
| 85 |
+
# -05/-04 - CST/CDT (Cuba DST)
|
| 86 |
+
XX +2308-08222 America/Havana Cuba
|
| 87 |
+
#
|
| 88 |
+
# -05/-04 - EST/EDT (North America DST)
|
| 89 |
+
XX +404251-0740023 America/New_York Eastern ("EST/EDT") - US & Canada
|
| 90 |
+
#
|
| 91 |
+
# -04
|
| 92 |
+
XX +1030-06656 America/Caracas western South America
|
| 93 |
+
#
|
| 94 |
+
# -04 - AST
|
| 95 |
+
XX +1828-06954 America/Santo_Domingo Atlantic Standard ("AST") - eastern Caribbean
|
| 96 |
+
#
|
| 97 |
+
# -04/-03 (Chile DST)
|
| 98 |
+
XX -3327-07040 America/Santiago most of Chile
|
| 99 |
+
#
|
| 100 |
+
# -04/-03 (Paraguay DST)
|
| 101 |
+
XX -2516-05740 America/Asuncion Paraguay
|
| 102 |
+
#
|
| 103 |
+
# -04/-03 - AST/ADT (North America DST)
|
| 104 |
+
XX +4439-06336 America/Halifax Atlantic ("AST/ADT") - Canada; Bermuda
|
| 105 |
+
#
|
| 106 |
+
# -03:30/-02:30 - NST/NDT (North America DST)
|
| 107 |
+
XX +4734-05243 America/St_Johns Newfoundland ("NST/NDT")
|
| 108 |
+
#
|
| 109 |
+
# -03
|
| 110 |
+
XX -2332-04637 America/Sao_Paulo eastern South America
|
| 111 |
+
#
|
| 112 |
+
# -03/-02 (North America DST)
|
| 113 |
+
XX +4703-05620 America/Miquelon St Pierre & Miquelon
|
| 114 |
+
#
|
| 115 |
+
# -02
|
| 116 |
+
XX -0351-03225 America/Noronha Fernando de Noronha; South Georgia
|
| 117 |
+
#
|
| 118 |
+
# -02/-01 (EU DST)
|
| 119 |
+
XX +6411-05144 America/Nuuk most of Greenland
|
| 120 |
+
#
|
| 121 |
+
# -01
|
| 122 |
+
XX +1455-02331 Atlantic/Cape_Verde Cape Verde
|
| 123 |
+
#
|
| 124 |
+
# -01/+00 (EU DST)
|
| 125 |
+
XX +3744-02540 Atlantic/Azores Azores
|
| 126 |
+
#
|
| 127 |
+
# +00 - GMT
|
| 128 |
+
XX +0519-00402 Africa/Abidjan far western Africa; Iceland ("GMT")
|
| 129 |
+
#
|
| 130 |
+
# +00/+01 - GMT/BST (EU DST)
|
| 131 |
+
XX +513030-0000731 Europe/London United Kingdom ("GMT/BST")
|
| 132 |
+
#
|
| 133 |
+
# +00/+01 - WET/WEST (EU DST)
|
| 134 |
+
XX +3843-00908 Europe/Lisbon western Europe ("WET/WEST")
|
| 135 |
+
#
|
| 136 |
+
# +00/+02 - Troll DST
|
| 137 |
+
XX -720041+0023206 Antarctica/Troll Troll Station in Antarctica
|
| 138 |
+
#
|
| 139 |
+
# +01 - CET
|
| 140 |
+
XX +3647+00303 Africa/Algiers Algeria, Tunisia ("CET")
|
| 141 |
+
#
|
| 142 |
+
# +01 - WAT
|
| 143 |
+
XX +0627+00324 Africa/Lagos western Africa ("WAT")
|
| 144 |
+
#
|
| 145 |
+
# +01/+00 - IST/GMT (EU DST in reverse)
|
| 146 |
+
XX +5320-00615 Europe/Dublin Ireland ("IST/GMT")
|
| 147 |
+
#
|
| 148 |
+
# +01/+00 - (Morocco DST)
|
| 149 |
+
XX +3339-00735 Africa/Casablanca Morocco
|
| 150 |
+
#
|
| 151 |
+
# +01/+02 - CET/CEST (EU DST)
|
| 152 |
+
XX +4852+00220 Europe/Paris central Europe ("CET/CEST")
|
| 153 |
+
#
|
| 154 |
+
# +02 - CAT
|
| 155 |
+
XX -2558+03235 Africa/Maputo central Africa ("CAT")
|
| 156 |
+
#
|
| 157 |
+
# +02 - EET
|
| 158 |
+
XX +3254+01311 Africa/Tripoli Libya; Kaliningrad ("EET")
|
| 159 |
+
#
|
| 160 |
+
# +02 - SAST
|
| 161 |
+
XX -2615+02800 Africa/Johannesburg southern Africa ("SAST")
|
| 162 |
+
#
|
| 163 |
+
# +02/+03 - EET/EEST (EU DST)
|
| 164 |
+
XX +3758+02343 Europe/Athens eastern Europe ("EET/EEST")
|
| 165 |
+
#
|
| 166 |
+
# +02/+03 - EET/EEST (Egypt DST)
|
| 167 |
+
XX +3003+03115 Africa/Cairo Egypt
|
| 168 |
+
#
|
| 169 |
+
# +02/+03 - EET/EEST (Lebanon DST)
|
| 170 |
+
XX +3353+03530 Asia/Beirut Lebanon
|
| 171 |
+
#
|
| 172 |
+
# +02/+03 - EET/EEST (Moldova DST)
|
| 173 |
+
XX +4700+02850 Europe/Chisinau Moldova
|
| 174 |
+
#
|
| 175 |
+
# +02/+03 - EET/EEST (Palestine DST)
|
| 176 |
+
XX +3130+03428 Asia/Gaza Palestine
|
| 177 |
+
#
|
| 178 |
+
# +02/+03 - IST/IDT (Israel DST)
|
| 179 |
+
XX +314650+0351326 Asia/Jerusalem Israel
|
| 180 |
+
#
|
| 181 |
+
# +03
|
| 182 |
+
XX +4101+02858 Europe/Istanbul Near East; Belarus
|
| 183 |
+
#
|
| 184 |
+
# +03 - EAT
|
| 185 |
+
XX -0117+03649 Africa/Nairobi eastern Africa ("EAT")
|
| 186 |
+
#
|
| 187 |
+
# +03 - MSK
|
| 188 |
+
XX +554521+0373704 Europe/Moscow Moscow ("MSK")
|
| 189 |
+
#
|
| 190 |
+
# +03:30
|
| 191 |
+
XX +3540+05126 Asia/Tehran Iran
|
| 192 |
+
#
|
| 193 |
+
# +04
|
| 194 |
+
XX +2518+05518 Asia/Dubai Russia; Caucasus; Persian Gulf; Seychelles; Réunion
|
| 195 |
+
#
|
| 196 |
+
# +04:30
|
| 197 |
+
XX +3431+06912 Asia/Kabul Afghanistan
|
| 198 |
+
#
|
| 199 |
+
# +05
|
| 200 |
+
XX +4120+06918 Asia/Tashkent Russia; Kazakhstan; Tajikistan; Turkmenistan; Uzbekistan; Maldives
|
| 201 |
+
#
|
| 202 |
+
# +05 - PKT
|
| 203 |
+
XX +2452+06703 Asia/Karachi Pakistan ("PKT")
|
| 204 |
+
#
|
| 205 |
+
# +05:30
|
| 206 |
+
XX +0656+07951 Asia/Colombo Sri Lanka
|
| 207 |
+
#
|
| 208 |
+
# +05:30 - IST
|
| 209 |
+
XX +2232+08822 Asia/Kolkata India ("IST")
|
| 210 |
+
#
|
| 211 |
+
# +05:45
|
| 212 |
+
XX +2743+08519 Asia/Kathmandu Nepal
|
| 213 |
+
#
|
| 214 |
+
# +06
|
| 215 |
+
XX +2343+09025 Asia/Dhaka Russia; Kyrgyzstan; Bhutan; Bangladesh; Chagos
|
| 216 |
+
#
|
| 217 |
+
# +06:30
|
| 218 |
+
XX +1647+09610 Asia/Yangon Myanmar; Cocos
|
| 219 |
+
#
|
| 220 |
+
# +07
|
| 221 |
+
XX +1345+10031 Asia/Bangkok Russia; Indochina; Christmas Island
|
| 222 |
+
#
|
| 223 |
+
# +07 - WIB
|
| 224 |
+
XX -0610+10648 Asia/Jakarta Indonesia ("WIB")
|
| 225 |
+
#
|
| 226 |
+
# +08
|
| 227 |
+
XX +0117+10351 Asia/Singapore Russia; Brunei; Malaysia; Singapore
|
| 228 |
+
#
|
| 229 |
+
# +08 - AWST
|
| 230 |
+
XX -3157+11551 Australia/Perth Western Australia ("AWST")
|
| 231 |
+
#
|
| 232 |
+
# +08 - CST
|
| 233 |
+
XX +3114+12128 Asia/Shanghai China ("CST")
|
| 234 |
+
#
|
| 235 |
+
# +08 - HKT
|
| 236 |
+
XX +2217+11409 Asia/Hong_Kong Hong Kong ("HKT")
|
| 237 |
+
#
|
| 238 |
+
# +08 - PHT
|
| 239 |
+
XX +1435+12100 Asia/Manila Philippines ("PHT")
|
| 240 |
+
#
|
| 241 |
+
# +08 - WITA
|
| 242 |
+
XX -0507+11924 Asia/Makassar Indonesia ("WITA")
|
| 243 |
+
#
|
| 244 |
+
# +08:45
|
| 245 |
+
XX -3143+12852 Australia/Eucla Eucla
|
| 246 |
+
#
|
| 247 |
+
# +09
|
| 248 |
+
XX +5203+11328 Asia/Chita Russia; Palau; East Timor
|
| 249 |
+
#
|
| 250 |
+
# +09 - JST
|
| 251 |
+
XX +353916+1394441 Asia/Tokyo Japan ("JST")
|
| 252 |
+
#
|
| 253 |
+
# +09 - KST
|
| 254 |
+
XX +3733+12658 Asia/Seoul Korea ("KST")
|
| 255 |
+
#
|
| 256 |
+
# +09 - WIT
|
| 257 |
+
XX -0232+14042 Asia/Jayapura Indonesia ("WIT")
|
| 258 |
+
#
|
| 259 |
+
# +09:30 - ACST
|
| 260 |
+
XX -1228+13050 Australia/Darwin Northern Territory ("ACST")
|
| 261 |
+
#
|
| 262 |
+
# +09:30/+10:30 - ACST/ACDT (Australia DST)
|
| 263 |
+
XX -3455+13835 Australia/Adelaide South Australia ("ACST/ACDT")
|
| 264 |
+
#
|
| 265 |
+
# +10
|
| 266 |
+
XX +4310+13156 Asia/Vladivostok Russia; Yap; Chuuk; Papua New Guinea; Dumont d'Urville
|
| 267 |
+
#
|
| 268 |
+
# +10 - AEST
|
| 269 |
+
XX -2728+15302 Australia/Brisbane Queensland ("AEST")
|
| 270 |
+
#
|
| 271 |
+
# +10 - ChST
|
| 272 |
+
XX +1328+14445 Pacific/Guam Mariana Islands ("ChST")
|
| 273 |
+
#
|
| 274 |
+
# +10/+11 - AEST/AEDT (Australia DST)
|
| 275 |
+
XX -3352+15113 Australia/Sydney southeast Australia ("AEST/AEDT")
|
| 276 |
+
#
|
| 277 |
+
# +10:30/+11
|
| 278 |
+
XX -3133+15905 Australia/Lord_Howe Lord Howe Island
|
| 279 |
+
#
|
| 280 |
+
# +11
|
| 281 |
+
XX -0613+15534 Pacific/Bougainville Russia; Kosrae; Bougainville; Solomons
|
| 282 |
+
#
|
| 283 |
+
# +11/+12 (Australia DST)
|
| 284 |
+
XX -2903+16758 Pacific/Norfolk Norfolk Island
|
| 285 |
+
#
|
| 286 |
+
# +12
|
| 287 |
+
XX +5301+15839 Asia/Kamchatka Russia; Tuvalu; Fiji; etc.
|
| 288 |
+
#
|
| 289 |
+
# +12/+13 (New Zealand DST)
|
| 290 |
+
XX -3652+17446 Pacific/Auckland New Zealand ("NZST/NZDT")
|
| 291 |
+
#
|
| 292 |
+
# +12:45/+13:45 (Chatham DST)
|
| 293 |
+
XX -4357-17633 Pacific/Chatham Chatham Islands
|
| 294 |
+
#
|
| 295 |
+
# +13
|
| 296 |
+
XX -210800-1751200 Pacific/Tongatapu Kanton; Tokelau; Samoa (western); Tonga
|
| 297 |
+
#
|
| 298 |
+
# +14
|
| 299 |
+
XX +0152-15720 Pacific/Kiritimati Kiritimati
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/__init__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# We are exposing all subpackages to the end-user.
|
| 2 |
+
# Because of possible inter-dependency, we want to avoid
|
| 3 |
+
# the cyclic imports, thus implementing lazy version
|
| 4 |
+
# as per https://peps.python.org/pep-0562/
|
| 5 |
+
|
| 6 |
+
import importlib
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"intrinsic",
|
| 10 |
+
"qat",
|
| 11 |
+
"quantizable",
|
| 12 |
+
"quantized",
|
| 13 |
+
"sparse",
|
| 14 |
+
]
|
| 15 |
+
|
| 16 |
+
def __getattr__(name):
|
| 17 |
+
if name in __all__:
|
| 18 |
+
return importlib.import_module("." + name, __name__)
|
| 19 |
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/__init__.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from . import functional
|
| 2 |
+
from .modules import * # noqa: F403
|
| 3 |
+
from .modules import MaxPool2d
|
| 4 |
+
|
| 5 |
+
__all__ = [
|
| 6 |
+
'BatchNorm2d',
|
| 7 |
+
'BatchNorm3d',
|
| 8 |
+
'Conv1d',
|
| 9 |
+
'Conv2d',
|
| 10 |
+
'Conv3d',
|
| 11 |
+
'ConvTranspose1d',
|
| 12 |
+
'ConvTranspose2d',
|
| 13 |
+
'ConvTranspose3d',
|
| 14 |
+
'DeQuantize',
|
| 15 |
+
'ELU',
|
| 16 |
+
'Embedding',
|
| 17 |
+
'EmbeddingBag',
|
| 18 |
+
'GroupNorm',
|
| 19 |
+
'Hardswish',
|
| 20 |
+
'InstanceNorm1d',
|
| 21 |
+
'InstanceNorm2d',
|
| 22 |
+
'InstanceNorm3d',
|
| 23 |
+
'LayerNorm',
|
| 24 |
+
'LeakyReLU',
|
| 25 |
+
'Linear',
|
| 26 |
+
'LSTM',
|
| 27 |
+
'MultiheadAttention',
|
| 28 |
+
'Quantize',
|
| 29 |
+
'ReLU6',
|
| 30 |
+
'Sigmoid',
|
| 31 |
+
'Softmax',
|
| 32 |
+
'Dropout',
|
| 33 |
+
'PReLU',
|
| 34 |
+
# Wrapper modules
|
| 35 |
+
'FloatFunctional',
|
| 36 |
+
'FXFloatFunctional',
|
| 37 |
+
'QFunctional',
|
| 38 |
+
]
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/functional.py
ADDED
|
@@ -0,0 +1,644 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r""" Functional interface (quantized)."""
|
| 2 |
+
from typing import List, Optional
|
| 3 |
+
import warnings
|
| 4 |
+
|
| 5 |
+
import torch
|
| 6 |
+
from torch import Tensor
|
| 7 |
+
from torch.nn.modules.utils import _pair, _triple
|
| 8 |
+
from torch.jit.annotations import BroadcastingList2
|
| 9 |
+
|
| 10 |
+
from .modules.utils import _pair_from_first
|
| 11 |
+
|
| 12 |
+
# Although some of the functions and docstrings are mirrored from the torch.nn,
|
| 13 |
+
# we want to have them here for future changes.
|
| 14 |
+
|
| 15 |
+
__all__ = [
|
| 16 |
+
"avg_pool2d",
|
| 17 |
+
"avg_pool3d",
|
| 18 |
+
"adaptive_avg_pool2d",
|
| 19 |
+
"adaptive_avg_pool3d",
|
| 20 |
+
"conv1d",
|
| 21 |
+
"conv2d",
|
| 22 |
+
"conv3d",
|
| 23 |
+
"interpolate",
|
| 24 |
+
"linear",
|
| 25 |
+
"max_pool1d",
|
| 26 |
+
"max_pool2d",
|
| 27 |
+
"celu",
|
| 28 |
+
"leaky_relu",
|
| 29 |
+
"hardtanh",
|
| 30 |
+
"hardswish",
|
| 31 |
+
"threshold",
|
| 32 |
+
"elu",
|
| 33 |
+
"hardsigmoid",
|
| 34 |
+
"clamp",
|
| 35 |
+
"upsample",
|
| 36 |
+
"upsample_bilinear",
|
| 37 |
+
"upsample_nearest",
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
def avg_pool2d(input, kernel_size, stride=None, padding=0, ceil_mode=False,
|
| 41 |
+
count_include_pad=True, divisor_override=None):
|
| 42 |
+
r"""
|
| 43 |
+
Applies 2D average-pooling operation in :math:`kH \times kW` regions by step size
|
| 44 |
+
:math:`sH \times sW` steps. The number of output features is equal to the number of
|
| 45 |
+
input planes.
|
| 46 |
+
|
| 47 |
+
.. note:: The input quantization parameters propagate to the output.
|
| 48 |
+
|
| 49 |
+
See :class:`~torch.ao.nn.quantized.AvgPool2d` for details and output shape.
|
| 50 |
+
|
| 51 |
+
Args:
|
| 52 |
+
input: quantized input tensor :math:`(\text{minibatch} , \text{in\_channels} , iH , iW)`
|
| 53 |
+
kernel_size: size of the pooling region. Can be a single number or a
|
| 54 |
+
tuple `(kH, kW)`
|
| 55 |
+
stride: stride of the pooling operation. Can be a single number or a
|
| 56 |
+
tuple `(sH, sW)`. Default: :attr:`kernel_size`
|
| 57 |
+
padding: implicit zero paddings on both sides of the input. Can be a
|
| 58 |
+
single number or a tuple `(padH, padW)`. Default: 0
|
| 59 |
+
ceil_mode: when True, will use `ceil` instead of `floor` in the formula
|
| 60 |
+
to compute the output shape. Default: ``False``
|
| 61 |
+
count_include_pad: when True, will include the zero-padding in the
|
| 62 |
+
averaging calculation. Default: ``True``
|
| 63 |
+
divisor_override: if specified, it will be used as divisor, otherwise
|
| 64 |
+
size of the pooling region will be used. Default: None
|
| 65 |
+
"""
|
| 66 |
+
if not input.is_quantized:
|
| 67 |
+
raise ValueError("Input to 'quantized.avg_pool2d' must be quantized!")
|
| 68 |
+
return torch.nn.functional.avg_pool2d(input, kernel_size, stride, padding,
|
| 69 |
+
ceil_mode, count_include_pad,
|
| 70 |
+
divisor_override)
|
| 71 |
+
|
| 72 |
+
def avg_pool3d(input, kernel_size, stride=None, padding=0, ceil_mode=False,
|
| 73 |
+
count_include_pad=True, divisor_override=None):
|
| 74 |
+
r"""
|
| 75 |
+
Applies 3D average-pooling operation in :math:`kD \ times kH \times kW` regions by step size
|
| 76 |
+
:math:`sD \times sH \times sW` steps. The number of output features is equal to the number of
|
| 77 |
+
input planes.
|
| 78 |
+
|
| 79 |
+
.. note:: The input quantization parameters propagate to the output.
|
| 80 |
+
|
| 81 |
+
Args:
|
| 82 |
+
input: quantized input tensor :math:`(\text{minibatch} , \text{in\_channels} , iH , iW)`
|
| 83 |
+
kernel_size: size of the pooling region. Can be a single number or a
|
| 84 |
+
tuple `(kD, kH, kW)`
|
| 85 |
+
stride: stride of the pooling operation. Can be a single number or a
|
| 86 |
+
tuple `(sD, sH, sW)`. Default: :attr:`kernel_size`
|
| 87 |
+
padding: implicit zero paddings on both sides of the input. Can be a
|
| 88 |
+
single number or a tuple `(padD, padH, padW)`. Default: 0
|
| 89 |
+
ceil_mode: when True, will use `ceil` instead of `floor` in the formula
|
| 90 |
+
to compute the output shape. Default: ``False``
|
| 91 |
+
count_include_pad: when True, will include the zero-padding in the
|
| 92 |
+
averaging calculation. Default: ``True``
|
| 93 |
+
divisor_override: if specified, it will be used as divisor, otherwise
|
| 94 |
+
size of the pooling region will be used. Default: None
|
| 95 |
+
"""
|
| 96 |
+
if not input.is_quantized:
|
| 97 |
+
raise ValueError("Input to 'quantized.avg_pool3d' must be quantized!")
|
| 98 |
+
return torch.nn.functional.avg_pool3d(input, kernel_size, stride, padding,
|
| 99 |
+
ceil_mode, count_include_pad,
|
| 100 |
+
divisor_override)
|
| 101 |
+
|
| 102 |
+
def adaptive_avg_pool2d(input: Tensor, output_size: BroadcastingList2[int]) -> Tensor:
|
| 103 |
+
r"""
|
| 104 |
+
Applies a 2D adaptive average pooling over a quantized input signal composed
|
| 105 |
+
of several quantized input planes.
|
| 106 |
+
|
| 107 |
+
.. note:: The input quantization parameters propagate to the output.
|
| 108 |
+
|
| 109 |
+
See :class:`~torch.ao.nn.quantized.AdaptiveAvgPool2d` for details and output shape.
|
| 110 |
+
|
| 111 |
+
Args:
|
| 112 |
+
output_size: the target output size (single integer or
|
| 113 |
+
double-integer tuple)
|
| 114 |
+
"""
|
| 115 |
+
if not input.is_quantized:
|
| 116 |
+
raise ValueError("Input to 'quantized.functional.adaptive_avg_pool2d' must be quantized!")
|
| 117 |
+
return torch.nn.functional.adaptive_avg_pool2d(input, output_size)
|
| 118 |
+
|
| 119 |
+
def adaptive_avg_pool3d(input: Tensor, output_size: BroadcastingList2[int]) -> Tensor:
|
| 120 |
+
r"""
|
| 121 |
+
Applies a 3D adaptive average pooling over a quantized input signal composed
|
| 122 |
+
of several quantized input planes.
|
| 123 |
+
|
| 124 |
+
.. note:: The input quantization parameters propagate to the output.
|
| 125 |
+
|
| 126 |
+
See :class:`~torch.ao.nn.quantized.AdaptiveAvgPool3d` for details and output shape.
|
| 127 |
+
|
| 128 |
+
Args:
|
| 129 |
+
output_size: the target output size (single integer or
|
| 130 |
+
double-integer tuple)
|
| 131 |
+
"""
|
| 132 |
+
if not input.is_quantized:
|
| 133 |
+
raise ValueError(
|
| 134 |
+
"Input to 'quantized.functional.adaptive_avg_pool3d' must be quantized!")
|
| 135 |
+
return torch.nn.functional.adaptive_avg_pool3d(input, output_size)
|
| 136 |
+
|
| 137 |
+
def conv1d(input, weight, bias,
|
| 138 |
+
stride=1, padding=0, dilation=1, groups=1,
|
| 139 |
+
padding_mode='zeros',
|
| 140 |
+
scale=1.0, zero_point=0,
|
| 141 |
+
dtype=torch.quint8):
|
| 142 |
+
r"""
|
| 143 |
+
Applies a 1D convolution over a quantized 1D input composed of several input
|
| 144 |
+
planes.
|
| 145 |
+
|
| 146 |
+
See :class:`~torch.ao.nn.quantized.Conv1d` for details and output shape.
|
| 147 |
+
|
| 148 |
+
Args:
|
| 149 |
+
input: quantized input tensor of shape :math:`(\text{minibatch} , \text{in\_channels} , iW)`
|
| 150 |
+
weight: quantized filters of shape :math:`(\text{out\_channels} , \frac{\text{in\_channels}}{\text{groups}} , iW)`
|
| 151 |
+
bias: **non-quantized** bias tensor of shape :math:`(\text{out\_channels})`. The tensor type must be `torch.float`.
|
| 152 |
+
stride: the stride of the convolving kernel. Can be a single number or a
|
| 153 |
+
tuple `(sW,)`. Default: 1
|
| 154 |
+
padding: implicit paddings on both sides of the input. Can be a
|
| 155 |
+
single number or a tuple `(padW,)`. Default: 0
|
| 156 |
+
dilation: the spacing between kernel elements. Can be a single number or
|
| 157 |
+
a tuple `(dW,)`. Default: 1
|
| 158 |
+
groups: split input into groups, :math:`\text{in\_channels}` should be divisible by the
|
| 159 |
+
number of groups. Default: 1
|
| 160 |
+
padding_mode: the padding mode to use. Only "zeros" is supported for quantized convolution at the moment. Default: "zeros"
|
| 161 |
+
scale: quantization scale for the output. Default: 1.0
|
| 162 |
+
zero_point: quantization zero_point for the output. Default: 0
|
| 163 |
+
dtype: quantization data type to use. Default: ``torch.quint8``
|
| 164 |
+
|
| 165 |
+
Examples::
|
| 166 |
+
|
| 167 |
+
>>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_QENGINE)
|
| 168 |
+
>>> from torch.ao.nn.quantized import functional as qF
|
| 169 |
+
>>> filters = torch.randn(33, 16, 3, dtype=torch.float)
|
| 170 |
+
>>> inputs = torch.randn(20, 16, 50, dtype=torch.float)
|
| 171 |
+
>>> bias = torch.randn(33, dtype=torch.float)
|
| 172 |
+
>>>
|
| 173 |
+
>>> scale, zero_point = 1.0, 0
|
| 174 |
+
>>> dtype_inputs = torch.quint8
|
| 175 |
+
>>> dtype_filters = torch.qint8
|
| 176 |
+
>>>
|
| 177 |
+
>>> q_filters = torch.quantize_per_tensor(filters, scale, zero_point, dtype_filters)
|
| 178 |
+
>>> q_inputs = torch.quantize_per_tensor(inputs, scale, zero_point, dtype_inputs)
|
| 179 |
+
>>> qF.conv1d(q_inputs, q_filters, bias, padding=1, scale=scale, zero_point=zero_point)
|
| 180 |
+
""" # noqa: E501
|
| 181 |
+
if padding_mode != 'zeros':
|
| 182 |
+
raise NotImplementedError("Only zero-padding is supported!")
|
| 183 |
+
if input.dtype != torch.quint8:
|
| 184 |
+
raise NotImplementedError("Only torch.quint8 is supported for activation tensor!")
|
| 185 |
+
if weight.dtype != torch.qint8:
|
| 186 |
+
raise NotImplementedError("Only torch.qint8 is supported for weight tensor!")
|
| 187 |
+
if input.ndim != 3:
|
| 188 |
+
raise ValueError("Input shape must be `(N, C, L)`!")
|
| 189 |
+
stride = _pair_from_first(stride)
|
| 190 |
+
padding = _pair_from_first(padding)
|
| 191 |
+
dilation = _pair_from_first(dilation)
|
| 192 |
+
|
| 193 |
+
packed_params = torch.ops.quantized.conv1d_prepack(
|
| 194 |
+
weight, bias, stride, padding, dilation, groups)
|
| 195 |
+
return torch.ops.quantized.conv1d(input, packed_params, scale, zero_point)
|
| 196 |
+
|
| 197 |
+
def conv2d(input, weight, bias,
|
| 198 |
+
stride=1, padding=0, dilation=1, groups=1,
|
| 199 |
+
padding_mode='zeros',
|
| 200 |
+
scale=1.0, zero_point=0,
|
| 201 |
+
dtype=torch.quint8):
|
| 202 |
+
r"""
|
| 203 |
+
Applies a 2D convolution over a quantized 2D input composed of several input
|
| 204 |
+
planes.
|
| 205 |
+
|
| 206 |
+
See :class:`~torch.ao.nn.quantized.Conv2d` for details and output shape.
|
| 207 |
+
|
| 208 |
+
Args:
|
| 209 |
+
input: quantized input tensor of shape :math:`(\text{minibatch} , \text{in\_channels} , iH , iW)`
|
| 210 |
+
weight: quantized filters of shape :math:`(\text{out\_channels} , \frac{\text{in\_channels}}{\text{groups}} , kH , kW)`
|
| 211 |
+
bias: **non-quantized** bias tensor of shape :math:`(\text{out\_channels})`. The tensor type must be `torch.float`.
|
| 212 |
+
stride: the stride of the convolving kernel. Can be a single number or a
|
| 213 |
+
tuple `(sH, sW)`. Default: 1
|
| 214 |
+
padding: implicit paddings on both sides of the input. Can be a
|
| 215 |
+
single number or a tuple `(padH, padW)`. Default: 0
|
| 216 |
+
dilation: the spacing between kernel elements. Can be a single number or
|
| 217 |
+
a tuple `(dH, dW)`. Default: 1
|
| 218 |
+
groups: split input into groups, :math:`\text{in\_channels}` should be divisible by the
|
| 219 |
+
number of groups. Default: 1
|
| 220 |
+
padding_mode: the padding mode to use. Only "zeros" is supported for quantized convolution at the moment. Default: "zeros"
|
| 221 |
+
scale: quantization scale for the output. Default: 1.0
|
| 222 |
+
zero_point: quantization zero_point for the output. Default: 0
|
| 223 |
+
dtype: quantization data type to use. Default: ``torch.quint8``
|
| 224 |
+
|
| 225 |
+
Examples::
|
| 226 |
+
|
| 227 |
+
>>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_QENGINE)
|
| 228 |
+
>>> from torch.ao.nn.quantized import functional as qF
|
| 229 |
+
>>> filters = torch.randn(8, 4, 3, 3, dtype=torch.float)
|
| 230 |
+
>>> inputs = torch.randn(1, 4, 5, 5, dtype=torch.float)
|
| 231 |
+
>>> bias = torch.randn(8, dtype=torch.float)
|
| 232 |
+
>>>
|
| 233 |
+
>>> scale, zero_point = 1.0, 0
|
| 234 |
+
>>> dtype_inputs = torch.quint8
|
| 235 |
+
>>> dtype_filters = torch.qint8
|
| 236 |
+
>>>
|
| 237 |
+
>>> q_filters = torch.quantize_per_tensor(filters, scale, zero_point, dtype_filters)
|
| 238 |
+
>>> q_inputs = torch.quantize_per_tensor(inputs, scale, zero_point, dtype_inputs)
|
| 239 |
+
>>> qF.conv2d(q_inputs, q_filters, bias, padding=1, scale=scale, zero_point=zero_point)
|
| 240 |
+
""" # noqa: E501
|
| 241 |
+
if padding_mode != 'zeros':
|
| 242 |
+
raise NotImplementedError("Only zero-padding is supported!")
|
| 243 |
+
if input.dtype != torch.quint8:
|
| 244 |
+
raise NotImplementedError("Only torch.quint8 is supported for activation tensor!")
|
| 245 |
+
if weight.dtype != torch.qint8:
|
| 246 |
+
raise NotImplementedError("Only torch.qint8 is supported for weight tensor!")
|
| 247 |
+
if input.ndim != 4:
|
| 248 |
+
raise ValueError("Input shape must be `(N, C, H, W)`!")
|
| 249 |
+
stride = _pair(stride)
|
| 250 |
+
padding = _pair(padding)
|
| 251 |
+
dilation = _pair(dilation)
|
| 252 |
+
|
| 253 |
+
packed_params = torch.ops.quantized.conv2d_prepack(
|
| 254 |
+
weight, bias, stride, padding, dilation, groups)
|
| 255 |
+
return torch.ops.quantized.conv2d(input, packed_params, scale, zero_point)
|
| 256 |
+
|
| 257 |
+
def conv3d(input, weight, bias, stride=1, padding=0, dilation=1, groups=1,
|
| 258 |
+
padding_mode='zeros', scale=1.0, zero_point=0, dtype=torch.quint8):
|
| 259 |
+
r"""
|
| 260 |
+
Applies a 3D convolution over a quantized 3D input composed of several input
|
| 261 |
+
planes.
|
| 262 |
+
|
| 263 |
+
See :class:`~torch.ao.nn.quantized.Conv3d` for details and output shape.
|
| 264 |
+
|
| 265 |
+
Args:
|
| 266 |
+
input: quantized input tensor of shape
|
| 267 |
+
:math:`(\text{minibatch} , \text{in\_channels} , iD , iH , iW)`
|
| 268 |
+
weight: quantized filters of shape
|
| 269 |
+
:math:`(\text{out\_channels} , \frac{\text{in\_channels}}{\text{groups}} , kD , kH , kW)`
|
| 270 |
+
bias: **non-quantized** bias tensor of shape
|
| 271 |
+
:math:`(\text{out\_channels})`. The tensor type must be `torch.float`.
|
| 272 |
+
stride: the stride of the convolving kernel. Can be a single number or a
|
| 273 |
+
tuple `(sD, sH, sW)`. Default: 1
|
| 274 |
+
padding: implicit paddings on both sides of the input. Can be a
|
| 275 |
+
single number or a tuple `(padD, padH, padW)`. Default: 0
|
| 276 |
+
dilation: the spacing between kernel elements. Can be a single number or
|
| 277 |
+
a tuple `(dD, dH, dW)`. Default: 1
|
| 278 |
+
groups: split input into groups, :math:`\text{in\_channels}` should be
|
| 279 |
+
divisible by the number of groups. Default: 1
|
| 280 |
+
padding_mode: the padding mode to use. Only "zeros" is supported for
|
| 281 |
+
quantized convolution at the moment. Default: "zeros"
|
| 282 |
+
scale: quantization scale for the output. Default: 1.0
|
| 283 |
+
zero_point: quantization zero_point for the output. Default: 0
|
| 284 |
+
dtype: quantization data type to use. Default: ``torch.quint8``
|
| 285 |
+
|
| 286 |
+
Examples::
|
| 287 |
+
|
| 288 |
+
>>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_QENGINE)
|
| 289 |
+
>>> from torch.ao.nn.quantized import functional as qF
|
| 290 |
+
>>> filters = torch.randn(8, 4, 3, 3, 3, dtype=torch.float)
|
| 291 |
+
>>> inputs = torch.randn(1, 4, 5, 5, 5, dtype=torch.float)
|
| 292 |
+
>>> bias = torch.randn(8, dtype=torch.float)
|
| 293 |
+
>>>
|
| 294 |
+
>>> scale, zero_point = 1.0, 0
|
| 295 |
+
>>> dtype_inputs = torch.quint8
|
| 296 |
+
>>> dtype_filters = torch.qint8
|
| 297 |
+
>>>
|
| 298 |
+
>>> q_filters = torch.quantize_per_tensor(filters, scale, zero_point, dtype_filters)
|
| 299 |
+
>>> q_inputs = torch.quantize_per_tensor(inputs, scale, zero_point, dtype_inputs)
|
| 300 |
+
>>> qF.conv3d(q_inputs, q_filters, bias, padding=1, scale=scale, zero_point=zero_point)
|
| 301 |
+
""" # noqa: E501
|
| 302 |
+
if padding_mode != 'zeros':
|
| 303 |
+
raise NotImplementedError("Only zero-padding is supported!")
|
| 304 |
+
if input.dtype != torch.quint8:
|
| 305 |
+
raise NotImplementedError("Only torch.quint8 is supported for activation tensor!")
|
| 306 |
+
if weight.dtype != torch.qint8:
|
| 307 |
+
raise NotImplementedError("Only torch.qint8 is supported for weight tensor!")
|
| 308 |
+
if input.ndim != 5:
|
| 309 |
+
raise ValueError("Input shape must be `(N, C, D, H, W)`!")
|
| 310 |
+
stride = _triple(stride)
|
| 311 |
+
padding = _triple(padding)
|
| 312 |
+
dilation = _triple(dilation)
|
| 313 |
+
|
| 314 |
+
packed_params = torch.ops.quantized.conv3d_prepack(
|
| 315 |
+
weight, bias, stride, padding, dilation, groups)
|
| 316 |
+
return torch.ops.quantized.conv3d(input, packed_params, scale, zero_point)
|
| 317 |
+
|
| 318 |
+
def interpolate(input, size=None, scale_factor=None, mode='nearest', align_corners=None):
|
| 319 |
+
r"""Down/up samples the input to either the given :attr:`size` or the given
|
| 320 |
+
:attr:`scale_factor`
|
| 321 |
+
|
| 322 |
+
See :func:`torch.nn.functional.interpolate` for implementation details.
|
| 323 |
+
|
| 324 |
+
The input dimensions are interpreted in the form:
|
| 325 |
+
`mini-batch x channels x [optional depth] x [optional height] x width`.
|
| 326 |
+
|
| 327 |
+
.. note:: The input quantization parameters propagate to the output.
|
| 328 |
+
|
| 329 |
+
.. note:: Only 2D/3D input is supported for quantized inputs
|
| 330 |
+
|
| 331 |
+
.. note:: Only the following modes are supported for the quantized inputs:
|
| 332 |
+
|
| 333 |
+
- `bilinear`
|
| 334 |
+
- `nearest`
|
| 335 |
+
|
| 336 |
+
Args:
|
| 337 |
+
input (Tensor): the input tensor
|
| 338 |
+
size (int or Tuple[int] or Tuple[int, int] or Tuple[int, int, int]):
|
| 339 |
+
output spatial size.
|
| 340 |
+
scale_factor (float or Tuple[float]): multiplier for spatial size. Has to match input size if it is a tuple.
|
| 341 |
+
mode (str): algorithm used for upsampling:
|
| 342 |
+
``'nearest'`` | ``'bilinear'``
|
| 343 |
+
align_corners (bool, optional): Geometrically, we consider the pixels of the
|
| 344 |
+
input and output as squares rather than points.
|
| 345 |
+
If set to ``True``, the input and output tensors are aligned by the
|
| 346 |
+
center points of their corner pixels, preserving the values at the corner pixels.
|
| 347 |
+
If set to ``False``, the input and output tensors are aligned by the corner
|
| 348 |
+
points of their corner pixels, and the interpolation uses edge value padding
|
| 349 |
+
for out-of-boundary values, making this operation *independent* of input size
|
| 350 |
+
when :attr:`scale_factor` is kept the same. This only has an effect when :attr:`mode`
|
| 351 |
+
is ``'bilinear'``.
|
| 352 |
+
Default: ``False``
|
| 353 |
+
"""
|
| 354 |
+
if not input.is_quantized:
|
| 355 |
+
raise ValueError("Input to 'quantized.interpolate' must be quantized!")
|
| 356 |
+
return torch.nn.functional.interpolate(input, size, scale_factor, mode,
|
| 357 |
+
align_corners)
|
| 358 |
+
|
| 359 |
+
def linear(
|
| 360 |
+
input: Tensor, weight: Tensor, bias: Optional[Tensor] = None,
|
| 361 |
+
scale: Optional[float] = None, zero_point: Optional[int] = None
|
| 362 |
+
) -> Tensor:
|
| 363 |
+
r"""
|
| 364 |
+
Applies a linear transformation to the incoming quantized data:
|
| 365 |
+
:math:`y = xA^T + b`.
|
| 366 |
+
See :class:`~torch.ao.nn.quantized.Linear`
|
| 367 |
+
|
| 368 |
+
.. note::
|
| 369 |
+
|
| 370 |
+
Current implementation packs weights on every call, which has penalty on performance.
|
| 371 |
+
If you want to avoid the overhead, use :class:`~torch.ao.nn.quantized.Linear`.
|
| 372 |
+
|
| 373 |
+
Args:
|
| 374 |
+
input (Tensor): Quantized input of type `torch.quint8`
|
| 375 |
+
weight (Tensor): Quantized weight of type `torch.qint8`
|
| 376 |
+
bias (Tensor): None or fp32 bias of type `torch.float`
|
| 377 |
+
scale (double): output scale. If None, derived from the input scale
|
| 378 |
+
zero_point (long): output zero point. If None, derived from the input zero_point
|
| 379 |
+
|
| 380 |
+
Shape:
|
| 381 |
+
- Input: :math:`(N, *, in\_features)` where `*` means any number of
|
| 382 |
+
additional dimensions
|
| 383 |
+
- Weight: :math:`(out\_features, in\_features)`
|
| 384 |
+
- Bias: :math:`(out\_features)`
|
| 385 |
+
- Output: :math:`(N, *, out\_features)`
|
| 386 |
+
"""
|
| 387 |
+
if scale is None:
|
| 388 |
+
scale = input.q_scale()
|
| 389 |
+
if zero_point is None:
|
| 390 |
+
zero_point = input.q_zero_point()
|
| 391 |
+
_packed_params = torch.ops.quantized.linear_prepack(weight, bias)
|
| 392 |
+
return torch.ops.quantized.linear(input, _packed_params, scale, zero_point)
|
| 393 |
+
|
| 394 |
+
def max_pool1d(input, kernel_size, stride=None, padding=0, dilation=1,
|
| 395 |
+
ceil_mode=False, return_indices=False):
|
| 396 |
+
r"""Applies a 1D max pooling over a quantized input signal composed of
|
| 397 |
+
several quantized input planes.
|
| 398 |
+
|
| 399 |
+
.. note:: The input quantization parameters are propagated to the output.
|
| 400 |
+
|
| 401 |
+
See :class:`~torch.ao.nn.quantized.MaxPool1d` for details.
|
| 402 |
+
"""
|
| 403 |
+
if return_indices:
|
| 404 |
+
raise NotImplementedError("return_indices is not yet implemented!")
|
| 405 |
+
if stride is None:
|
| 406 |
+
stride = torch.jit.annotate(List[int], [])
|
| 407 |
+
return torch.nn.functional.max_pool1d(input, kernel_size, stride, padding,
|
| 408 |
+
dilation, ceil_mode=ceil_mode, return_indices=return_indices)
|
| 409 |
+
|
| 410 |
+
def max_pool2d(input, kernel_size, stride=None, padding=0, dilation=1,
|
| 411 |
+
ceil_mode=False, return_indices=False):
|
| 412 |
+
r"""Applies a 2D max pooling over a quantized input signal composed of
|
| 413 |
+
several quantized input planes.
|
| 414 |
+
|
| 415 |
+
.. note:: The input quantization parameters are propagated to the output.
|
| 416 |
+
|
| 417 |
+
See :class:`~torch.ao.nn.quantized.MaxPool2d` for details.
|
| 418 |
+
"""
|
| 419 |
+
if return_indices:
|
| 420 |
+
raise NotImplementedError("return_indices is not yet implemented!")
|
| 421 |
+
if stride is None:
|
| 422 |
+
stride = torch.jit.annotate(List[int], [])
|
| 423 |
+
return torch.nn.functional.max_pool2d(input, kernel_size, stride, padding,
|
| 424 |
+
dilation, ceil_mode=ceil_mode, return_indices=return_indices)
|
| 425 |
+
|
| 426 |
+
def celu(input: Tensor, scale: float, zero_point: int, alpha: float = 1.) -> Tensor:
|
| 427 |
+
r"""celu(input, scale, zero_point, alpha=1.) -> Tensor
|
| 428 |
+
|
| 429 |
+
Applies the quantized CELU function element-wise.
|
| 430 |
+
|
| 431 |
+
.. math::
|
| 432 |
+
\text{CELU}(x) = \max(0,x) + \min(0, \alpha * (\exp(x / \alpha) - 1))
|
| 433 |
+
|
| 434 |
+
Args:
|
| 435 |
+
input: quantized input
|
| 436 |
+
alpha: the :math:`\alpha` value for the CELU formulation. Default: 1.0
|
| 437 |
+
"""
|
| 438 |
+
if not input.is_quantized:
|
| 439 |
+
raise ValueError("Input to 'quantized.celu' must be quantized!")
|
| 440 |
+
return torch.ops.quantized.celu(input, scale, zero_point, alpha)
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
def leaky_relu(input: Tensor, negative_slope: float = 0.01, inplace: bool = False,
|
| 444 |
+
scale: Optional[float] = None, zero_point: Optional[int] = None):
|
| 445 |
+
r"""
|
| 446 |
+
Quantized version of the.
|
| 447 |
+
leaky_relu(input, negative_slope=0.01, inplace=False, scale, zero_point) -> Tensor
|
| 448 |
+
|
| 449 |
+
Applies element-wise,
|
| 450 |
+
:math:`\text{LeakyReLU}(x) = \max(0, x) + \text{negative\_slope} * \min(0, x)`
|
| 451 |
+
|
| 452 |
+
Args:
|
| 453 |
+
input: Quantized input
|
| 454 |
+
negative_slope: The slope of the negative input
|
| 455 |
+
inplace: Inplace modification of the input tensor
|
| 456 |
+
scale, zero_point: Scale and zero point of the output tensor.
|
| 457 |
+
|
| 458 |
+
See :class:`~torch.nn.LeakyReLU` for more details.
|
| 459 |
+
"""
|
| 460 |
+
if scale is not None and zero_point is not None:
|
| 461 |
+
assert not inplace, "Cannot rescale with `inplace`"
|
| 462 |
+
output = torch._empty_affine_quantized(
|
| 463 |
+
input.shape, scale=scale, zero_point=int(zero_point), dtype=input.dtype)
|
| 464 |
+
torch._C._nn.leaky_relu(input, negative_slope, out=output)
|
| 465 |
+
return output
|
| 466 |
+
if inplace:
|
| 467 |
+
result = torch._C._nn.leaky_relu_(input, negative_slope)
|
| 468 |
+
else:
|
| 469 |
+
result = torch._C._nn.leaky_relu(input, negative_slope)
|
| 470 |
+
return result
|
| 471 |
+
|
| 472 |
+
def hardtanh(input: Tensor, min_val: float = -1., max_val: float = 1., inplace: bool = False) -> Tensor:
|
| 473 |
+
r"""This is the quantized version of :func:`~torch.nn.functional.hardtanh`.
|
| 474 |
+
"""
|
| 475 |
+
if not input.is_quantized:
|
| 476 |
+
raise ValueError("Input to 'quantized.hardtanh' must be quantized!")
|
| 477 |
+
if inplace:
|
| 478 |
+
return torch._C._nn.hardtanh_(input, min_val, max_val)
|
| 479 |
+
return torch._C._nn.hardtanh(input, min_val, max_val)
|
| 480 |
+
|
| 481 |
+
def hardswish(input: Tensor, scale: float, zero_point: int) -> Tensor:
|
| 482 |
+
r"""This is the quantized version of :func:`~torch.nn.functional.hardswish`.
|
| 483 |
+
|
| 484 |
+
Args:
|
| 485 |
+
input: quantized input
|
| 486 |
+
scale: quantization scale of the output tensor
|
| 487 |
+
zero_point: quantization zero point of the output tensor
|
| 488 |
+
"""
|
| 489 |
+
if not input.is_quantized:
|
| 490 |
+
raise ValueError("Input to 'quantized.hardswish' must be quantized!")
|
| 491 |
+
return torch._ops.ops.quantized.hardswish(input, scale, zero_point)
|
| 492 |
+
|
| 493 |
+
def threshold(input: Tensor, threshold: float, value: float) -> Tensor:
|
| 494 |
+
r"""Applies the quantized version of the threshold function element-wise:
|
| 495 |
+
|
| 496 |
+
.. math::
|
| 497 |
+
x = \begin{cases}
|
| 498 |
+
x & \text{if~} x > \text{threshold} \\
|
| 499 |
+
\text{value} & \text{otherwise}
|
| 500 |
+
\end{cases}
|
| 501 |
+
|
| 502 |
+
See :class:`~torch.nn.Threshold` for more details.
|
| 503 |
+
"""
|
| 504 |
+
if not input.is_quantized:
|
| 505 |
+
raise ValueError("Input to 'quantized.threshold' must be quantized!")
|
| 506 |
+
if threshold is None:
|
| 507 |
+
raise ValueError("Input to 'threshold' must be specified!")
|
| 508 |
+
if value is None:
|
| 509 |
+
raise ValueError("Input to 'value' must be specified!")
|
| 510 |
+
return torch._ops.ops.quantized.threshold(input, threshold, value)
|
| 511 |
+
|
| 512 |
+
def elu(input: Tensor, scale: float, zero_point: int, alpha: float = 1.) -> Tensor:
|
| 513 |
+
r"""This is the quantized version of :func:`~torch.nn.functional.elu`.
|
| 514 |
+
|
| 515 |
+
Args:
|
| 516 |
+
input: quantized input
|
| 517 |
+
scale: quantization scale of the output tensor
|
| 518 |
+
zero_point: quantization zero point of the output tensor
|
| 519 |
+
alpha: the alpha constant
|
| 520 |
+
"""
|
| 521 |
+
if not input.is_quantized:
|
| 522 |
+
raise ValueError("Input to 'quantized.elu' must be quantized!")
|
| 523 |
+
return torch.ops.quantized.elu(input, scale, zero_point, alpha)
|
| 524 |
+
|
| 525 |
+
def hardsigmoid(input: Tensor, inplace: bool = False) -> Tensor:
|
| 526 |
+
r"""This is the quantized version of :func:`~torch.nn.functional.hardsigmoid`.
|
| 527 |
+
"""
|
| 528 |
+
if not input.is_quantized:
|
| 529 |
+
raise ValueError("Input to 'quantized.hardsigmoid' must be quantized!")
|
| 530 |
+
if inplace:
|
| 531 |
+
return torch._C._nn.hardsigmoid_(input) # type: ignore[attr-defined]
|
| 532 |
+
return torch._C._nn.hardsigmoid(input)
|
| 533 |
+
|
| 534 |
+
def clamp(input: Tensor, min_: float, max_: float) -> Tensor:
|
| 535 |
+
r"""float(input, min\_, max\_) -> Tensor
|
| 536 |
+
|
| 537 |
+
Applies the clamp function element-wise.
|
| 538 |
+
See :class:`~torch.ao.nn.quantized.clamp` for more details.
|
| 539 |
+
|
| 540 |
+
Args:
|
| 541 |
+
input: quantized input
|
| 542 |
+
min_: minimum value for clamping
|
| 543 |
+
max_: maximum value for clamping
|
| 544 |
+
"""
|
| 545 |
+
if not input.is_quantized:
|
| 546 |
+
raise ValueError("Input to 'quantized.clamp' must be quantized!")
|
| 547 |
+
return torch.clamp(input, min_, max_)
|
| 548 |
+
|
| 549 |
+
def upsample(input, size=None, scale_factor=None, mode='nearest', align_corners=None):
|
| 550 |
+
r"""Upsamples the input to either the given :attr:`size` or the given
|
| 551 |
+
:attr:`scale_factor`
|
| 552 |
+
|
| 553 |
+
.. warning::
|
| 554 |
+
This function is deprecated in favor of
|
| 555 |
+
:func:`torch.ao.nn.quantized.functional.interpolate`.
|
| 556 |
+
This is equivalent with ``nn.quantized.functional.interpolate(...)``.
|
| 557 |
+
|
| 558 |
+
See :func:`torch.nn.functional.interpolate` for implementation details.
|
| 559 |
+
|
| 560 |
+
The input dimensions are interpreted in the form:
|
| 561 |
+
`mini-batch x channels x [optional depth] x [optional height] x width`.
|
| 562 |
+
|
| 563 |
+
.. note:: The input quantization parameters propagate to the output.
|
| 564 |
+
|
| 565 |
+
.. note:: Only 2D input is supported for quantized inputs
|
| 566 |
+
|
| 567 |
+
.. note:: Only the following modes are supported for the quantized inputs:
|
| 568 |
+
|
| 569 |
+
- `bilinear`
|
| 570 |
+
- `nearest`
|
| 571 |
+
|
| 572 |
+
Args:
|
| 573 |
+
input (Tensor): quantized input tensor
|
| 574 |
+
size (int or Tuple[int] or Tuple[int, int] or Tuple[int, int, int]):
|
| 575 |
+
output spatial size.
|
| 576 |
+
scale_factor (float or Tuple[float]): multiplier for spatial size. Has to be an integer.
|
| 577 |
+
mode (str): algorithm used for upsampling:
|
| 578 |
+
``'nearest'`` | ``'bilinear'``
|
| 579 |
+
align_corners (bool, optional): Geometrically, we consider the pixels of the
|
| 580 |
+
input and output as squares rather than points.
|
| 581 |
+
If set to ``True``, the input and output tensors are aligned by the
|
| 582 |
+
center points of their corner pixels, preserving the values at the corner pixels.
|
| 583 |
+
If set to ``False``, the input and output tensors are aligned by the corner
|
| 584 |
+
points of their corner pixels, and the interpolation uses edge value padding
|
| 585 |
+
for out-of-boundary values, making this operation *independent* of input size
|
| 586 |
+
when :attr:`scale_factor` is kept the same. This only has an effect when :attr:`mode`
|
| 587 |
+
is ``'bilinear'``.
|
| 588 |
+
Default: ``False``
|
| 589 |
+
|
| 590 |
+
.. warning::
|
| 591 |
+
With ``align_corners = True``, the linearly interpolating modes
|
| 592 |
+
(`bilinear`) don't proportionally align the
|
| 593 |
+
output and input pixels, and thus the output values can depend on the
|
| 594 |
+
input size. This was the default behavior for these modes up to version
|
| 595 |
+
0.3.1. Since then, the default behavior is ``align_corners = False``.
|
| 596 |
+
See :class:`~torch.nn.Upsample` for concrete examples on how this
|
| 597 |
+
affects the outputs.
|
| 598 |
+
"""
|
| 599 |
+
warnings.warn("nn.quantized.functional.upsample is deprecated. Use nn.quantized.functional.interpolate instead.")
|
| 600 |
+
return interpolate(input, size, scale_factor, mode, align_corners)
|
| 601 |
+
|
| 602 |
+
def upsample_bilinear(input, size=None, scale_factor=None):
|
| 603 |
+
r"""Upsamples the input, using bilinear upsampling.
|
| 604 |
+
|
| 605 |
+
.. warning::
|
| 606 |
+
This function is deprecated in favor of
|
| 607 |
+
:func:`torch.ao.nn.quantized.functional.interpolate`.
|
| 608 |
+
This is equivalent with
|
| 609 |
+
``nn.quantized.functional.interpolate(..., mode='bilinear', align_corners=True)``.
|
| 610 |
+
|
| 611 |
+
.. note:: The input quantization parameters propagate to the output.
|
| 612 |
+
|
| 613 |
+
.. note:: Only 2D inputs are supported
|
| 614 |
+
|
| 615 |
+
Args:
|
| 616 |
+
input (Tensor): quantized input
|
| 617 |
+
size (int or Tuple[int, int]): output spatial size.
|
| 618 |
+
scale_factor (int or Tuple[int, int]): multiplier for spatial size
|
| 619 |
+
"""
|
| 620 |
+
# DeprecationWarning is ignored by default
|
| 621 |
+
warnings.warn("nn.quantized.functional.upsample_bilinear is deprecated. Use nn.quantized.functional.interpolate instead.")
|
| 622 |
+
return interpolate(input, size, scale_factor, mode='bilinear', align_corners=True)
|
| 623 |
+
|
| 624 |
+
def upsample_nearest(input, size=None, scale_factor=None):
|
| 625 |
+
r"""Upsamples the input, using nearest neighbours' pixel values.
|
| 626 |
+
|
| 627 |
+
.. warning::
|
| 628 |
+
This function is deprecated in favor of
|
| 629 |
+
:func:`torch.ao.nn.quantized.functional.interpolate`.
|
| 630 |
+
This is equivalent with ``nn.quantized.functional.interpolate(..., mode='nearest')``.
|
| 631 |
+
|
| 632 |
+
.. note:: The input quantization parameters propagate to the output.
|
| 633 |
+
|
| 634 |
+
.. note:: Only 2D inputs are supported
|
| 635 |
+
|
| 636 |
+
Args:
|
| 637 |
+
input (Tensor): quantized input
|
| 638 |
+
size (int or Tuple[int, int] or Tuple[int, int, int]): output spatial
|
| 639 |
+
size.
|
| 640 |
+
scale_factor (int): multiplier for spatial size. Has to be an integer.
|
| 641 |
+
"""
|
| 642 |
+
# DeprecationWarning is ignored by default
|
| 643 |
+
warnings.warn("nn.quantized.functional.upsample_nearest is deprecated. Use nn.quantized.functional.interpolate instead.")
|
| 644 |
+
return interpolate(input, size, scale_factor, mode='nearest')
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__init__.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
|
| 3 |
+
# The quantized modules use `torch.nn` and `torch.ao.nn.quantizable`
|
| 4 |
+
# packages. However, the `quantizable` package uses "lazy imports"
|
| 5 |
+
# to avoid circular dependency.
|
| 6 |
+
# Hence we need to include it here to make sure it is resolved before
|
| 7 |
+
# they are used in the modules.
|
| 8 |
+
import torch.ao.nn.quantizable
|
| 9 |
+
|
| 10 |
+
from torch.nn.modules.pooling import MaxPool2d
|
| 11 |
+
|
| 12 |
+
from .activation import ReLU6, Hardswish, ELU, LeakyReLU, Sigmoid, Softmax, MultiheadAttention, PReLU
|
| 13 |
+
from .dropout import Dropout
|
| 14 |
+
from .batchnorm import BatchNorm2d, BatchNorm3d
|
| 15 |
+
from .normalization import LayerNorm, GroupNorm, InstanceNorm1d, \
|
| 16 |
+
InstanceNorm2d, InstanceNorm3d
|
| 17 |
+
from .conv import Conv1d, Conv2d, Conv3d
|
| 18 |
+
from .conv import ConvTranspose1d, ConvTranspose2d, ConvTranspose3d
|
| 19 |
+
from .linear import Linear
|
| 20 |
+
from .embedding_ops import Embedding, EmbeddingBag
|
| 21 |
+
from .rnn import LSTM
|
| 22 |
+
|
| 23 |
+
from .functional_modules import FloatFunctional, FXFloatFunctional, QFunctional
|
| 24 |
+
|
| 25 |
+
__all__ = [
|
| 26 |
+
'BatchNorm2d',
|
| 27 |
+
'BatchNorm3d',
|
| 28 |
+
'Conv1d',
|
| 29 |
+
'Conv2d',
|
| 30 |
+
'Conv3d',
|
| 31 |
+
'ConvTranspose1d',
|
| 32 |
+
'ConvTranspose2d',
|
| 33 |
+
'ConvTranspose3d',
|
| 34 |
+
'DeQuantize',
|
| 35 |
+
'ELU',
|
| 36 |
+
'Embedding',
|
| 37 |
+
'EmbeddingBag',
|
| 38 |
+
'GroupNorm',
|
| 39 |
+
'Hardswish',
|
| 40 |
+
'InstanceNorm1d',
|
| 41 |
+
'InstanceNorm2d',
|
| 42 |
+
'InstanceNorm3d',
|
| 43 |
+
'LayerNorm',
|
| 44 |
+
'LeakyReLU',
|
| 45 |
+
'Linear',
|
| 46 |
+
'LSTM',
|
| 47 |
+
'MultiheadAttention',
|
| 48 |
+
'Quantize',
|
| 49 |
+
'ReLU6',
|
| 50 |
+
'Sigmoid',
|
| 51 |
+
'Softmax',
|
| 52 |
+
'Dropout',
|
| 53 |
+
'PReLU',
|
| 54 |
+
# Wrapper modules
|
| 55 |
+
'FloatFunctional',
|
| 56 |
+
'FXFloatFunctional',
|
| 57 |
+
'QFunctional',
|
| 58 |
+
]
|
| 59 |
+
|
| 60 |
+
class Quantize(torch.nn.Module):
|
| 61 |
+
r"""Quantizes an incoming tensor
|
| 62 |
+
|
| 63 |
+
Args:
|
| 64 |
+
`scale`: scale of the output Quantized Tensor
|
| 65 |
+
`zero_point`: zero_point of output Quantized Tensor
|
| 66 |
+
`dtype`: data type of output Quantized Tensor
|
| 67 |
+
`factory_kwargs`: Dictionary of kwargs used for configuring initialization
|
| 68 |
+
of internal buffers. Currently, `device` and `dtype` are supported.
|
| 69 |
+
Example: `factory_kwargs={'device': 'cuda', 'dtype': torch.float64}`
|
| 70 |
+
will initialize internal buffers as type `torch.float64` on the current CUDA device.
|
| 71 |
+
Note that `dtype` only applies to floating-point buffers.
|
| 72 |
+
|
| 73 |
+
Examples::
|
| 74 |
+
>>> t = torch.tensor([[1., -1.], [1., -1.]])
|
| 75 |
+
>>> scale, zero_point, dtype = 1.0, 2, torch.qint8
|
| 76 |
+
>>> qm = Quantize(scale, zero_point, dtype)
|
| 77 |
+
>>> # xdoctest: +SKIP
|
| 78 |
+
>>> qt = qm(t)
|
| 79 |
+
>>> print(qt)
|
| 80 |
+
tensor([[ 1., -1.],
|
| 81 |
+
[ 1., -1.]], size=(2, 2), dtype=torch.qint8, scale=1.0, zero_point=2)
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
scale: torch.Tensor
|
| 85 |
+
zero_point: torch.Tensor
|
| 86 |
+
|
| 87 |
+
def __init__(self, scale, zero_point, dtype, factory_kwargs=None):
|
| 88 |
+
factory_kwargs = torch.nn.factory_kwargs(factory_kwargs)
|
| 89 |
+
super().__init__()
|
| 90 |
+
self.register_buffer('scale', torch.tensor([scale], **factory_kwargs))
|
| 91 |
+
self.register_buffer('zero_point',
|
| 92 |
+
torch.tensor([zero_point], dtype=torch.long,
|
| 93 |
+
**{k: v for k, v in factory_kwargs.items() if k != 'dtype'}))
|
| 94 |
+
self.dtype = dtype
|
| 95 |
+
|
| 96 |
+
def forward(self, X):
|
| 97 |
+
return torch.quantize_per_tensor(X, float(self.scale),
|
| 98 |
+
int(self.zero_point), self.dtype)
|
| 99 |
+
|
| 100 |
+
@staticmethod
|
| 101 |
+
def from_float(mod):
|
| 102 |
+
assert hasattr(mod, 'activation_post_process')
|
| 103 |
+
scale, zero_point = mod.activation_post_process.calculate_qparams()
|
| 104 |
+
return Quantize(scale.float().item(), zero_point.long().item(), mod.activation_post_process.dtype)
|
| 105 |
+
|
| 106 |
+
def extra_repr(self):
|
| 107 |
+
return 'scale={}, zero_point={}, dtype={}'.format(self.scale, self.zero_point, self.dtype)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class DeQuantize(torch.nn.Module):
|
| 111 |
+
r"""Dequantizes an incoming tensor
|
| 112 |
+
|
| 113 |
+
Examples::
|
| 114 |
+
>>> input = torch.tensor([[1., -1.], [1., -1.]])
|
| 115 |
+
>>> scale, zero_point, dtype = 1.0, 2, torch.qint8
|
| 116 |
+
>>> qm = Quantize(scale, zero_point, dtype)
|
| 117 |
+
>>> # xdoctest: +SKIP
|
| 118 |
+
>>> quantized_input = qm(input)
|
| 119 |
+
>>> dqm = DeQuantize()
|
| 120 |
+
>>> dequantized = dqm(quantized_input)
|
| 121 |
+
>>> print(dequantized)
|
| 122 |
+
tensor([[ 1., -1.],
|
| 123 |
+
[ 1., -1.]], dtype=torch.float32)
|
| 124 |
+
"""
|
| 125 |
+
|
| 126 |
+
def forward(self, Xq):
|
| 127 |
+
return Xq.dequantize()
|
| 128 |
+
|
| 129 |
+
@staticmethod
|
| 130 |
+
def from_float(mod):
|
| 131 |
+
return DeQuantize()
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (4.69 kB). View file
|
|
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/activation.cpython-310.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/batchnorm.cpython-310.pyc
ADDED
|
Binary file (3.95 kB). View file
|
|
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/conv.cpython-310.pyc
ADDED
|
Binary file (31.2 kB). View file
|
|
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/dropout.cpython-310.pyc
ADDED
|
Binary file (1.34 kB). View file
|
|
|
deepseekvl2/lib/python3.10/site-packages/torch/ao/nn/quantized/modules/__pycache__/embedding_ops.cpython-310.pyc
ADDED
|
Binary file (11.1 kB). View file
|
|
|