Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- lib/python3.13/site-packages/PIL/BufrStubImagePlugin.py +75 -0
- lib/python3.13/site-packages/PIL/EpsImagePlugin.py +479 -0
- lib/python3.13/site-packages/PIL/GifImagePlugin.py +1215 -0
- lib/python3.13/site-packages/PIL/ImageMode.py +85 -0
- lib/python3.13/site-packages/PIL/ImagePalette.py +286 -0
- lib/python3.13/site-packages/PIL/ImageSequence.py +88 -0
- lib/python3.13/site-packages/PIL/ImageShow.py +362 -0
- lib/python3.13/site-packages/PIL/JpegPresets.py +242 -0
- lib/python3.13/site-packages/PIL/MspImagePlugin.py +200 -0
- lib/python3.13/site-packages/PIL/PcfFontFile.py +258 -0
- lib/python3.13/site-packages/PIL/PpmImagePlugin.py +375 -0
- lib/python3.13/site-packages/PIL/TarIO.py +61 -0
- lib/python3.13/site-packages/PIL/TgaImagePlugin.py +264 -0
- lib/python3.13/site-packages/PIL/TiffImagePlugin.py +2338 -0
- lib/python3.13/site-packages/PIL/WmfImagePlugin.py +186 -0
- lib/python3.13/site-packages/PIL/_imagingft.pyi +70 -0
- lib/python3.13/site-packages/PIL/_imagingmorph.pyi +3 -0
- lib/python3.13/site-packages/PIL/_imagingtk.cpython-313-x86_64-linux-gnu.so +0 -0
- lib/python3.13/site-packages/PIL/_tkinter_finder.py +20 -0
- lib/python3.13/site-packages/PIL/_typing.py +45 -0
- lib/python3.13/site-packages/PIL/py.typed +0 -0
- lib/python3.13/site-packages/blake3/__init__.py +5 -0
- lib/python3.13/site-packages/blake3/__init__.pyi +31 -0
- lib/python3.13/site-packages/blake3/py.typed +0 -0
- lib/python3.13/site-packages/cbor2/__init__.py +83 -0
- lib/python3.13/site-packages/cbor2/_decoder.py +869 -0
- lib/python3.13/site-packages/cbor2/_encoder.py +838 -0
- lib/python3.13/site-packages/cbor2/_types.py +231 -0
- lib/python3.13/site-packages/cbor2/encoder.py +10 -0
- lib/python3.13/site-packages/cbor2/py.typed +0 -0
- lib/python3.13/site-packages/cbor2/types.py +15 -0
- lib/python3.13/site-packages/click-8.3.1.dist-info/INSTALLER +1 -0
- lib/python3.13/site-packages/click-8.3.1.dist-info/RECORD +24 -0
- lib/python3.13/site-packages/dill-0.4.0.dist-info/INSTALLER +1 -0
- lib/python3.13/site-packages/dill-0.4.0.dist-info/LICENSE +35 -0
- lib/python3.13/site-packages/dill-0.4.0.dist-info/METADATA +281 -0
- lib/python3.13/site-packages/dill-0.4.0.dist-info/RECORD +56 -0
- lib/python3.13/site-packages/dill-0.4.0.dist-info/REQUESTED +0 -0
- lib/python3.13/site-packages/dill-0.4.0.dist-info/WHEEL +5 -0
- lib/python3.13/site-packages/dill-0.4.0.dist-info/top_level.txt +1 -0
- lib/python3.13/site-packages/dns/__init__.py +72 -0
- lib/python3.13/site-packages/dns/_asyncbackend.py +100 -0
- lib/python3.13/site-packages/dns/_asyncio_backend.py +276 -0
- lib/python3.13/site-packages/dns/_ddr.py +154 -0
- lib/python3.13/site-packages/dns/_features.py +95 -0
- lib/python3.13/site-packages/dns/_immutable_ctx.py +76 -0
- lib/python3.13/site-packages/dns/_no_ssl.py +61 -0
- lib/python3.13/site-packages/dns/_tls_util.py +19 -0
- lib/python3.13/site-packages/dns/_trio_backend.py +255 -0
- lib/python3.13/site-packages/dns/asyncbackend.py +101 -0
lib/python3.13/site-packages/PIL/BufrStubImagePlugin.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# BUFR stub adapter
|
| 6 |
+
#
|
| 7 |
+
# Copyright (c) 1996-2003 by Fredrik Lundh
|
| 8 |
+
#
|
| 9 |
+
# See the README file for information on usage and redistribution.
|
| 10 |
+
#
|
| 11 |
+
from __future__ import annotations
|
| 12 |
+
|
| 13 |
+
import os
|
| 14 |
+
from typing import IO
|
| 15 |
+
|
| 16 |
+
from . import Image, ImageFile
|
| 17 |
+
|
| 18 |
+
_handler = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def register_handler(handler: ImageFile.StubHandler | None) -> None:
|
| 22 |
+
"""
|
| 23 |
+
Install application-specific BUFR image handler.
|
| 24 |
+
|
| 25 |
+
:param handler: Handler object.
|
| 26 |
+
"""
|
| 27 |
+
global _handler
|
| 28 |
+
_handler = handler
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# --------------------------------------------------------------------
|
| 32 |
+
# Image adapter
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def _accept(prefix: bytes) -> bool:
|
| 36 |
+
return prefix.startswith((b"BUFR", b"ZCZC"))
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class BufrStubImageFile(ImageFile.StubImageFile):
|
| 40 |
+
format = "BUFR"
|
| 41 |
+
format_description = "BUFR"
|
| 42 |
+
|
| 43 |
+
def _open(self) -> None:
|
| 44 |
+
if not _accept(self.fp.read(4)):
|
| 45 |
+
msg = "Not a BUFR file"
|
| 46 |
+
raise SyntaxError(msg)
|
| 47 |
+
|
| 48 |
+
self.fp.seek(-4, os.SEEK_CUR)
|
| 49 |
+
|
| 50 |
+
# make something up
|
| 51 |
+
self._mode = "F"
|
| 52 |
+
self._size = 1, 1
|
| 53 |
+
|
| 54 |
+
loader = self._load()
|
| 55 |
+
if loader:
|
| 56 |
+
loader.open(self)
|
| 57 |
+
|
| 58 |
+
def _load(self) -> ImageFile.StubHandler | None:
|
| 59 |
+
return _handler
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 63 |
+
if _handler is None or not hasattr(_handler, "save"):
|
| 64 |
+
msg = "BUFR save handler not installed"
|
| 65 |
+
raise OSError(msg)
|
| 66 |
+
_handler.save(im, fp, filename)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
# --------------------------------------------------------------------
|
| 70 |
+
# Registry
|
| 71 |
+
|
| 72 |
+
Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept)
|
| 73 |
+
Image.register_save(BufrStubImageFile.format, _save)
|
| 74 |
+
|
| 75 |
+
Image.register_extension(BufrStubImageFile.format, ".bufr")
|
lib/python3.13/site-packages/PIL/EpsImagePlugin.py
ADDED
|
@@ -0,0 +1,479 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# EPS file handling
|
| 6 |
+
#
|
| 7 |
+
# History:
|
| 8 |
+
# 1995-09-01 fl Created (0.1)
|
| 9 |
+
# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2)
|
| 10 |
+
# 1996-08-22 fl Don't choke on floating point BoundingBox values
|
| 11 |
+
# 1996-08-23 fl Handle files from Macintosh (0.3)
|
| 12 |
+
# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)
|
| 13 |
+
# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5)
|
| 14 |
+
# 2014-05-07 e Handling of EPS with binary preview and fixed resolution
|
| 15 |
+
# resizing
|
| 16 |
+
#
|
| 17 |
+
# Copyright (c) 1997-2003 by Secret Labs AB.
|
| 18 |
+
# Copyright (c) 1995-2003 by Fredrik Lundh
|
| 19 |
+
#
|
| 20 |
+
# See the README file for information on usage and redistribution.
|
| 21 |
+
#
|
| 22 |
+
from __future__ import annotations
|
| 23 |
+
|
| 24 |
+
import io
|
| 25 |
+
import os
|
| 26 |
+
import re
|
| 27 |
+
import subprocess
|
| 28 |
+
import sys
|
| 29 |
+
import tempfile
|
| 30 |
+
from typing import IO
|
| 31 |
+
|
| 32 |
+
from . import Image, ImageFile
|
| 33 |
+
from ._binary import i32le as i32
|
| 34 |
+
|
| 35 |
+
# --------------------------------------------------------------------
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$")
|
| 39 |
+
field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$")
|
| 40 |
+
|
| 41 |
+
gs_binary: str | bool | None = None
|
| 42 |
+
gs_windows_binary = None
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def has_ghostscript() -> bool:
|
| 46 |
+
global gs_binary, gs_windows_binary
|
| 47 |
+
if gs_binary is None:
|
| 48 |
+
if sys.platform.startswith("win"):
|
| 49 |
+
if gs_windows_binary is None:
|
| 50 |
+
import shutil
|
| 51 |
+
|
| 52 |
+
for binary in ("gswin32c", "gswin64c", "gs"):
|
| 53 |
+
if shutil.which(binary) is not None:
|
| 54 |
+
gs_windows_binary = binary
|
| 55 |
+
break
|
| 56 |
+
else:
|
| 57 |
+
gs_windows_binary = False
|
| 58 |
+
gs_binary = gs_windows_binary
|
| 59 |
+
else:
|
| 60 |
+
try:
|
| 61 |
+
subprocess.check_call(["gs", "--version"], stdout=subprocess.DEVNULL)
|
| 62 |
+
gs_binary = "gs"
|
| 63 |
+
except OSError:
|
| 64 |
+
gs_binary = False
|
| 65 |
+
return gs_binary is not False
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def Ghostscript(
|
| 69 |
+
tile: list[ImageFile._Tile],
|
| 70 |
+
size: tuple[int, int],
|
| 71 |
+
fp: IO[bytes],
|
| 72 |
+
scale: int = 1,
|
| 73 |
+
transparency: bool = False,
|
| 74 |
+
) -> Image.core.ImagingCore:
|
| 75 |
+
"""Render an image using Ghostscript"""
|
| 76 |
+
global gs_binary
|
| 77 |
+
if not has_ghostscript():
|
| 78 |
+
msg = "Unable to locate Ghostscript on paths"
|
| 79 |
+
raise OSError(msg)
|
| 80 |
+
assert isinstance(gs_binary, str)
|
| 81 |
+
|
| 82 |
+
# Unpack decoder tile
|
| 83 |
+
args = tile[0].args
|
| 84 |
+
assert isinstance(args, tuple)
|
| 85 |
+
length, bbox = args
|
| 86 |
+
|
| 87 |
+
# Hack to support hi-res rendering
|
| 88 |
+
scale = int(scale) or 1
|
| 89 |
+
width = size[0] * scale
|
| 90 |
+
height = size[1] * scale
|
| 91 |
+
# resolution is dependent on bbox and size
|
| 92 |
+
res_x = 72.0 * width / (bbox[2] - bbox[0])
|
| 93 |
+
res_y = 72.0 * height / (bbox[3] - bbox[1])
|
| 94 |
+
|
| 95 |
+
out_fd, outfile = tempfile.mkstemp()
|
| 96 |
+
os.close(out_fd)
|
| 97 |
+
|
| 98 |
+
infile_temp = None
|
| 99 |
+
if hasattr(fp, "name") and os.path.exists(fp.name):
|
| 100 |
+
infile = fp.name
|
| 101 |
+
else:
|
| 102 |
+
in_fd, infile_temp = tempfile.mkstemp()
|
| 103 |
+
os.close(in_fd)
|
| 104 |
+
infile = infile_temp
|
| 105 |
+
|
| 106 |
+
# Ignore length and offset!
|
| 107 |
+
# Ghostscript can read it
|
| 108 |
+
# Copy whole file to read in Ghostscript
|
| 109 |
+
with open(infile_temp, "wb") as f:
|
| 110 |
+
# fetch length of fp
|
| 111 |
+
fp.seek(0, io.SEEK_END)
|
| 112 |
+
fsize = fp.tell()
|
| 113 |
+
# ensure start position
|
| 114 |
+
# go back
|
| 115 |
+
fp.seek(0)
|
| 116 |
+
lengthfile = fsize
|
| 117 |
+
while lengthfile > 0:
|
| 118 |
+
s = fp.read(min(lengthfile, 100 * 1024))
|
| 119 |
+
if not s:
|
| 120 |
+
break
|
| 121 |
+
lengthfile -= len(s)
|
| 122 |
+
f.write(s)
|
| 123 |
+
|
| 124 |
+
if transparency:
|
| 125 |
+
# "RGBA"
|
| 126 |
+
device = "pngalpha"
|
| 127 |
+
else:
|
| 128 |
+
# "pnmraw" automatically chooses between
|
| 129 |
+
# PBM ("1"), PGM ("L"), and PPM ("RGB").
|
| 130 |
+
device = "pnmraw"
|
| 131 |
+
|
| 132 |
+
# Build Ghostscript command
|
| 133 |
+
command = [
|
| 134 |
+
gs_binary,
|
| 135 |
+
"-q", # quiet mode
|
| 136 |
+
f"-g{width:d}x{height:d}", # set output geometry (pixels)
|
| 137 |
+
f"-r{res_x:f}x{res_y:f}", # set input DPI (dots per inch)
|
| 138 |
+
"-dBATCH", # exit after processing
|
| 139 |
+
"-dNOPAUSE", # don't pause between pages
|
| 140 |
+
"-dSAFER", # safe mode
|
| 141 |
+
f"-sDEVICE={device}",
|
| 142 |
+
f"-sOutputFile={outfile}", # output file
|
| 143 |
+
# adjust for image origin
|
| 144 |
+
"-c",
|
| 145 |
+
f"{-bbox[0]} {-bbox[1]} translate",
|
| 146 |
+
"-f",
|
| 147 |
+
infile, # input file
|
| 148 |
+
# showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272)
|
| 149 |
+
"-c",
|
| 150 |
+
"showpage",
|
| 151 |
+
]
|
| 152 |
+
|
| 153 |
+
# push data through Ghostscript
|
| 154 |
+
try:
|
| 155 |
+
startupinfo = None
|
| 156 |
+
if sys.platform.startswith("win"):
|
| 157 |
+
startupinfo = subprocess.STARTUPINFO()
|
| 158 |
+
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
| 159 |
+
subprocess.check_call(command, startupinfo=startupinfo)
|
| 160 |
+
with Image.open(outfile) as out_im:
|
| 161 |
+
out_im.load()
|
| 162 |
+
return out_im.im.copy()
|
| 163 |
+
finally:
|
| 164 |
+
try:
|
| 165 |
+
os.unlink(outfile)
|
| 166 |
+
if infile_temp:
|
| 167 |
+
os.unlink(infile_temp)
|
| 168 |
+
except OSError:
|
| 169 |
+
pass
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def _accept(prefix: bytes) -> bool:
|
| 173 |
+
return prefix.startswith(b"%!PS") or (
|
| 174 |
+
len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
##
|
| 179 |
+
# Image plugin for Encapsulated PostScript. This plugin supports only
|
| 180 |
+
# a few variants of this format.
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
class EpsImageFile(ImageFile.ImageFile):
|
| 184 |
+
"""EPS File Parser for the Python Imaging Library"""
|
| 185 |
+
|
| 186 |
+
format = "EPS"
|
| 187 |
+
format_description = "Encapsulated Postscript"
|
| 188 |
+
|
| 189 |
+
mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"}
|
| 190 |
+
|
| 191 |
+
def _open(self) -> None:
|
| 192 |
+
(length, offset) = self._find_offset(self.fp)
|
| 193 |
+
|
| 194 |
+
# go to offset - start of "%!PS"
|
| 195 |
+
self.fp.seek(offset)
|
| 196 |
+
|
| 197 |
+
self._mode = "RGB"
|
| 198 |
+
|
| 199 |
+
# When reading header comments, the first comment is used.
|
| 200 |
+
# When reading trailer comments, the last comment is used.
|
| 201 |
+
bounding_box: list[int] | None = None
|
| 202 |
+
imagedata_size: tuple[int, int] | None = None
|
| 203 |
+
|
| 204 |
+
byte_arr = bytearray(255)
|
| 205 |
+
bytes_mv = memoryview(byte_arr)
|
| 206 |
+
bytes_read = 0
|
| 207 |
+
reading_header_comments = True
|
| 208 |
+
reading_trailer_comments = False
|
| 209 |
+
trailer_reached = False
|
| 210 |
+
|
| 211 |
+
def check_required_header_comments() -> None:
|
| 212 |
+
"""
|
| 213 |
+
The EPS specification requires that some headers exist.
|
| 214 |
+
This should be checked when the header comments formally end,
|
| 215 |
+
when image data starts, or when the file ends, whichever comes first.
|
| 216 |
+
"""
|
| 217 |
+
if "PS-Adobe" not in self.info:
|
| 218 |
+
msg = 'EPS header missing "%!PS-Adobe" comment'
|
| 219 |
+
raise SyntaxError(msg)
|
| 220 |
+
if "BoundingBox" not in self.info:
|
| 221 |
+
msg = 'EPS header missing "%%BoundingBox" comment'
|
| 222 |
+
raise SyntaxError(msg)
|
| 223 |
+
|
| 224 |
+
def read_comment(s: str) -> bool:
|
| 225 |
+
nonlocal bounding_box, reading_trailer_comments
|
| 226 |
+
try:
|
| 227 |
+
m = split.match(s)
|
| 228 |
+
except re.error as e:
|
| 229 |
+
msg = "not an EPS file"
|
| 230 |
+
raise SyntaxError(msg) from e
|
| 231 |
+
|
| 232 |
+
if not m:
|
| 233 |
+
return False
|
| 234 |
+
|
| 235 |
+
k, v = m.group(1, 2)
|
| 236 |
+
self.info[k] = v
|
| 237 |
+
if k == "BoundingBox":
|
| 238 |
+
if v == "(atend)":
|
| 239 |
+
reading_trailer_comments = True
|
| 240 |
+
elif not bounding_box or (trailer_reached and reading_trailer_comments):
|
| 241 |
+
try:
|
| 242 |
+
# Note: The DSC spec says that BoundingBox
|
| 243 |
+
# fields should be integers, but some drivers
|
| 244 |
+
# put floating point values there anyway.
|
| 245 |
+
bounding_box = [int(float(i)) for i in v.split()]
|
| 246 |
+
except Exception:
|
| 247 |
+
pass
|
| 248 |
+
return True
|
| 249 |
+
|
| 250 |
+
while True:
|
| 251 |
+
byte = self.fp.read(1)
|
| 252 |
+
if byte == b"":
|
| 253 |
+
# if we didn't read a byte we must be at the end of the file
|
| 254 |
+
if bytes_read == 0:
|
| 255 |
+
if reading_header_comments:
|
| 256 |
+
check_required_header_comments()
|
| 257 |
+
break
|
| 258 |
+
elif byte in b"\r\n":
|
| 259 |
+
# if we read a line ending character, ignore it and parse what
|
| 260 |
+
# we have already read. if we haven't read any other characters,
|
| 261 |
+
# continue reading
|
| 262 |
+
if bytes_read == 0:
|
| 263 |
+
continue
|
| 264 |
+
else:
|
| 265 |
+
# ASCII/hexadecimal lines in an EPS file must not exceed
|
| 266 |
+
# 255 characters, not including line ending characters
|
| 267 |
+
if bytes_read >= 255:
|
| 268 |
+
# only enforce this for lines starting with a "%",
|
| 269 |
+
# otherwise assume it's binary data
|
| 270 |
+
if byte_arr[0] == ord("%"):
|
| 271 |
+
msg = "not an EPS file"
|
| 272 |
+
raise SyntaxError(msg)
|
| 273 |
+
else:
|
| 274 |
+
if reading_header_comments:
|
| 275 |
+
check_required_header_comments()
|
| 276 |
+
reading_header_comments = False
|
| 277 |
+
# reset bytes_read so we can keep reading
|
| 278 |
+
# data until the end of the line
|
| 279 |
+
bytes_read = 0
|
| 280 |
+
byte_arr[bytes_read] = byte[0]
|
| 281 |
+
bytes_read += 1
|
| 282 |
+
continue
|
| 283 |
+
|
| 284 |
+
if reading_header_comments:
|
| 285 |
+
# Load EPS header
|
| 286 |
+
|
| 287 |
+
# if this line doesn't start with a "%",
|
| 288 |
+
# or does start with "%%EndComments",
|
| 289 |
+
# then we've reached the end of the header/comments
|
| 290 |
+
if byte_arr[0] != ord("%") or bytes_mv[:13] == b"%%EndComments":
|
| 291 |
+
check_required_header_comments()
|
| 292 |
+
reading_header_comments = False
|
| 293 |
+
continue
|
| 294 |
+
|
| 295 |
+
s = str(bytes_mv[:bytes_read], "latin-1")
|
| 296 |
+
if not read_comment(s):
|
| 297 |
+
m = field.match(s)
|
| 298 |
+
if m:
|
| 299 |
+
k = m.group(1)
|
| 300 |
+
if k.startswith("PS-Adobe"):
|
| 301 |
+
self.info["PS-Adobe"] = k[9:]
|
| 302 |
+
else:
|
| 303 |
+
self.info[k] = ""
|
| 304 |
+
elif s[0] == "%":
|
| 305 |
+
# handle non-DSC PostScript comments that some
|
| 306 |
+
# tools mistakenly put in the Comments section
|
| 307 |
+
pass
|
| 308 |
+
else:
|
| 309 |
+
msg = "bad EPS header"
|
| 310 |
+
raise OSError(msg)
|
| 311 |
+
elif bytes_mv[:11] == b"%ImageData:":
|
| 312 |
+
# Check for an "ImageData" descriptor
|
| 313 |
+
# https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577413_pgfId-1035096
|
| 314 |
+
|
| 315 |
+
# If we've already read an "ImageData" descriptor,
|
| 316 |
+
# don't read another one.
|
| 317 |
+
if imagedata_size:
|
| 318 |
+
bytes_read = 0
|
| 319 |
+
continue
|
| 320 |
+
|
| 321 |
+
# Values:
|
| 322 |
+
# columns
|
| 323 |
+
# rows
|
| 324 |
+
# bit depth (1 or 8)
|
| 325 |
+
# mode (1: L, 2: LAB, 3: RGB, 4: CMYK)
|
| 326 |
+
# number of padding channels
|
| 327 |
+
# block size (number of bytes per row per channel)
|
| 328 |
+
# binary/ascii (1: binary, 2: ascii)
|
| 329 |
+
# data start identifier (the image data follows after a single line
|
| 330 |
+
# consisting only of this quoted value)
|
| 331 |
+
image_data_values = byte_arr[11:bytes_read].split(None, 7)
|
| 332 |
+
columns, rows, bit_depth, mode_id = (
|
| 333 |
+
int(value) for value in image_data_values[:4]
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
if bit_depth == 1:
|
| 337 |
+
self._mode = "1"
|
| 338 |
+
elif bit_depth == 8:
|
| 339 |
+
try:
|
| 340 |
+
self._mode = self.mode_map[mode_id]
|
| 341 |
+
except ValueError:
|
| 342 |
+
break
|
| 343 |
+
else:
|
| 344 |
+
break
|
| 345 |
+
|
| 346 |
+
# Parse the columns and rows after checking the bit depth and mode
|
| 347 |
+
# in case the bit depth and/or mode are invalid.
|
| 348 |
+
imagedata_size = columns, rows
|
| 349 |
+
elif bytes_mv[:5] == b"%%EOF":
|
| 350 |
+
break
|
| 351 |
+
elif trailer_reached and reading_trailer_comments:
|
| 352 |
+
# Load EPS trailer
|
| 353 |
+
s = str(bytes_mv[:bytes_read], "latin-1")
|
| 354 |
+
read_comment(s)
|
| 355 |
+
elif bytes_mv[:9] == b"%%Trailer":
|
| 356 |
+
trailer_reached = True
|
| 357 |
+
elif bytes_mv[:14] == b"%%BeginBinary:":
|
| 358 |
+
bytecount = int(byte_arr[14:bytes_read])
|
| 359 |
+
self.fp.seek(bytecount, os.SEEK_CUR)
|
| 360 |
+
bytes_read = 0
|
| 361 |
+
|
| 362 |
+
# A "BoundingBox" is always required,
|
| 363 |
+
# even if an "ImageData" descriptor size exists.
|
| 364 |
+
if not bounding_box:
|
| 365 |
+
msg = "cannot determine EPS bounding box"
|
| 366 |
+
raise OSError(msg)
|
| 367 |
+
|
| 368 |
+
# An "ImageData" size takes precedence over the "BoundingBox".
|
| 369 |
+
self._size = imagedata_size or (
|
| 370 |
+
bounding_box[2] - bounding_box[0],
|
| 371 |
+
bounding_box[3] - bounding_box[1],
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
self.tile = [
|
| 375 |
+
ImageFile._Tile("eps", (0, 0) + self.size, offset, (length, bounding_box))
|
| 376 |
+
]
|
| 377 |
+
|
| 378 |
+
def _find_offset(self, fp: IO[bytes]) -> tuple[int, int]:
|
| 379 |
+
s = fp.read(4)
|
| 380 |
+
|
| 381 |
+
if s == b"%!PS":
|
| 382 |
+
# for HEAD without binary preview
|
| 383 |
+
fp.seek(0, io.SEEK_END)
|
| 384 |
+
length = fp.tell()
|
| 385 |
+
offset = 0
|
| 386 |
+
elif i32(s) == 0xC6D3D0C5:
|
| 387 |
+
# FIX for: Some EPS file not handled correctly / issue #302
|
| 388 |
+
# EPS can contain binary data
|
| 389 |
+
# or start directly with latin coding
|
| 390 |
+
# more info see:
|
| 391 |
+
# https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf
|
| 392 |
+
s = fp.read(8)
|
| 393 |
+
offset = i32(s)
|
| 394 |
+
length = i32(s, 4)
|
| 395 |
+
else:
|
| 396 |
+
msg = "not an EPS file"
|
| 397 |
+
raise SyntaxError(msg)
|
| 398 |
+
|
| 399 |
+
return length, offset
|
| 400 |
+
|
| 401 |
+
def load(
|
| 402 |
+
self, scale: int = 1, transparency: bool = False
|
| 403 |
+
) -> Image.core.PixelAccess | None:
|
| 404 |
+
# Load EPS via Ghostscript
|
| 405 |
+
if self.tile:
|
| 406 |
+
self.im = Ghostscript(self.tile, self.size, self.fp, scale, transparency)
|
| 407 |
+
self._mode = self.im.mode
|
| 408 |
+
self._size = self.im.size
|
| 409 |
+
self.tile = []
|
| 410 |
+
return Image.Image.load(self)
|
| 411 |
+
|
| 412 |
+
def load_seek(self, pos: int) -> None:
|
| 413 |
+
# we can't incrementally load, so force ImageFile.parser to
|
| 414 |
+
# use our custom load method by defining this method.
|
| 415 |
+
pass
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
# --------------------------------------------------------------------
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes, eps: int = 1) -> None:
|
| 422 |
+
"""EPS Writer for the Python Imaging Library."""
|
| 423 |
+
|
| 424 |
+
# make sure image data is available
|
| 425 |
+
im.load()
|
| 426 |
+
|
| 427 |
+
# determine PostScript image mode
|
| 428 |
+
if im.mode == "L":
|
| 429 |
+
operator = (8, 1, b"image")
|
| 430 |
+
elif im.mode == "RGB":
|
| 431 |
+
operator = (8, 3, b"false 3 colorimage")
|
| 432 |
+
elif im.mode == "CMYK":
|
| 433 |
+
operator = (8, 4, b"false 4 colorimage")
|
| 434 |
+
else:
|
| 435 |
+
msg = "image mode is not supported"
|
| 436 |
+
raise ValueError(msg)
|
| 437 |
+
|
| 438 |
+
if eps:
|
| 439 |
+
# write EPS header
|
| 440 |
+
fp.write(b"%!PS-Adobe-3.0 EPSF-3.0\n")
|
| 441 |
+
fp.write(b"%%Creator: PIL 0.1 EpsEncode\n")
|
| 442 |
+
# fp.write("%%CreationDate: %s"...)
|
| 443 |
+
fp.write(b"%%%%BoundingBox: 0 0 %d %d\n" % im.size)
|
| 444 |
+
fp.write(b"%%Pages: 1\n")
|
| 445 |
+
fp.write(b"%%EndComments\n")
|
| 446 |
+
fp.write(b"%%Page: 1 1\n")
|
| 447 |
+
fp.write(b"%%ImageData: %d %d " % im.size)
|
| 448 |
+
fp.write(b'%d %d 0 1 1 "%s"\n' % operator)
|
| 449 |
+
|
| 450 |
+
# image header
|
| 451 |
+
fp.write(b"gsave\n")
|
| 452 |
+
fp.write(b"10 dict begin\n")
|
| 453 |
+
fp.write(b"/buf %d string def\n" % (im.size[0] * operator[1]))
|
| 454 |
+
fp.write(b"%d %d scale\n" % im.size)
|
| 455 |
+
fp.write(b"%d %d 8\n" % im.size) # <= bits
|
| 456 |
+
fp.write(b"[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
|
| 457 |
+
fp.write(b"{ currentfile buf readhexstring pop } bind\n")
|
| 458 |
+
fp.write(operator[2] + b"\n")
|
| 459 |
+
if hasattr(fp, "flush"):
|
| 460 |
+
fp.flush()
|
| 461 |
+
|
| 462 |
+
ImageFile._save(im, fp, [ImageFile._Tile("eps", (0, 0) + im.size)])
|
| 463 |
+
|
| 464 |
+
fp.write(b"\n%%%%EndBinary\n")
|
| 465 |
+
fp.write(b"grestore end\n")
|
| 466 |
+
if hasattr(fp, "flush"):
|
| 467 |
+
fp.flush()
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
# --------------------------------------------------------------------
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
Image.register_open(EpsImageFile.format, EpsImageFile, _accept)
|
| 474 |
+
|
| 475 |
+
Image.register_save(EpsImageFile.format, _save)
|
| 476 |
+
|
| 477 |
+
Image.register_extensions(EpsImageFile.format, [".ps", ".eps"])
|
| 478 |
+
|
| 479 |
+
Image.register_mime(EpsImageFile.format, "application/postscript")
|
lib/python3.13/site-packages/PIL/GifImagePlugin.py
ADDED
|
@@ -0,0 +1,1215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# GIF file handling
|
| 6 |
+
#
|
| 7 |
+
# History:
|
| 8 |
+
# 1995-09-01 fl Created
|
| 9 |
+
# 1996-12-14 fl Added interlace support
|
| 10 |
+
# 1996-12-30 fl Added animation support
|
| 11 |
+
# 1997-01-05 fl Added write support, fixed local colour map bug
|
| 12 |
+
# 1997-02-23 fl Make sure to load raster data in getdata()
|
| 13 |
+
# 1997-07-05 fl Support external decoder (0.4)
|
| 14 |
+
# 1998-07-09 fl Handle all modes when saving (0.5)
|
| 15 |
+
# 1998-07-15 fl Renamed offset attribute to avoid name clash
|
| 16 |
+
# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6)
|
| 17 |
+
# 2001-04-17 fl Added palette optimization (0.7)
|
| 18 |
+
# 2002-06-06 fl Added transparency support for save (0.8)
|
| 19 |
+
# 2004-02-24 fl Disable interlacing for small images
|
| 20 |
+
#
|
| 21 |
+
# Copyright (c) 1997-2004 by Secret Labs AB
|
| 22 |
+
# Copyright (c) 1995-2004 by Fredrik Lundh
|
| 23 |
+
#
|
| 24 |
+
# See the README file for information on usage and redistribution.
|
| 25 |
+
#
|
| 26 |
+
from __future__ import annotations
|
| 27 |
+
|
| 28 |
+
import itertools
|
| 29 |
+
import math
|
| 30 |
+
import os
|
| 31 |
+
import subprocess
|
| 32 |
+
from enum import IntEnum
|
| 33 |
+
from functools import cached_property
|
| 34 |
+
from typing import Any, NamedTuple, cast
|
| 35 |
+
|
| 36 |
+
from . import (
|
| 37 |
+
Image,
|
| 38 |
+
ImageChops,
|
| 39 |
+
ImageFile,
|
| 40 |
+
ImageMath,
|
| 41 |
+
ImageOps,
|
| 42 |
+
ImagePalette,
|
| 43 |
+
ImageSequence,
|
| 44 |
+
)
|
| 45 |
+
from ._binary import i16le as i16
|
| 46 |
+
from ._binary import o8
|
| 47 |
+
from ._binary import o16le as o16
|
| 48 |
+
from ._util import DeferredError
|
| 49 |
+
|
| 50 |
+
TYPE_CHECKING = False
|
| 51 |
+
if TYPE_CHECKING:
|
| 52 |
+
from typing import IO, Literal
|
| 53 |
+
|
| 54 |
+
from . import _imaging
|
| 55 |
+
from ._typing import Buffer
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class LoadingStrategy(IntEnum):
|
| 59 |
+
""".. versionadded:: 9.1.0"""
|
| 60 |
+
|
| 61 |
+
RGB_AFTER_FIRST = 0
|
| 62 |
+
RGB_AFTER_DIFFERENT_PALETTE_ONLY = 1
|
| 63 |
+
RGB_ALWAYS = 2
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
#: .. versionadded:: 9.1.0
|
| 67 |
+
LOADING_STRATEGY = LoadingStrategy.RGB_AFTER_FIRST
|
| 68 |
+
|
| 69 |
+
# --------------------------------------------------------------------
|
| 70 |
+
# Identify/read GIF files
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def _accept(prefix: bytes) -> bool:
|
| 74 |
+
return prefix.startswith((b"GIF87a", b"GIF89a"))
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
##
|
| 78 |
+
# Image plugin for GIF images. This plugin supports both GIF87 and
|
| 79 |
+
# GIF89 images.
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class GifImageFile(ImageFile.ImageFile):
|
| 83 |
+
format = "GIF"
|
| 84 |
+
format_description = "Compuserve GIF"
|
| 85 |
+
_close_exclusive_fp_after_loading = False
|
| 86 |
+
|
| 87 |
+
global_palette = None
|
| 88 |
+
|
| 89 |
+
def data(self) -> bytes | None:
|
| 90 |
+
s = self.fp.read(1)
|
| 91 |
+
if s and s[0]:
|
| 92 |
+
return self.fp.read(s[0])
|
| 93 |
+
return None
|
| 94 |
+
|
| 95 |
+
def _is_palette_needed(self, p: bytes) -> bool:
|
| 96 |
+
for i in range(0, len(p), 3):
|
| 97 |
+
if not (i // 3 == p[i] == p[i + 1] == p[i + 2]):
|
| 98 |
+
return True
|
| 99 |
+
return False
|
| 100 |
+
|
| 101 |
+
def _open(self) -> None:
|
| 102 |
+
# Screen
|
| 103 |
+
s = self.fp.read(13)
|
| 104 |
+
if not _accept(s):
|
| 105 |
+
msg = "not a GIF file"
|
| 106 |
+
raise SyntaxError(msg)
|
| 107 |
+
|
| 108 |
+
self.info["version"] = s[:6]
|
| 109 |
+
self._size = i16(s, 6), i16(s, 8)
|
| 110 |
+
flags = s[10]
|
| 111 |
+
bits = (flags & 7) + 1
|
| 112 |
+
|
| 113 |
+
if flags & 128:
|
| 114 |
+
# get global palette
|
| 115 |
+
self.info["background"] = s[11]
|
| 116 |
+
# check if palette contains colour indices
|
| 117 |
+
p = self.fp.read(3 << bits)
|
| 118 |
+
if self._is_palette_needed(p):
|
| 119 |
+
p = ImagePalette.raw("RGB", p)
|
| 120 |
+
self.global_palette = self.palette = p
|
| 121 |
+
|
| 122 |
+
self._fp = self.fp # FIXME: hack
|
| 123 |
+
self.__rewind = self.fp.tell()
|
| 124 |
+
self._n_frames: int | None = None
|
| 125 |
+
self._seek(0) # get ready to read first frame
|
| 126 |
+
|
| 127 |
+
@property
|
| 128 |
+
def n_frames(self) -> int:
|
| 129 |
+
if self._n_frames is None:
|
| 130 |
+
current = self.tell()
|
| 131 |
+
try:
|
| 132 |
+
while True:
|
| 133 |
+
self._seek(self.tell() + 1, False)
|
| 134 |
+
except EOFError:
|
| 135 |
+
self._n_frames = self.tell() + 1
|
| 136 |
+
self.seek(current)
|
| 137 |
+
return self._n_frames
|
| 138 |
+
|
| 139 |
+
@cached_property
|
| 140 |
+
def is_animated(self) -> bool:
|
| 141 |
+
if self._n_frames is not None:
|
| 142 |
+
return self._n_frames != 1
|
| 143 |
+
|
| 144 |
+
current = self.tell()
|
| 145 |
+
if current:
|
| 146 |
+
return True
|
| 147 |
+
|
| 148 |
+
try:
|
| 149 |
+
self._seek(1, False)
|
| 150 |
+
is_animated = True
|
| 151 |
+
except EOFError:
|
| 152 |
+
is_animated = False
|
| 153 |
+
|
| 154 |
+
self.seek(current)
|
| 155 |
+
return is_animated
|
| 156 |
+
|
| 157 |
+
def seek(self, frame: int) -> None:
|
| 158 |
+
if not self._seek_check(frame):
|
| 159 |
+
return
|
| 160 |
+
if frame < self.__frame:
|
| 161 |
+
self._im = None
|
| 162 |
+
self._seek(0)
|
| 163 |
+
|
| 164 |
+
last_frame = self.__frame
|
| 165 |
+
for f in range(self.__frame + 1, frame + 1):
|
| 166 |
+
try:
|
| 167 |
+
self._seek(f)
|
| 168 |
+
except EOFError as e:
|
| 169 |
+
self.seek(last_frame)
|
| 170 |
+
msg = "no more images in GIF file"
|
| 171 |
+
raise EOFError(msg) from e
|
| 172 |
+
|
| 173 |
+
def _seek(self, frame: int, update_image: bool = True) -> None:
|
| 174 |
+
if isinstance(self._fp, DeferredError):
|
| 175 |
+
raise self._fp.ex
|
| 176 |
+
if frame == 0:
|
| 177 |
+
# rewind
|
| 178 |
+
self.__offset = 0
|
| 179 |
+
self.dispose: _imaging.ImagingCore | None = None
|
| 180 |
+
self.__frame = -1
|
| 181 |
+
self._fp.seek(self.__rewind)
|
| 182 |
+
self.disposal_method = 0
|
| 183 |
+
if "comment" in self.info:
|
| 184 |
+
del self.info["comment"]
|
| 185 |
+
else:
|
| 186 |
+
# ensure that the previous frame was loaded
|
| 187 |
+
if self.tile and update_image:
|
| 188 |
+
self.load()
|
| 189 |
+
|
| 190 |
+
if frame != self.__frame + 1:
|
| 191 |
+
msg = f"cannot seek to frame {frame}"
|
| 192 |
+
raise ValueError(msg)
|
| 193 |
+
|
| 194 |
+
self.fp = self._fp
|
| 195 |
+
if self.__offset:
|
| 196 |
+
# backup to last frame
|
| 197 |
+
self.fp.seek(self.__offset)
|
| 198 |
+
while self.data():
|
| 199 |
+
pass
|
| 200 |
+
self.__offset = 0
|
| 201 |
+
|
| 202 |
+
s = self.fp.read(1)
|
| 203 |
+
if not s or s == b";":
|
| 204 |
+
msg = "no more images in GIF file"
|
| 205 |
+
raise EOFError(msg)
|
| 206 |
+
|
| 207 |
+
palette: ImagePalette.ImagePalette | Literal[False] | None = None
|
| 208 |
+
|
| 209 |
+
info: dict[str, Any] = {}
|
| 210 |
+
frame_transparency = None
|
| 211 |
+
interlace = None
|
| 212 |
+
frame_dispose_extent = None
|
| 213 |
+
while True:
|
| 214 |
+
if not s:
|
| 215 |
+
s = self.fp.read(1)
|
| 216 |
+
if not s or s == b";":
|
| 217 |
+
break
|
| 218 |
+
|
| 219 |
+
elif s == b"!":
|
| 220 |
+
#
|
| 221 |
+
# extensions
|
| 222 |
+
#
|
| 223 |
+
s = self.fp.read(1)
|
| 224 |
+
block = self.data()
|
| 225 |
+
if s[0] == 249 and block is not None:
|
| 226 |
+
#
|
| 227 |
+
# graphic control extension
|
| 228 |
+
#
|
| 229 |
+
flags = block[0]
|
| 230 |
+
if flags & 1:
|
| 231 |
+
frame_transparency = block[3]
|
| 232 |
+
info["duration"] = i16(block, 1) * 10
|
| 233 |
+
|
| 234 |
+
# disposal method - find the value of bits 4 - 6
|
| 235 |
+
dispose_bits = 0b00011100 & flags
|
| 236 |
+
dispose_bits = dispose_bits >> 2
|
| 237 |
+
if dispose_bits:
|
| 238 |
+
# only set the dispose if it is not
|
| 239 |
+
# unspecified. I'm not sure if this is
|
| 240 |
+
# correct, but it seems to prevent the last
|
| 241 |
+
# frame from looking odd for some animations
|
| 242 |
+
self.disposal_method = dispose_bits
|
| 243 |
+
elif s[0] == 254:
|
| 244 |
+
#
|
| 245 |
+
# comment extension
|
| 246 |
+
#
|
| 247 |
+
comment = b""
|
| 248 |
+
|
| 249 |
+
# Read this comment block
|
| 250 |
+
while block:
|
| 251 |
+
comment += block
|
| 252 |
+
block = self.data()
|
| 253 |
+
|
| 254 |
+
if "comment" in info:
|
| 255 |
+
# If multiple comment blocks in frame, separate with \n
|
| 256 |
+
info["comment"] += b"\n" + comment
|
| 257 |
+
else:
|
| 258 |
+
info["comment"] = comment
|
| 259 |
+
s = None
|
| 260 |
+
continue
|
| 261 |
+
elif s[0] == 255 and frame == 0 and block is not None:
|
| 262 |
+
#
|
| 263 |
+
# application extension
|
| 264 |
+
#
|
| 265 |
+
info["extension"] = block, self.fp.tell()
|
| 266 |
+
if block.startswith(b"NETSCAPE2.0"):
|
| 267 |
+
block = self.data()
|
| 268 |
+
if block and len(block) >= 3 and block[0] == 1:
|
| 269 |
+
self.info["loop"] = i16(block, 1)
|
| 270 |
+
while self.data():
|
| 271 |
+
pass
|
| 272 |
+
|
| 273 |
+
elif s == b",":
|
| 274 |
+
#
|
| 275 |
+
# local image
|
| 276 |
+
#
|
| 277 |
+
s = self.fp.read(9)
|
| 278 |
+
|
| 279 |
+
# extent
|
| 280 |
+
x0, y0 = i16(s, 0), i16(s, 2)
|
| 281 |
+
x1, y1 = x0 + i16(s, 4), y0 + i16(s, 6)
|
| 282 |
+
if (x1 > self.size[0] or y1 > self.size[1]) and update_image:
|
| 283 |
+
self._size = max(x1, self.size[0]), max(y1, self.size[1])
|
| 284 |
+
Image._decompression_bomb_check(self._size)
|
| 285 |
+
frame_dispose_extent = x0, y0, x1, y1
|
| 286 |
+
flags = s[8]
|
| 287 |
+
|
| 288 |
+
interlace = (flags & 64) != 0
|
| 289 |
+
|
| 290 |
+
if flags & 128:
|
| 291 |
+
bits = (flags & 7) + 1
|
| 292 |
+
p = self.fp.read(3 << bits)
|
| 293 |
+
if self._is_palette_needed(p):
|
| 294 |
+
palette = ImagePalette.raw("RGB", p)
|
| 295 |
+
else:
|
| 296 |
+
palette = False
|
| 297 |
+
|
| 298 |
+
# image data
|
| 299 |
+
bits = self.fp.read(1)[0]
|
| 300 |
+
self.__offset = self.fp.tell()
|
| 301 |
+
break
|
| 302 |
+
s = None
|
| 303 |
+
|
| 304 |
+
if interlace is None:
|
| 305 |
+
msg = "image not found in GIF frame"
|
| 306 |
+
raise EOFError(msg)
|
| 307 |
+
|
| 308 |
+
self.__frame = frame
|
| 309 |
+
if not update_image:
|
| 310 |
+
return
|
| 311 |
+
|
| 312 |
+
self.tile = []
|
| 313 |
+
|
| 314 |
+
if self.dispose:
|
| 315 |
+
self.im.paste(self.dispose, self.dispose_extent)
|
| 316 |
+
|
| 317 |
+
self._frame_palette = palette if palette is not None else self.global_palette
|
| 318 |
+
self._frame_transparency = frame_transparency
|
| 319 |
+
if frame == 0:
|
| 320 |
+
if self._frame_palette:
|
| 321 |
+
if LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS:
|
| 322 |
+
self._mode = "RGBA" if frame_transparency is not None else "RGB"
|
| 323 |
+
else:
|
| 324 |
+
self._mode = "P"
|
| 325 |
+
else:
|
| 326 |
+
self._mode = "L"
|
| 327 |
+
|
| 328 |
+
if palette:
|
| 329 |
+
self.palette = palette
|
| 330 |
+
elif self.global_palette:
|
| 331 |
+
from copy import copy
|
| 332 |
+
|
| 333 |
+
self.palette = copy(self.global_palette)
|
| 334 |
+
else:
|
| 335 |
+
self.palette = None
|
| 336 |
+
else:
|
| 337 |
+
if self.mode == "P":
|
| 338 |
+
if (
|
| 339 |
+
LOADING_STRATEGY != LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY
|
| 340 |
+
or palette
|
| 341 |
+
):
|
| 342 |
+
if "transparency" in self.info:
|
| 343 |
+
self.im.putpalettealpha(self.info["transparency"], 0)
|
| 344 |
+
self.im = self.im.convert("RGBA", Image.Dither.FLOYDSTEINBERG)
|
| 345 |
+
self._mode = "RGBA"
|
| 346 |
+
del self.info["transparency"]
|
| 347 |
+
else:
|
| 348 |
+
self._mode = "RGB"
|
| 349 |
+
self.im = self.im.convert("RGB", Image.Dither.FLOYDSTEINBERG)
|
| 350 |
+
|
| 351 |
+
def _rgb(color: int) -> tuple[int, int, int]:
|
| 352 |
+
if self._frame_palette:
|
| 353 |
+
if color * 3 + 3 > len(self._frame_palette.palette):
|
| 354 |
+
color = 0
|
| 355 |
+
return cast(
|
| 356 |
+
tuple[int, int, int],
|
| 357 |
+
tuple(self._frame_palette.palette[color * 3 : color * 3 + 3]),
|
| 358 |
+
)
|
| 359 |
+
else:
|
| 360 |
+
return (color, color, color)
|
| 361 |
+
|
| 362 |
+
self.dispose = None
|
| 363 |
+
self.dispose_extent: tuple[int, int, int, int] | None = frame_dispose_extent
|
| 364 |
+
if self.dispose_extent and self.disposal_method >= 2:
|
| 365 |
+
try:
|
| 366 |
+
if self.disposal_method == 2:
|
| 367 |
+
# replace with background colour
|
| 368 |
+
|
| 369 |
+
# only dispose the extent in this frame
|
| 370 |
+
x0, y0, x1, y1 = self.dispose_extent
|
| 371 |
+
dispose_size = (x1 - x0, y1 - y0)
|
| 372 |
+
|
| 373 |
+
Image._decompression_bomb_check(dispose_size)
|
| 374 |
+
|
| 375 |
+
# by convention, attempt to use transparency first
|
| 376 |
+
dispose_mode = "P"
|
| 377 |
+
color = self.info.get("transparency", frame_transparency)
|
| 378 |
+
if color is not None:
|
| 379 |
+
if self.mode in ("RGB", "RGBA"):
|
| 380 |
+
dispose_mode = "RGBA"
|
| 381 |
+
color = _rgb(color) + (0,)
|
| 382 |
+
else:
|
| 383 |
+
color = self.info.get("background", 0)
|
| 384 |
+
if self.mode in ("RGB", "RGBA"):
|
| 385 |
+
dispose_mode = "RGB"
|
| 386 |
+
color = _rgb(color)
|
| 387 |
+
self.dispose = Image.core.fill(dispose_mode, dispose_size, color)
|
| 388 |
+
else:
|
| 389 |
+
# replace with previous contents
|
| 390 |
+
if self._im is not None:
|
| 391 |
+
# only dispose the extent in this frame
|
| 392 |
+
self.dispose = self._crop(self.im, self.dispose_extent)
|
| 393 |
+
elif frame_transparency is not None:
|
| 394 |
+
x0, y0, x1, y1 = self.dispose_extent
|
| 395 |
+
dispose_size = (x1 - x0, y1 - y0)
|
| 396 |
+
|
| 397 |
+
Image._decompression_bomb_check(dispose_size)
|
| 398 |
+
dispose_mode = "P"
|
| 399 |
+
color = frame_transparency
|
| 400 |
+
if self.mode in ("RGB", "RGBA"):
|
| 401 |
+
dispose_mode = "RGBA"
|
| 402 |
+
color = _rgb(frame_transparency) + (0,)
|
| 403 |
+
self.dispose = Image.core.fill(
|
| 404 |
+
dispose_mode, dispose_size, color
|
| 405 |
+
)
|
| 406 |
+
except AttributeError:
|
| 407 |
+
pass
|
| 408 |
+
|
| 409 |
+
if interlace is not None:
|
| 410 |
+
transparency = -1
|
| 411 |
+
if frame_transparency is not None:
|
| 412 |
+
if frame == 0:
|
| 413 |
+
if LOADING_STRATEGY != LoadingStrategy.RGB_ALWAYS:
|
| 414 |
+
self.info["transparency"] = frame_transparency
|
| 415 |
+
elif self.mode not in ("RGB", "RGBA"):
|
| 416 |
+
transparency = frame_transparency
|
| 417 |
+
self.tile = [
|
| 418 |
+
ImageFile._Tile(
|
| 419 |
+
"gif",
|
| 420 |
+
(x0, y0, x1, y1),
|
| 421 |
+
self.__offset,
|
| 422 |
+
(bits, interlace, transparency),
|
| 423 |
+
)
|
| 424 |
+
]
|
| 425 |
+
|
| 426 |
+
if info.get("comment"):
|
| 427 |
+
self.info["comment"] = info["comment"]
|
| 428 |
+
for k in ["duration", "extension"]:
|
| 429 |
+
if k in info:
|
| 430 |
+
self.info[k] = info[k]
|
| 431 |
+
elif k in self.info:
|
| 432 |
+
del self.info[k]
|
| 433 |
+
|
| 434 |
+
def load_prepare(self) -> None:
|
| 435 |
+
temp_mode = "P" if self._frame_palette else "L"
|
| 436 |
+
self._prev_im = None
|
| 437 |
+
if self.__frame == 0:
|
| 438 |
+
if self._frame_transparency is not None:
|
| 439 |
+
self.im = Image.core.fill(
|
| 440 |
+
temp_mode, self.size, self._frame_transparency
|
| 441 |
+
)
|
| 442 |
+
elif self.mode in ("RGB", "RGBA"):
|
| 443 |
+
self._prev_im = self.im
|
| 444 |
+
if self._frame_palette:
|
| 445 |
+
self.im = Image.core.fill("P", self.size, self._frame_transparency or 0)
|
| 446 |
+
self.im.putpalette("RGB", *self._frame_palette.getdata())
|
| 447 |
+
else:
|
| 448 |
+
self._im = None
|
| 449 |
+
if not self._prev_im and self._im is not None and self.size != self.im.size:
|
| 450 |
+
expanded_im = Image.core.fill(self.im.mode, self.size)
|
| 451 |
+
if self._frame_palette:
|
| 452 |
+
expanded_im.putpalette("RGB", *self._frame_palette.getdata())
|
| 453 |
+
expanded_im.paste(self.im, (0, 0) + self.im.size)
|
| 454 |
+
|
| 455 |
+
self.im = expanded_im
|
| 456 |
+
self._mode = temp_mode
|
| 457 |
+
self._frame_palette = None
|
| 458 |
+
|
| 459 |
+
super().load_prepare()
|
| 460 |
+
|
| 461 |
+
def load_end(self) -> None:
|
| 462 |
+
if self.__frame == 0:
|
| 463 |
+
if self.mode == "P" and LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS:
|
| 464 |
+
if self._frame_transparency is not None:
|
| 465 |
+
self.im.putpalettealpha(self._frame_transparency, 0)
|
| 466 |
+
self._mode = "RGBA"
|
| 467 |
+
else:
|
| 468 |
+
self._mode = "RGB"
|
| 469 |
+
self.im = self.im.convert(self.mode, Image.Dither.FLOYDSTEINBERG)
|
| 470 |
+
return
|
| 471 |
+
if not self._prev_im:
|
| 472 |
+
return
|
| 473 |
+
if self.size != self._prev_im.size:
|
| 474 |
+
if self._frame_transparency is not None:
|
| 475 |
+
expanded_im = Image.core.fill("RGBA", self.size)
|
| 476 |
+
else:
|
| 477 |
+
expanded_im = Image.core.fill("P", self.size)
|
| 478 |
+
expanded_im.putpalette("RGB", "RGB", self.im.getpalette())
|
| 479 |
+
expanded_im = expanded_im.convert("RGB")
|
| 480 |
+
expanded_im.paste(self._prev_im, (0, 0) + self._prev_im.size)
|
| 481 |
+
|
| 482 |
+
self._prev_im = expanded_im
|
| 483 |
+
assert self._prev_im is not None
|
| 484 |
+
if self._frame_transparency is not None:
|
| 485 |
+
if self.mode == "L":
|
| 486 |
+
frame_im = self.im.convert_transparent("LA", self._frame_transparency)
|
| 487 |
+
else:
|
| 488 |
+
self.im.putpalettealpha(self._frame_transparency, 0)
|
| 489 |
+
frame_im = self.im.convert("RGBA")
|
| 490 |
+
else:
|
| 491 |
+
frame_im = self.im.convert("RGB")
|
| 492 |
+
|
| 493 |
+
assert self.dispose_extent is not None
|
| 494 |
+
frame_im = self._crop(frame_im, self.dispose_extent)
|
| 495 |
+
|
| 496 |
+
self.im = self._prev_im
|
| 497 |
+
self._mode = self.im.mode
|
| 498 |
+
if frame_im.mode in ("LA", "RGBA"):
|
| 499 |
+
self.im.paste(frame_im, self.dispose_extent, frame_im)
|
| 500 |
+
else:
|
| 501 |
+
self.im.paste(frame_im, self.dispose_extent)
|
| 502 |
+
|
| 503 |
+
def tell(self) -> int:
|
| 504 |
+
return self.__frame
|
| 505 |
+
|
| 506 |
+
|
| 507 |
+
# --------------------------------------------------------------------
|
| 508 |
+
# Write GIF files
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
RAWMODE = {"1": "L", "L": "L", "P": "P"}
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def _normalize_mode(im: Image.Image) -> Image.Image:
|
| 515 |
+
"""
|
| 516 |
+
Takes an image (or frame), returns an image in a mode that is appropriate
|
| 517 |
+
for saving in a Gif.
|
| 518 |
+
|
| 519 |
+
It may return the original image, or it may return an image converted to
|
| 520 |
+
palette or 'L' mode.
|
| 521 |
+
|
| 522 |
+
:param im: Image object
|
| 523 |
+
:returns: Image object
|
| 524 |
+
"""
|
| 525 |
+
if im.mode in RAWMODE:
|
| 526 |
+
im.load()
|
| 527 |
+
return im
|
| 528 |
+
if Image.getmodebase(im.mode) == "RGB":
|
| 529 |
+
im = im.convert("P", palette=Image.Palette.ADAPTIVE)
|
| 530 |
+
assert im.palette is not None
|
| 531 |
+
if im.palette.mode == "RGBA":
|
| 532 |
+
for rgba in im.palette.colors:
|
| 533 |
+
if rgba[3] == 0:
|
| 534 |
+
im.info["transparency"] = im.palette.colors[rgba]
|
| 535 |
+
break
|
| 536 |
+
return im
|
| 537 |
+
return im.convert("L")
|
| 538 |
+
|
| 539 |
+
|
| 540 |
+
_Palette = bytes | bytearray | list[int] | ImagePalette.ImagePalette
|
| 541 |
+
|
| 542 |
+
|
| 543 |
+
def _normalize_palette(
|
| 544 |
+
im: Image.Image, palette: _Palette | None, info: dict[str, Any]
|
| 545 |
+
) -> Image.Image:
|
| 546 |
+
"""
|
| 547 |
+
Normalizes the palette for image.
|
| 548 |
+
- Sets the palette to the incoming palette, if provided.
|
| 549 |
+
- Ensures that there's a palette for L mode images
|
| 550 |
+
- Optimizes the palette if necessary/desired.
|
| 551 |
+
|
| 552 |
+
:param im: Image object
|
| 553 |
+
:param palette: bytes object containing the source palette, or ....
|
| 554 |
+
:param info: encoderinfo
|
| 555 |
+
:returns: Image object
|
| 556 |
+
"""
|
| 557 |
+
source_palette = None
|
| 558 |
+
if palette:
|
| 559 |
+
# a bytes palette
|
| 560 |
+
if isinstance(palette, (bytes, bytearray, list)):
|
| 561 |
+
source_palette = bytearray(palette[:768])
|
| 562 |
+
if isinstance(palette, ImagePalette.ImagePalette):
|
| 563 |
+
source_palette = bytearray(palette.palette)
|
| 564 |
+
|
| 565 |
+
if im.mode == "P":
|
| 566 |
+
if not source_palette:
|
| 567 |
+
im_palette = im.getpalette(None)
|
| 568 |
+
assert im_palette is not None
|
| 569 |
+
source_palette = bytearray(im_palette)
|
| 570 |
+
else: # L-mode
|
| 571 |
+
if not source_palette:
|
| 572 |
+
source_palette = bytearray(i // 3 for i in range(768))
|
| 573 |
+
im.palette = ImagePalette.ImagePalette("RGB", palette=source_palette)
|
| 574 |
+
assert source_palette is not None
|
| 575 |
+
|
| 576 |
+
if palette:
|
| 577 |
+
used_palette_colors: list[int | None] = []
|
| 578 |
+
assert im.palette is not None
|
| 579 |
+
for i in range(0, len(source_palette), 3):
|
| 580 |
+
source_color = tuple(source_palette[i : i + 3])
|
| 581 |
+
index = im.palette.colors.get(source_color)
|
| 582 |
+
if index in used_palette_colors:
|
| 583 |
+
index = None
|
| 584 |
+
used_palette_colors.append(index)
|
| 585 |
+
for i, index in enumerate(used_palette_colors):
|
| 586 |
+
if index is None:
|
| 587 |
+
for j in range(len(used_palette_colors)):
|
| 588 |
+
if j not in used_palette_colors:
|
| 589 |
+
used_palette_colors[i] = j
|
| 590 |
+
break
|
| 591 |
+
dest_map: list[int] = []
|
| 592 |
+
for index in used_palette_colors:
|
| 593 |
+
assert index is not None
|
| 594 |
+
dest_map.append(index)
|
| 595 |
+
im = im.remap_palette(dest_map)
|
| 596 |
+
else:
|
| 597 |
+
optimized_palette_colors = _get_optimize(im, info)
|
| 598 |
+
if optimized_palette_colors is not None:
|
| 599 |
+
im = im.remap_palette(optimized_palette_colors, source_palette)
|
| 600 |
+
if "transparency" in info:
|
| 601 |
+
try:
|
| 602 |
+
info["transparency"] = optimized_palette_colors.index(
|
| 603 |
+
info["transparency"]
|
| 604 |
+
)
|
| 605 |
+
except ValueError:
|
| 606 |
+
del info["transparency"]
|
| 607 |
+
return im
|
| 608 |
+
|
| 609 |
+
assert im.palette is not None
|
| 610 |
+
im.palette.palette = source_palette
|
| 611 |
+
return im
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
def _write_single_frame(
|
| 615 |
+
im: Image.Image,
|
| 616 |
+
fp: IO[bytes],
|
| 617 |
+
palette: _Palette | None,
|
| 618 |
+
) -> None:
|
| 619 |
+
im_out = _normalize_mode(im)
|
| 620 |
+
for k, v in im_out.info.items():
|
| 621 |
+
if isinstance(k, str):
|
| 622 |
+
im.encoderinfo.setdefault(k, v)
|
| 623 |
+
im_out = _normalize_palette(im_out, palette, im.encoderinfo)
|
| 624 |
+
|
| 625 |
+
for s in _get_global_header(im_out, im.encoderinfo):
|
| 626 |
+
fp.write(s)
|
| 627 |
+
|
| 628 |
+
# local image header
|
| 629 |
+
flags = 0
|
| 630 |
+
if get_interlace(im):
|
| 631 |
+
flags = flags | 64
|
| 632 |
+
_write_local_header(fp, im, (0, 0), flags)
|
| 633 |
+
|
| 634 |
+
im_out.encoderconfig = (8, get_interlace(im))
|
| 635 |
+
ImageFile._save(
|
| 636 |
+
im_out, fp, [ImageFile._Tile("gif", (0, 0) + im.size, 0, RAWMODE[im_out.mode])]
|
| 637 |
+
)
|
| 638 |
+
|
| 639 |
+
fp.write(b"\0") # end of image data
|
| 640 |
+
|
| 641 |
+
|
| 642 |
+
def _getbbox(
|
| 643 |
+
base_im: Image.Image, im_frame: Image.Image
|
| 644 |
+
) -> tuple[Image.Image, tuple[int, int, int, int] | None]:
|
| 645 |
+
palette_bytes = [
|
| 646 |
+
bytes(im.palette.palette) if im.palette else b"" for im in (base_im, im_frame)
|
| 647 |
+
]
|
| 648 |
+
if palette_bytes[0] != palette_bytes[1]:
|
| 649 |
+
im_frame = im_frame.convert("RGBA")
|
| 650 |
+
base_im = base_im.convert("RGBA")
|
| 651 |
+
delta = ImageChops.subtract_modulo(im_frame, base_im)
|
| 652 |
+
return delta, delta.getbbox(alpha_only=False)
|
| 653 |
+
|
| 654 |
+
|
| 655 |
+
class _Frame(NamedTuple):
|
| 656 |
+
im: Image.Image
|
| 657 |
+
bbox: tuple[int, int, int, int] | None
|
| 658 |
+
encoderinfo: dict[str, Any]
|
| 659 |
+
|
| 660 |
+
|
| 661 |
+
def _write_multiple_frames(
|
| 662 |
+
im: Image.Image, fp: IO[bytes], palette: _Palette | None
|
| 663 |
+
) -> bool:
|
| 664 |
+
duration = im.encoderinfo.get("duration")
|
| 665 |
+
disposal = im.encoderinfo.get("disposal", im.info.get("disposal"))
|
| 666 |
+
|
| 667 |
+
im_frames: list[_Frame] = []
|
| 668 |
+
previous_im: Image.Image | None = None
|
| 669 |
+
frame_count = 0
|
| 670 |
+
background_im = None
|
| 671 |
+
for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])):
|
| 672 |
+
for im_frame in ImageSequence.Iterator(imSequence):
|
| 673 |
+
# a copy is required here since seek can still mutate the image
|
| 674 |
+
im_frame = _normalize_mode(im_frame.copy())
|
| 675 |
+
if frame_count == 0:
|
| 676 |
+
for k, v in im_frame.info.items():
|
| 677 |
+
if k == "transparency":
|
| 678 |
+
continue
|
| 679 |
+
if isinstance(k, str):
|
| 680 |
+
im.encoderinfo.setdefault(k, v)
|
| 681 |
+
|
| 682 |
+
encoderinfo = im.encoderinfo.copy()
|
| 683 |
+
if "transparency" in im_frame.info:
|
| 684 |
+
encoderinfo.setdefault("transparency", im_frame.info["transparency"])
|
| 685 |
+
im_frame = _normalize_palette(im_frame, palette, encoderinfo)
|
| 686 |
+
if isinstance(duration, (list, tuple)):
|
| 687 |
+
encoderinfo["duration"] = duration[frame_count]
|
| 688 |
+
elif duration is None and "duration" in im_frame.info:
|
| 689 |
+
encoderinfo["duration"] = im_frame.info["duration"]
|
| 690 |
+
if isinstance(disposal, (list, tuple)):
|
| 691 |
+
encoderinfo["disposal"] = disposal[frame_count]
|
| 692 |
+
frame_count += 1
|
| 693 |
+
|
| 694 |
+
diff_frame = None
|
| 695 |
+
if im_frames and previous_im:
|
| 696 |
+
# delta frame
|
| 697 |
+
delta, bbox = _getbbox(previous_im, im_frame)
|
| 698 |
+
if not bbox:
|
| 699 |
+
# This frame is identical to the previous frame
|
| 700 |
+
if encoderinfo.get("duration"):
|
| 701 |
+
im_frames[-1].encoderinfo["duration"] += encoderinfo["duration"]
|
| 702 |
+
continue
|
| 703 |
+
if im_frames[-1].encoderinfo.get("disposal") == 2:
|
| 704 |
+
# To appear correctly in viewers using a convention,
|
| 705 |
+
# only consider transparency, and not background color
|
| 706 |
+
color = im.encoderinfo.get(
|
| 707 |
+
"transparency", im.info.get("transparency")
|
| 708 |
+
)
|
| 709 |
+
if color is not None:
|
| 710 |
+
if background_im is None:
|
| 711 |
+
background = _get_background(im_frame, color)
|
| 712 |
+
background_im = Image.new("P", im_frame.size, background)
|
| 713 |
+
first_palette = im_frames[0].im.palette
|
| 714 |
+
assert first_palette is not None
|
| 715 |
+
background_im.putpalette(first_palette, first_palette.mode)
|
| 716 |
+
bbox = _getbbox(background_im, im_frame)[1]
|
| 717 |
+
else:
|
| 718 |
+
bbox = (0, 0) + im_frame.size
|
| 719 |
+
elif encoderinfo.get("optimize") and im_frame.mode != "1":
|
| 720 |
+
if "transparency" not in encoderinfo:
|
| 721 |
+
assert im_frame.palette is not None
|
| 722 |
+
try:
|
| 723 |
+
encoderinfo["transparency"] = (
|
| 724 |
+
im_frame.palette._new_color_index(im_frame)
|
| 725 |
+
)
|
| 726 |
+
except ValueError:
|
| 727 |
+
pass
|
| 728 |
+
if "transparency" in encoderinfo:
|
| 729 |
+
# When the delta is zero, fill the image with transparency
|
| 730 |
+
diff_frame = im_frame.copy()
|
| 731 |
+
fill = Image.new("P", delta.size, encoderinfo["transparency"])
|
| 732 |
+
if delta.mode == "RGBA":
|
| 733 |
+
r, g, b, a = delta.split()
|
| 734 |
+
mask = ImageMath.lambda_eval(
|
| 735 |
+
lambda args: args["convert"](
|
| 736 |
+
args["max"](
|
| 737 |
+
args["max"](
|
| 738 |
+
args["max"](args["r"], args["g"]), args["b"]
|
| 739 |
+
),
|
| 740 |
+
args["a"],
|
| 741 |
+
)
|
| 742 |
+
* 255,
|
| 743 |
+
"1",
|
| 744 |
+
),
|
| 745 |
+
r=r,
|
| 746 |
+
g=g,
|
| 747 |
+
b=b,
|
| 748 |
+
a=a,
|
| 749 |
+
)
|
| 750 |
+
else:
|
| 751 |
+
if delta.mode == "P":
|
| 752 |
+
# Convert to L without considering palette
|
| 753 |
+
delta_l = Image.new("L", delta.size)
|
| 754 |
+
delta_l.putdata(delta.getdata())
|
| 755 |
+
delta = delta_l
|
| 756 |
+
mask = ImageMath.lambda_eval(
|
| 757 |
+
lambda args: args["convert"](args["im"] * 255, "1"),
|
| 758 |
+
im=delta,
|
| 759 |
+
)
|
| 760 |
+
diff_frame.paste(fill, mask=ImageOps.invert(mask))
|
| 761 |
+
else:
|
| 762 |
+
bbox = None
|
| 763 |
+
previous_im = im_frame
|
| 764 |
+
im_frames.append(_Frame(diff_frame or im_frame, bbox, encoderinfo))
|
| 765 |
+
|
| 766 |
+
if len(im_frames) == 1:
|
| 767 |
+
if "duration" in im.encoderinfo:
|
| 768 |
+
# Since multiple frames will not be written, use the combined duration
|
| 769 |
+
im.encoderinfo["duration"] = im_frames[0].encoderinfo["duration"]
|
| 770 |
+
return False
|
| 771 |
+
|
| 772 |
+
for frame_data in im_frames:
|
| 773 |
+
im_frame = frame_data.im
|
| 774 |
+
if not frame_data.bbox:
|
| 775 |
+
# global header
|
| 776 |
+
for s in _get_global_header(im_frame, frame_data.encoderinfo):
|
| 777 |
+
fp.write(s)
|
| 778 |
+
offset = (0, 0)
|
| 779 |
+
else:
|
| 780 |
+
# compress difference
|
| 781 |
+
if not palette:
|
| 782 |
+
frame_data.encoderinfo["include_color_table"] = True
|
| 783 |
+
|
| 784 |
+
if frame_data.bbox != (0, 0) + im_frame.size:
|
| 785 |
+
im_frame = im_frame.crop(frame_data.bbox)
|
| 786 |
+
offset = frame_data.bbox[:2]
|
| 787 |
+
_write_frame_data(fp, im_frame, offset, frame_data.encoderinfo)
|
| 788 |
+
return True
|
| 789 |
+
|
| 790 |
+
|
| 791 |
+
def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 792 |
+
_save(im, fp, filename, save_all=True)
|
| 793 |
+
|
| 794 |
+
|
| 795 |
+
def _save(
|
| 796 |
+
im: Image.Image, fp: IO[bytes], filename: str | bytes, save_all: bool = False
|
| 797 |
+
) -> None:
|
| 798 |
+
# header
|
| 799 |
+
if "palette" in im.encoderinfo or "palette" in im.info:
|
| 800 |
+
palette = im.encoderinfo.get("palette", im.info.get("palette"))
|
| 801 |
+
else:
|
| 802 |
+
palette = None
|
| 803 |
+
im.encoderinfo.setdefault("optimize", True)
|
| 804 |
+
|
| 805 |
+
if not save_all or not _write_multiple_frames(im, fp, palette):
|
| 806 |
+
_write_single_frame(im, fp, palette)
|
| 807 |
+
|
| 808 |
+
fp.write(b";") # end of file
|
| 809 |
+
|
| 810 |
+
if hasattr(fp, "flush"):
|
| 811 |
+
fp.flush()
|
| 812 |
+
|
| 813 |
+
|
| 814 |
+
def get_interlace(im: Image.Image) -> int:
|
| 815 |
+
interlace = im.encoderinfo.get("interlace", 1)
|
| 816 |
+
|
| 817 |
+
# workaround for @PIL153
|
| 818 |
+
if min(im.size) < 16:
|
| 819 |
+
interlace = 0
|
| 820 |
+
|
| 821 |
+
return interlace
|
| 822 |
+
|
| 823 |
+
|
| 824 |
+
def _write_local_header(
|
| 825 |
+
fp: IO[bytes], im: Image.Image, offset: tuple[int, int], flags: int
|
| 826 |
+
) -> None:
|
| 827 |
+
try:
|
| 828 |
+
transparency = im.encoderinfo["transparency"]
|
| 829 |
+
except KeyError:
|
| 830 |
+
transparency = None
|
| 831 |
+
|
| 832 |
+
if "duration" in im.encoderinfo:
|
| 833 |
+
duration = int(im.encoderinfo["duration"] / 10)
|
| 834 |
+
else:
|
| 835 |
+
duration = 0
|
| 836 |
+
|
| 837 |
+
disposal = int(im.encoderinfo.get("disposal", 0))
|
| 838 |
+
|
| 839 |
+
if transparency is not None or duration != 0 or disposal:
|
| 840 |
+
packed_flag = 1 if transparency is not None else 0
|
| 841 |
+
packed_flag |= disposal << 2
|
| 842 |
+
|
| 843 |
+
fp.write(
|
| 844 |
+
b"!"
|
| 845 |
+
+ o8(249) # extension intro
|
| 846 |
+
+ o8(4) # length
|
| 847 |
+
+ o8(packed_flag) # packed fields
|
| 848 |
+
+ o16(duration) # duration
|
| 849 |
+
+ o8(transparency or 0) # transparency index
|
| 850 |
+
+ o8(0)
|
| 851 |
+
)
|
| 852 |
+
|
| 853 |
+
include_color_table = im.encoderinfo.get("include_color_table")
|
| 854 |
+
if include_color_table:
|
| 855 |
+
palette_bytes = _get_palette_bytes(im)
|
| 856 |
+
color_table_size = _get_color_table_size(palette_bytes)
|
| 857 |
+
if color_table_size:
|
| 858 |
+
flags = flags | 128 # local color table flag
|
| 859 |
+
flags = flags | color_table_size
|
| 860 |
+
|
| 861 |
+
fp.write(
|
| 862 |
+
b","
|
| 863 |
+
+ o16(offset[0]) # offset
|
| 864 |
+
+ o16(offset[1])
|
| 865 |
+
+ o16(im.size[0]) # size
|
| 866 |
+
+ o16(im.size[1])
|
| 867 |
+
+ o8(flags) # flags
|
| 868 |
+
)
|
| 869 |
+
if include_color_table and color_table_size:
|
| 870 |
+
fp.write(_get_header_palette(palette_bytes))
|
| 871 |
+
fp.write(o8(8)) # bits
|
| 872 |
+
|
| 873 |
+
|
| 874 |
+
def _save_netpbm(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 875 |
+
# Unused by default.
|
| 876 |
+
# To use, uncomment the register_save call at the end of the file.
|
| 877 |
+
#
|
| 878 |
+
# If you need real GIF compression and/or RGB quantization, you
|
| 879 |
+
# can use the external NETPBM/PBMPLUS utilities. See comments
|
| 880 |
+
# below for information on how to enable this.
|
| 881 |
+
tempfile = im._dump()
|
| 882 |
+
|
| 883 |
+
try:
|
| 884 |
+
with open(filename, "wb") as f:
|
| 885 |
+
if im.mode != "RGB":
|
| 886 |
+
subprocess.check_call(
|
| 887 |
+
["ppmtogif", tempfile], stdout=f, stderr=subprocess.DEVNULL
|
| 888 |
+
)
|
| 889 |
+
else:
|
| 890 |
+
# Pipe ppmquant output into ppmtogif
|
| 891 |
+
# "ppmquant 256 %s | ppmtogif > %s" % (tempfile, filename)
|
| 892 |
+
quant_cmd = ["ppmquant", "256", tempfile]
|
| 893 |
+
togif_cmd = ["ppmtogif"]
|
| 894 |
+
quant_proc = subprocess.Popen(
|
| 895 |
+
quant_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
|
| 896 |
+
)
|
| 897 |
+
togif_proc = subprocess.Popen(
|
| 898 |
+
togif_cmd,
|
| 899 |
+
stdin=quant_proc.stdout,
|
| 900 |
+
stdout=f,
|
| 901 |
+
stderr=subprocess.DEVNULL,
|
| 902 |
+
)
|
| 903 |
+
|
| 904 |
+
# Allow ppmquant to receive SIGPIPE if ppmtogif exits
|
| 905 |
+
assert quant_proc.stdout is not None
|
| 906 |
+
quant_proc.stdout.close()
|
| 907 |
+
|
| 908 |
+
retcode = quant_proc.wait()
|
| 909 |
+
if retcode:
|
| 910 |
+
raise subprocess.CalledProcessError(retcode, quant_cmd)
|
| 911 |
+
|
| 912 |
+
retcode = togif_proc.wait()
|
| 913 |
+
if retcode:
|
| 914 |
+
raise subprocess.CalledProcessError(retcode, togif_cmd)
|
| 915 |
+
finally:
|
| 916 |
+
try:
|
| 917 |
+
os.unlink(tempfile)
|
| 918 |
+
except OSError:
|
| 919 |
+
pass
|
| 920 |
+
|
| 921 |
+
|
| 922 |
+
# Force optimization so that we can test performance against
|
| 923 |
+
# cases where it took lots of memory and time previously.
|
| 924 |
+
_FORCE_OPTIMIZE = False
|
| 925 |
+
|
| 926 |
+
|
| 927 |
+
def _get_optimize(im: Image.Image, info: dict[str, Any]) -> list[int] | None:
|
| 928 |
+
"""
|
| 929 |
+
Palette optimization is a potentially expensive operation.
|
| 930 |
+
|
| 931 |
+
This function determines if the palette should be optimized using
|
| 932 |
+
some heuristics, then returns the list of palette entries in use.
|
| 933 |
+
|
| 934 |
+
:param im: Image object
|
| 935 |
+
:param info: encoderinfo
|
| 936 |
+
:returns: list of indexes of palette entries in use, or None
|
| 937 |
+
"""
|
| 938 |
+
if im.mode in ("P", "L") and info and info.get("optimize"):
|
| 939 |
+
# Potentially expensive operation.
|
| 940 |
+
|
| 941 |
+
# The palette saves 3 bytes per color not used, but palette
|
| 942 |
+
# lengths are restricted to 3*(2**N) bytes. Max saving would
|
| 943 |
+
# be 768 -> 6 bytes if we went all the way down to 2 colors.
|
| 944 |
+
# * If we're over 128 colors, we can't save any space.
|
| 945 |
+
# * If there aren't any holes, it's not worth collapsing.
|
| 946 |
+
# * If we have a 'large' image, the palette is in the noise.
|
| 947 |
+
|
| 948 |
+
# create the new palette if not every color is used
|
| 949 |
+
optimise = _FORCE_OPTIMIZE or im.mode == "L"
|
| 950 |
+
if optimise or im.width * im.height < 512 * 512:
|
| 951 |
+
# check which colors are used
|
| 952 |
+
used_palette_colors = []
|
| 953 |
+
for i, count in enumerate(im.histogram()):
|
| 954 |
+
if count:
|
| 955 |
+
used_palette_colors.append(i)
|
| 956 |
+
|
| 957 |
+
if optimise or max(used_palette_colors) >= len(used_palette_colors):
|
| 958 |
+
return used_palette_colors
|
| 959 |
+
|
| 960 |
+
assert im.palette is not None
|
| 961 |
+
num_palette_colors = len(im.palette.palette) // Image.getmodebands(
|
| 962 |
+
im.palette.mode
|
| 963 |
+
)
|
| 964 |
+
current_palette_size = 1 << (num_palette_colors - 1).bit_length()
|
| 965 |
+
if (
|
| 966 |
+
# check that the palette would become smaller when saved
|
| 967 |
+
len(used_palette_colors) <= current_palette_size // 2
|
| 968 |
+
# check that the palette is not already the smallest possible size
|
| 969 |
+
and current_palette_size > 2
|
| 970 |
+
):
|
| 971 |
+
return used_palette_colors
|
| 972 |
+
return None
|
| 973 |
+
|
| 974 |
+
|
| 975 |
+
def _get_color_table_size(palette_bytes: bytes) -> int:
|
| 976 |
+
# calculate the palette size for the header
|
| 977 |
+
if not palette_bytes:
|
| 978 |
+
return 0
|
| 979 |
+
elif len(palette_bytes) < 9:
|
| 980 |
+
return 1
|
| 981 |
+
else:
|
| 982 |
+
return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1
|
| 983 |
+
|
| 984 |
+
|
| 985 |
+
def _get_header_palette(palette_bytes: bytes) -> bytes:
|
| 986 |
+
"""
|
| 987 |
+
Returns the palette, null padded to the next power of 2 (*3) bytes
|
| 988 |
+
suitable for direct inclusion in the GIF header
|
| 989 |
+
|
| 990 |
+
:param palette_bytes: Unpadded palette bytes, in RGBRGB form
|
| 991 |
+
:returns: Null padded palette
|
| 992 |
+
"""
|
| 993 |
+
color_table_size = _get_color_table_size(palette_bytes)
|
| 994 |
+
|
| 995 |
+
# add the missing amount of bytes
|
| 996 |
+
# the palette has to be 2<<n in size
|
| 997 |
+
actual_target_size_diff = (2 << color_table_size) - len(palette_bytes) // 3
|
| 998 |
+
if actual_target_size_diff > 0:
|
| 999 |
+
palette_bytes += o8(0) * 3 * actual_target_size_diff
|
| 1000 |
+
return palette_bytes
|
| 1001 |
+
|
| 1002 |
+
|
| 1003 |
+
def _get_palette_bytes(im: Image.Image) -> bytes:
|
| 1004 |
+
"""
|
| 1005 |
+
Gets the palette for inclusion in the gif header
|
| 1006 |
+
|
| 1007 |
+
:param im: Image object
|
| 1008 |
+
:returns: Bytes, len<=768 suitable for inclusion in gif header
|
| 1009 |
+
"""
|
| 1010 |
+
if not im.palette:
|
| 1011 |
+
return b""
|
| 1012 |
+
|
| 1013 |
+
palette = bytes(im.palette.palette)
|
| 1014 |
+
if im.palette.mode == "RGBA":
|
| 1015 |
+
palette = b"".join(palette[i * 4 : i * 4 + 3] for i in range(len(palette) // 3))
|
| 1016 |
+
return palette
|
| 1017 |
+
|
| 1018 |
+
|
| 1019 |
+
def _get_background(
|
| 1020 |
+
im: Image.Image,
|
| 1021 |
+
info_background: int | tuple[int, int, int] | tuple[int, int, int, int] | None,
|
| 1022 |
+
) -> int:
|
| 1023 |
+
background = 0
|
| 1024 |
+
if info_background:
|
| 1025 |
+
if isinstance(info_background, tuple):
|
| 1026 |
+
# WebPImagePlugin stores an RGBA value in info["background"]
|
| 1027 |
+
# So it must be converted to the same format as GifImagePlugin's
|
| 1028 |
+
# info["background"] - a global color table index
|
| 1029 |
+
assert im.palette is not None
|
| 1030 |
+
try:
|
| 1031 |
+
background = im.palette.getcolor(info_background, im)
|
| 1032 |
+
except ValueError as e:
|
| 1033 |
+
if str(e) not in (
|
| 1034 |
+
# If all 256 colors are in use,
|
| 1035 |
+
# then there is no need for the background color
|
| 1036 |
+
"cannot allocate more than 256 colors",
|
| 1037 |
+
# Ignore non-opaque WebP background
|
| 1038 |
+
"cannot add non-opaque RGBA color to RGB palette",
|
| 1039 |
+
):
|
| 1040 |
+
raise
|
| 1041 |
+
else:
|
| 1042 |
+
background = info_background
|
| 1043 |
+
return background
|
| 1044 |
+
|
| 1045 |
+
|
| 1046 |
+
def _get_global_header(im: Image.Image, info: dict[str, Any]) -> list[bytes]:
|
| 1047 |
+
"""Return a list of strings representing a GIF header"""
|
| 1048 |
+
|
| 1049 |
+
# Header Block
|
| 1050 |
+
# https://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp
|
| 1051 |
+
|
| 1052 |
+
version = b"87a"
|
| 1053 |
+
if im.info.get("version") == b"89a" or (
|
| 1054 |
+
info
|
| 1055 |
+
and (
|
| 1056 |
+
"transparency" in info
|
| 1057 |
+
or info.get("loop") is not None
|
| 1058 |
+
or info.get("duration")
|
| 1059 |
+
or info.get("comment")
|
| 1060 |
+
)
|
| 1061 |
+
):
|
| 1062 |
+
version = b"89a"
|
| 1063 |
+
|
| 1064 |
+
background = _get_background(im, info.get("background"))
|
| 1065 |
+
|
| 1066 |
+
palette_bytes = _get_palette_bytes(im)
|
| 1067 |
+
color_table_size = _get_color_table_size(palette_bytes)
|
| 1068 |
+
|
| 1069 |
+
header = [
|
| 1070 |
+
b"GIF" # signature
|
| 1071 |
+
+ version # version
|
| 1072 |
+
+ o16(im.size[0]) # canvas width
|
| 1073 |
+
+ o16(im.size[1]), # canvas height
|
| 1074 |
+
# Logical Screen Descriptor
|
| 1075 |
+
# size of global color table + global color table flag
|
| 1076 |
+
o8(color_table_size + 128), # packed fields
|
| 1077 |
+
# background + reserved/aspect
|
| 1078 |
+
o8(background) + o8(0),
|
| 1079 |
+
# Global Color Table
|
| 1080 |
+
_get_header_palette(palette_bytes),
|
| 1081 |
+
]
|
| 1082 |
+
if info.get("loop") is not None:
|
| 1083 |
+
header.append(
|
| 1084 |
+
b"!"
|
| 1085 |
+
+ o8(255) # extension intro
|
| 1086 |
+
+ o8(11)
|
| 1087 |
+
+ b"NETSCAPE2.0"
|
| 1088 |
+
+ o8(3)
|
| 1089 |
+
+ o8(1)
|
| 1090 |
+
+ o16(info["loop"]) # number of loops
|
| 1091 |
+
+ o8(0)
|
| 1092 |
+
)
|
| 1093 |
+
if info.get("comment"):
|
| 1094 |
+
comment_block = b"!" + o8(254) # extension intro
|
| 1095 |
+
|
| 1096 |
+
comment = info["comment"]
|
| 1097 |
+
if isinstance(comment, str):
|
| 1098 |
+
comment = comment.encode()
|
| 1099 |
+
for i in range(0, len(comment), 255):
|
| 1100 |
+
subblock = comment[i : i + 255]
|
| 1101 |
+
comment_block += o8(len(subblock)) + subblock
|
| 1102 |
+
|
| 1103 |
+
comment_block += o8(0)
|
| 1104 |
+
header.append(comment_block)
|
| 1105 |
+
return header
|
| 1106 |
+
|
| 1107 |
+
|
| 1108 |
+
def _write_frame_data(
|
| 1109 |
+
fp: IO[bytes],
|
| 1110 |
+
im_frame: Image.Image,
|
| 1111 |
+
offset: tuple[int, int],
|
| 1112 |
+
params: dict[str, Any],
|
| 1113 |
+
) -> None:
|
| 1114 |
+
try:
|
| 1115 |
+
im_frame.encoderinfo = params
|
| 1116 |
+
|
| 1117 |
+
# local image header
|
| 1118 |
+
_write_local_header(fp, im_frame, offset, 0)
|
| 1119 |
+
|
| 1120 |
+
ImageFile._save(
|
| 1121 |
+
im_frame,
|
| 1122 |
+
fp,
|
| 1123 |
+
[ImageFile._Tile("gif", (0, 0) + im_frame.size, 0, RAWMODE[im_frame.mode])],
|
| 1124 |
+
)
|
| 1125 |
+
|
| 1126 |
+
fp.write(b"\0") # end of image data
|
| 1127 |
+
finally:
|
| 1128 |
+
del im_frame.encoderinfo
|
| 1129 |
+
|
| 1130 |
+
|
| 1131 |
+
# --------------------------------------------------------------------
|
| 1132 |
+
# Legacy GIF utilities
|
| 1133 |
+
|
| 1134 |
+
|
| 1135 |
+
def getheader(
|
| 1136 |
+
im: Image.Image, palette: _Palette | None = None, info: dict[str, Any] | None = None
|
| 1137 |
+
) -> tuple[list[bytes], list[int] | None]:
|
| 1138 |
+
"""
|
| 1139 |
+
Legacy Method to get Gif data from image.
|
| 1140 |
+
|
| 1141 |
+
Warning:: May modify image data.
|
| 1142 |
+
|
| 1143 |
+
:param im: Image object
|
| 1144 |
+
:param palette: bytes object containing the source palette, or ....
|
| 1145 |
+
:param info: encoderinfo
|
| 1146 |
+
:returns: tuple of(list of header items, optimized palette)
|
| 1147 |
+
|
| 1148 |
+
"""
|
| 1149 |
+
if info is None:
|
| 1150 |
+
info = {}
|
| 1151 |
+
|
| 1152 |
+
used_palette_colors = _get_optimize(im, info)
|
| 1153 |
+
|
| 1154 |
+
if "background" not in info and "background" in im.info:
|
| 1155 |
+
info["background"] = im.info["background"]
|
| 1156 |
+
|
| 1157 |
+
im_mod = _normalize_palette(im, palette, info)
|
| 1158 |
+
im.palette = im_mod.palette
|
| 1159 |
+
im.im = im_mod.im
|
| 1160 |
+
header = _get_global_header(im, info)
|
| 1161 |
+
|
| 1162 |
+
return header, used_palette_colors
|
| 1163 |
+
|
| 1164 |
+
|
| 1165 |
+
def getdata(
|
| 1166 |
+
im: Image.Image, offset: tuple[int, int] = (0, 0), **params: Any
|
| 1167 |
+
) -> list[bytes]:
|
| 1168 |
+
"""
|
| 1169 |
+
Legacy Method
|
| 1170 |
+
|
| 1171 |
+
Return a list of strings representing this image.
|
| 1172 |
+
The first string is a local image header, the rest contains
|
| 1173 |
+
encoded image data.
|
| 1174 |
+
|
| 1175 |
+
To specify duration, add the time in milliseconds,
|
| 1176 |
+
e.g. ``getdata(im_frame, duration=1000)``
|
| 1177 |
+
|
| 1178 |
+
:param im: Image object
|
| 1179 |
+
:param offset: Tuple of (x, y) pixels. Defaults to (0, 0)
|
| 1180 |
+
:param \\**params: e.g. duration or other encoder info parameters
|
| 1181 |
+
:returns: List of bytes containing GIF encoded frame data
|
| 1182 |
+
|
| 1183 |
+
"""
|
| 1184 |
+
from io import BytesIO
|
| 1185 |
+
|
| 1186 |
+
class Collector(BytesIO):
|
| 1187 |
+
data = []
|
| 1188 |
+
|
| 1189 |
+
def write(self, data: Buffer) -> int:
|
| 1190 |
+
self.data.append(data)
|
| 1191 |
+
return len(data)
|
| 1192 |
+
|
| 1193 |
+
im.load() # make sure raster data is available
|
| 1194 |
+
|
| 1195 |
+
fp = Collector()
|
| 1196 |
+
|
| 1197 |
+
_write_frame_data(fp, im, offset, params)
|
| 1198 |
+
|
| 1199 |
+
return fp.data
|
| 1200 |
+
|
| 1201 |
+
|
| 1202 |
+
# --------------------------------------------------------------------
|
| 1203 |
+
# Registry
|
| 1204 |
+
|
| 1205 |
+
Image.register_open(GifImageFile.format, GifImageFile, _accept)
|
| 1206 |
+
Image.register_save(GifImageFile.format, _save)
|
| 1207 |
+
Image.register_save_all(GifImageFile.format, _save_all)
|
| 1208 |
+
Image.register_extension(GifImageFile.format, ".gif")
|
| 1209 |
+
Image.register_mime(GifImageFile.format, "image/gif")
|
| 1210 |
+
|
| 1211 |
+
#
|
| 1212 |
+
# Uncomment the following line if you wish to use NETPBM/PBMPLUS
|
| 1213 |
+
# instead of the built-in "uncompressed" GIF encoder
|
| 1214 |
+
|
| 1215 |
+
# Image.register_save(GifImageFile.format, _save_netpbm)
|
lib/python3.13/site-packages/PIL/ImageMode.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# standard mode descriptors
|
| 6 |
+
#
|
| 7 |
+
# History:
|
| 8 |
+
# 2006-03-20 fl Added
|
| 9 |
+
#
|
| 10 |
+
# Copyright (c) 2006 by Secret Labs AB.
|
| 11 |
+
# Copyright (c) 2006 by Fredrik Lundh.
|
| 12 |
+
#
|
| 13 |
+
# See the README file for information on usage and redistribution.
|
| 14 |
+
#
|
| 15 |
+
from __future__ import annotations
|
| 16 |
+
|
| 17 |
+
import sys
|
| 18 |
+
from functools import lru_cache
|
| 19 |
+
from typing import NamedTuple
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class ModeDescriptor(NamedTuple):
|
| 23 |
+
"""Wrapper for mode strings."""
|
| 24 |
+
|
| 25 |
+
mode: str
|
| 26 |
+
bands: tuple[str, ...]
|
| 27 |
+
basemode: str
|
| 28 |
+
basetype: str
|
| 29 |
+
typestr: str
|
| 30 |
+
|
| 31 |
+
def __str__(self) -> str:
|
| 32 |
+
return self.mode
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
@lru_cache
|
| 36 |
+
def getmode(mode: str) -> ModeDescriptor:
|
| 37 |
+
"""Gets a mode descriptor for the given mode."""
|
| 38 |
+
endian = "<" if sys.byteorder == "little" else ">"
|
| 39 |
+
|
| 40 |
+
modes = {
|
| 41 |
+
# core modes
|
| 42 |
+
# Bits need to be extended to bytes
|
| 43 |
+
"1": ("L", "L", ("1",), "|b1"),
|
| 44 |
+
"L": ("L", "L", ("L",), "|u1"),
|
| 45 |
+
"I": ("L", "I", ("I",), f"{endian}i4"),
|
| 46 |
+
"F": ("L", "F", ("F",), f"{endian}f4"),
|
| 47 |
+
"P": ("P", "L", ("P",), "|u1"),
|
| 48 |
+
"RGB": ("RGB", "L", ("R", "G", "B"), "|u1"),
|
| 49 |
+
"RGBX": ("RGB", "L", ("R", "G", "B", "X"), "|u1"),
|
| 50 |
+
"RGBA": ("RGB", "L", ("R", "G", "B", "A"), "|u1"),
|
| 51 |
+
"CMYK": ("RGB", "L", ("C", "M", "Y", "K"), "|u1"),
|
| 52 |
+
"YCbCr": ("RGB", "L", ("Y", "Cb", "Cr"), "|u1"),
|
| 53 |
+
# UNDONE - unsigned |u1i1i1
|
| 54 |
+
"LAB": ("RGB", "L", ("L", "A", "B"), "|u1"),
|
| 55 |
+
"HSV": ("RGB", "L", ("H", "S", "V"), "|u1"),
|
| 56 |
+
# extra experimental modes
|
| 57 |
+
"RGBa": ("RGB", "L", ("R", "G", "B", "a"), "|u1"),
|
| 58 |
+
"LA": ("L", "L", ("L", "A"), "|u1"),
|
| 59 |
+
"La": ("L", "L", ("L", "a"), "|u1"),
|
| 60 |
+
"PA": ("RGB", "L", ("P", "A"), "|u1"),
|
| 61 |
+
}
|
| 62 |
+
if mode in modes:
|
| 63 |
+
base_mode, base_type, bands, type_str = modes[mode]
|
| 64 |
+
return ModeDescriptor(mode, bands, base_mode, base_type, type_str)
|
| 65 |
+
|
| 66 |
+
mapping_modes = {
|
| 67 |
+
# I;16 == I;16L, and I;32 == I;32L
|
| 68 |
+
"I;16": "<u2",
|
| 69 |
+
"I;16S": "<i2",
|
| 70 |
+
"I;16L": "<u2",
|
| 71 |
+
"I;16LS": "<i2",
|
| 72 |
+
"I;16B": ">u2",
|
| 73 |
+
"I;16BS": ">i2",
|
| 74 |
+
"I;16N": f"{endian}u2",
|
| 75 |
+
"I;16NS": f"{endian}i2",
|
| 76 |
+
"I;32": "<u4",
|
| 77 |
+
"I;32B": ">u4",
|
| 78 |
+
"I;32L": "<u4",
|
| 79 |
+
"I;32S": "<i4",
|
| 80 |
+
"I;32BS": ">i4",
|
| 81 |
+
"I;32LS": "<i4",
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
type_str = mapping_modes[mode]
|
| 85 |
+
return ModeDescriptor(mode, ("I",), "L", "L", type_str)
|
lib/python3.13/site-packages/PIL/ImagePalette.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# image palette object
|
| 6 |
+
#
|
| 7 |
+
# History:
|
| 8 |
+
# 1996-03-11 fl Rewritten.
|
| 9 |
+
# 1997-01-03 fl Up and running.
|
| 10 |
+
# 1997-08-23 fl Added load hack
|
| 11 |
+
# 2001-04-16 fl Fixed randint shadow bug in random()
|
| 12 |
+
#
|
| 13 |
+
# Copyright (c) 1997-2001 by Secret Labs AB
|
| 14 |
+
# Copyright (c) 1996-1997 by Fredrik Lundh
|
| 15 |
+
#
|
| 16 |
+
# See the README file for information on usage and redistribution.
|
| 17 |
+
#
|
| 18 |
+
from __future__ import annotations
|
| 19 |
+
|
| 20 |
+
import array
|
| 21 |
+
from collections.abc import Sequence
|
| 22 |
+
from typing import IO
|
| 23 |
+
|
| 24 |
+
from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile
|
| 25 |
+
|
| 26 |
+
TYPE_CHECKING = False
|
| 27 |
+
if TYPE_CHECKING:
|
| 28 |
+
from . import Image
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class ImagePalette:
|
| 32 |
+
"""
|
| 33 |
+
Color palette for palette mapped images
|
| 34 |
+
|
| 35 |
+
:param mode: The mode to use for the palette. See:
|
| 36 |
+
:ref:`concept-modes`. Defaults to "RGB"
|
| 37 |
+
:param palette: An optional palette. If given, it must be a bytearray,
|
| 38 |
+
an array or a list of ints between 0-255. The list must consist of
|
| 39 |
+
all channels for one color followed by the next color (e.g. RGBRGBRGB).
|
| 40 |
+
Defaults to an empty palette.
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
def __init__(
|
| 44 |
+
self,
|
| 45 |
+
mode: str = "RGB",
|
| 46 |
+
palette: Sequence[int] | bytes | bytearray | None = None,
|
| 47 |
+
) -> None:
|
| 48 |
+
self.mode = mode
|
| 49 |
+
self.rawmode: str | None = None # if set, palette contains raw data
|
| 50 |
+
self.palette = palette or bytearray()
|
| 51 |
+
self.dirty: int | None = None
|
| 52 |
+
|
| 53 |
+
@property
|
| 54 |
+
def palette(self) -> Sequence[int] | bytes | bytearray:
|
| 55 |
+
return self._palette
|
| 56 |
+
|
| 57 |
+
@palette.setter
|
| 58 |
+
def palette(self, palette: Sequence[int] | bytes | bytearray) -> None:
|
| 59 |
+
self._colors: dict[tuple[int, ...], int] | None = None
|
| 60 |
+
self._palette = palette
|
| 61 |
+
|
| 62 |
+
@property
|
| 63 |
+
def colors(self) -> dict[tuple[int, ...], int]:
|
| 64 |
+
if self._colors is None:
|
| 65 |
+
mode_len = len(self.mode)
|
| 66 |
+
self._colors = {}
|
| 67 |
+
for i in range(0, len(self.palette), mode_len):
|
| 68 |
+
color = tuple(self.palette[i : i + mode_len])
|
| 69 |
+
if color in self._colors:
|
| 70 |
+
continue
|
| 71 |
+
self._colors[color] = i // mode_len
|
| 72 |
+
return self._colors
|
| 73 |
+
|
| 74 |
+
@colors.setter
|
| 75 |
+
def colors(self, colors: dict[tuple[int, ...], int]) -> None:
|
| 76 |
+
self._colors = colors
|
| 77 |
+
|
| 78 |
+
def copy(self) -> ImagePalette:
|
| 79 |
+
new = ImagePalette()
|
| 80 |
+
|
| 81 |
+
new.mode = self.mode
|
| 82 |
+
new.rawmode = self.rawmode
|
| 83 |
+
if self.palette is not None:
|
| 84 |
+
new.palette = self.palette[:]
|
| 85 |
+
new.dirty = self.dirty
|
| 86 |
+
|
| 87 |
+
return new
|
| 88 |
+
|
| 89 |
+
def getdata(self) -> tuple[str, Sequence[int] | bytes | bytearray]:
|
| 90 |
+
"""
|
| 91 |
+
Get palette contents in format suitable for the low-level
|
| 92 |
+
``im.putpalette`` primitive.
|
| 93 |
+
|
| 94 |
+
.. warning:: This method is experimental.
|
| 95 |
+
"""
|
| 96 |
+
if self.rawmode:
|
| 97 |
+
return self.rawmode, self.palette
|
| 98 |
+
return self.mode, self.tobytes()
|
| 99 |
+
|
| 100 |
+
def tobytes(self) -> bytes:
|
| 101 |
+
"""Convert palette to bytes.
|
| 102 |
+
|
| 103 |
+
.. warning:: This method is experimental.
|
| 104 |
+
"""
|
| 105 |
+
if self.rawmode:
|
| 106 |
+
msg = "palette contains raw palette data"
|
| 107 |
+
raise ValueError(msg)
|
| 108 |
+
if isinstance(self.palette, bytes):
|
| 109 |
+
return self.palette
|
| 110 |
+
arr = array.array("B", self.palette)
|
| 111 |
+
return arr.tobytes()
|
| 112 |
+
|
| 113 |
+
# Declare tostring as an alias for tobytes
|
| 114 |
+
tostring = tobytes
|
| 115 |
+
|
| 116 |
+
def _new_color_index(
|
| 117 |
+
self, image: Image.Image | None = None, e: Exception | None = None
|
| 118 |
+
) -> int:
|
| 119 |
+
if not isinstance(self.palette, bytearray):
|
| 120 |
+
self._palette = bytearray(self.palette)
|
| 121 |
+
index = len(self.palette) // 3
|
| 122 |
+
special_colors: tuple[int | tuple[int, ...] | None, ...] = ()
|
| 123 |
+
if image:
|
| 124 |
+
special_colors = (
|
| 125 |
+
image.info.get("background"),
|
| 126 |
+
image.info.get("transparency"),
|
| 127 |
+
)
|
| 128 |
+
while index in special_colors:
|
| 129 |
+
index += 1
|
| 130 |
+
if index >= 256:
|
| 131 |
+
if image:
|
| 132 |
+
# Search for an unused index
|
| 133 |
+
for i, count in reversed(list(enumerate(image.histogram()))):
|
| 134 |
+
if count == 0 and i not in special_colors:
|
| 135 |
+
index = i
|
| 136 |
+
break
|
| 137 |
+
if index >= 256:
|
| 138 |
+
msg = "cannot allocate more than 256 colors"
|
| 139 |
+
raise ValueError(msg) from e
|
| 140 |
+
return index
|
| 141 |
+
|
| 142 |
+
def getcolor(
|
| 143 |
+
self,
|
| 144 |
+
color: tuple[int, ...],
|
| 145 |
+
image: Image.Image | None = None,
|
| 146 |
+
) -> int:
|
| 147 |
+
"""Given an rgb tuple, allocate palette entry.
|
| 148 |
+
|
| 149 |
+
.. warning:: This method is experimental.
|
| 150 |
+
"""
|
| 151 |
+
if self.rawmode:
|
| 152 |
+
msg = "palette contains raw palette data"
|
| 153 |
+
raise ValueError(msg)
|
| 154 |
+
if isinstance(color, tuple):
|
| 155 |
+
if self.mode == "RGB":
|
| 156 |
+
if len(color) == 4:
|
| 157 |
+
if color[3] != 255:
|
| 158 |
+
msg = "cannot add non-opaque RGBA color to RGB palette"
|
| 159 |
+
raise ValueError(msg)
|
| 160 |
+
color = color[:3]
|
| 161 |
+
elif self.mode == "RGBA":
|
| 162 |
+
if len(color) == 3:
|
| 163 |
+
color += (255,)
|
| 164 |
+
try:
|
| 165 |
+
return self.colors[color]
|
| 166 |
+
except KeyError as e:
|
| 167 |
+
# allocate new color slot
|
| 168 |
+
index = self._new_color_index(image, e)
|
| 169 |
+
assert isinstance(self._palette, bytearray)
|
| 170 |
+
self.colors[color] = index
|
| 171 |
+
if index * 3 < len(self.palette):
|
| 172 |
+
self._palette = (
|
| 173 |
+
self._palette[: index * 3]
|
| 174 |
+
+ bytes(color)
|
| 175 |
+
+ self._palette[index * 3 + 3 :]
|
| 176 |
+
)
|
| 177 |
+
else:
|
| 178 |
+
self._palette += bytes(color)
|
| 179 |
+
self.dirty = 1
|
| 180 |
+
return index
|
| 181 |
+
else:
|
| 182 |
+
msg = f"unknown color specifier: {repr(color)}" # type: ignore[unreachable]
|
| 183 |
+
raise ValueError(msg)
|
| 184 |
+
|
| 185 |
+
def save(self, fp: str | IO[str]) -> None:
|
| 186 |
+
"""Save palette to text file.
|
| 187 |
+
|
| 188 |
+
.. warning:: This method is experimental.
|
| 189 |
+
"""
|
| 190 |
+
if self.rawmode:
|
| 191 |
+
msg = "palette contains raw palette data"
|
| 192 |
+
raise ValueError(msg)
|
| 193 |
+
if isinstance(fp, str):
|
| 194 |
+
fp = open(fp, "w")
|
| 195 |
+
fp.write("# Palette\n")
|
| 196 |
+
fp.write(f"# Mode: {self.mode}\n")
|
| 197 |
+
for i in range(256):
|
| 198 |
+
fp.write(f"{i}")
|
| 199 |
+
for j in range(i * len(self.mode), (i + 1) * len(self.mode)):
|
| 200 |
+
try:
|
| 201 |
+
fp.write(f" {self.palette[j]}")
|
| 202 |
+
except IndexError:
|
| 203 |
+
fp.write(" 0")
|
| 204 |
+
fp.write("\n")
|
| 205 |
+
fp.close()
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
# --------------------------------------------------------------------
|
| 209 |
+
# Internal
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def raw(rawmode: str, data: Sequence[int] | bytes | bytearray) -> ImagePalette:
|
| 213 |
+
palette = ImagePalette()
|
| 214 |
+
palette.rawmode = rawmode
|
| 215 |
+
palette.palette = data
|
| 216 |
+
palette.dirty = 1
|
| 217 |
+
return palette
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
# --------------------------------------------------------------------
|
| 221 |
+
# Factories
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def make_linear_lut(black: int, white: float) -> list[int]:
|
| 225 |
+
if black == 0:
|
| 226 |
+
return [int(white * i // 255) for i in range(256)]
|
| 227 |
+
|
| 228 |
+
msg = "unavailable when black is non-zero"
|
| 229 |
+
raise NotImplementedError(msg) # FIXME
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def make_gamma_lut(exp: float) -> list[int]:
|
| 233 |
+
return [int(((i / 255.0) ** exp) * 255.0 + 0.5) for i in range(256)]
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def negative(mode: str = "RGB") -> ImagePalette:
|
| 237 |
+
palette = list(range(256 * len(mode)))
|
| 238 |
+
palette.reverse()
|
| 239 |
+
return ImagePalette(mode, [i // len(mode) for i in palette])
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def random(mode: str = "RGB") -> ImagePalette:
|
| 243 |
+
from random import randint
|
| 244 |
+
|
| 245 |
+
palette = [randint(0, 255) for _ in range(256 * len(mode))]
|
| 246 |
+
return ImagePalette(mode, palette)
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
def sepia(white: str = "#fff0c0") -> ImagePalette:
|
| 250 |
+
bands = [make_linear_lut(0, band) for band in ImageColor.getrgb(white)]
|
| 251 |
+
return ImagePalette("RGB", [bands[i % 3][i // 3] for i in range(256 * 3)])
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def wedge(mode: str = "RGB") -> ImagePalette:
|
| 255 |
+
palette = list(range(256 * len(mode)))
|
| 256 |
+
return ImagePalette(mode, [i // len(mode) for i in palette])
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def load(filename: str) -> tuple[bytes, str]:
|
| 260 |
+
# FIXME: supports GIMP gradients only
|
| 261 |
+
|
| 262 |
+
with open(filename, "rb") as fp:
|
| 263 |
+
paletteHandlers: list[
|
| 264 |
+
type[
|
| 265 |
+
GimpPaletteFile.GimpPaletteFile
|
| 266 |
+
| GimpGradientFile.GimpGradientFile
|
| 267 |
+
| PaletteFile.PaletteFile
|
| 268 |
+
]
|
| 269 |
+
] = [
|
| 270 |
+
GimpPaletteFile.GimpPaletteFile,
|
| 271 |
+
GimpGradientFile.GimpGradientFile,
|
| 272 |
+
PaletteFile.PaletteFile,
|
| 273 |
+
]
|
| 274 |
+
for paletteHandler in paletteHandlers:
|
| 275 |
+
try:
|
| 276 |
+
fp.seek(0)
|
| 277 |
+
lut = paletteHandler(fp).getpalette()
|
| 278 |
+
if lut:
|
| 279 |
+
break
|
| 280 |
+
except (SyntaxError, ValueError):
|
| 281 |
+
pass
|
| 282 |
+
else:
|
| 283 |
+
msg = "cannot load palette"
|
| 284 |
+
raise OSError(msg)
|
| 285 |
+
|
| 286 |
+
return lut # data, rawmode
|
lib/python3.13/site-packages/PIL/ImageSequence.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# sequence support classes
|
| 6 |
+
#
|
| 7 |
+
# history:
|
| 8 |
+
# 1997-02-20 fl Created
|
| 9 |
+
#
|
| 10 |
+
# Copyright (c) 1997 by Secret Labs AB.
|
| 11 |
+
# Copyright (c) 1997 by Fredrik Lundh.
|
| 12 |
+
#
|
| 13 |
+
# See the README file for information on usage and redistribution.
|
| 14 |
+
#
|
| 15 |
+
|
| 16 |
+
##
|
| 17 |
+
from __future__ import annotations
|
| 18 |
+
|
| 19 |
+
from . import Image
|
| 20 |
+
|
| 21 |
+
TYPE_CHECKING = False
|
| 22 |
+
if TYPE_CHECKING:
|
| 23 |
+
from collections.abc import Callable
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class Iterator:
|
| 27 |
+
"""
|
| 28 |
+
This class implements an iterator object that can be used to loop
|
| 29 |
+
over an image sequence.
|
| 30 |
+
|
| 31 |
+
You can use the ``[]`` operator to access elements by index. This operator
|
| 32 |
+
will raise an :py:exc:`IndexError` if you try to access a nonexistent
|
| 33 |
+
frame.
|
| 34 |
+
|
| 35 |
+
:param im: An image object.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def __init__(self, im: Image.Image) -> None:
|
| 39 |
+
if not hasattr(im, "seek"):
|
| 40 |
+
msg = "im must have seek method"
|
| 41 |
+
raise AttributeError(msg)
|
| 42 |
+
self.im = im
|
| 43 |
+
self.position = getattr(self.im, "_min_frame", 0)
|
| 44 |
+
|
| 45 |
+
def __getitem__(self, ix: int) -> Image.Image:
|
| 46 |
+
try:
|
| 47 |
+
self.im.seek(ix)
|
| 48 |
+
return self.im
|
| 49 |
+
except EOFError as e:
|
| 50 |
+
msg = "end of sequence"
|
| 51 |
+
raise IndexError(msg) from e
|
| 52 |
+
|
| 53 |
+
def __iter__(self) -> Iterator:
|
| 54 |
+
return self
|
| 55 |
+
|
| 56 |
+
def __next__(self) -> Image.Image:
|
| 57 |
+
try:
|
| 58 |
+
self.im.seek(self.position)
|
| 59 |
+
self.position += 1
|
| 60 |
+
return self.im
|
| 61 |
+
except EOFError as e:
|
| 62 |
+
msg = "end of sequence"
|
| 63 |
+
raise StopIteration(msg) from e
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def all_frames(
|
| 67 |
+
im: Image.Image | list[Image.Image],
|
| 68 |
+
func: Callable[[Image.Image], Image.Image] | None = None,
|
| 69 |
+
) -> list[Image.Image]:
|
| 70 |
+
"""
|
| 71 |
+
Applies a given function to all frames in an image or a list of images.
|
| 72 |
+
The frames are returned as a list of separate images.
|
| 73 |
+
|
| 74 |
+
:param im: An image, or a list of images.
|
| 75 |
+
:param func: The function to apply to all of the image frames.
|
| 76 |
+
:returns: A list of images.
|
| 77 |
+
"""
|
| 78 |
+
if not isinstance(im, list):
|
| 79 |
+
im = [im]
|
| 80 |
+
|
| 81 |
+
ims = []
|
| 82 |
+
for imSequence in im:
|
| 83 |
+
current = imSequence.tell()
|
| 84 |
+
|
| 85 |
+
ims += [im_frame.copy() for im_frame in Iterator(imSequence)]
|
| 86 |
+
|
| 87 |
+
imSequence.seek(current)
|
| 88 |
+
return [func(im) for im in ims] if func else ims
|
lib/python3.13/site-packages/PIL/ImageShow.py
ADDED
|
@@ -0,0 +1,362 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# im.show() drivers
|
| 6 |
+
#
|
| 7 |
+
# History:
|
| 8 |
+
# 2008-04-06 fl Created
|
| 9 |
+
#
|
| 10 |
+
# Copyright (c) Secret Labs AB 2008.
|
| 11 |
+
#
|
| 12 |
+
# See the README file for information on usage and redistribution.
|
| 13 |
+
#
|
| 14 |
+
from __future__ import annotations
|
| 15 |
+
|
| 16 |
+
import abc
|
| 17 |
+
import os
|
| 18 |
+
import shutil
|
| 19 |
+
import subprocess
|
| 20 |
+
import sys
|
| 21 |
+
from shlex import quote
|
| 22 |
+
from typing import Any
|
| 23 |
+
|
| 24 |
+
from . import Image
|
| 25 |
+
|
| 26 |
+
_viewers = []
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def register(viewer: type[Viewer] | Viewer, order: int = 1) -> None:
|
| 30 |
+
"""
|
| 31 |
+
The :py:func:`register` function is used to register additional viewers::
|
| 32 |
+
|
| 33 |
+
from PIL import ImageShow
|
| 34 |
+
ImageShow.register(MyViewer()) # MyViewer will be used as a last resort
|
| 35 |
+
ImageShow.register(MySecondViewer(), 0) # MySecondViewer will be prioritised
|
| 36 |
+
ImageShow.register(ImageShow.XVViewer(), 0) # XVViewer will be prioritised
|
| 37 |
+
|
| 38 |
+
:param viewer: The viewer to be registered.
|
| 39 |
+
:param order:
|
| 40 |
+
Zero or a negative integer to prepend this viewer to the list,
|
| 41 |
+
a positive integer to append it.
|
| 42 |
+
"""
|
| 43 |
+
if isinstance(viewer, type) and issubclass(viewer, Viewer):
|
| 44 |
+
viewer = viewer()
|
| 45 |
+
if order > 0:
|
| 46 |
+
_viewers.append(viewer)
|
| 47 |
+
else:
|
| 48 |
+
_viewers.insert(0, viewer)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def show(image: Image.Image, title: str | None = None, **options: Any) -> bool:
|
| 52 |
+
r"""
|
| 53 |
+
Display a given image.
|
| 54 |
+
|
| 55 |
+
:param image: An image object.
|
| 56 |
+
:param title: Optional title. Not all viewers can display the title.
|
| 57 |
+
:param \**options: Additional viewer options.
|
| 58 |
+
:returns: ``True`` if a suitable viewer was found, ``False`` otherwise.
|
| 59 |
+
"""
|
| 60 |
+
for viewer in _viewers:
|
| 61 |
+
if viewer.show(image, title=title, **options):
|
| 62 |
+
return True
|
| 63 |
+
return False
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class Viewer:
|
| 67 |
+
"""Base class for viewers."""
|
| 68 |
+
|
| 69 |
+
# main api
|
| 70 |
+
|
| 71 |
+
def show(self, image: Image.Image, **options: Any) -> int:
|
| 72 |
+
"""
|
| 73 |
+
The main function for displaying an image.
|
| 74 |
+
Converts the given image to the target format and displays it.
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
if not (
|
| 78 |
+
image.mode in ("1", "RGBA")
|
| 79 |
+
or (self.format == "PNG" and image.mode in ("I;16", "LA"))
|
| 80 |
+
):
|
| 81 |
+
base = Image.getmodebase(image.mode)
|
| 82 |
+
if image.mode != base:
|
| 83 |
+
image = image.convert(base)
|
| 84 |
+
|
| 85 |
+
return self.show_image(image, **options)
|
| 86 |
+
|
| 87 |
+
# hook methods
|
| 88 |
+
|
| 89 |
+
format: str | None = None
|
| 90 |
+
"""The format to convert the image into."""
|
| 91 |
+
options: dict[str, Any] = {}
|
| 92 |
+
"""Additional options used to convert the image."""
|
| 93 |
+
|
| 94 |
+
def get_format(self, image: Image.Image) -> str | None:
|
| 95 |
+
"""Return format name, or ``None`` to save as PGM/PPM."""
|
| 96 |
+
return self.format
|
| 97 |
+
|
| 98 |
+
def get_command(self, file: str, **options: Any) -> str:
|
| 99 |
+
"""
|
| 100 |
+
Returns the command used to display the file.
|
| 101 |
+
Not implemented in the base class.
|
| 102 |
+
"""
|
| 103 |
+
msg = "unavailable in base viewer"
|
| 104 |
+
raise NotImplementedError(msg)
|
| 105 |
+
|
| 106 |
+
def save_image(self, image: Image.Image) -> str:
|
| 107 |
+
"""Save to temporary file and return filename."""
|
| 108 |
+
return image._dump(format=self.get_format(image), **self.options)
|
| 109 |
+
|
| 110 |
+
def show_image(self, image: Image.Image, **options: Any) -> int:
|
| 111 |
+
"""Display the given image."""
|
| 112 |
+
return self.show_file(self.save_image(image), **options)
|
| 113 |
+
|
| 114 |
+
def show_file(self, path: str, **options: Any) -> int:
|
| 115 |
+
"""
|
| 116 |
+
Display given file.
|
| 117 |
+
"""
|
| 118 |
+
if not os.path.exists(path):
|
| 119 |
+
raise FileNotFoundError
|
| 120 |
+
os.system(self.get_command(path, **options)) # nosec
|
| 121 |
+
return 1
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
# --------------------------------------------------------------------
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class WindowsViewer(Viewer):
|
| 128 |
+
"""The default viewer on Windows is the default system application for PNG files."""
|
| 129 |
+
|
| 130 |
+
format = "PNG"
|
| 131 |
+
options = {"compress_level": 1, "save_all": True}
|
| 132 |
+
|
| 133 |
+
def get_command(self, file: str, **options: Any) -> str:
|
| 134 |
+
return (
|
| 135 |
+
f'start "Pillow" /WAIT "{file}" '
|
| 136 |
+
"&& ping -n 4 127.0.0.1 >NUL "
|
| 137 |
+
f'&& del /f "{file}"'
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
def show_file(self, path: str, **options: Any) -> int:
|
| 141 |
+
"""
|
| 142 |
+
Display given file.
|
| 143 |
+
"""
|
| 144 |
+
if not os.path.exists(path):
|
| 145 |
+
raise FileNotFoundError
|
| 146 |
+
subprocess.Popen(
|
| 147 |
+
self.get_command(path, **options),
|
| 148 |
+
shell=True,
|
| 149 |
+
creationflags=getattr(subprocess, "CREATE_NO_WINDOW"),
|
| 150 |
+
) # nosec
|
| 151 |
+
return 1
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
if sys.platform == "win32":
|
| 155 |
+
register(WindowsViewer)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class MacViewer(Viewer):
|
| 159 |
+
"""The default viewer on macOS using ``Preview.app``."""
|
| 160 |
+
|
| 161 |
+
format = "PNG"
|
| 162 |
+
options = {"compress_level": 1, "save_all": True}
|
| 163 |
+
|
| 164 |
+
def get_command(self, file: str, **options: Any) -> str:
|
| 165 |
+
# on darwin open returns immediately resulting in the temp
|
| 166 |
+
# file removal while app is opening
|
| 167 |
+
command = "open -a Preview.app"
|
| 168 |
+
command = f"({command} {quote(file)}; sleep 20; rm -f {quote(file)})&"
|
| 169 |
+
return command
|
| 170 |
+
|
| 171 |
+
def show_file(self, path: str, **options: Any) -> int:
|
| 172 |
+
"""
|
| 173 |
+
Display given file.
|
| 174 |
+
"""
|
| 175 |
+
if not os.path.exists(path):
|
| 176 |
+
raise FileNotFoundError
|
| 177 |
+
subprocess.call(["open", "-a", "Preview.app", path])
|
| 178 |
+
|
| 179 |
+
pyinstaller = getattr(sys, "frozen", False) and hasattr(sys, "_MEIPASS")
|
| 180 |
+
executable = (not pyinstaller and sys.executable) or shutil.which("python3")
|
| 181 |
+
if executable:
|
| 182 |
+
subprocess.Popen(
|
| 183 |
+
[
|
| 184 |
+
executable,
|
| 185 |
+
"-c",
|
| 186 |
+
"import os, sys, time; time.sleep(20); os.remove(sys.argv[1])",
|
| 187 |
+
path,
|
| 188 |
+
]
|
| 189 |
+
)
|
| 190 |
+
return 1
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
if sys.platform == "darwin":
|
| 194 |
+
register(MacViewer)
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
class UnixViewer(abc.ABC, Viewer):
|
| 198 |
+
format = "PNG"
|
| 199 |
+
options = {"compress_level": 1, "save_all": True}
|
| 200 |
+
|
| 201 |
+
@abc.abstractmethod
|
| 202 |
+
def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]:
|
| 203 |
+
pass
|
| 204 |
+
|
| 205 |
+
def get_command(self, file: str, **options: Any) -> str:
|
| 206 |
+
command = self.get_command_ex(file, **options)[0]
|
| 207 |
+
return f"{command} {quote(file)}"
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
class XDGViewer(UnixViewer):
|
| 211 |
+
"""
|
| 212 |
+
The freedesktop.org ``xdg-open`` command.
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]:
|
| 216 |
+
command = executable = "xdg-open"
|
| 217 |
+
return command, executable
|
| 218 |
+
|
| 219 |
+
def show_file(self, path: str, **options: Any) -> int:
|
| 220 |
+
"""
|
| 221 |
+
Display given file.
|
| 222 |
+
"""
|
| 223 |
+
if not os.path.exists(path):
|
| 224 |
+
raise FileNotFoundError
|
| 225 |
+
subprocess.Popen(["xdg-open", path])
|
| 226 |
+
return 1
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
class DisplayViewer(UnixViewer):
|
| 230 |
+
"""
|
| 231 |
+
The ImageMagick ``display`` command.
|
| 232 |
+
This viewer supports the ``title`` parameter.
|
| 233 |
+
"""
|
| 234 |
+
|
| 235 |
+
def get_command_ex(
|
| 236 |
+
self, file: str, title: str | None = None, **options: Any
|
| 237 |
+
) -> tuple[str, str]:
|
| 238 |
+
command = executable = "display"
|
| 239 |
+
if title:
|
| 240 |
+
command += f" -title {quote(title)}"
|
| 241 |
+
return command, executable
|
| 242 |
+
|
| 243 |
+
def show_file(self, path: str, **options: Any) -> int:
|
| 244 |
+
"""
|
| 245 |
+
Display given file.
|
| 246 |
+
"""
|
| 247 |
+
if not os.path.exists(path):
|
| 248 |
+
raise FileNotFoundError
|
| 249 |
+
args = ["display"]
|
| 250 |
+
title = options.get("title")
|
| 251 |
+
if title:
|
| 252 |
+
args += ["-title", title]
|
| 253 |
+
args.append(path)
|
| 254 |
+
|
| 255 |
+
subprocess.Popen(args)
|
| 256 |
+
return 1
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
class GmDisplayViewer(UnixViewer):
|
| 260 |
+
"""The GraphicsMagick ``gm display`` command."""
|
| 261 |
+
|
| 262 |
+
def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]:
|
| 263 |
+
executable = "gm"
|
| 264 |
+
command = "gm display"
|
| 265 |
+
return command, executable
|
| 266 |
+
|
| 267 |
+
def show_file(self, path: str, **options: Any) -> int:
|
| 268 |
+
"""
|
| 269 |
+
Display given file.
|
| 270 |
+
"""
|
| 271 |
+
if not os.path.exists(path):
|
| 272 |
+
raise FileNotFoundError
|
| 273 |
+
subprocess.Popen(["gm", "display", path])
|
| 274 |
+
return 1
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
class EogViewer(UnixViewer):
|
| 278 |
+
"""The GNOME Image Viewer ``eog`` command."""
|
| 279 |
+
|
| 280 |
+
def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]:
|
| 281 |
+
executable = "eog"
|
| 282 |
+
command = "eog -n"
|
| 283 |
+
return command, executable
|
| 284 |
+
|
| 285 |
+
def show_file(self, path: str, **options: Any) -> int:
|
| 286 |
+
"""
|
| 287 |
+
Display given file.
|
| 288 |
+
"""
|
| 289 |
+
if not os.path.exists(path):
|
| 290 |
+
raise FileNotFoundError
|
| 291 |
+
subprocess.Popen(["eog", "-n", path])
|
| 292 |
+
return 1
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
class XVViewer(UnixViewer):
|
| 296 |
+
"""
|
| 297 |
+
The X Viewer ``xv`` command.
|
| 298 |
+
This viewer supports the ``title`` parameter.
|
| 299 |
+
"""
|
| 300 |
+
|
| 301 |
+
def get_command_ex(
|
| 302 |
+
self, file: str, title: str | None = None, **options: Any
|
| 303 |
+
) -> tuple[str, str]:
|
| 304 |
+
# note: xv is pretty outdated. most modern systems have
|
| 305 |
+
# imagemagick's display command instead.
|
| 306 |
+
command = executable = "xv"
|
| 307 |
+
if title:
|
| 308 |
+
command += f" -name {quote(title)}"
|
| 309 |
+
return command, executable
|
| 310 |
+
|
| 311 |
+
def show_file(self, path: str, **options: Any) -> int:
|
| 312 |
+
"""
|
| 313 |
+
Display given file.
|
| 314 |
+
"""
|
| 315 |
+
if not os.path.exists(path):
|
| 316 |
+
raise FileNotFoundError
|
| 317 |
+
args = ["xv"]
|
| 318 |
+
title = options.get("title")
|
| 319 |
+
if title:
|
| 320 |
+
args += ["-name", title]
|
| 321 |
+
args.append(path)
|
| 322 |
+
|
| 323 |
+
subprocess.Popen(args)
|
| 324 |
+
return 1
|
| 325 |
+
|
| 326 |
+
|
| 327 |
+
if sys.platform not in ("win32", "darwin"): # unixoids
|
| 328 |
+
if shutil.which("xdg-open"):
|
| 329 |
+
register(XDGViewer)
|
| 330 |
+
if shutil.which("display"):
|
| 331 |
+
register(DisplayViewer)
|
| 332 |
+
if shutil.which("gm"):
|
| 333 |
+
register(GmDisplayViewer)
|
| 334 |
+
if shutil.which("eog"):
|
| 335 |
+
register(EogViewer)
|
| 336 |
+
if shutil.which("xv"):
|
| 337 |
+
register(XVViewer)
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
class IPythonViewer(Viewer):
|
| 341 |
+
"""The viewer for IPython frontends."""
|
| 342 |
+
|
| 343 |
+
def show_image(self, image: Image.Image, **options: Any) -> int:
|
| 344 |
+
ipython_display(image)
|
| 345 |
+
return 1
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
try:
|
| 349 |
+
from IPython.display import display as ipython_display
|
| 350 |
+
except ImportError:
|
| 351 |
+
pass
|
| 352 |
+
else:
|
| 353 |
+
register(IPythonViewer)
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
if __name__ == "__main__":
|
| 357 |
+
if len(sys.argv) < 2:
|
| 358 |
+
print("Syntax: python3 ImageShow.py imagefile [title]")
|
| 359 |
+
sys.exit()
|
| 360 |
+
|
| 361 |
+
with Image.open(sys.argv[1]) as im:
|
| 362 |
+
print(show(im, *sys.argv[2:]))
|
lib/python3.13/site-packages/PIL/JpegPresets.py
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
JPEG quality settings equivalent to the Photoshop settings.
|
| 3 |
+
Can be used when saving JPEG files.
|
| 4 |
+
|
| 5 |
+
The following presets are available by default:
|
| 6 |
+
``web_low``, ``web_medium``, ``web_high``, ``web_very_high``, ``web_maximum``,
|
| 7 |
+
``low``, ``medium``, ``high``, ``maximum``.
|
| 8 |
+
More presets can be added to the :py:data:`presets` dict if needed.
|
| 9 |
+
|
| 10 |
+
To apply the preset, specify::
|
| 11 |
+
|
| 12 |
+
quality="preset_name"
|
| 13 |
+
|
| 14 |
+
To apply only the quantization table::
|
| 15 |
+
|
| 16 |
+
qtables="preset_name"
|
| 17 |
+
|
| 18 |
+
To apply only the subsampling setting::
|
| 19 |
+
|
| 20 |
+
subsampling="preset_name"
|
| 21 |
+
|
| 22 |
+
Example::
|
| 23 |
+
|
| 24 |
+
im.save("image_name.jpg", quality="web_high")
|
| 25 |
+
|
| 26 |
+
Subsampling
|
| 27 |
+
-----------
|
| 28 |
+
|
| 29 |
+
Subsampling is the practice of encoding images by implementing less resolution
|
| 30 |
+
for chroma information than for luma information.
|
| 31 |
+
(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling)
|
| 32 |
+
|
| 33 |
+
Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and
|
| 34 |
+
4:2:0.
|
| 35 |
+
|
| 36 |
+
You can get the subsampling of a JPEG with the
|
| 37 |
+
:func:`.JpegImagePlugin.get_sampling` function.
|
| 38 |
+
|
| 39 |
+
In JPEG compressed data a JPEG marker is used instead of an EXIF tag.
|
| 40 |
+
(ref.: https://exiv2.org/tags.html)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
Quantization tables
|
| 44 |
+
-------------------
|
| 45 |
+
|
| 46 |
+
They are values use by the DCT (Discrete cosine transform) to remove
|
| 47 |
+
*unnecessary* information from the image (the lossy part of the compression).
|
| 48 |
+
(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices,
|
| 49 |
+
https://en.wikipedia.org/wiki/JPEG#Quantization)
|
| 50 |
+
|
| 51 |
+
You can get the quantization tables of a JPEG with::
|
| 52 |
+
|
| 53 |
+
im.quantization
|
| 54 |
+
|
| 55 |
+
This will return a dict with a number of lists. You can pass this dict
|
| 56 |
+
directly as the qtables argument when saving a JPEG.
|
| 57 |
+
|
| 58 |
+
The quantization table format in presets is a list with sublists. These formats
|
| 59 |
+
are interchangeable.
|
| 60 |
+
|
| 61 |
+
Libjpeg ref.:
|
| 62 |
+
https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html
|
| 63 |
+
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
from __future__ import annotations
|
| 67 |
+
|
| 68 |
+
# fmt: off
|
| 69 |
+
presets = {
|
| 70 |
+
'web_low': {'subsampling': 2, # "4:2:0"
|
| 71 |
+
'quantization': [
|
| 72 |
+
[20, 16, 25, 39, 50, 46, 62, 68,
|
| 73 |
+
16, 18, 23, 38, 38, 53, 65, 68,
|
| 74 |
+
25, 23, 31, 38, 53, 65, 68, 68,
|
| 75 |
+
39, 38, 38, 53, 65, 68, 68, 68,
|
| 76 |
+
50, 38, 53, 65, 68, 68, 68, 68,
|
| 77 |
+
46, 53, 65, 68, 68, 68, 68, 68,
|
| 78 |
+
62, 65, 68, 68, 68, 68, 68, 68,
|
| 79 |
+
68, 68, 68, 68, 68, 68, 68, 68],
|
| 80 |
+
[21, 25, 32, 38, 54, 68, 68, 68,
|
| 81 |
+
25, 28, 24, 38, 54, 68, 68, 68,
|
| 82 |
+
32, 24, 32, 43, 66, 68, 68, 68,
|
| 83 |
+
38, 38, 43, 53, 68, 68, 68, 68,
|
| 84 |
+
54, 54, 66, 68, 68, 68, 68, 68,
|
| 85 |
+
68, 68, 68, 68, 68, 68, 68, 68,
|
| 86 |
+
68, 68, 68, 68, 68, 68, 68, 68,
|
| 87 |
+
68, 68, 68, 68, 68, 68, 68, 68]
|
| 88 |
+
]},
|
| 89 |
+
'web_medium': {'subsampling': 2, # "4:2:0"
|
| 90 |
+
'quantization': [
|
| 91 |
+
[16, 11, 11, 16, 23, 27, 31, 30,
|
| 92 |
+
11, 12, 12, 15, 20, 23, 23, 30,
|
| 93 |
+
11, 12, 13, 16, 23, 26, 35, 47,
|
| 94 |
+
16, 15, 16, 23, 26, 37, 47, 64,
|
| 95 |
+
23, 20, 23, 26, 39, 51, 64, 64,
|
| 96 |
+
27, 23, 26, 37, 51, 64, 64, 64,
|
| 97 |
+
31, 23, 35, 47, 64, 64, 64, 64,
|
| 98 |
+
30, 30, 47, 64, 64, 64, 64, 64],
|
| 99 |
+
[17, 15, 17, 21, 20, 26, 38, 48,
|
| 100 |
+
15, 19, 18, 17, 20, 26, 35, 43,
|
| 101 |
+
17, 18, 20, 22, 26, 30, 46, 53,
|
| 102 |
+
21, 17, 22, 28, 30, 39, 53, 64,
|
| 103 |
+
20, 20, 26, 30, 39, 48, 64, 64,
|
| 104 |
+
26, 26, 30, 39, 48, 63, 64, 64,
|
| 105 |
+
38, 35, 46, 53, 64, 64, 64, 64,
|
| 106 |
+
48, 43, 53, 64, 64, 64, 64, 64]
|
| 107 |
+
]},
|
| 108 |
+
'web_high': {'subsampling': 0, # "4:4:4"
|
| 109 |
+
'quantization': [
|
| 110 |
+
[6, 4, 4, 6, 9, 11, 12, 16,
|
| 111 |
+
4, 5, 5, 6, 8, 10, 12, 12,
|
| 112 |
+
4, 5, 5, 6, 10, 12, 14, 19,
|
| 113 |
+
6, 6, 6, 11, 12, 15, 19, 28,
|
| 114 |
+
9, 8, 10, 12, 16, 20, 27, 31,
|
| 115 |
+
11, 10, 12, 15, 20, 27, 31, 31,
|
| 116 |
+
12, 12, 14, 19, 27, 31, 31, 31,
|
| 117 |
+
16, 12, 19, 28, 31, 31, 31, 31],
|
| 118 |
+
[7, 7, 13, 24, 26, 31, 31, 31,
|
| 119 |
+
7, 12, 16, 21, 31, 31, 31, 31,
|
| 120 |
+
13, 16, 17, 31, 31, 31, 31, 31,
|
| 121 |
+
24, 21, 31, 31, 31, 31, 31, 31,
|
| 122 |
+
26, 31, 31, 31, 31, 31, 31, 31,
|
| 123 |
+
31, 31, 31, 31, 31, 31, 31, 31,
|
| 124 |
+
31, 31, 31, 31, 31, 31, 31, 31,
|
| 125 |
+
31, 31, 31, 31, 31, 31, 31, 31]
|
| 126 |
+
]},
|
| 127 |
+
'web_very_high': {'subsampling': 0, # "4:4:4"
|
| 128 |
+
'quantization': [
|
| 129 |
+
[2, 2, 2, 2, 3, 4, 5, 6,
|
| 130 |
+
2, 2, 2, 2, 3, 4, 5, 6,
|
| 131 |
+
2, 2, 2, 2, 4, 5, 7, 9,
|
| 132 |
+
2, 2, 2, 4, 5, 7, 9, 12,
|
| 133 |
+
3, 3, 4, 5, 8, 10, 12, 12,
|
| 134 |
+
4, 4, 5, 7, 10, 12, 12, 12,
|
| 135 |
+
5, 5, 7, 9, 12, 12, 12, 12,
|
| 136 |
+
6, 6, 9, 12, 12, 12, 12, 12],
|
| 137 |
+
[3, 3, 5, 9, 13, 15, 15, 15,
|
| 138 |
+
3, 4, 6, 11, 14, 12, 12, 12,
|
| 139 |
+
5, 6, 9, 14, 12, 12, 12, 12,
|
| 140 |
+
9, 11, 14, 12, 12, 12, 12, 12,
|
| 141 |
+
13, 14, 12, 12, 12, 12, 12, 12,
|
| 142 |
+
15, 12, 12, 12, 12, 12, 12, 12,
|
| 143 |
+
15, 12, 12, 12, 12, 12, 12, 12,
|
| 144 |
+
15, 12, 12, 12, 12, 12, 12, 12]
|
| 145 |
+
]},
|
| 146 |
+
'web_maximum': {'subsampling': 0, # "4:4:4"
|
| 147 |
+
'quantization': [
|
| 148 |
+
[1, 1, 1, 1, 1, 1, 1, 1,
|
| 149 |
+
1, 1, 1, 1, 1, 1, 1, 1,
|
| 150 |
+
1, 1, 1, 1, 1, 1, 1, 2,
|
| 151 |
+
1, 1, 1, 1, 1, 1, 2, 2,
|
| 152 |
+
1, 1, 1, 1, 1, 2, 2, 3,
|
| 153 |
+
1, 1, 1, 1, 2, 2, 3, 3,
|
| 154 |
+
1, 1, 1, 2, 2, 3, 3, 3,
|
| 155 |
+
1, 1, 2, 2, 3, 3, 3, 3],
|
| 156 |
+
[1, 1, 1, 2, 2, 3, 3, 3,
|
| 157 |
+
1, 1, 1, 2, 3, 3, 3, 3,
|
| 158 |
+
1, 1, 1, 3, 3, 3, 3, 3,
|
| 159 |
+
2, 2, 3, 3, 3, 3, 3, 3,
|
| 160 |
+
2, 3, 3, 3, 3, 3, 3, 3,
|
| 161 |
+
3, 3, 3, 3, 3, 3, 3, 3,
|
| 162 |
+
3, 3, 3, 3, 3, 3, 3, 3,
|
| 163 |
+
3, 3, 3, 3, 3, 3, 3, 3]
|
| 164 |
+
]},
|
| 165 |
+
'low': {'subsampling': 2, # "4:2:0"
|
| 166 |
+
'quantization': [
|
| 167 |
+
[18, 14, 14, 21, 30, 35, 34, 17,
|
| 168 |
+
14, 16, 16, 19, 26, 23, 12, 12,
|
| 169 |
+
14, 16, 17, 21, 23, 12, 12, 12,
|
| 170 |
+
21, 19, 21, 23, 12, 12, 12, 12,
|
| 171 |
+
30, 26, 23, 12, 12, 12, 12, 12,
|
| 172 |
+
35, 23, 12, 12, 12, 12, 12, 12,
|
| 173 |
+
34, 12, 12, 12, 12, 12, 12, 12,
|
| 174 |
+
17, 12, 12, 12, 12, 12, 12, 12],
|
| 175 |
+
[20, 19, 22, 27, 20, 20, 17, 17,
|
| 176 |
+
19, 25, 23, 14, 14, 12, 12, 12,
|
| 177 |
+
22, 23, 14, 14, 12, 12, 12, 12,
|
| 178 |
+
27, 14, 14, 12, 12, 12, 12, 12,
|
| 179 |
+
20, 14, 12, 12, 12, 12, 12, 12,
|
| 180 |
+
20, 12, 12, 12, 12, 12, 12, 12,
|
| 181 |
+
17, 12, 12, 12, 12, 12, 12, 12,
|
| 182 |
+
17, 12, 12, 12, 12, 12, 12, 12]
|
| 183 |
+
]},
|
| 184 |
+
'medium': {'subsampling': 2, # "4:2:0"
|
| 185 |
+
'quantization': [
|
| 186 |
+
[12, 8, 8, 12, 17, 21, 24, 17,
|
| 187 |
+
8, 9, 9, 11, 15, 19, 12, 12,
|
| 188 |
+
8, 9, 10, 12, 19, 12, 12, 12,
|
| 189 |
+
12, 11, 12, 21, 12, 12, 12, 12,
|
| 190 |
+
17, 15, 19, 12, 12, 12, 12, 12,
|
| 191 |
+
21, 19, 12, 12, 12, 12, 12, 12,
|
| 192 |
+
24, 12, 12, 12, 12, 12, 12, 12,
|
| 193 |
+
17, 12, 12, 12, 12, 12, 12, 12],
|
| 194 |
+
[13, 11, 13, 16, 20, 20, 17, 17,
|
| 195 |
+
11, 14, 14, 14, 14, 12, 12, 12,
|
| 196 |
+
13, 14, 14, 14, 12, 12, 12, 12,
|
| 197 |
+
16, 14, 14, 12, 12, 12, 12, 12,
|
| 198 |
+
20, 14, 12, 12, 12, 12, 12, 12,
|
| 199 |
+
20, 12, 12, 12, 12, 12, 12, 12,
|
| 200 |
+
17, 12, 12, 12, 12, 12, 12, 12,
|
| 201 |
+
17, 12, 12, 12, 12, 12, 12, 12]
|
| 202 |
+
]},
|
| 203 |
+
'high': {'subsampling': 0, # "4:4:4"
|
| 204 |
+
'quantization': [
|
| 205 |
+
[6, 4, 4, 6, 9, 11, 12, 16,
|
| 206 |
+
4, 5, 5, 6, 8, 10, 12, 12,
|
| 207 |
+
4, 5, 5, 6, 10, 12, 12, 12,
|
| 208 |
+
6, 6, 6, 11, 12, 12, 12, 12,
|
| 209 |
+
9, 8, 10, 12, 12, 12, 12, 12,
|
| 210 |
+
11, 10, 12, 12, 12, 12, 12, 12,
|
| 211 |
+
12, 12, 12, 12, 12, 12, 12, 12,
|
| 212 |
+
16, 12, 12, 12, 12, 12, 12, 12],
|
| 213 |
+
[7, 7, 13, 24, 20, 20, 17, 17,
|
| 214 |
+
7, 12, 16, 14, 14, 12, 12, 12,
|
| 215 |
+
13, 16, 14, 14, 12, 12, 12, 12,
|
| 216 |
+
24, 14, 14, 12, 12, 12, 12, 12,
|
| 217 |
+
20, 14, 12, 12, 12, 12, 12, 12,
|
| 218 |
+
20, 12, 12, 12, 12, 12, 12, 12,
|
| 219 |
+
17, 12, 12, 12, 12, 12, 12, 12,
|
| 220 |
+
17, 12, 12, 12, 12, 12, 12, 12]
|
| 221 |
+
]},
|
| 222 |
+
'maximum': {'subsampling': 0, # "4:4:4"
|
| 223 |
+
'quantization': [
|
| 224 |
+
[2, 2, 2, 2, 3, 4, 5, 6,
|
| 225 |
+
2, 2, 2, 2, 3, 4, 5, 6,
|
| 226 |
+
2, 2, 2, 2, 4, 5, 7, 9,
|
| 227 |
+
2, 2, 2, 4, 5, 7, 9, 12,
|
| 228 |
+
3, 3, 4, 5, 8, 10, 12, 12,
|
| 229 |
+
4, 4, 5, 7, 10, 12, 12, 12,
|
| 230 |
+
5, 5, 7, 9, 12, 12, 12, 12,
|
| 231 |
+
6, 6, 9, 12, 12, 12, 12, 12],
|
| 232 |
+
[3, 3, 5, 9, 13, 15, 15, 15,
|
| 233 |
+
3, 4, 6, 10, 14, 12, 12, 12,
|
| 234 |
+
5, 6, 9, 14, 12, 12, 12, 12,
|
| 235 |
+
9, 10, 14, 12, 12, 12, 12, 12,
|
| 236 |
+
13, 14, 12, 12, 12, 12, 12, 12,
|
| 237 |
+
15, 12, 12, 12, 12, 12, 12, 12,
|
| 238 |
+
15, 12, 12, 12, 12, 12, 12, 12,
|
| 239 |
+
15, 12, 12, 12, 12, 12, 12, 12]
|
| 240 |
+
]},
|
| 241 |
+
}
|
| 242 |
+
# fmt: on
|
lib/python3.13/site-packages/PIL/MspImagePlugin.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
#
|
| 4 |
+
# MSP file handling
|
| 5 |
+
#
|
| 6 |
+
# This is the format used by the Paint program in Windows 1 and 2.
|
| 7 |
+
#
|
| 8 |
+
# History:
|
| 9 |
+
# 95-09-05 fl Created
|
| 10 |
+
# 97-01-03 fl Read/write MSP images
|
| 11 |
+
# 17-02-21 es Fixed RLE interpretation
|
| 12 |
+
#
|
| 13 |
+
# Copyright (c) Secret Labs AB 1997.
|
| 14 |
+
# Copyright (c) Fredrik Lundh 1995-97.
|
| 15 |
+
# Copyright (c) Eric Soroos 2017.
|
| 16 |
+
#
|
| 17 |
+
# See the README file for information on usage and redistribution.
|
| 18 |
+
#
|
| 19 |
+
# More info on this format: https://archive.org/details/gg243631
|
| 20 |
+
# Page 313:
|
| 21 |
+
# Figure 205. Windows Paint Version 1: "DanM" Format
|
| 22 |
+
# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03
|
| 23 |
+
#
|
| 24 |
+
# See also: https://www.fileformat.info/format/mspaint/egff.htm
|
| 25 |
+
from __future__ import annotations
|
| 26 |
+
|
| 27 |
+
import io
|
| 28 |
+
import struct
|
| 29 |
+
from typing import IO
|
| 30 |
+
|
| 31 |
+
from . import Image, ImageFile
|
| 32 |
+
from ._binary import i16le as i16
|
| 33 |
+
from ._binary import o16le as o16
|
| 34 |
+
|
| 35 |
+
#
|
| 36 |
+
# read MSP files
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _accept(prefix: bytes) -> bool:
|
| 40 |
+
return prefix.startswith((b"DanM", b"LinS"))
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
##
|
| 44 |
+
# Image plugin for Windows MSP images. This plugin supports both
|
| 45 |
+
# uncompressed (Windows 1.0).
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class MspImageFile(ImageFile.ImageFile):
|
| 49 |
+
format = "MSP"
|
| 50 |
+
format_description = "Windows Paint"
|
| 51 |
+
|
| 52 |
+
def _open(self) -> None:
|
| 53 |
+
# Header
|
| 54 |
+
assert self.fp is not None
|
| 55 |
+
|
| 56 |
+
s = self.fp.read(32)
|
| 57 |
+
if not _accept(s):
|
| 58 |
+
msg = "not an MSP file"
|
| 59 |
+
raise SyntaxError(msg)
|
| 60 |
+
|
| 61 |
+
# Header checksum
|
| 62 |
+
checksum = 0
|
| 63 |
+
for i in range(0, 32, 2):
|
| 64 |
+
checksum = checksum ^ i16(s, i)
|
| 65 |
+
if checksum != 0:
|
| 66 |
+
msg = "bad MSP checksum"
|
| 67 |
+
raise SyntaxError(msg)
|
| 68 |
+
|
| 69 |
+
self._mode = "1"
|
| 70 |
+
self._size = i16(s, 4), i16(s, 6)
|
| 71 |
+
|
| 72 |
+
if s.startswith(b"DanM"):
|
| 73 |
+
self.tile = [ImageFile._Tile("raw", (0, 0) + self.size, 32, "1")]
|
| 74 |
+
else:
|
| 75 |
+
self.tile = [ImageFile._Tile("MSP", (0, 0) + self.size, 32)]
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class MspDecoder(ImageFile.PyDecoder):
|
| 79 |
+
# The algo for the MSP decoder is from
|
| 80 |
+
# https://www.fileformat.info/format/mspaint/egff.htm
|
| 81 |
+
# cc-by-attribution -- That page references is taken from the
|
| 82 |
+
# Encyclopedia of Graphics File Formats and is licensed by
|
| 83 |
+
# O'Reilly under the Creative Common/Attribution license
|
| 84 |
+
#
|
| 85 |
+
# For RLE encoded files, the 32byte header is followed by a scan
|
| 86 |
+
# line map, encoded as one 16bit word of encoded byte length per
|
| 87 |
+
# line.
|
| 88 |
+
#
|
| 89 |
+
# NOTE: the encoded length of the line can be 0. This was not
|
| 90 |
+
# handled in the previous version of this encoder, and there's no
|
| 91 |
+
# mention of how to handle it in the documentation. From the few
|
| 92 |
+
# examples I've seen, I've assumed that it is a fill of the
|
| 93 |
+
# background color, in this case, white.
|
| 94 |
+
#
|
| 95 |
+
#
|
| 96 |
+
# Pseudocode of the decoder:
|
| 97 |
+
# Read a BYTE value as the RunType
|
| 98 |
+
# If the RunType value is zero
|
| 99 |
+
# Read next byte as the RunCount
|
| 100 |
+
# Read the next byte as the RunValue
|
| 101 |
+
# Write the RunValue byte RunCount times
|
| 102 |
+
# If the RunType value is non-zero
|
| 103 |
+
# Use this value as the RunCount
|
| 104 |
+
# Read and write the next RunCount bytes literally
|
| 105 |
+
#
|
| 106 |
+
# e.g.:
|
| 107 |
+
# 0x00 03 ff 05 00 01 02 03 04
|
| 108 |
+
# would yield the bytes:
|
| 109 |
+
# 0xff ff ff 00 01 02 03 04
|
| 110 |
+
#
|
| 111 |
+
# which are then interpreted as a bit packed mode '1' image
|
| 112 |
+
|
| 113 |
+
_pulls_fd = True
|
| 114 |
+
|
| 115 |
+
def decode(self, buffer: bytes | Image.SupportsArrayInterface) -> tuple[int, int]:
|
| 116 |
+
assert self.fd is not None
|
| 117 |
+
|
| 118 |
+
img = io.BytesIO()
|
| 119 |
+
blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8))
|
| 120 |
+
try:
|
| 121 |
+
self.fd.seek(32)
|
| 122 |
+
rowmap = struct.unpack_from(
|
| 123 |
+
f"<{self.state.ysize}H", self.fd.read(self.state.ysize * 2)
|
| 124 |
+
)
|
| 125 |
+
except struct.error as e:
|
| 126 |
+
msg = "Truncated MSP file in row map"
|
| 127 |
+
raise OSError(msg) from e
|
| 128 |
+
|
| 129 |
+
for x, rowlen in enumerate(rowmap):
|
| 130 |
+
try:
|
| 131 |
+
if rowlen == 0:
|
| 132 |
+
img.write(blank_line)
|
| 133 |
+
continue
|
| 134 |
+
row = self.fd.read(rowlen)
|
| 135 |
+
if len(row) != rowlen:
|
| 136 |
+
msg = f"Truncated MSP file, expected {rowlen} bytes on row {x}"
|
| 137 |
+
raise OSError(msg)
|
| 138 |
+
idx = 0
|
| 139 |
+
while idx < rowlen:
|
| 140 |
+
runtype = row[idx]
|
| 141 |
+
idx += 1
|
| 142 |
+
if runtype == 0:
|
| 143 |
+
(runcount, runval) = struct.unpack_from("Bc", row, idx)
|
| 144 |
+
img.write(runval * runcount)
|
| 145 |
+
idx += 2
|
| 146 |
+
else:
|
| 147 |
+
runcount = runtype
|
| 148 |
+
img.write(row[idx : idx + runcount])
|
| 149 |
+
idx += runcount
|
| 150 |
+
|
| 151 |
+
except struct.error as e:
|
| 152 |
+
msg = f"Corrupted MSP file in row {x}"
|
| 153 |
+
raise OSError(msg) from e
|
| 154 |
+
|
| 155 |
+
self.set_as_raw(img.getvalue(), "1")
|
| 156 |
+
|
| 157 |
+
return -1, 0
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
Image.register_decoder("MSP", MspDecoder)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
#
|
| 164 |
+
# write MSP files (uncompressed only)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 168 |
+
if im.mode != "1":
|
| 169 |
+
msg = f"cannot write mode {im.mode} as MSP"
|
| 170 |
+
raise OSError(msg)
|
| 171 |
+
|
| 172 |
+
# create MSP header
|
| 173 |
+
header = [0] * 16
|
| 174 |
+
|
| 175 |
+
header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1
|
| 176 |
+
header[2], header[3] = im.size
|
| 177 |
+
header[4], header[5] = 1, 1
|
| 178 |
+
header[6], header[7] = 1, 1
|
| 179 |
+
header[8], header[9] = im.size
|
| 180 |
+
|
| 181 |
+
checksum = 0
|
| 182 |
+
for h in header:
|
| 183 |
+
checksum = checksum ^ h
|
| 184 |
+
header[12] = checksum # FIXME: is this the right field?
|
| 185 |
+
|
| 186 |
+
# header
|
| 187 |
+
for h in header:
|
| 188 |
+
fp.write(o16(h))
|
| 189 |
+
|
| 190 |
+
# image body
|
| 191 |
+
ImageFile._save(im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 32, "1")])
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
#
|
| 195 |
+
# registry
|
| 196 |
+
|
| 197 |
+
Image.register_open(MspImageFile.format, MspImageFile, _accept)
|
| 198 |
+
Image.register_save(MspImageFile.format, _save)
|
| 199 |
+
|
| 200 |
+
Image.register_extension(MspImageFile.format, ".msp")
|
lib/python3.13/site-packages/PIL/PcfFontFile.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# THIS IS WORK IN PROGRESS
|
| 3 |
+
#
|
| 4 |
+
# The Python Imaging Library
|
| 5 |
+
# $Id$
|
| 6 |
+
#
|
| 7 |
+
# portable compiled font file parser
|
| 8 |
+
#
|
| 9 |
+
# history:
|
| 10 |
+
# 1997-08-19 fl created
|
| 11 |
+
# 2003-09-13 fl fixed loading of unicode fonts
|
| 12 |
+
#
|
| 13 |
+
# Copyright (c) 1997-2003 by Secret Labs AB.
|
| 14 |
+
# Copyright (c) 1997-2003 by Fredrik Lundh.
|
| 15 |
+
#
|
| 16 |
+
# See the README file for information on usage and redistribution.
|
| 17 |
+
#
|
| 18 |
+
from __future__ import annotations
|
| 19 |
+
|
| 20 |
+
import io
|
| 21 |
+
|
| 22 |
+
from . import FontFile, Image
|
| 23 |
+
from ._binary import i8
|
| 24 |
+
from ._binary import i16be as b16
|
| 25 |
+
from ._binary import i16le as l16
|
| 26 |
+
from ._binary import i32be as b32
|
| 27 |
+
from ._binary import i32le as l32
|
| 28 |
+
|
| 29 |
+
TYPE_CHECKING = False
|
| 30 |
+
if TYPE_CHECKING:
|
| 31 |
+
from collections.abc import Callable
|
| 32 |
+
from typing import BinaryIO
|
| 33 |
+
|
| 34 |
+
# --------------------------------------------------------------------
|
| 35 |
+
# declarations
|
| 36 |
+
|
| 37 |
+
PCF_MAGIC = 0x70636601 # "\x01fcp"
|
| 38 |
+
|
| 39 |
+
PCF_PROPERTIES = 1 << 0
|
| 40 |
+
PCF_ACCELERATORS = 1 << 1
|
| 41 |
+
PCF_METRICS = 1 << 2
|
| 42 |
+
PCF_BITMAPS = 1 << 3
|
| 43 |
+
PCF_INK_METRICS = 1 << 4
|
| 44 |
+
PCF_BDF_ENCODINGS = 1 << 5
|
| 45 |
+
PCF_SWIDTHS = 1 << 6
|
| 46 |
+
PCF_GLYPH_NAMES = 1 << 7
|
| 47 |
+
PCF_BDF_ACCELERATORS = 1 << 8
|
| 48 |
+
|
| 49 |
+
BYTES_PER_ROW: list[Callable[[int], int]] = [
|
| 50 |
+
lambda bits: ((bits + 7) >> 3),
|
| 51 |
+
lambda bits: ((bits + 15) >> 3) & ~1,
|
| 52 |
+
lambda bits: ((bits + 31) >> 3) & ~3,
|
| 53 |
+
lambda bits: ((bits + 63) >> 3) & ~7,
|
| 54 |
+
]
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def sz(s: bytes, o: int) -> bytes:
|
| 58 |
+
return s[o : s.index(b"\0", o)]
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class PcfFontFile(FontFile.FontFile):
|
| 62 |
+
"""Font file plugin for the X11 PCF format."""
|
| 63 |
+
|
| 64 |
+
name = "name"
|
| 65 |
+
|
| 66 |
+
def __init__(self, fp: BinaryIO, charset_encoding: str = "iso8859-1"):
|
| 67 |
+
self.charset_encoding = charset_encoding
|
| 68 |
+
|
| 69 |
+
magic = l32(fp.read(4))
|
| 70 |
+
if magic != PCF_MAGIC:
|
| 71 |
+
msg = "not a PCF file"
|
| 72 |
+
raise SyntaxError(msg)
|
| 73 |
+
|
| 74 |
+
super().__init__()
|
| 75 |
+
|
| 76 |
+
count = l32(fp.read(4))
|
| 77 |
+
self.toc = {}
|
| 78 |
+
for i in range(count):
|
| 79 |
+
type = l32(fp.read(4))
|
| 80 |
+
self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4))
|
| 81 |
+
|
| 82 |
+
self.fp = fp
|
| 83 |
+
|
| 84 |
+
self.info = self._load_properties()
|
| 85 |
+
|
| 86 |
+
metrics = self._load_metrics()
|
| 87 |
+
bitmaps = self._load_bitmaps(metrics)
|
| 88 |
+
encoding = self._load_encoding()
|
| 89 |
+
|
| 90 |
+
#
|
| 91 |
+
# create glyph structure
|
| 92 |
+
|
| 93 |
+
for ch, ix in enumerate(encoding):
|
| 94 |
+
if ix is not None:
|
| 95 |
+
(
|
| 96 |
+
xsize,
|
| 97 |
+
ysize,
|
| 98 |
+
left,
|
| 99 |
+
right,
|
| 100 |
+
width,
|
| 101 |
+
ascent,
|
| 102 |
+
descent,
|
| 103 |
+
attributes,
|
| 104 |
+
) = metrics[ix]
|
| 105 |
+
self.glyph[ch] = (
|
| 106 |
+
(width, 0),
|
| 107 |
+
(left, descent - ysize, xsize + left, descent),
|
| 108 |
+
(0, 0, xsize, ysize),
|
| 109 |
+
bitmaps[ix],
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
def _getformat(
|
| 113 |
+
self, tag: int
|
| 114 |
+
) -> tuple[BinaryIO, int, Callable[[bytes], int], Callable[[bytes], int]]:
|
| 115 |
+
format, size, offset = self.toc[tag]
|
| 116 |
+
|
| 117 |
+
fp = self.fp
|
| 118 |
+
fp.seek(offset)
|
| 119 |
+
|
| 120 |
+
format = l32(fp.read(4))
|
| 121 |
+
|
| 122 |
+
if format & 4:
|
| 123 |
+
i16, i32 = b16, b32
|
| 124 |
+
else:
|
| 125 |
+
i16, i32 = l16, l32
|
| 126 |
+
|
| 127 |
+
return fp, format, i16, i32
|
| 128 |
+
|
| 129 |
+
def _load_properties(self) -> dict[bytes, bytes | int]:
|
| 130 |
+
#
|
| 131 |
+
# font properties
|
| 132 |
+
|
| 133 |
+
properties = {}
|
| 134 |
+
|
| 135 |
+
fp, format, i16, i32 = self._getformat(PCF_PROPERTIES)
|
| 136 |
+
|
| 137 |
+
nprops = i32(fp.read(4))
|
| 138 |
+
|
| 139 |
+
# read property description
|
| 140 |
+
p = [(i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4))) for _ in range(nprops)]
|
| 141 |
+
|
| 142 |
+
if nprops & 3:
|
| 143 |
+
fp.seek(4 - (nprops & 3), io.SEEK_CUR) # pad
|
| 144 |
+
|
| 145 |
+
data = fp.read(i32(fp.read(4)))
|
| 146 |
+
|
| 147 |
+
for k, s, v in p:
|
| 148 |
+
property_value: bytes | int = sz(data, v) if s else v
|
| 149 |
+
properties[sz(data, k)] = property_value
|
| 150 |
+
|
| 151 |
+
return properties
|
| 152 |
+
|
| 153 |
+
def _load_metrics(self) -> list[tuple[int, int, int, int, int, int, int, int]]:
|
| 154 |
+
#
|
| 155 |
+
# font metrics
|
| 156 |
+
|
| 157 |
+
metrics: list[tuple[int, int, int, int, int, int, int, int]] = []
|
| 158 |
+
|
| 159 |
+
fp, format, i16, i32 = self._getformat(PCF_METRICS)
|
| 160 |
+
|
| 161 |
+
append = metrics.append
|
| 162 |
+
|
| 163 |
+
if (format & 0xFF00) == 0x100:
|
| 164 |
+
# "compressed" metrics
|
| 165 |
+
for i in range(i16(fp.read(2))):
|
| 166 |
+
left = i8(fp.read(1)) - 128
|
| 167 |
+
right = i8(fp.read(1)) - 128
|
| 168 |
+
width = i8(fp.read(1)) - 128
|
| 169 |
+
ascent = i8(fp.read(1)) - 128
|
| 170 |
+
descent = i8(fp.read(1)) - 128
|
| 171 |
+
xsize = right - left
|
| 172 |
+
ysize = ascent + descent
|
| 173 |
+
append((xsize, ysize, left, right, width, ascent, descent, 0))
|
| 174 |
+
|
| 175 |
+
else:
|
| 176 |
+
# "jumbo" metrics
|
| 177 |
+
for i in range(i32(fp.read(4))):
|
| 178 |
+
left = i16(fp.read(2))
|
| 179 |
+
right = i16(fp.read(2))
|
| 180 |
+
width = i16(fp.read(2))
|
| 181 |
+
ascent = i16(fp.read(2))
|
| 182 |
+
descent = i16(fp.read(2))
|
| 183 |
+
attributes = i16(fp.read(2))
|
| 184 |
+
xsize = right - left
|
| 185 |
+
ysize = ascent + descent
|
| 186 |
+
append((xsize, ysize, left, right, width, ascent, descent, attributes))
|
| 187 |
+
|
| 188 |
+
return metrics
|
| 189 |
+
|
| 190 |
+
def _load_bitmaps(
|
| 191 |
+
self, metrics: list[tuple[int, int, int, int, int, int, int, int]]
|
| 192 |
+
) -> list[Image.Image]:
|
| 193 |
+
#
|
| 194 |
+
# bitmap data
|
| 195 |
+
|
| 196 |
+
fp, format, i16, i32 = self._getformat(PCF_BITMAPS)
|
| 197 |
+
|
| 198 |
+
nbitmaps = i32(fp.read(4))
|
| 199 |
+
|
| 200 |
+
if nbitmaps != len(metrics):
|
| 201 |
+
msg = "Wrong number of bitmaps"
|
| 202 |
+
raise OSError(msg)
|
| 203 |
+
|
| 204 |
+
offsets = [i32(fp.read(4)) for _ in range(nbitmaps)]
|
| 205 |
+
|
| 206 |
+
bitmap_sizes = [i32(fp.read(4)) for _ in range(4)]
|
| 207 |
+
|
| 208 |
+
# byteorder = format & 4 # non-zero => MSB
|
| 209 |
+
bitorder = format & 8 # non-zero => MSB
|
| 210 |
+
padindex = format & 3
|
| 211 |
+
|
| 212 |
+
bitmapsize = bitmap_sizes[padindex]
|
| 213 |
+
offsets.append(bitmapsize)
|
| 214 |
+
|
| 215 |
+
data = fp.read(bitmapsize)
|
| 216 |
+
|
| 217 |
+
pad = BYTES_PER_ROW[padindex]
|
| 218 |
+
mode = "1;R"
|
| 219 |
+
if bitorder:
|
| 220 |
+
mode = "1"
|
| 221 |
+
|
| 222 |
+
bitmaps = []
|
| 223 |
+
for i in range(nbitmaps):
|
| 224 |
+
xsize, ysize = metrics[i][:2]
|
| 225 |
+
b, e = offsets[i : i + 2]
|
| 226 |
+
bitmaps.append(
|
| 227 |
+
Image.frombytes("1", (xsize, ysize), data[b:e], "raw", mode, pad(xsize))
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
return bitmaps
|
| 231 |
+
|
| 232 |
+
def _load_encoding(self) -> list[int | None]:
|
| 233 |
+
fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS)
|
| 234 |
+
|
| 235 |
+
first_col, last_col = i16(fp.read(2)), i16(fp.read(2))
|
| 236 |
+
first_row, last_row = i16(fp.read(2)), i16(fp.read(2))
|
| 237 |
+
|
| 238 |
+
i16(fp.read(2)) # default
|
| 239 |
+
|
| 240 |
+
nencoding = (last_col - first_col + 1) * (last_row - first_row + 1)
|
| 241 |
+
|
| 242 |
+
# map character code to bitmap index
|
| 243 |
+
encoding: list[int | None] = [None] * min(256, nencoding)
|
| 244 |
+
|
| 245 |
+
encoding_offsets = [i16(fp.read(2)) for _ in range(nencoding)]
|
| 246 |
+
|
| 247 |
+
for i in range(first_col, len(encoding)):
|
| 248 |
+
try:
|
| 249 |
+
encoding_offset = encoding_offsets[
|
| 250 |
+
ord(bytearray([i]).decode(self.charset_encoding))
|
| 251 |
+
]
|
| 252 |
+
if encoding_offset != 0xFFFF:
|
| 253 |
+
encoding[i] = encoding_offset
|
| 254 |
+
except UnicodeDecodeError:
|
| 255 |
+
# character is not supported in selected encoding
|
| 256 |
+
pass
|
| 257 |
+
|
| 258 |
+
return encoding
|
lib/python3.13/site-packages/PIL/PpmImagePlugin.py
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# PPM support for PIL
|
| 6 |
+
#
|
| 7 |
+
# History:
|
| 8 |
+
# 96-03-24 fl Created
|
| 9 |
+
# 98-03-06 fl Write RGBA images (as RGB, that is)
|
| 10 |
+
#
|
| 11 |
+
# Copyright (c) Secret Labs AB 1997-98.
|
| 12 |
+
# Copyright (c) Fredrik Lundh 1996.
|
| 13 |
+
#
|
| 14 |
+
# See the README file for information on usage and redistribution.
|
| 15 |
+
#
|
| 16 |
+
from __future__ import annotations
|
| 17 |
+
|
| 18 |
+
import math
|
| 19 |
+
from typing import IO
|
| 20 |
+
|
| 21 |
+
from . import Image, ImageFile
|
| 22 |
+
from ._binary import i16be as i16
|
| 23 |
+
from ._binary import o8
|
| 24 |
+
from ._binary import o32le as o32
|
| 25 |
+
|
| 26 |
+
#
|
| 27 |
+
# --------------------------------------------------------------------
|
| 28 |
+
|
| 29 |
+
b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d"
|
| 30 |
+
|
| 31 |
+
MODES = {
|
| 32 |
+
# standard
|
| 33 |
+
b"P1": "1",
|
| 34 |
+
b"P2": "L",
|
| 35 |
+
b"P3": "RGB",
|
| 36 |
+
b"P4": "1",
|
| 37 |
+
b"P5": "L",
|
| 38 |
+
b"P6": "RGB",
|
| 39 |
+
# extensions
|
| 40 |
+
b"P0CMYK": "CMYK",
|
| 41 |
+
b"Pf": "F",
|
| 42 |
+
# PIL extensions (for test purposes only)
|
| 43 |
+
b"PyP": "P",
|
| 44 |
+
b"PyRGBA": "RGBA",
|
| 45 |
+
b"PyCMYK": "CMYK",
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def _accept(prefix: bytes) -> bool:
|
| 50 |
+
return len(prefix) >= 2 and prefix.startswith(b"P") and prefix[1] in b"0123456fy"
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
##
|
| 54 |
+
# Image plugin for PBM, PGM, and PPM images.
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class PpmImageFile(ImageFile.ImageFile):
|
| 58 |
+
format = "PPM"
|
| 59 |
+
format_description = "Pbmplus image"
|
| 60 |
+
|
| 61 |
+
def _read_magic(self) -> bytes:
|
| 62 |
+
assert self.fp is not None
|
| 63 |
+
|
| 64 |
+
magic = b""
|
| 65 |
+
# read until whitespace or longest available magic number
|
| 66 |
+
for _ in range(6):
|
| 67 |
+
c = self.fp.read(1)
|
| 68 |
+
if not c or c in b_whitespace:
|
| 69 |
+
break
|
| 70 |
+
magic += c
|
| 71 |
+
return magic
|
| 72 |
+
|
| 73 |
+
def _read_token(self) -> bytes:
|
| 74 |
+
assert self.fp is not None
|
| 75 |
+
|
| 76 |
+
token = b""
|
| 77 |
+
while len(token) <= 10: # read until next whitespace or limit of 10 characters
|
| 78 |
+
c = self.fp.read(1)
|
| 79 |
+
if not c:
|
| 80 |
+
break
|
| 81 |
+
elif c in b_whitespace: # token ended
|
| 82 |
+
if not token:
|
| 83 |
+
# skip whitespace at start
|
| 84 |
+
continue
|
| 85 |
+
break
|
| 86 |
+
elif c == b"#":
|
| 87 |
+
# ignores rest of the line; stops at CR, LF or EOF
|
| 88 |
+
while self.fp.read(1) not in b"\r\n":
|
| 89 |
+
pass
|
| 90 |
+
continue
|
| 91 |
+
token += c
|
| 92 |
+
if not token:
|
| 93 |
+
# Token was not even 1 byte
|
| 94 |
+
msg = "Reached EOF while reading header"
|
| 95 |
+
raise ValueError(msg)
|
| 96 |
+
elif len(token) > 10:
|
| 97 |
+
msg_too_long = b"Token too long in file header: %s" % token
|
| 98 |
+
raise ValueError(msg_too_long)
|
| 99 |
+
return token
|
| 100 |
+
|
| 101 |
+
def _open(self) -> None:
|
| 102 |
+
assert self.fp is not None
|
| 103 |
+
|
| 104 |
+
magic_number = self._read_magic()
|
| 105 |
+
try:
|
| 106 |
+
mode = MODES[magic_number]
|
| 107 |
+
except KeyError:
|
| 108 |
+
msg = "not a PPM file"
|
| 109 |
+
raise SyntaxError(msg)
|
| 110 |
+
self._mode = mode
|
| 111 |
+
|
| 112 |
+
if magic_number in (b"P1", b"P4"):
|
| 113 |
+
self.custom_mimetype = "image/x-portable-bitmap"
|
| 114 |
+
elif magic_number in (b"P2", b"P5"):
|
| 115 |
+
self.custom_mimetype = "image/x-portable-graymap"
|
| 116 |
+
elif magic_number in (b"P3", b"P6"):
|
| 117 |
+
self.custom_mimetype = "image/x-portable-pixmap"
|
| 118 |
+
|
| 119 |
+
self._size = int(self._read_token()), int(self._read_token())
|
| 120 |
+
|
| 121 |
+
decoder_name = "raw"
|
| 122 |
+
if magic_number in (b"P1", b"P2", b"P3"):
|
| 123 |
+
decoder_name = "ppm_plain"
|
| 124 |
+
|
| 125 |
+
args: str | tuple[str | int, ...]
|
| 126 |
+
if mode == "1":
|
| 127 |
+
args = "1;I"
|
| 128 |
+
elif mode == "F":
|
| 129 |
+
scale = float(self._read_token())
|
| 130 |
+
if scale == 0.0 or not math.isfinite(scale):
|
| 131 |
+
msg = "scale must be finite and non-zero"
|
| 132 |
+
raise ValueError(msg)
|
| 133 |
+
self.info["scale"] = abs(scale)
|
| 134 |
+
|
| 135 |
+
rawmode = "F;32F" if scale < 0 else "F;32BF"
|
| 136 |
+
args = (rawmode, 0, -1)
|
| 137 |
+
else:
|
| 138 |
+
maxval = int(self._read_token())
|
| 139 |
+
if not 0 < maxval < 65536:
|
| 140 |
+
msg = "maxval must be greater than 0 and less than 65536"
|
| 141 |
+
raise ValueError(msg)
|
| 142 |
+
if maxval > 255 and mode == "L":
|
| 143 |
+
self._mode = "I"
|
| 144 |
+
|
| 145 |
+
rawmode = mode
|
| 146 |
+
if decoder_name != "ppm_plain":
|
| 147 |
+
# If maxval matches a bit depth, use the raw decoder directly
|
| 148 |
+
if maxval == 65535 and mode == "L":
|
| 149 |
+
rawmode = "I;16B"
|
| 150 |
+
elif maxval != 255:
|
| 151 |
+
decoder_name = "ppm"
|
| 152 |
+
|
| 153 |
+
args = rawmode if decoder_name == "raw" else (rawmode, maxval)
|
| 154 |
+
self.tile = [
|
| 155 |
+
ImageFile._Tile(decoder_name, (0, 0) + self.size, self.fp.tell(), args)
|
| 156 |
+
]
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
#
|
| 160 |
+
# --------------------------------------------------------------------
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
class PpmPlainDecoder(ImageFile.PyDecoder):
|
| 164 |
+
_pulls_fd = True
|
| 165 |
+
_comment_spans: bool
|
| 166 |
+
|
| 167 |
+
def _read_block(self) -> bytes:
|
| 168 |
+
assert self.fd is not None
|
| 169 |
+
|
| 170 |
+
return self.fd.read(ImageFile.SAFEBLOCK)
|
| 171 |
+
|
| 172 |
+
def _find_comment_end(self, block: bytes, start: int = 0) -> int:
|
| 173 |
+
a = block.find(b"\n", start)
|
| 174 |
+
b = block.find(b"\r", start)
|
| 175 |
+
return min(a, b) if a * b > 0 else max(a, b) # lowest nonnegative index (or -1)
|
| 176 |
+
|
| 177 |
+
def _ignore_comments(self, block: bytes) -> bytes:
|
| 178 |
+
if self._comment_spans:
|
| 179 |
+
# Finish current comment
|
| 180 |
+
while block:
|
| 181 |
+
comment_end = self._find_comment_end(block)
|
| 182 |
+
if comment_end != -1:
|
| 183 |
+
# Comment ends in this block
|
| 184 |
+
# Delete tail of comment
|
| 185 |
+
block = block[comment_end + 1 :]
|
| 186 |
+
break
|
| 187 |
+
else:
|
| 188 |
+
# Comment spans whole block
|
| 189 |
+
# So read the next block, looking for the end
|
| 190 |
+
block = self._read_block()
|
| 191 |
+
|
| 192 |
+
# Search for any further comments
|
| 193 |
+
self._comment_spans = False
|
| 194 |
+
while True:
|
| 195 |
+
comment_start = block.find(b"#")
|
| 196 |
+
if comment_start == -1:
|
| 197 |
+
# No comment found
|
| 198 |
+
break
|
| 199 |
+
comment_end = self._find_comment_end(block, comment_start)
|
| 200 |
+
if comment_end != -1:
|
| 201 |
+
# Comment ends in this block
|
| 202 |
+
# Delete comment
|
| 203 |
+
block = block[:comment_start] + block[comment_end + 1 :]
|
| 204 |
+
else:
|
| 205 |
+
# Comment continues to next block(s)
|
| 206 |
+
block = block[:comment_start]
|
| 207 |
+
self._comment_spans = True
|
| 208 |
+
break
|
| 209 |
+
return block
|
| 210 |
+
|
| 211 |
+
def _decode_bitonal(self) -> bytearray:
|
| 212 |
+
"""
|
| 213 |
+
This is a separate method because in the plain PBM format, all data tokens are
|
| 214 |
+
exactly one byte, so the inter-token whitespace is optional.
|
| 215 |
+
"""
|
| 216 |
+
data = bytearray()
|
| 217 |
+
total_bytes = self.state.xsize * self.state.ysize
|
| 218 |
+
|
| 219 |
+
while len(data) != total_bytes:
|
| 220 |
+
block = self._read_block() # read next block
|
| 221 |
+
if not block:
|
| 222 |
+
# eof
|
| 223 |
+
break
|
| 224 |
+
|
| 225 |
+
block = self._ignore_comments(block)
|
| 226 |
+
|
| 227 |
+
tokens = b"".join(block.split())
|
| 228 |
+
for token in tokens:
|
| 229 |
+
if token not in (48, 49):
|
| 230 |
+
msg = b"Invalid token for this mode: %s" % bytes([token])
|
| 231 |
+
raise ValueError(msg)
|
| 232 |
+
data = (data + tokens)[:total_bytes]
|
| 233 |
+
invert = bytes.maketrans(b"01", b"\xff\x00")
|
| 234 |
+
return data.translate(invert)
|
| 235 |
+
|
| 236 |
+
def _decode_blocks(self, maxval: int) -> bytearray:
|
| 237 |
+
data = bytearray()
|
| 238 |
+
max_len = 10
|
| 239 |
+
out_byte_count = 4 if self.mode == "I" else 1
|
| 240 |
+
out_max = 65535 if self.mode == "I" else 255
|
| 241 |
+
bands = Image.getmodebands(self.mode)
|
| 242 |
+
total_bytes = self.state.xsize * self.state.ysize * bands * out_byte_count
|
| 243 |
+
|
| 244 |
+
half_token = b""
|
| 245 |
+
while len(data) != total_bytes:
|
| 246 |
+
block = self._read_block() # read next block
|
| 247 |
+
if not block:
|
| 248 |
+
if half_token:
|
| 249 |
+
block = bytearray(b" ") # flush half_token
|
| 250 |
+
else:
|
| 251 |
+
# eof
|
| 252 |
+
break
|
| 253 |
+
|
| 254 |
+
block = self._ignore_comments(block)
|
| 255 |
+
|
| 256 |
+
if half_token:
|
| 257 |
+
block = half_token + block # stitch half_token to new block
|
| 258 |
+
half_token = b""
|
| 259 |
+
|
| 260 |
+
tokens = block.split()
|
| 261 |
+
|
| 262 |
+
if block and not block[-1:].isspace(): # block might split token
|
| 263 |
+
half_token = tokens.pop() # save half token for later
|
| 264 |
+
if len(half_token) > max_len: # prevent buildup of half_token
|
| 265 |
+
msg = (
|
| 266 |
+
b"Token too long found in data: %s" % half_token[: max_len + 1]
|
| 267 |
+
)
|
| 268 |
+
raise ValueError(msg)
|
| 269 |
+
|
| 270 |
+
for token in tokens:
|
| 271 |
+
if len(token) > max_len:
|
| 272 |
+
msg = b"Token too long found in data: %s" % token[: max_len + 1]
|
| 273 |
+
raise ValueError(msg)
|
| 274 |
+
value = int(token)
|
| 275 |
+
if value < 0:
|
| 276 |
+
msg_str = f"Channel value is negative: {value}"
|
| 277 |
+
raise ValueError(msg_str)
|
| 278 |
+
if value > maxval:
|
| 279 |
+
msg_str = f"Channel value too large for this mode: {value}"
|
| 280 |
+
raise ValueError(msg_str)
|
| 281 |
+
value = round(value / maxval * out_max)
|
| 282 |
+
data += o32(value) if self.mode == "I" else o8(value)
|
| 283 |
+
if len(data) == total_bytes: # finished!
|
| 284 |
+
break
|
| 285 |
+
return data
|
| 286 |
+
|
| 287 |
+
def decode(self, buffer: bytes | Image.SupportsArrayInterface) -> tuple[int, int]:
|
| 288 |
+
self._comment_spans = False
|
| 289 |
+
if self.mode == "1":
|
| 290 |
+
data = self._decode_bitonal()
|
| 291 |
+
rawmode = "1;8"
|
| 292 |
+
else:
|
| 293 |
+
maxval = self.args[-1]
|
| 294 |
+
data = self._decode_blocks(maxval)
|
| 295 |
+
rawmode = "I;32" if self.mode == "I" else self.mode
|
| 296 |
+
self.set_as_raw(bytes(data), rawmode)
|
| 297 |
+
return -1, 0
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
class PpmDecoder(ImageFile.PyDecoder):
|
| 301 |
+
_pulls_fd = True
|
| 302 |
+
|
| 303 |
+
def decode(self, buffer: bytes | Image.SupportsArrayInterface) -> tuple[int, int]:
|
| 304 |
+
assert self.fd is not None
|
| 305 |
+
|
| 306 |
+
data = bytearray()
|
| 307 |
+
maxval = self.args[-1]
|
| 308 |
+
in_byte_count = 1 if maxval < 256 else 2
|
| 309 |
+
out_byte_count = 4 if self.mode == "I" else 1
|
| 310 |
+
out_max = 65535 if self.mode == "I" else 255
|
| 311 |
+
bands = Image.getmodebands(self.mode)
|
| 312 |
+
dest_length = self.state.xsize * self.state.ysize * bands * out_byte_count
|
| 313 |
+
while len(data) < dest_length:
|
| 314 |
+
pixels = self.fd.read(in_byte_count * bands)
|
| 315 |
+
if len(pixels) < in_byte_count * bands:
|
| 316 |
+
# eof
|
| 317 |
+
break
|
| 318 |
+
for b in range(bands):
|
| 319 |
+
value = (
|
| 320 |
+
pixels[b] if in_byte_count == 1 else i16(pixels, b * in_byte_count)
|
| 321 |
+
)
|
| 322 |
+
value = min(out_max, round(value / maxval * out_max))
|
| 323 |
+
data += o32(value) if self.mode == "I" else o8(value)
|
| 324 |
+
rawmode = "I;32" if self.mode == "I" else self.mode
|
| 325 |
+
self.set_as_raw(bytes(data), rawmode)
|
| 326 |
+
return -1, 0
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
#
|
| 330 |
+
# --------------------------------------------------------------------
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 334 |
+
if im.mode == "1":
|
| 335 |
+
rawmode, head = "1;I", b"P4"
|
| 336 |
+
elif im.mode == "L":
|
| 337 |
+
rawmode, head = "L", b"P5"
|
| 338 |
+
elif im.mode in ("I", "I;16"):
|
| 339 |
+
rawmode, head = "I;16B", b"P5"
|
| 340 |
+
elif im.mode in ("RGB", "RGBA"):
|
| 341 |
+
rawmode, head = "RGB", b"P6"
|
| 342 |
+
elif im.mode == "F":
|
| 343 |
+
rawmode, head = "F;32F", b"Pf"
|
| 344 |
+
else:
|
| 345 |
+
msg = f"cannot write mode {im.mode} as PPM"
|
| 346 |
+
raise OSError(msg)
|
| 347 |
+
fp.write(head + b"\n%d %d\n" % im.size)
|
| 348 |
+
if head == b"P6":
|
| 349 |
+
fp.write(b"255\n")
|
| 350 |
+
elif head == b"P5":
|
| 351 |
+
if rawmode == "L":
|
| 352 |
+
fp.write(b"255\n")
|
| 353 |
+
else:
|
| 354 |
+
fp.write(b"65535\n")
|
| 355 |
+
elif head == b"Pf":
|
| 356 |
+
fp.write(b"-1.0\n")
|
| 357 |
+
row_order = -1 if im.mode == "F" else 1
|
| 358 |
+
ImageFile._save(
|
| 359 |
+
im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 0, (rawmode, 0, row_order))]
|
| 360 |
+
)
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
#
|
| 364 |
+
# --------------------------------------------------------------------
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
Image.register_open(PpmImageFile.format, PpmImageFile, _accept)
|
| 368 |
+
Image.register_save(PpmImageFile.format, _save)
|
| 369 |
+
|
| 370 |
+
Image.register_decoder("ppm", PpmDecoder)
|
| 371 |
+
Image.register_decoder("ppm_plain", PpmPlainDecoder)
|
| 372 |
+
|
| 373 |
+
Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm", ".pfm"])
|
| 374 |
+
|
| 375 |
+
Image.register_mime(PpmImageFile.format, "image/x-portable-anymap")
|
lib/python3.13/site-packages/PIL/TarIO.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# read files from within a tar file
|
| 6 |
+
#
|
| 7 |
+
# History:
|
| 8 |
+
# 95-06-18 fl Created
|
| 9 |
+
# 96-05-28 fl Open files in binary mode
|
| 10 |
+
#
|
| 11 |
+
# Copyright (c) Secret Labs AB 1997.
|
| 12 |
+
# Copyright (c) Fredrik Lundh 1995-96.
|
| 13 |
+
#
|
| 14 |
+
# See the README file for information on usage and redistribution.
|
| 15 |
+
#
|
| 16 |
+
from __future__ import annotations
|
| 17 |
+
|
| 18 |
+
import io
|
| 19 |
+
|
| 20 |
+
from . import ContainerIO
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class TarIO(ContainerIO.ContainerIO[bytes]):
|
| 24 |
+
"""A file object that provides read access to a given member of a TAR file."""
|
| 25 |
+
|
| 26 |
+
def __init__(self, tarfile: str, file: str) -> None:
|
| 27 |
+
"""
|
| 28 |
+
Create file object.
|
| 29 |
+
|
| 30 |
+
:param tarfile: Name of TAR file.
|
| 31 |
+
:param file: Name of member file.
|
| 32 |
+
"""
|
| 33 |
+
self.fh = open(tarfile, "rb")
|
| 34 |
+
|
| 35 |
+
while True:
|
| 36 |
+
s = self.fh.read(512)
|
| 37 |
+
if len(s) != 512:
|
| 38 |
+
self.fh.close()
|
| 39 |
+
|
| 40 |
+
msg = "unexpected end of tar file"
|
| 41 |
+
raise OSError(msg)
|
| 42 |
+
|
| 43 |
+
name = s[:100].decode("utf-8")
|
| 44 |
+
i = name.find("\0")
|
| 45 |
+
if i == 0:
|
| 46 |
+
self.fh.close()
|
| 47 |
+
|
| 48 |
+
msg = "cannot find subfile"
|
| 49 |
+
raise OSError(msg)
|
| 50 |
+
if i > 0:
|
| 51 |
+
name = name[:i]
|
| 52 |
+
|
| 53 |
+
size = int(s[124:135], 8)
|
| 54 |
+
|
| 55 |
+
if file == name:
|
| 56 |
+
break
|
| 57 |
+
|
| 58 |
+
self.fh.seek((size + 511) & (~511), io.SEEK_CUR)
|
| 59 |
+
|
| 60 |
+
# Open region
|
| 61 |
+
super().__init__(self.fh, self.fh.tell(), size)
|
lib/python3.13/site-packages/PIL/TgaImagePlugin.py
ADDED
|
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# TGA file handling
|
| 6 |
+
#
|
| 7 |
+
# History:
|
| 8 |
+
# 95-09-01 fl created (reads 24-bit files only)
|
| 9 |
+
# 97-01-04 fl support more TGA versions, including compressed images
|
| 10 |
+
# 98-07-04 fl fixed orientation and alpha layer bugs
|
| 11 |
+
# 98-09-11 fl fixed orientation for runlength decoder
|
| 12 |
+
#
|
| 13 |
+
# Copyright (c) Secret Labs AB 1997-98.
|
| 14 |
+
# Copyright (c) Fredrik Lundh 1995-97.
|
| 15 |
+
#
|
| 16 |
+
# See the README file for information on usage and redistribution.
|
| 17 |
+
#
|
| 18 |
+
from __future__ import annotations
|
| 19 |
+
|
| 20 |
+
import warnings
|
| 21 |
+
from typing import IO
|
| 22 |
+
|
| 23 |
+
from . import Image, ImageFile, ImagePalette
|
| 24 |
+
from ._binary import i16le as i16
|
| 25 |
+
from ._binary import o8
|
| 26 |
+
from ._binary import o16le as o16
|
| 27 |
+
|
| 28 |
+
#
|
| 29 |
+
# --------------------------------------------------------------------
|
| 30 |
+
# Read RGA file
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
MODES = {
|
| 34 |
+
# map imagetype/depth to rawmode
|
| 35 |
+
(1, 8): "P",
|
| 36 |
+
(3, 1): "1",
|
| 37 |
+
(3, 8): "L",
|
| 38 |
+
(3, 16): "LA",
|
| 39 |
+
(2, 16): "BGRA;15Z",
|
| 40 |
+
(2, 24): "BGR",
|
| 41 |
+
(2, 32): "BGRA",
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
##
|
| 46 |
+
# Image plugin for Targa files.
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class TgaImageFile(ImageFile.ImageFile):
|
| 50 |
+
format = "TGA"
|
| 51 |
+
format_description = "Targa"
|
| 52 |
+
|
| 53 |
+
def _open(self) -> None:
|
| 54 |
+
# process header
|
| 55 |
+
assert self.fp is not None
|
| 56 |
+
|
| 57 |
+
s = self.fp.read(18)
|
| 58 |
+
|
| 59 |
+
id_len = s[0]
|
| 60 |
+
|
| 61 |
+
colormaptype = s[1]
|
| 62 |
+
imagetype = s[2]
|
| 63 |
+
|
| 64 |
+
depth = s[16]
|
| 65 |
+
|
| 66 |
+
flags = s[17]
|
| 67 |
+
|
| 68 |
+
self._size = i16(s, 12), i16(s, 14)
|
| 69 |
+
|
| 70 |
+
# validate header fields
|
| 71 |
+
if (
|
| 72 |
+
colormaptype not in (0, 1)
|
| 73 |
+
or self.size[0] <= 0
|
| 74 |
+
or self.size[1] <= 0
|
| 75 |
+
or depth not in (1, 8, 16, 24, 32)
|
| 76 |
+
):
|
| 77 |
+
msg = "not a TGA file"
|
| 78 |
+
raise SyntaxError(msg)
|
| 79 |
+
|
| 80 |
+
# image mode
|
| 81 |
+
if imagetype in (3, 11):
|
| 82 |
+
self._mode = "L"
|
| 83 |
+
if depth == 1:
|
| 84 |
+
self._mode = "1" # ???
|
| 85 |
+
elif depth == 16:
|
| 86 |
+
self._mode = "LA"
|
| 87 |
+
elif imagetype in (1, 9):
|
| 88 |
+
self._mode = "P" if colormaptype else "L"
|
| 89 |
+
elif imagetype in (2, 10):
|
| 90 |
+
self._mode = "RGB" if depth == 24 else "RGBA"
|
| 91 |
+
else:
|
| 92 |
+
msg = "unknown TGA mode"
|
| 93 |
+
raise SyntaxError(msg)
|
| 94 |
+
|
| 95 |
+
# orientation
|
| 96 |
+
orientation = flags & 0x30
|
| 97 |
+
self._flip_horizontally = orientation in [0x10, 0x30]
|
| 98 |
+
if orientation in [0x20, 0x30]:
|
| 99 |
+
orientation = 1
|
| 100 |
+
elif orientation in [0, 0x10]:
|
| 101 |
+
orientation = -1
|
| 102 |
+
else:
|
| 103 |
+
msg = "unknown TGA orientation"
|
| 104 |
+
raise SyntaxError(msg)
|
| 105 |
+
|
| 106 |
+
self.info["orientation"] = orientation
|
| 107 |
+
|
| 108 |
+
if imagetype & 8:
|
| 109 |
+
self.info["compression"] = "tga_rle"
|
| 110 |
+
|
| 111 |
+
if id_len:
|
| 112 |
+
self.info["id_section"] = self.fp.read(id_len)
|
| 113 |
+
|
| 114 |
+
if colormaptype:
|
| 115 |
+
# read palette
|
| 116 |
+
start, size, mapdepth = i16(s, 3), i16(s, 5), s[7]
|
| 117 |
+
if mapdepth == 16:
|
| 118 |
+
self.palette = ImagePalette.raw(
|
| 119 |
+
"BGRA;15Z", bytes(2 * start) + self.fp.read(2 * size)
|
| 120 |
+
)
|
| 121 |
+
self.palette.mode = "RGBA"
|
| 122 |
+
elif mapdepth == 24:
|
| 123 |
+
self.palette = ImagePalette.raw(
|
| 124 |
+
"BGR", bytes(3 * start) + self.fp.read(3 * size)
|
| 125 |
+
)
|
| 126 |
+
elif mapdepth == 32:
|
| 127 |
+
self.palette = ImagePalette.raw(
|
| 128 |
+
"BGRA", bytes(4 * start) + self.fp.read(4 * size)
|
| 129 |
+
)
|
| 130 |
+
else:
|
| 131 |
+
msg = "unknown TGA map depth"
|
| 132 |
+
raise SyntaxError(msg)
|
| 133 |
+
|
| 134 |
+
# setup tile descriptor
|
| 135 |
+
try:
|
| 136 |
+
rawmode = MODES[(imagetype & 7, depth)]
|
| 137 |
+
if imagetype & 8:
|
| 138 |
+
# compressed
|
| 139 |
+
self.tile = [
|
| 140 |
+
ImageFile._Tile(
|
| 141 |
+
"tga_rle",
|
| 142 |
+
(0, 0) + self.size,
|
| 143 |
+
self.fp.tell(),
|
| 144 |
+
(rawmode, orientation, depth),
|
| 145 |
+
)
|
| 146 |
+
]
|
| 147 |
+
else:
|
| 148 |
+
self.tile = [
|
| 149 |
+
ImageFile._Tile(
|
| 150 |
+
"raw",
|
| 151 |
+
(0, 0) + self.size,
|
| 152 |
+
self.fp.tell(),
|
| 153 |
+
(rawmode, 0, orientation),
|
| 154 |
+
)
|
| 155 |
+
]
|
| 156 |
+
except KeyError:
|
| 157 |
+
pass # cannot decode
|
| 158 |
+
|
| 159 |
+
def load_end(self) -> None:
|
| 160 |
+
if self._flip_horizontally:
|
| 161 |
+
self.im = self.im.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
#
|
| 165 |
+
# --------------------------------------------------------------------
|
| 166 |
+
# Write TGA file
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
SAVE = {
|
| 170 |
+
"1": ("1", 1, 0, 3),
|
| 171 |
+
"L": ("L", 8, 0, 3),
|
| 172 |
+
"LA": ("LA", 16, 0, 3),
|
| 173 |
+
"P": ("P", 8, 1, 1),
|
| 174 |
+
"RGB": ("BGR", 24, 0, 2),
|
| 175 |
+
"RGBA": ("BGRA", 32, 0, 2),
|
| 176 |
+
}
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 180 |
+
try:
|
| 181 |
+
rawmode, bits, colormaptype, imagetype = SAVE[im.mode]
|
| 182 |
+
except KeyError as e:
|
| 183 |
+
msg = f"cannot write mode {im.mode} as TGA"
|
| 184 |
+
raise OSError(msg) from e
|
| 185 |
+
|
| 186 |
+
if "rle" in im.encoderinfo:
|
| 187 |
+
rle = im.encoderinfo["rle"]
|
| 188 |
+
else:
|
| 189 |
+
compression = im.encoderinfo.get("compression", im.info.get("compression"))
|
| 190 |
+
rle = compression == "tga_rle"
|
| 191 |
+
if rle:
|
| 192 |
+
imagetype += 8
|
| 193 |
+
|
| 194 |
+
id_section = im.encoderinfo.get("id_section", im.info.get("id_section", ""))
|
| 195 |
+
id_len = len(id_section)
|
| 196 |
+
if id_len > 255:
|
| 197 |
+
id_len = 255
|
| 198 |
+
id_section = id_section[:255]
|
| 199 |
+
warnings.warn("id_section has been trimmed to 255 characters")
|
| 200 |
+
|
| 201 |
+
if colormaptype:
|
| 202 |
+
palette = im.im.getpalette("RGB", "BGR")
|
| 203 |
+
colormaplength, colormapentry = len(palette) // 3, 24
|
| 204 |
+
else:
|
| 205 |
+
colormaplength, colormapentry = 0, 0
|
| 206 |
+
|
| 207 |
+
if im.mode in ("LA", "RGBA"):
|
| 208 |
+
flags = 8
|
| 209 |
+
else:
|
| 210 |
+
flags = 0
|
| 211 |
+
|
| 212 |
+
orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1))
|
| 213 |
+
if orientation > 0:
|
| 214 |
+
flags = flags | 0x20
|
| 215 |
+
|
| 216 |
+
fp.write(
|
| 217 |
+
o8(id_len)
|
| 218 |
+
+ o8(colormaptype)
|
| 219 |
+
+ o8(imagetype)
|
| 220 |
+
+ o16(0) # colormapfirst
|
| 221 |
+
+ o16(colormaplength)
|
| 222 |
+
+ o8(colormapentry)
|
| 223 |
+
+ o16(0)
|
| 224 |
+
+ o16(0)
|
| 225 |
+
+ o16(im.size[0])
|
| 226 |
+
+ o16(im.size[1])
|
| 227 |
+
+ o8(bits)
|
| 228 |
+
+ o8(flags)
|
| 229 |
+
)
|
| 230 |
+
|
| 231 |
+
if id_section:
|
| 232 |
+
fp.write(id_section)
|
| 233 |
+
|
| 234 |
+
if colormaptype:
|
| 235 |
+
fp.write(palette)
|
| 236 |
+
|
| 237 |
+
if rle:
|
| 238 |
+
ImageFile._save(
|
| 239 |
+
im,
|
| 240 |
+
fp,
|
| 241 |
+
[ImageFile._Tile("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))],
|
| 242 |
+
)
|
| 243 |
+
else:
|
| 244 |
+
ImageFile._save(
|
| 245 |
+
im,
|
| 246 |
+
fp,
|
| 247 |
+
[ImageFile._Tile("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))],
|
| 248 |
+
)
|
| 249 |
+
|
| 250 |
+
# write targa version 2 footer
|
| 251 |
+
fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000")
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
#
|
| 255 |
+
# --------------------------------------------------------------------
|
| 256 |
+
# Registry
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
Image.register_open(TgaImageFile.format, TgaImageFile)
|
| 260 |
+
Image.register_save(TgaImageFile.format, _save)
|
| 261 |
+
|
| 262 |
+
Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"])
|
| 263 |
+
|
| 264 |
+
Image.register_mime(TgaImageFile.format, "image/x-tga")
|
lib/python3.13/site-packages/PIL/TiffImagePlugin.py
ADDED
|
@@ -0,0 +1,2338 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library.
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# TIFF file handling
|
| 6 |
+
#
|
| 7 |
+
# TIFF is a flexible, if somewhat aged, image file format originally
|
| 8 |
+
# defined by Aldus. Although TIFF supports a wide variety of pixel
|
| 9 |
+
# layouts and compression methods, the name doesn't really stand for
|
| 10 |
+
# "thousands of incompatible file formats," it just feels that way.
|
| 11 |
+
#
|
| 12 |
+
# To read TIFF data from a stream, the stream must be seekable. For
|
| 13 |
+
# progressive decoding, make sure to use TIFF files where the tag
|
| 14 |
+
# directory is placed first in the file.
|
| 15 |
+
#
|
| 16 |
+
# History:
|
| 17 |
+
# 1995-09-01 fl Created
|
| 18 |
+
# 1996-05-04 fl Handle JPEGTABLES tag
|
| 19 |
+
# 1996-05-18 fl Fixed COLORMAP support
|
| 20 |
+
# 1997-01-05 fl Fixed PREDICTOR support
|
| 21 |
+
# 1997-08-27 fl Added support for rational tags (from Perry Stoll)
|
| 22 |
+
# 1998-01-10 fl Fixed seek/tell (from Jan Blom)
|
| 23 |
+
# 1998-07-15 fl Use private names for internal variables
|
| 24 |
+
# 1999-06-13 fl Rewritten for PIL 1.0 (1.0)
|
| 25 |
+
# 2000-10-11 fl Additional fixes for Python 2.0 (1.1)
|
| 26 |
+
# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2)
|
| 27 |
+
# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3)
|
| 28 |
+
# 2001-12-18 fl Added workaround for broken Matrox library
|
| 29 |
+
# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart)
|
| 30 |
+
# 2003-05-19 fl Check FILLORDER tag
|
| 31 |
+
# 2003-09-26 fl Added RGBa support
|
| 32 |
+
# 2004-02-24 fl Added DPI support; fixed rational write support
|
| 33 |
+
# 2005-02-07 fl Added workaround for broken Corel Draw 10 files
|
| 34 |
+
# 2006-01-09 fl Added support for float/double tags (from Russell Nelson)
|
| 35 |
+
#
|
| 36 |
+
# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved.
|
| 37 |
+
# Copyright (c) 1995-1997 by Fredrik Lundh
|
| 38 |
+
#
|
| 39 |
+
# See the README file for information on usage and redistribution.
|
| 40 |
+
#
|
| 41 |
+
from __future__ import annotations
|
| 42 |
+
|
| 43 |
+
import io
|
| 44 |
+
import itertools
|
| 45 |
+
import logging
|
| 46 |
+
import math
|
| 47 |
+
import os
|
| 48 |
+
import struct
|
| 49 |
+
import warnings
|
| 50 |
+
from collections.abc import Callable, MutableMapping
|
| 51 |
+
from fractions import Fraction
|
| 52 |
+
from numbers import Number, Rational
|
| 53 |
+
from typing import IO, Any, cast
|
| 54 |
+
|
| 55 |
+
from . import ExifTags, Image, ImageFile, ImageOps, ImagePalette, TiffTags
|
| 56 |
+
from ._binary import i16be as i16
|
| 57 |
+
from ._binary import i32be as i32
|
| 58 |
+
from ._binary import o8
|
| 59 |
+
from ._util import DeferredError, is_path
|
| 60 |
+
from .TiffTags import TYPES
|
| 61 |
+
|
| 62 |
+
TYPE_CHECKING = False
|
| 63 |
+
if TYPE_CHECKING:
|
| 64 |
+
from collections.abc import Iterator
|
| 65 |
+
from typing import NoReturn
|
| 66 |
+
|
| 67 |
+
from ._typing import Buffer, IntegralLike, StrOrBytesPath
|
| 68 |
+
|
| 69 |
+
logger = logging.getLogger(__name__)
|
| 70 |
+
|
| 71 |
+
# Set these to true to force use of libtiff for reading or writing.
|
| 72 |
+
READ_LIBTIFF = False
|
| 73 |
+
WRITE_LIBTIFF = False
|
| 74 |
+
STRIP_SIZE = 65536
|
| 75 |
+
|
| 76 |
+
II = b"II" # little-endian (Intel style)
|
| 77 |
+
MM = b"MM" # big-endian (Motorola style)
|
| 78 |
+
|
| 79 |
+
#
|
| 80 |
+
# --------------------------------------------------------------------
|
| 81 |
+
# Read TIFF files
|
| 82 |
+
|
| 83 |
+
# a few tag names, just to make the code below a bit more readable
|
| 84 |
+
OSUBFILETYPE = 255
|
| 85 |
+
IMAGEWIDTH = 256
|
| 86 |
+
IMAGELENGTH = 257
|
| 87 |
+
BITSPERSAMPLE = 258
|
| 88 |
+
COMPRESSION = 259
|
| 89 |
+
PHOTOMETRIC_INTERPRETATION = 262
|
| 90 |
+
FILLORDER = 266
|
| 91 |
+
IMAGEDESCRIPTION = 270
|
| 92 |
+
STRIPOFFSETS = 273
|
| 93 |
+
SAMPLESPERPIXEL = 277
|
| 94 |
+
ROWSPERSTRIP = 278
|
| 95 |
+
STRIPBYTECOUNTS = 279
|
| 96 |
+
X_RESOLUTION = 282
|
| 97 |
+
Y_RESOLUTION = 283
|
| 98 |
+
PLANAR_CONFIGURATION = 284
|
| 99 |
+
RESOLUTION_UNIT = 296
|
| 100 |
+
TRANSFERFUNCTION = 301
|
| 101 |
+
SOFTWARE = 305
|
| 102 |
+
DATE_TIME = 306
|
| 103 |
+
ARTIST = 315
|
| 104 |
+
PREDICTOR = 317
|
| 105 |
+
COLORMAP = 320
|
| 106 |
+
TILEWIDTH = 322
|
| 107 |
+
TILELENGTH = 323
|
| 108 |
+
TILEOFFSETS = 324
|
| 109 |
+
TILEBYTECOUNTS = 325
|
| 110 |
+
SUBIFD = 330
|
| 111 |
+
EXTRASAMPLES = 338
|
| 112 |
+
SAMPLEFORMAT = 339
|
| 113 |
+
JPEGTABLES = 347
|
| 114 |
+
YCBCRSUBSAMPLING = 530
|
| 115 |
+
REFERENCEBLACKWHITE = 532
|
| 116 |
+
COPYRIGHT = 33432
|
| 117 |
+
IPTC_NAA_CHUNK = 33723 # newsphoto properties
|
| 118 |
+
PHOTOSHOP_CHUNK = 34377 # photoshop properties
|
| 119 |
+
ICCPROFILE = 34675
|
| 120 |
+
EXIFIFD = 34665
|
| 121 |
+
XMP = 700
|
| 122 |
+
JPEGQUALITY = 65537 # pseudo-tag by libtiff
|
| 123 |
+
|
| 124 |
+
# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java
|
| 125 |
+
IMAGEJ_META_DATA_BYTE_COUNTS = 50838
|
| 126 |
+
IMAGEJ_META_DATA = 50839
|
| 127 |
+
|
| 128 |
+
COMPRESSION_INFO = {
|
| 129 |
+
# Compression => pil compression name
|
| 130 |
+
1: "raw",
|
| 131 |
+
2: "tiff_ccitt",
|
| 132 |
+
3: "group3",
|
| 133 |
+
4: "group4",
|
| 134 |
+
5: "tiff_lzw",
|
| 135 |
+
6: "tiff_jpeg", # obsolete
|
| 136 |
+
7: "jpeg",
|
| 137 |
+
8: "tiff_adobe_deflate",
|
| 138 |
+
32771: "tiff_raw_16", # 16-bit padding
|
| 139 |
+
32773: "packbits",
|
| 140 |
+
32809: "tiff_thunderscan",
|
| 141 |
+
32946: "tiff_deflate",
|
| 142 |
+
34676: "tiff_sgilog",
|
| 143 |
+
34677: "tiff_sgilog24",
|
| 144 |
+
34925: "lzma",
|
| 145 |
+
50000: "zstd",
|
| 146 |
+
50001: "webp",
|
| 147 |
+
}
|
| 148 |
+
|
| 149 |
+
COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()}
|
| 150 |
+
|
| 151 |
+
OPEN_INFO = {
|
| 152 |
+
# (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample,
|
| 153 |
+
# ExtraSamples) => mode, rawmode
|
| 154 |
+
(II, 0, (1,), 1, (1,), ()): ("1", "1;I"),
|
| 155 |
+
(MM, 0, (1,), 1, (1,), ()): ("1", "1;I"),
|
| 156 |
+
(II, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
|
| 157 |
+
(MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
|
| 158 |
+
(II, 1, (1,), 1, (1,), ()): ("1", "1"),
|
| 159 |
+
(MM, 1, (1,), 1, (1,), ()): ("1", "1"),
|
| 160 |
+
(II, 1, (1,), 2, (1,), ()): ("1", "1;R"),
|
| 161 |
+
(MM, 1, (1,), 2, (1,), ()): ("1", "1;R"),
|
| 162 |
+
(II, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
|
| 163 |
+
(MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
|
| 164 |
+
(II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
|
| 165 |
+
(MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
|
| 166 |
+
(II, 1, (1,), 1, (2,), ()): ("L", "L;2"),
|
| 167 |
+
(MM, 1, (1,), 1, (2,), ()): ("L", "L;2"),
|
| 168 |
+
(II, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
|
| 169 |
+
(MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
|
| 170 |
+
(II, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
|
| 171 |
+
(MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
|
| 172 |
+
(II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
|
| 173 |
+
(MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
|
| 174 |
+
(II, 1, (1,), 1, (4,), ()): ("L", "L;4"),
|
| 175 |
+
(MM, 1, (1,), 1, (4,), ()): ("L", "L;4"),
|
| 176 |
+
(II, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
|
| 177 |
+
(MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
|
| 178 |
+
(II, 0, (1,), 1, (8,), ()): ("L", "L;I"),
|
| 179 |
+
(MM, 0, (1,), 1, (8,), ()): ("L", "L;I"),
|
| 180 |
+
(II, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
|
| 181 |
+
(MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
|
| 182 |
+
(II, 1, (1,), 1, (8,), ()): ("L", "L"),
|
| 183 |
+
(MM, 1, (1,), 1, (8,), ()): ("L", "L"),
|
| 184 |
+
(II, 1, (2,), 1, (8,), ()): ("L", "L"),
|
| 185 |
+
(MM, 1, (2,), 1, (8,), ()): ("L", "L"),
|
| 186 |
+
(II, 1, (1,), 2, (8,), ()): ("L", "L;R"),
|
| 187 |
+
(MM, 1, (1,), 2, (8,), ()): ("L", "L;R"),
|
| 188 |
+
(II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"),
|
| 189 |
+
(II, 0, (1,), 1, (16,), ()): ("I;16", "I;16"),
|
| 190 |
+
(II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"),
|
| 191 |
+
(MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"),
|
| 192 |
+
(II, 1, (1,), 2, (16,), ()): ("I;16", "I;16R"),
|
| 193 |
+
(II, 1, (2,), 1, (16,), ()): ("I", "I;16S"),
|
| 194 |
+
(MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"),
|
| 195 |
+
(II, 0, (3,), 1, (32,), ()): ("F", "F;32F"),
|
| 196 |
+
(MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"),
|
| 197 |
+
(II, 1, (1,), 1, (32,), ()): ("I", "I;32N"),
|
| 198 |
+
(II, 1, (2,), 1, (32,), ()): ("I", "I;32S"),
|
| 199 |
+
(MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"),
|
| 200 |
+
(II, 1, (3,), 1, (32,), ()): ("F", "F;32F"),
|
| 201 |
+
(MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"),
|
| 202 |
+
(II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
|
| 203 |
+
(MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
|
| 204 |
+
(II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
| 205 |
+
(MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
|
| 206 |
+
(II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
|
| 207 |
+
(MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
|
| 208 |
+
(II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
|
| 209 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
|
| 210 |
+
(II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGB", "RGBX"),
|
| 211 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGB", "RGBX"),
|
| 212 |
+
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGB", "RGBXX"),
|
| 213 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGB", "RGBXX"),
|
| 214 |
+
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGB", "RGBXXX"),
|
| 215 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGB", "RGBXXX"),
|
| 216 |
+
(II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
| 217 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
|
| 218 |
+
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
| 219 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
|
| 220 |
+
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
| 221 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
|
| 222 |
+
(II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
| 223 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
|
| 224 |
+
(II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
| 225 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
|
| 226 |
+
(II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
| 227 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
|
| 228 |
+
(II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
| 229 |
+
(MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
|
| 230 |
+
(II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"),
|
| 231 |
+
(MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"),
|
| 232 |
+
(II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"),
|
| 233 |
+
(MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"),
|
| 234 |
+
(II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGB", "RGBX;16L"),
|
| 235 |
+
(MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGB", "RGBX;16B"),
|
| 236 |
+
(II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"),
|
| 237 |
+
(MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"),
|
| 238 |
+
(II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"),
|
| 239 |
+
(MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"),
|
| 240 |
+
(II, 3, (1,), 1, (1,), ()): ("P", "P;1"),
|
| 241 |
+
(MM, 3, (1,), 1, (1,), ()): ("P", "P;1"),
|
| 242 |
+
(II, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
|
| 243 |
+
(MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
|
| 244 |
+
(II, 3, (1,), 1, (2,), ()): ("P", "P;2"),
|
| 245 |
+
(MM, 3, (1,), 1, (2,), ()): ("P", "P;2"),
|
| 246 |
+
(II, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
|
| 247 |
+
(MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
|
| 248 |
+
(II, 3, (1,), 1, (4,), ()): ("P", "P;4"),
|
| 249 |
+
(MM, 3, (1,), 1, (4,), ()): ("P", "P;4"),
|
| 250 |
+
(II, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
|
| 251 |
+
(MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
|
| 252 |
+
(II, 3, (1,), 1, (8,), ()): ("P", "P"),
|
| 253 |
+
(MM, 3, (1,), 1, (8,), ()): ("P", "P"),
|
| 254 |
+
(II, 3, (1,), 1, (8, 8), (0,)): ("P", "PX"),
|
| 255 |
+
(MM, 3, (1,), 1, (8, 8), (0,)): ("P", "PX"),
|
| 256 |
+
(II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
|
| 257 |
+
(MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
|
| 258 |
+
(II, 3, (1,), 2, (8,), ()): ("P", "P;R"),
|
| 259 |
+
(MM, 3, (1,), 2, (8,), ()): ("P", "P;R"),
|
| 260 |
+
(II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
|
| 261 |
+
(MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
|
| 262 |
+
(II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
|
| 263 |
+
(MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
|
| 264 |
+
(II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
| 265 |
+
(MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
|
| 266 |
+
(II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"),
|
| 267 |
+
(MM, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16B"),
|
| 268 |
+
(II, 6, (1,), 1, (8,), ()): ("L", "L"),
|
| 269 |
+
(MM, 6, (1,), 1, (8,), ()): ("L", "L"),
|
| 270 |
+
# JPEG compressed images handled by LibTiff and auto-converted to RGBX
|
| 271 |
+
# Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel
|
| 272 |
+
(II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
|
| 273 |
+
(MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
|
| 274 |
+
(II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
| 275 |
+
(MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
|
| 276 |
+
}
|
| 277 |
+
|
| 278 |
+
MAX_SAMPLESPERPIXEL = max(len(key_tp[4]) for key_tp in OPEN_INFO)
|
| 279 |
+
|
| 280 |
+
PREFIXES = [
|
| 281 |
+
b"MM\x00\x2a", # Valid TIFF header with big-endian byte order
|
| 282 |
+
b"II\x2a\x00", # Valid TIFF header with little-endian byte order
|
| 283 |
+
b"MM\x2a\x00", # Invalid TIFF header, assume big-endian
|
| 284 |
+
b"II\x00\x2a", # Invalid TIFF header, assume little-endian
|
| 285 |
+
b"MM\x00\x2b", # BigTIFF with big-endian byte order
|
| 286 |
+
b"II\x2b\x00", # BigTIFF with little-endian byte order
|
| 287 |
+
]
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def _accept(prefix: bytes) -> bool:
|
| 291 |
+
return prefix.startswith(tuple(PREFIXES))
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def _limit_rational(
|
| 295 |
+
val: float | Fraction | IFDRational, max_val: int
|
| 296 |
+
) -> tuple[IntegralLike, IntegralLike]:
|
| 297 |
+
inv = abs(val) > 1
|
| 298 |
+
n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
|
| 299 |
+
return n_d[::-1] if inv else n_d
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
def _limit_signed_rational(
|
| 303 |
+
val: IFDRational, max_val: int, min_val: int
|
| 304 |
+
) -> tuple[IntegralLike, IntegralLike]:
|
| 305 |
+
frac = Fraction(val)
|
| 306 |
+
n_d: tuple[IntegralLike, IntegralLike] = frac.numerator, frac.denominator
|
| 307 |
+
|
| 308 |
+
if min(float(i) for i in n_d) < min_val:
|
| 309 |
+
n_d = _limit_rational(val, abs(min_val))
|
| 310 |
+
|
| 311 |
+
n_d_float = tuple(float(i) for i in n_d)
|
| 312 |
+
if max(n_d_float) > max_val:
|
| 313 |
+
n_d = _limit_rational(n_d_float[0] / n_d_float[1], max_val)
|
| 314 |
+
|
| 315 |
+
return n_d
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
##
|
| 319 |
+
# Wrapper for TIFF IFDs.
|
| 320 |
+
|
| 321 |
+
_load_dispatch = {}
|
| 322 |
+
_write_dispatch = {}
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
def _delegate(op: str) -> Any:
|
| 326 |
+
def delegate(
|
| 327 |
+
self: IFDRational, *args: tuple[float, ...]
|
| 328 |
+
) -> bool | float | Fraction:
|
| 329 |
+
return getattr(self._val, op)(*args)
|
| 330 |
+
|
| 331 |
+
return delegate
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
class IFDRational(Rational):
|
| 335 |
+
"""Implements a rational class where 0/0 is a legal value to match
|
| 336 |
+
the in the wild use of exif rationals.
|
| 337 |
+
|
| 338 |
+
e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used
|
| 339 |
+
"""
|
| 340 |
+
|
| 341 |
+
""" If the denominator is 0, store this as a float('nan'), otherwise store
|
| 342 |
+
as a fractions.Fraction(). Delegate as appropriate
|
| 343 |
+
|
| 344 |
+
"""
|
| 345 |
+
|
| 346 |
+
__slots__ = ("_numerator", "_denominator", "_val")
|
| 347 |
+
|
| 348 |
+
def __init__(
|
| 349 |
+
self, value: float | Fraction | IFDRational, denominator: int = 1
|
| 350 |
+
) -> None:
|
| 351 |
+
"""
|
| 352 |
+
:param value: either an integer numerator, a
|
| 353 |
+
float/rational/other number, or an IFDRational
|
| 354 |
+
:param denominator: Optional integer denominator
|
| 355 |
+
"""
|
| 356 |
+
self._val: Fraction | float
|
| 357 |
+
if isinstance(value, IFDRational):
|
| 358 |
+
self._numerator = value.numerator
|
| 359 |
+
self._denominator = value.denominator
|
| 360 |
+
self._val = value._val
|
| 361 |
+
return
|
| 362 |
+
|
| 363 |
+
if isinstance(value, Fraction):
|
| 364 |
+
self._numerator = value.numerator
|
| 365 |
+
self._denominator = value.denominator
|
| 366 |
+
else:
|
| 367 |
+
if TYPE_CHECKING:
|
| 368 |
+
self._numerator = cast(IntegralLike, value)
|
| 369 |
+
else:
|
| 370 |
+
self._numerator = value
|
| 371 |
+
self._denominator = denominator
|
| 372 |
+
|
| 373 |
+
if denominator == 0:
|
| 374 |
+
self._val = float("nan")
|
| 375 |
+
elif denominator == 1:
|
| 376 |
+
self._val = Fraction(value)
|
| 377 |
+
elif int(value) == value:
|
| 378 |
+
self._val = Fraction(int(value), denominator)
|
| 379 |
+
else:
|
| 380 |
+
self._val = Fraction(value / denominator)
|
| 381 |
+
|
| 382 |
+
@property
|
| 383 |
+
def numerator(self) -> IntegralLike:
|
| 384 |
+
return self._numerator
|
| 385 |
+
|
| 386 |
+
@property
|
| 387 |
+
def denominator(self) -> int:
|
| 388 |
+
return self._denominator
|
| 389 |
+
|
| 390 |
+
def limit_rational(self, max_denominator: int) -> tuple[IntegralLike, int]:
|
| 391 |
+
"""
|
| 392 |
+
|
| 393 |
+
:param max_denominator: Integer, the maximum denominator value
|
| 394 |
+
:returns: Tuple of (numerator, denominator)
|
| 395 |
+
"""
|
| 396 |
+
|
| 397 |
+
if self.denominator == 0:
|
| 398 |
+
return self.numerator, self.denominator
|
| 399 |
+
|
| 400 |
+
assert isinstance(self._val, Fraction)
|
| 401 |
+
f = self._val.limit_denominator(max_denominator)
|
| 402 |
+
return f.numerator, f.denominator
|
| 403 |
+
|
| 404 |
+
def __repr__(self) -> str:
|
| 405 |
+
return str(float(self._val))
|
| 406 |
+
|
| 407 |
+
def __hash__(self) -> int: # type: ignore[override]
|
| 408 |
+
return self._val.__hash__()
|
| 409 |
+
|
| 410 |
+
def __eq__(self, other: object) -> bool:
|
| 411 |
+
val = self._val
|
| 412 |
+
if isinstance(other, IFDRational):
|
| 413 |
+
other = other._val
|
| 414 |
+
if isinstance(other, float):
|
| 415 |
+
val = float(val)
|
| 416 |
+
return val == other
|
| 417 |
+
|
| 418 |
+
def __getstate__(self) -> list[float | Fraction | IntegralLike]:
|
| 419 |
+
return [self._val, self._numerator, self._denominator]
|
| 420 |
+
|
| 421 |
+
def __setstate__(self, state: list[float | Fraction | IntegralLike]) -> None:
|
| 422 |
+
IFDRational.__init__(self, 0)
|
| 423 |
+
_val, _numerator, _denominator = state
|
| 424 |
+
assert isinstance(_val, (float, Fraction))
|
| 425 |
+
self._val = _val
|
| 426 |
+
if TYPE_CHECKING:
|
| 427 |
+
self._numerator = cast(IntegralLike, _numerator)
|
| 428 |
+
else:
|
| 429 |
+
self._numerator = _numerator
|
| 430 |
+
assert isinstance(_denominator, int)
|
| 431 |
+
self._denominator = _denominator
|
| 432 |
+
|
| 433 |
+
""" a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul',
|
| 434 |
+
'truediv', 'rtruediv', 'floordiv', 'rfloordiv',
|
| 435 |
+
'mod','rmod', 'pow','rpow', 'pos', 'neg',
|
| 436 |
+
'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool',
|
| 437 |
+
'ceil', 'floor', 'round']
|
| 438 |
+
print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a))
|
| 439 |
+
"""
|
| 440 |
+
|
| 441 |
+
__add__ = _delegate("__add__")
|
| 442 |
+
__radd__ = _delegate("__radd__")
|
| 443 |
+
__sub__ = _delegate("__sub__")
|
| 444 |
+
__rsub__ = _delegate("__rsub__")
|
| 445 |
+
__mul__ = _delegate("__mul__")
|
| 446 |
+
__rmul__ = _delegate("__rmul__")
|
| 447 |
+
__truediv__ = _delegate("__truediv__")
|
| 448 |
+
__rtruediv__ = _delegate("__rtruediv__")
|
| 449 |
+
__floordiv__ = _delegate("__floordiv__")
|
| 450 |
+
__rfloordiv__ = _delegate("__rfloordiv__")
|
| 451 |
+
__mod__ = _delegate("__mod__")
|
| 452 |
+
__rmod__ = _delegate("__rmod__")
|
| 453 |
+
__pow__ = _delegate("__pow__")
|
| 454 |
+
__rpow__ = _delegate("__rpow__")
|
| 455 |
+
__pos__ = _delegate("__pos__")
|
| 456 |
+
__neg__ = _delegate("__neg__")
|
| 457 |
+
__abs__ = _delegate("__abs__")
|
| 458 |
+
__trunc__ = _delegate("__trunc__")
|
| 459 |
+
__lt__ = _delegate("__lt__")
|
| 460 |
+
__gt__ = _delegate("__gt__")
|
| 461 |
+
__le__ = _delegate("__le__")
|
| 462 |
+
__ge__ = _delegate("__ge__")
|
| 463 |
+
__bool__ = _delegate("__bool__")
|
| 464 |
+
__ceil__ = _delegate("__ceil__")
|
| 465 |
+
__floor__ = _delegate("__floor__")
|
| 466 |
+
__round__ = _delegate("__round__")
|
| 467 |
+
# Python >= 3.11
|
| 468 |
+
if hasattr(Fraction, "__int__"):
|
| 469 |
+
__int__ = _delegate("__int__")
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
_LoaderFunc = Callable[["ImageFileDirectory_v2", bytes, bool], Any]
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
def _register_loader(idx: int, size: int) -> Callable[[_LoaderFunc], _LoaderFunc]:
|
| 476 |
+
def decorator(func: _LoaderFunc) -> _LoaderFunc:
|
| 477 |
+
from .TiffTags import TYPES
|
| 478 |
+
|
| 479 |
+
if func.__name__.startswith("load_"):
|
| 480 |
+
TYPES[idx] = func.__name__[5:].replace("_", " ")
|
| 481 |
+
_load_dispatch[idx] = size, func # noqa: F821
|
| 482 |
+
return func
|
| 483 |
+
|
| 484 |
+
return decorator
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
def _register_writer(idx: int) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
| 488 |
+
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
|
| 489 |
+
_write_dispatch[idx] = func # noqa: F821
|
| 490 |
+
return func
|
| 491 |
+
|
| 492 |
+
return decorator
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
def _register_basic(idx_fmt_name: tuple[int, str, str]) -> None:
|
| 496 |
+
from .TiffTags import TYPES
|
| 497 |
+
|
| 498 |
+
idx, fmt, name = idx_fmt_name
|
| 499 |
+
TYPES[idx] = name
|
| 500 |
+
size = struct.calcsize(f"={fmt}")
|
| 501 |
+
|
| 502 |
+
def basic_handler(
|
| 503 |
+
self: ImageFileDirectory_v2, data: bytes, legacy_api: bool = True
|
| 504 |
+
) -> tuple[Any, ...]:
|
| 505 |
+
return self._unpack(f"{len(data) // size}{fmt}", data)
|
| 506 |
+
|
| 507 |
+
_load_dispatch[idx] = size, basic_handler # noqa: F821
|
| 508 |
+
_write_dispatch[idx] = lambda self, *values: ( # noqa: F821
|
| 509 |
+
b"".join(self._pack(fmt, value) for value in values)
|
| 510 |
+
)
|
| 511 |
+
|
| 512 |
+
|
| 513 |
+
if TYPE_CHECKING:
|
| 514 |
+
_IFDv2Base = MutableMapping[int, Any]
|
| 515 |
+
else:
|
| 516 |
+
_IFDv2Base = MutableMapping
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
class ImageFileDirectory_v2(_IFDv2Base):
|
| 520 |
+
"""This class represents a TIFF tag directory. To speed things up, we
|
| 521 |
+
don't decode tags unless they're asked for.
|
| 522 |
+
|
| 523 |
+
Exposes a dictionary interface of the tags in the directory::
|
| 524 |
+
|
| 525 |
+
ifd = ImageFileDirectory_v2()
|
| 526 |
+
ifd[key] = 'Some Data'
|
| 527 |
+
ifd.tagtype[key] = TiffTags.ASCII
|
| 528 |
+
print(ifd[key])
|
| 529 |
+
'Some Data'
|
| 530 |
+
|
| 531 |
+
Individual values are returned as the strings or numbers, sequences are
|
| 532 |
+
returned as tuples of the values.
|
| 533 |
+
|
| 534 |
+
The tiff metadata type of each item is stored in a dictionary of
|
| 535 |
+
tag types in
|
| 536 |
+
:attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types
|
| 537 |
+
are read from a tiff file, guessed from the type added, or added
|
| 538 |
+
manually.
|
| 539 |
+
|
| 540 |
+
Data Structures:
|
| 541 |
+
|
| 542 |
+
* ``self.tagtype = {}``
|
| 543 |
+
|
| 544 |
+
* Key: numerical TIFF tag number
|
| 545 |
+
* Value: integer corresponding to the data type from
|
| 546 |
+
:py:data:`.TiffTags.TYPES`
|
| 547 |
+
|
| 548 |
+
.. versionadded:: 3.0.0
|
| 549 |
+
|
| 550 |
+
'Internal' data structures:
|
| 551 |
+
|
| 552 |
+
* ``self._tags_v2 = {}``
|
| 553 |
+
|
| 554 |
+
* Key: numerical TIFF tag number
|
| 555 |
+
* Value: decoded data, as tuple for multiple values
|
| 556 |
+
|
| 557 |
+
* ``self._tagdata = {}``
|
| 558 |
+
|
| 559 |
+
* Key: numerical TIFF tag number
|
| 560 |
+
* Value: undecoded byte string from file
|
| 561 |
+
|
| 562 |
+
* ``self._tags_v1 = {}``
|
| 563 |
+
|
| 564 |
+
* Key: numerical TIFF tag number
|
| 565 |
+
* Value: decoded data in the v1 format
|
| 566 |
+
|
| 567 |
+
Tags will be found in the private attributes ``self._tagdata``, and in
|
| 568 |
+
``self._tags_v2`` once decoded.
|
| 569 |
+
|
| 570 |
+
``self.legacy_api`` is a value for internal use, and shouldn't be changed
|
| 571 |
+
from outside code. In cooperation with
|
| 572 |
+
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`, if ``legacy_api``
|
| 573 |
+
is true, then decoded tags will be populated into both ``_tags_v1`` and
|
| 574 |
+
``_tags_v2``. ``_tags_v2`` will be used if this IFD is used in the TIFF
|
| 575 |
+
save routine. Tags should be read from ``_tags_v1`` if
|
| 576 |
+
``legacy_api == true``.
|
| 577 |
+
|
| 578 |
+
"""
|
| 579 |
+
|
| 580 |
+
_load_dispatch: dict[int, tuple[int, _LoaderFunc]] = {}
|
| 581 |
+
_write_dispatch: dict[int, Callable[..., Any]] = {}
|
| 582 |
+
|
| 583 |
+
def __init__(
|
| 584 |
+
self,
|
| 585 |
+
ifh: bytes = b"II\x2a\x00\x00\x00\x00\x00",
|
| 586 |
+
prefix: bytes | None = None,
|
| 587 |
+
group: int | None = None,
|
| 588 |
+
) -> None:
|
| 589 |
+
"""Initialize an ImageFileDirectory.
|
| 590 |
+
|
| 591 |
+
To construct an ImageFileDirectory from a real file, pass the 8-byte
|
| 592 |
+
magic header to the constructor. To only set the endianness, pass it
|
| 593 |
+
as the 'prefix' keyword argument.
|
| 594 |
+
|
| 595 |
+
:param ifh: One of the accepted magic headers (cf. PREFIXES); also sets
|
| 596 |
+
endianness.
|
| 597 |
+
:param prefix: Override the endianness of the file.
|
| 598 |
+
"""
|
| 599 |
+
if not _accept(ifh):
|
| 600 |
+
msg = f"not a TIFF file (header {repr(ifh)} not valid)"
|
| 601 |
+
raise SyntaxError(msg)
|
| 602 |
+
self._prefix = prefix if prefix is not None else ifh[:2]
|
| 603 |
+
if self._prefix == MM:
|
| 604 |
+
self._endian = ">"
|
| 605 |
+
elif self._prefix == II:
|
| 606 |
+
self._endian = "<"
|
| 607 |
+
else:
|
| 608 |
+
msg = "not a TIFF IFD"
|
| 609 |
+
raise SyntaxError(msg)
|
| 610 |
+
self._bigtiff = ifh[2] == 43
|
| 611 |
+
self.group = group
|
| 612 |
+
self.tagtype: dict[int, int] = {}
|
| 613 |
+
""" Dictionary of tag types """
|
| 614 |
+
self.reset()
|
| 615 |
+
self.next = (
|
| 616 |
+
self._unpack("Q", ifh[8:])[0]
|
| 617 |
+
if self._bigtiff
|
| 618 |
+
else self._unpack("L", ifh[4:])[0]
|
| 619 |
+
)
|
| 620 |
+
self._legacy_api = False
|
| 621 |
+
|
| 622 |
+
prefix = property(lambda self: self._prefix)
|
| 623 |
+
offset = property(lambda self: self._offset)
|
| 624 |
+
|
| 625 |
+
@property
|
| 626 |
+
def legacy_api(self) -> bool:
|
| 627 |
+
return self._legacy_api
|
| 628 |
+
|
| 629 |
+
@legacy_api.setter
|
| 630 |
+
def legacy_api(self, value: bool) -> NoReturn:
|
| 631 |
+
msg = "Not allowing setting of legacy api"
|
| 632 |
+
raise Exception(msg)
|
| 633 |
+
|
| 634 |
+
def reset(self) -> None:
|
| 635 |
+
self._tags_v1: dict[int, Any] = {} # will remain empty if legacy_api is false
|
| 636 |
+
self._tags_v2: dict[int, Any] = {} # main tag storage
|
| 637 |
+
self._tagdata: dict[int, bytes] = {}
|
| 638 |
+
self.tagtype = {} # added 2008-06-05 by Florian Hoech
|
| 639 |
+
self._next = None
|
| 640 |
+
self._offset: int | None = None
|
| 641 |
+
|
| 642 |
+
def __str__(self) -> str:
|
| 643 |
+
return str(dict(self))
|
| 644 |
+
|
| 645 |
+
def named(self) -> dict[str, Any]:
|
| 646 |
+
"""
|
| 647 |
+
:returns: dict of name|key: value
|
| 648 |
+
|
| 649 |
+
Returns the complete tag dictionary, with named tags where possible.
|
| 650 |
+
"""
|
| 651 |
+
return {
|
| 652 |
+
TiffTags.lookup(code, self.group).name: value
|
| 653 |
+
for code, value in self.items()
|
| 654 |
+
}
|
| 655 |
+
|
| 656 |
+
def __len__(self) -> int:
|
| 657 |
+
return len(set(self._tagdata) | set(self._tags_v2))
|
| 658 |
+
|
| 659 |
+
def __getitem__(self, tag: int) -> Any:
|
| 660 |
+
if tag not in self._tags_v2: # unpack on the fly
|
| 661 |
+
data = self._tagdata[tag]
|
| 662 |
+
typ = self.tagtype[tag]
|
| 663 |
+
size, handler = self._load_dispatch[typ]
|
| 664 |
+
self[tag] = handler(self, data, self.legacy_api) # check type
|
| 665 |
+
val = self._tags_v2[tag]
|
| 666 |
+
if self.legacy_api and not isinstance(val, (tuple, bytes)):
|
| 667 |
+
val = (val,)
|
| 668 |
+
return val
|
| 669 |
+
|
| 670 |
+
def __contains__(self, tag: object) -> bool:
|
| 671 |
+
return tag in self._tags_v2 or tag in self._tagdata
|
| 672 |
+
|
| 673 |
+
def __setitem__(self, tag: int, value: Any) -> None:
|
| 674 |
+
self._setitem(tag, value, self.legacy_api)
|
| 675 |
+
|
| 676 |
+
def _setitem(self, tag: int, value: Any, legacy_api: bool) -> None:
|
| 677 |
+
basetypes = (Number, bytes, str)
|
| 678 |
+
|
| 679 |
+
info = TiffTags.lookup(tag, self.group)
|
| 680 |
+
values = [value] if isinstance(value, basetypes) else value
|
| 681 |
+
|
| 682 |
+
if tag not in self.tagtype:
|
| 683 |
+
if info.type:
|
| 684 |
+
self.tagtype[tag] = info.type
|
| 685 |
+
else:
|
| 686 |
+
self.tagtype[tag] = TiffTags.UNDEFINED
|
| 687 |
+
if all(isinstance(v, IFDRational) for v in values):
|
| 688 |
+
for v in values:
|
| 689 |
+
assert isinstance(v, IFDRational)
|
| 690 |
+
if v < 0:
|
| 691 |
+
self.tagtype[tag] = TiffTags.SIGNED_RATIONAL
|
| 692 |
+
break
|
| 693 |
+
else:
|
| 694 |
+
self.tagtype[tag] = TiffTags.RATIONAL
|
| 695 |
+
elif all(isinstance(v, int) for v in values):
|
| 696 |
+
short = True
|
| 697 |
+
signed_short = True
|
| 698 |
+
long = True
|
| 699 |
+
for v in values:
|
| 700 |
+
assert isinstance(v, int)
|
| 701 |
+
if short and not (0 <= v < 2**16):
|
| 702 |
+
short = False
|
| 703 |
+
if signed_short and not (-(2**15) < v < 2**15):
|
| 704 |
+
signed_short = False
|
| 705 |
+
if long and v < 0:
|
| 706 |
+
long = False
|
| 707 |
+
if short:
|
| 708 |
+
self.tagtype[tag] = TiffTags.SHORT
|
| 709 |
+
elif signed_short:
|
| 710 |
+
self.tagtype[tag] = TiffTags.SIGNED_SHORT
|
| 711 |
+
elif long:
|
| 712 |
+
self.tagtype[tag] = TiffTags.LONG
|
| 713 |
+
else:
|
| 714 |
+
self.tagtype[tag] = TiffTags.SIGNED_LONG
|
| 715 |
+
elif all(isinstance(v, float) for v in values):
|
| 716 |
+
self.tagtype[tag] = TiffTags.DOUBLE
|
| 717 |
+
elif all(isinstance(v, str) for v in values):
|
| 718 |
+
self.tagtype[tag] = TiffTags.ASCII
|
| 719 |
+
elif all(isinstance(v, bytes) for v in values):
|
| 720 |
+
self.tagtype[tag] = TiffTags.BYTE
|
| 721 |
+
|
| 722 |
+
if self.tagtype[tag] == TiffTags.UNDEFINED:
|
| 723 |
+
values = [
|
| 724 |
+
v.encode("ascii", "replace") if isinstance(v, str) else v
|
| 725 |
+
for v in values
|
| 726 |
+
]
|
| 727 |
+
elif self.tagtype[tag] == TiffTags.RATIONAL:
|
| 728 |
+
values = [float(v) if isinstance(v, int) else v for v in values]
|
| 729 |
+
|
| 730 |
+
is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict)
|
| 731 |
+
if not is_ifd:
|
| 732 |
+
values = tuple(
|
| 733 |
+
info.cvt_enum(value) if isinstance(value, str) else value
|
| 734 |
+
for value in values
|
| 735 |
+
)
|
| 736 |
+
|
| 737 |
+
dest = self._tags_v1 if legacy_api else self._tags_v2
|
| 738 |
+
|
| 739 |
+
# Three branches:
|
| 740 |
+
# Spec'd length == 1, Actual length 1, store as element
|
| 741 |
+
# Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed.
|
| 742 |
+
# No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple.
|
| 743 |
+
# Don't mess with the legacy api, since it's frozen.
|
| 744 |
+
if not is_ifd and (
|
| 745 |
+
(info.length == 1)
|
| 746 |
+
or self.tagtype[tag] == TiffTags.BYTE
|
| 747 |
+
or (info.length is None and len(values) == 1 and not legacy_api)
|
| 748 |
+
):
|
| 749 |
+
# Don't mess with the legacy api, since it's frozen.
|
| 750 |
+
if legacy_api and self.tagtype[tag] in [
|
| 751 |
+
TiffTags.RATIONAL,
|
| 752 |
+
TiffTags.SIGNED_RATIONAL,
|
| 753 |
+
]: # rationals
|
| 754 |
+
values = (values,)
|
| 755 |
+
try:
|
| 756 |
+
(dest[tag],) = values
|
| 757 |
+
except ValueError:
|
| 758 |
+
# We've got a builtin tag with 1 expected entry
|
| 759 |
+
warnings.warn(
|
| 760 |
+
f"Metadata Warning, tag {tag} had too many entries: "
|
| 761 |
+
f"{len(values)}, expected 1"
|
| 762 |
+
)
|
| 763 |
+
dest[tag] = values[0]
|
| 764 |
+
|
| 765 |
+
else:
|
| 766 |
+
# Spec'd length > 1 or undefined
|
| 767 |
+
# Unspec'd, and length > 1
|
| 768 |
+
dest[tag] = values
|
| 769 |
+
|
| 770 |
+
def __delitem__(self, tag: int) -> None:
|
| 771 |
+
self._tags_v2.pop(tag, None)
|
| 772 |
+
self._tags_v1.pop(tag, None)
|
| 773 |
+
self._tagdata.pop(tag, None)
|
| 774 |
+
|
| 775 |
+
def __iter__(self) -> Iterator[int]:
|
| 776 |
+
return iter(set(self._tagdata) | set(self._tags_v2))
|
| 777 |
+
|
| 778 |
+
def _unpack(self, fmt: str, data: bytes) -> tuple[Any, ...]:
|
| 779 |
+
return struct.unpack(self._endian + fmt, data)
|
| 780 |
+
|
| 781 |
+
def _pack(self, fmt: str, *values: Any) -> bytes:
|
| 782 |
+
return struct.pack(self._endian + fmt, *values)
|
| 783 |
+
|
| 784 |
+
list(
|
| 785 |
+
map(
|
| 786 |
+
_register_basic,
|
| 787 |
+
[
|
| 788 |
+
(TiffTags.SHORT, "H", "short"),
|
| 789 |
+
(TiffTags.LONG, "L", "long"),
|
| 790 |
+
(TiffTags.SIGNED_BYTE, "b", "signed byte"),
|
| 791 |
+
(TiffTags.SIGNED_SHORT, "h", "signed short"),
|
| 792 |
+
(TiffTags.SIGNED_LONG, "l", "signed long"),
|
| 793 |
+
(TiffTags.FLOAT, "f", "float"),
|
| 794 |
+
(TiffTags.DOUBLE, "d", "double"),
|
| 795 |
+
(TiffTags.IFD, "L", "long"),
|
| 796 |
+
(TiffTags.LONG8, "Q", "long8"),
|
| 797 |
+
],
|
| 798 |
+
)
|
| 799 |
+
)
|
| 800 |
+
|
| 801 |
+
@_register_loader(1, 1) # Basic type, except for the legacy API.
|
| 802 |
+
def load_byte(self, data: bytes, legacy_api: bool = True) -> bytes:
|
| 803 |
+
return data
|
| 804 |
+
|
| 805 |
+
@_register_writer(1) # Basic type, except for the legacy API.
|
| 806 |
+
def write_byte(self, data: bytes | int | IFDRational) -> bytes:
|
| 807 |
+
if isinstance(data, IFDRational):
|
| 808 |
+
data = int(data)
|
| 809 |
+
if isinstance(data, int):
|
| 810 |
+
data = bytes((data,))
|
| 811 |
+
return data
|
| 812 |
+
|
| 813 |
+
@_register_loader(2, 1)
|
| 814 |
+
def load_string(self, data: bytes, legacy_api: bool = True) -> str:
|
| 815 |
+
if data.endswith(b"\0"):
|
| 816 |
+
data = data[:-1]
|
| 817 |
+
return data.decode("latin-1", "replace")
|
| 818 |
+
|
| 819 |
+
@_register_writer(2)
|
| 820 |
+
def write_string(self, value: str | bytes | int) -> bytes:
|
| 821 |
+
# remerge of https://github.com/python-pillow/Pillow/pull/1416
|
| 822 |
+
if isinstance(value, int):
|
| 823 |
+
value = str(value)
|
| 824 |
+
if not isinstance(value, bytes):
|
| 825 |
+
value = value.encode("ascii", "replace")
|
| 826 |
+
return value + b"\0"
|
| 827 |
+
|
| 828 |
+
@_register_loader(5, 8)
|
| 829 |
+
def load_rational(
|
| 830 |
+
self, data: bytes, legacy_api: bool = True
|
| 831 |
+
) -> tuple[tuple[int, int] | IFDRational, ...]:
|
| 832 |
+
vals = self._unpack(f"{len(data) // 4}L", data)
|
| 833 |
+
|
| 834 |
+
def combine(a: int, b: int) -> tuple[int, int] | IFDRational:
|
| 835 |
+
return (a, b) if legacy_api else IFDRational(a, b)
|
| 836 |
+
|
| 837 |
+
return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
|
| 838 |
+
|
| 839 |
+
@_register_writer(5)
|
| 840 |
+
def write_rational(self, *values: IFDRational) -> bytes:
|
| 841 |
+
return b"".join(
|
| 842 |
+
self._pack("2L", *_limit_rational(frac, 2**32 - 1)) for frac in values
|
| 843 |
+
)
|
| 844 |
+
|
| 845 |
+
@_register_loader(7, 1)
|
| 846 |
+
def load_undefined(self, data: bytes, legacy_api: bool = True) -> bytes:
|
| 847 |
+
return data
|
| 848 |
+
|
| 849 |
+
@_register_writer(7)
|
| 850 |
+
def write_undefined(self, value: bytes | int | IFDRational) -> bytes:
|
| 851 |
+
if isinstance(value, IFDRational):
|
| 852 |
+
value = int(value)
|
| 853 |
+
if isinstance(value, int):
|
| 854 |
+
value = str(value).encode("ascii", "replace")
|
| 855 |
+
return value
|
| 856 |
+
|
| 857 |
+
@_register_loader(10, 8)
|
| 858 |
+
def load_signed_rational(
|
| 859 |
+
self, data: bytes, legacy_api: bool = True
|
| 860 |
+
) -> tuple[tuple[int, int] | IFDRational, ...]:
|
| 861 |
+
vals = self._unpack(f"{len(data) // 4}l", data)
|
| 862 |
+
|
| 863 |
+
def combine(a: int, b: int) -> tuple[int, int] | IFDRational:
|
| 864 |
+
return (a, b) if legacy_api else IFDRational(a, b)
|
| 865 |
+
|
| 866 |
+
return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
|
| 867 |
+
|
| 868 |
+
@_register_writer(10)
|
| 869 |
+
def write_signed_rational(self, *values: IFDRational) -> bytes:
|
| 870 |
+
return b"".join(
|
| 871 |
+
self._pack("2l", *_limit_signed_rational(frac, 2**31 - 1, -(2**31)))
|
| 872 |
+
for frac in values
|
| 873 |
+
)
|
| 874 |
+
|
| 875 |
+
def _ensure_read(self, fp: IO[bytes], size: int) -> bytes:
|
| 876 |
+
ret = fp.read(size)
|
| 877 |
+
if len(ret) != size:
|
| 878 |
+
msg = (
|
| 879 |
+
"Corrupt EXIF data. "
|
| 880 |
+
f"Expecting to read {size} bytes but only got {len(ret)}. "
|
| 881 |
+
)
|
| 882 |
+
raise OSError(msg)
|
| 883 |
+
return ret
|
| 884 |
+
|
| 885 |
+
def load(self, fp: IO[bytes]) -> None:
|
| 886 |
+
self.reset()
|
| 887 |
+
self._offset = fp.tell()
|
| 888 |
+
|
| 889 |
+
try:
|
| 890 |
+
tag_count = (
|
| 891 |
+
self._unpack("Q", self._ensure_read(fp, 8))
|
| 892 |
+
if self._bigtiff
|
| 893 |
+
else self._unpack("H", self._ensure_read(fp, 2))
|
| 894 |
+
)[0]
|
| 895 |
+
for i in range(tag_count):
|
| 896 |
+
tag, typ, count, data = (
|
| 897 |
+
self._unpack("HHQ8s", self._ensure_read(fp, 20))
|
| 898 |
+
if self._bigtiff
|
| 899 |
+
else self._unpack("HHL4s", self._ensure_read(fp, 12))
|
| 900 |
+
)
|
| 901 |
+
|
| 902 |
+
tagname = TiffTags.lookup(tag, self.group).name
|
| 903 |
+
typname = TYPES.get(typ, "unknown")
|
| 904 |
+
msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})"
|
| 905 |
+
|
| 906 |
+
try:
|
| 907 |
+
unit_size, handler = self._load_dispatch[typ]
|
| 908 |
+
except KeyError:
|
| 909 |
+
logger.debug("%s - unsupported type %s", msg, typ)
|
| 910 |
+
continue # ignore unsupported type
|
| 911 |
+
size = count * unit_size
|
| 912 |
+
if size > (8 if self._bigtiff else 4):
|
| 913 |
+
here = fp.tell()
|
| 914 |
+
(offset,) = self._unpack("Q" if self._bigtiff else "L", data)
|
| 915 |
+
msg += f" Tag Location: {here} - Data Location: {offset}"
|
| 916 |
+
fp.seek(offset)
|
| 917 |
+
data = ImageFile._safe_read(fp, size)
|
| 918 |
+
fp.seek(here)
|
| 919 |
+
else:
|
| 920 |
+
data = data[:size]
|
| 921 |
+
|
| 922 |
+
if len(data) != size:
|
| 923 |
+
warnings.warn(
|
| 924 |
+
"Possibly corrupt EXIF data. "
|
| 925 |
+
f"Expecting to read {size} bytes but only got {len(data)}."
|
| 926 |
+
f" Skipping tag {tag}"
|
| 927 |
+
)
|
| 928 |
+
logger.debug(msg)
|
| 929 |
+
continue
|
| 930 |
+
|
| 931 |
+
if not data:
|
| 932 |
+
logger.debug(msg)
|
| 933 |
+
continue
|
| 934 |
+
|
| 935 |
+
self._tagdata[tag] = data
|
| 936 |
+
self.tagtype[tag] = typ
|
| 937 |
+
|
| 938 |
+
msg += " - value: "
|
| 939 |
+
msg += f"<table: {size} bytes>" if size > 32 else repr(data)
|
| 940 |
+
|
| 941 |
+
logger.debug(msg)
|
| 942 |
+
|
| 943 |
+
(self.next,) = (
|
| 944 |
+
self._unpack("Q", self._ensure_read(fp, 8))
|
| 945 |
+
if self._bigtiff
|
| 946 |
+
else self._unpack("L", self._ensure_read(fp, 4))
|
| 947 |
+
)
|
| 948 |
+
except OSError as msg:
|
| 949 |
+
warnings.warn(str(msg))
|
| 950 |
+
return
|
| 951 |
+
|
| 952 |
+
def _get_ifh(self) -> bytes:
|
| 953 |
+
ifh = self._prefix + self._pack("H", 43 if self._bigtiff else 42)
|
| 954 |
+
if self._bigtiff:
|
| 955 |
+
ifh += self._pack("HH", 8, 0)
|
| 956 |
+
ifh += self._pack("Q", 16) if self._bigtiff else self._pack("L", 8)
|
| 957 |
+
|
| 958 |
+
return ifh
|
| 959 |
+
|
| 960 |
+
def tobytes(self, offset: int = 0) -> bytes:
|
| 961 |
+
# FIXME What about tagdata?
|
| 962 |
+
result = self._pack("Q" if self._bigtiff else "H", len(self._tags_v2))
|
| 963 |
+
|
| 964 |
+
entries: list[tuple[int, int, int, bytes, bytes]] = []
|
| 965 |
+
|
| 966 |
+
fmt = "Q" if self._bigtiff else "L"
|
| 967 |
+
fmt_size = 8 if self._bigtiff else 4
|
| 968 |
+
offset += (
|
| 969 |
+
len(result) + len(self._tags_v2) * (20 if self._bigtiff else 12) + fmt_size
|
| 970 |
+
)
|
| 971 |
+
stripoffsets = None
|
| 972 |
+
|
| 973 |
+
# pass 1: convert tags to binary format
|
| 974 |
+
# always write tags in ascending order
|
| 975 |
+
for tag, value in sorted(self._tags_v2.items()):
|
| 976 |
+
if tag == STRIPOFFSETS:
|
| 977 |
+
stripoffsets = len(entries)
|
| 978 |
+
typ = self.tagtype[tag]
|
| 979 |
+
logger.debug("Tag %s, Type: %s, Value: %s", tag, typ, repr(value))
|
| 980 |
+
is_ifd = typ == TiffTags.LONG and isinstance(value, dict)
|
| 981 |
+
if is_ifd:
|
| 982 |
+
ifd = ImageFileDirectory_v2(self._get_ifh(), group=tag)
|
| 983 |
+
values = self._tags_v2[tag]
|
| 984 |
+
for ifd_tag, ifd_value in values.items():
|
| 985 |
+
ifd[ifd_tag] = ifd_value
|
| 986 |
+
data = ifd.tobytes(offset)
|
| 987 |
+
else:
|
| 988 |
+
values = value if isinstance(value, tuple) else (value,)
|
| 989 |
+
data = self._write_dispatch[typ](self, *values)
|
| 990 |
+
|
| 991 |
+
tagname = TiffTags.lookup(tag, self.group).name
|
| 992 |
+
typname = "ifd" if is_ifd else TYPES.get(typ, "unknown")
|
| 993 |
+
msg = f"save: {tagname} ({tag}) - type: {typname} ({typ}) - value: "
|
| 994 |
+
msg += f"<table: {len(data)} bytes>" if len(data) >= 16 else str(values)
|
| 995 |
+
logger.debug(msg)
|
| 996 |
+
|
| 997 |
+
# count is sum of lengths for string and arbitrary data
|
| 998 |
+
if is_ifd:
|
| 999 |
+
count = 1
|
| 1000 |
+
elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]:
|
| 1001 |
+
count = len(data)
|
| 1002 |
+
else:
|
| 1003 |
+
count = len(values)
|
| 1004 |
+
# figure out if data fits into the entry
|
| 1005 |
+
if len(data) <= fmt_size:
|
| 1006 |
+
entries.append((tag, typ, count, data.ljust(fmt_size, b"\0"), b""))
|
| 1007 |
+
else:
|
| 1008 |
+
entries.append((tag, typ, count, self._pack(fmt, offset), data))
|
| 1009 |
+
offset += (len(data) + 1) // 2 * 2 # pad to word
|
| 1010 |
+
|
| 1011 |
+
# update strip offset data to point beyond auxiliary data
|
| 1012 |
+
if stripoffsets is not None:
|
| 1013 |
+
tag, typ, count, value, data = entries[stripoffsets]
|
| 1014 |
+
if data:
|
| 1015 |
+
size, handler = self._load_dispatch[typ]
|
| 1016 |
+
values = [val + offset for val in handler(self, data, self.legacy_api)]
|
| 1017 |
+
data = self._write_dispatch[typ](self, *values)
|
| 1018 |
+
else:
|
| 1019 |
+
value = self._pack(fmt, self._unpack(fmt, value)[0] + offset)
|
| 1020 |
+
entries[stripoffsets] = tag, typ, count, value, data
|
| 1021 |
+
|
| 1022 |
+
# pass 2: write entries to file
|
| 1023 |
+
for tag, typ, count, value, data in entries:
|
| 1024 |
+
logger.debug("%s %s %s %s %s", tag, typ, count, repr(value), repr(data))
|
| 1025 |
+
result += self._pack(
|
| 1026 |
+
"HHQ8s" if self._bigtiff else "HHL4s", tag, typ, count, value
|
| 1027 |
+
)
|
| 1028 |
+
|
| 1029 |
+
# -- overwrite here for multi-page --
|
| 1030 |
+
result += self._pack(fmt, 0) # end of entries
|
| 1031 |
+
|
| 1032 |
+
# pass 3: write auxiliary data to file
|
| 1033 |
+
for tag, typ, count, value, data in entries:
|
| 1034 |
+
result += data
|
| 1035 |
+
if len(data) & 1:
|
| 1036 |
+
result += b"\0"
|
| 1037 |
+
|
| 1038 |
+
return result
|
| 1039 |
+
|
| 1040 |
+
def save(self, fp: IO[bytes]) -> int:
|
| 1041 |
+
if fp.tell() == 0: # skip TIFF header on subsequent pages
|
| 1042 |
+
fp.write(self._get_ifh())
|
| 1043 |
+
|
| 1044 |
+
offset = fp.tell()
|
| 1045 |
+
result = self.tobytes(offset)
|
| 1046 |
+
fp.write(result)
|
| 1047 |
+
return offset + len(result)
|
| 1048 |
+
|
| 1049 |
+
|
| 1050 |
+
ImageFileDirectory_v2._load_dispatch = _load_dispatch
|
| 1051 |
+
ImageFileDirectory_v2._write_dispatch = _write_dispatch
|
| 1052 |
+
for idx, name in TYPES.items():
|
| 1053 |
+
name = name.replace(" ", "_")
|
| 1054 |
+
setattr(ImageFileDirectory_v2, f"load_{name}", _load_dispatch[idx][1])
|
| 1055 |
+
setattr(ImageFileDirectory_v2, f"write_{name}", _write_dispatch[idx])
|
| 1056 |
+
del _load_dispatch, _write_dispatch, idx, name
|
| 1057 |
+
|
| 1058 |
+
|
| 1059 |
+
# Legacy ImageFileDirectory support.
|
| 1060 |
+
class ImageFileDirectory_v1(ImageFileDirectory_v2):
|
| 1061 |
+
"""This class represents the **legacy** interface to a TIFF tag directory.
|
| 1062 |
+
|
| 1063 |
+
Exposes a dictionary interface of the tags in the directory::
|
| 1064 |
+
|
| 1065 |
+
ifd = ImageFileDirectory_v1()
|
| 1066 |
+
ifd[key] = 'Some Data'
|
| 1067 |
+
ifd.tagtype[key] = TiffTags.ASCII
|
| 1068 |
+
print(ifd[key])
|
| 1069 |
+
('Some Data',)
|
| 1070 |
+
|
| 1071 |
+
Also contains a dictionary of tag types as read from the tiff image file,
|
| 1072 |
+
:attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`.
|
| 1073 |
+
|
| 1074 |
+
Values are returned as a tuple.
|
| 1075 |
+
|
| 1076 |
+
.. deprecated:: 3.0.0
|
| 1077 |
+
"""
|
| 1078 |
+
|
| 1079 |
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
| 1080 |
+
super().__init__(*args, **kwargs)
|
| 1081 |
+
self._legacy_api = True
|
| 1082 |
+
|
| 1083 |
+
tags = property(lambda self: self._tags_v1)
|
| 1084 |
+
tagdata = property(lambda self: self._tagdata)
|
| 1085 |
+
|
| 1086 |
+
# defined in ImageFileDirectory_v2
|
| 1087 |
+
tagtype: dict[int, int]
|
| 1088 |
+
"""Dictionary of tag types"""
|
| 1089 |
+
|
| 1090 |
+
@classmethod
|
| 1091 |
+
def from_v2(cls, original: ImageFileDirectory_v2) -> ImageFileDirectory_v1:
|
| 1092 |
+
"""Returns an
|
| 1093 |
+
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
| 1094 |
+
instance with the same data as is contained in the original
|
| 1095 |
+
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
| 1096 |
+
instance.
|
| 1097 |
+
|
| 1098 |
+
:returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
| 1099 |
+
|
| 1100 |
+
"""
|
| 1101 |
+
|
| 1102 |
+
ifd = cls(prefix=original.prefix)
|
| 1103 |
+
ifd._tagdata = original._tagdata
|
| 1104 |
+
ifd.tagtype = original.tagtype
|
| 1105 |
+
ifd.next = original.next # an indicator for multipage tiffs
|
| 1106 |
+
return ifd
|
| 1107 |
+
|
| 1108 |
+
def to_v2(self) -> ImageFileDirectory_v2:
|
| 1109 |
+
"""Returns an
|
| 1110 |
+
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
| 1111 |
+
instance with the same data as is contained in the original
|
| 1112 |
+
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
|
| 1113 |
+
instance.
|
| 1114 |
+
|
| 1115 |
+
:returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
|
| 1116 |
+
|
| 1117 |
+
"""
|
| 1118 |
+
|
| 1119 |
+
ifd = ImageFileDirectory_v2(prefix=self.prefix)
|
| 1120 |
+
ifd._tagdata = dict(self._tagdata)
|
| 1121 |
+
ifd.tagtype = dict(self.tagtype)
|
| 1122 |
+
ifd._tags_v2 = dict(self._tags_v2)
|
| 1123 |
+
return ifd
|
| 1124 |
+
|
| 1125 |
+
def __contains__(self, tag: object) -> bool:
|
| 1126 |
+
return tag in self._tags_v1 or tag in self._tagdata
|
| 1127 |
+
|
| 1128 |
+
def __len__(self) -> int:
|
| 1129 |
+
return len(set(self._tagdata) | set(self._tags_v1))
|
| 1130 |
+
|
| 1131 |
+
def __iter__(self) -> Iterator[int]:
|
| 1132 |
+
return iter(set(self._tagdata) | set(self._tags_v1))
|
| 1133 |
+
|
| 1134 |
+
def __setitem__(self, tag: int, value: Any) -> None:
|
| 1135 |
+
for legacy_api in (False, True):
|
| 1136 |
+
self._setitem(tag, value, legacy_api)
|
| 1137 |
+
|
| 1138 |
+
def __getitem__(self, tag: int) -> Any:
|
| 1139 |
+
if tag not in self._tags_v1: # unpack on the fly
|
| 1140 |
+
data = self._tagdata[tag]
|
| 1141 |
+
typ = self.tagtype[tag]
|
| 1142 |
+
size, handler = self._load_dispatch[typ]
|
| 1143 |
+
for legacy in (False, True):
|
| 1144 |
+
self._setitem(tag, handler(self, data, legacy), legacy)
|
| 1145 |
+
val = self._tags_v1[tag]
|
| 1146 |
+
if not isinstance(val, (tuple, bytes)):
|
| 1147 |
+
val = (val,)
|
| 1148 |
+
return val
|
| 1149 |
+
|
| 1150 |
+
|
| 1151 |
+
# undone -- switch this pointer
|
| 1152 |
+
ImageFileDirectory = ImageFileDirectory_v1
|
| 1153 |
+
|
| 1154 |
+
|
| 1155 |
+
##
|
| 1156 |
+
# Image plugin for TIFF files.
|
| 1157 |
+
|
| 1158 |
+
|
| 1159 |
+
class TiffImageFile(ImageFile.ImageFile):
|
| 1160 |
+
format = "TIFF"
|
| 1161 |
+
format_description = "Adobe TIFF"
|
| 1162 |
+
_close_exclusive_fp_after_loading = False
|
| 1163 |
+
|
| 1164 |
+
def __init__(
|
| 1165 |
+
self,
|
| 1166 |
+
fp: StrOrBytesPath | IO[bytes],
|
| 1167 |
+
filename: str | bytes | None = None,
|
| 1168 |
+
) -> None:
|
| 1169 |
+
self.tag_v2: ImageFileDirectory_v2
|
| 1170 |
+
""" Image file directory (tag dictionary) """
|
| 1171 |
+
|
| 1172 |
+
self.tag: ImageFileDirectory_v1
|
| 1173 |
+
""" Legacy tag entries """
|
| 1174 |
+
|
| 1175 |
+
super().__init__(fp, filename)
|
| 1176 |
+
|
| 1177 |
+
def _open(self) -> None:
|
| 1178 |
+
"""Open the first image in a TIFF file"""
|
| 1179 |
+
|
| 1180 |
+
# Header
|
| 1181 |
+
assert self.fp is not None
|
| 1182 |
+
ifh = self.fp.read(8)
|
| 1183 |
+
if ifh[2] == 43:
|
| 1184 |
+
ifh += self.fp.read(8)
|
| 1185 |
+
|
| 1186 |
+
self.tag_v2 = ImageFileDirectory_v2(ifh)
|
| 1187 |
+
|
| 1188 |
+
# setup frame pointers
|
| 1189 |
+
self.__first = self.__next = self.tag_v2.next
|
| 1190 |
+
self.__frame = -1
|
| 1191 |
+
self._fp = self.fp
|
| 1192 |
+
self._frame_pos: list[int] = []
|
| 1193 |
+
self._n_frames: int | None = None
|
| 1194 |
+
|
| 1195 |
+
logger.debug("*** TiffImageFile._open ***")
|
| 1196 |
+
logger.debug("- __first: %s", self.__first)
|
| 1197 |
+
logger.debug("- ifh: %s", repr(ifh)) # Use repr to avoid str(bytes)
|
| 1198 |
+
|
| 1199 |
+
# and load the first frame
|
| 1200 |
+
self._seek(0)
|
| 1201 |
+
|
| 1202 |
+
@property
|
| 1203 |
+
def n_frames(self) -> int:
|
| 1204 |
+
current_n_frames = self._n_frames
|
| 1205 |
+
if current_n_frames is None:
|
| 1206 |
+
current = self.tell()
|
| 1207 |
+
self._seek(len(self._frame_pos))
|
| 1208 |
+
while self._n_frames is None:
|
| 1209 |
+
self._seek(self.tell() + 1)
|
| 1210 |
+
self.seek(current)
|
| 1211 |
+
assert self._n_frames is not None
|
| 1212 |
+
return self._n_frames
|
| 1213 |
+
|
| 1214 |
+
def seek(self, frame: int) -> None:
|
| 1215 |
+
"""Select a given frame as current image"""
|
| 1216 |
+
if not self._seek_check(frame):
|
| 1217 |
+
return
|
| 1218 |
+
self._seek(frame)
|
| 1219 |
+
if self._im is not None and (
|
| 1220 |
+
self.im.size != self._tile_size
|
| 1221 |
+
or self.im.mode != self.mode
|
| 1222 |
+
or self.readonly
|
| 1223 |
+
):
|
| 1224 |
+
self._im = None
|
| 1225 |
+
|
| 1226 |
+
def _seek(self, frame: int) -> None:
|
| 1227 |
+
if isinstance(self._fp, DeferredError):
|
| 1228 |
+
raise self._fp.ex
|
| 1229 |
+
self.fp = self._fp
|
| 1230 |
+
|
| 1231 |
+
while len(self._frame_pos) <= frame:
|
| 1232 |
+
if not self.__next:
|
| 1233 |
+
msg = "no more images in TIFF file"
|
| 1234 |
+
raise EOFError(msg)
|
| 1235 |
+
logger.debug(
|
| 1236 |
+
"Seeking to frame %s, on frame %s, __next %s, location: %s",
|
| 1237 |
+
frame,
|
| 1238 |
+
self.__frame,
|
| 1239 |
+
self.__next,
|
| 1240 |
+
self.fp.tell(),
|
| 1241 |
+
)
|
| 1242 |
+
if self.__next >= 2**63:
|
| 1243 |
+
msg = "Unable to seek to frame"
|
| 1244 |
+
raise ValueError(msg)
|
| 1245 |
+
self.fp.seek(self.__next)
|
| 1246 |
+
self._frame_pos.append(self.__next)
|
| 1247 |
+
logger.debug("Loading tags, location: %s", self.fp.tell())
|
| 1248 |
+
self.tag_v2.load(self.fp)
|
| 1249 |
+
if self.tag_v2.next in self._frame_pos:
|
| 1250 |
+
# This IFD has already been processed
|
| 1251 |
+
# Declare this to be the end of the image
|
| 1252 |
+
self.__next = 0
|
| 1253 |
+
else:
|
| 1254 |
+
self.__next = self.tag_v2.next
|
| 1255 |
+
if self.__next == 0:
|
| 1256 |
+
self._n_frames = frame + 1
|
| 1257 |
+
if len(self._frame_pos) == 1:
|
| 1258 |
+
self.is_animated = self.__next != 0
|
| 1259 |
+
self.__frame += 1
|
| 1260 |
+
self.fp.seek(self._frame_pos[frame])
|
| 1261 |
+
self.tag_v2.load(self.fp)
|
| 1262 |
+
if XMP in self.tag_v2:
|
| 1263 |
+
xmp = self.tag_v2[XMP]
|
| 1264 |
+
if isinstance(xmp, tuple) and len(xmp) == 1:
|
| 1265 |
+
xmp = xmp[0]
|
| 1266 |
+
self.info["xmp"] = xmp
|
| 1267 |
+
elif "xmp" in self.info:
|
| 1268 |
+
del self.info["xmp"]
|
| 1269 |
+
self._reload_exif()
|
| 1270 |
+
# fill the legacy tag/ifd entries
|
| 1271 |
+
self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2)
|
| 1272 |
+
self.__frame = frame
|
| 1273 |
+
self._setup()
|
| 1274 |
+
|
| 1275 |
+
def tell(self) -> int:
|
| 1276 |
+
"""Return the current frame number"""
|
| 1277 |
+
return self.__frame
|
| 1278 |
+
|
| 1279 |
+
def get_photoshop_blocks(self) -> dict[int, dict[str, bytes]]:
|
| 1280 |
+
"""
|
| 1281 |
+
Returns a dictionary of Photoshop "Image Resource Blocks".
|
| 1282 |
+
The keys are the image resource ID. For more information, see
|
| 1283 |
+
https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577409_pgfId-1037727
|
| 1284 |
+
|
| 1285 |
+
:returns: Photoshop "Image Resource Blocks" in a dictionary.
|
| 1286 |
+
"""
|
| 1287 |
+
blocks = {}
|
| 1288 |
+
val = self.tag_v2.get(ExifTags.Base.ImageResources)
|
| 1289 |
+
if val:
|
| 1290 |
+
while val.startswith(b"8BIM"):
|
| 1291 |
+
id = i16(val[4:6])
|
| 1292 |
+
n = math.ceil((val[6] + 1) / 2) * 2
|
| 1293 |
+
size = i32(val[6 + n : 10 + n])
|
| 1294 |
+
data = val[10 + n : 10 + n + size]
|
| 1295 |
+
blocks[id] = {"data": data}
|
| 1296 |
+
|
| 1297 |
+
val = val[math.ceil((10 + n + size) / 2) * 2 :]
|
| 1298 |
+
return blocks
|
| 1299 |
+
|
| 1300 |
+
def load(self) -> Image.core.PixelAccess | None:
|
| 1301 |
+
if self.tile and self.use_load_libtiff:
|
| 1302 |
+
return self._load_libtiff()
|
| 1303 |
+
return super().load()
|
| 1304 |
+
|
| 1305 |
+
def load_prepare(self) -> None:
|
| 1306 |
+
if self._im is None:
|
| 1307 |
+
Image._decompression_bomb_check(self._tile_size)
|
| 1308 |
+
self.im = Image.core.new(self.mode, self._tile_size)
|
| 1309 |
+
ImageFile.ImageFile.load_prepare(self)
|
| 1310 |
+
|
| 1311 |
+
def load_end(self) -> None:
|
| 1312 |
+
# allow closing if we're on the first frame, there's no next
|
| 1313 |
+
# This is the ImageFile.load path only, libtiff specific below.
|
| 1314 |
+
if not self.is_animated:
|
| 1315 |
+
self._close_exclusive_fp_after_loading = True
|
| 1316 |
+
|
| 1317 |
+
# load IFD data from fp before it is closed
|
| 1318 |
+
exif = self.getexif()
|
| 1319 |
+
for key in TiffTags.TAGS_V2_GROUPS:
|
| 1320 |
+
if key not in exif:
|
| 1321 |
+
continue
|
| 1322 |
+
exif.get_ifd(key)
|
| 1323 |
+
|
| 1324 |
+
ImageOps.exif_transpose(self, in_place=True)
|
| 1325 |
+
if ExifTags.Base.Orientation in self.tag_v2:
|
| 1326 |
+
del self.tag_v2[ExifTags.Base.Orientation]
|
| 1327 |
+
|
| 1328 |
+
def _load_libtiff(self) -> Image.core.PixelAccess | None:
|
| 1329 |
+
"""Overload method triggered when we detect a compressed tiff
|
| 1330 |
+
Calls out to libtiff"""
|
| 1331 |
+
|
| 1332 |
+
Image.Image.load(self)
|
| 1333 |
+
|
| 1334 |
+
self.load_prepare()
|
| 1335 |
+
|
| 1336 |
+
if not len(self.tile) == 1:
|
| 1337 |
+
msg = "Not exactly one tile"
|
| 1338 |
+
raise OSError(msg)
|
| 1339 |
+
|
| 1340 |
+
# (self._compression, (extents tuple),
|
| 1341 |
+
# 0, (rawmode, self._compression, fp))
|
| 1342 |
+
extents = self.tile[0][1]
|
| 1343 |
+
args = self.tile[0][3]
|
| 1344 |
+
|
| 1345 |
+
# To be nice on memory footprint, if there's a
|
| 1346 |
+
# file descriptor, use that instead of reading
|
| 1347 |
+
# into a string in python.
|
| 1348 |
+
assert self.fp is not None
|
| 1349 |
+
try:
|
| 1350 |
+
fp = hasattr(self.fp, "fileno") and self.fp.fileno()
|
| 1351 |
+
# flush the file descriptor, prevents error on pypy 2.4+
|
| 1352 |
+
# should also eliminate the need for fp.tell
|
| 1353 |
+
# in _seek
|
| 1354 |
+
if hasattr(self.fp, "flush"):
|
| 1355 |
+
self.fp.flush()
|
| 1356 |
+
except OSError:
|
| 1357 |
+
# io.BytesIO have a fileno, but returns an OSError if
|
| 1358 |
+
# it doesn't use a file descriptor.
|
| 1359 |
+
fp = False
|
| 1360 |
+
|
| 1361 |
+
if fp:
|
| 1362 |
+
assert isinstance(args, tuple)
|
| 1363 |
+
args_list = list(args)
|
| 1364 |
+
args_list[2] = fp
|
| 1365 |
+
args = tuple(args_list)
|
| 1366 |
+
|
| 1367 |
+
decoder = Image._getdecoder(self.mode, "libtiff", args, self.decoderconfig)
|
| 1368 |
+
try:
|
| 1369 |
+
decoder.setimage(self.im, extents)
|
| 1370 |
+
except ValueError as e:
|
| 1371 |
+
msg = "Couldn't set the image"
|
| 1372 |
+
raise OSError(msg) from e
|
| 1373 |
+
|
| 1374 |
+
close_self_fp = self._exclusive_fp and not self.is_animated
|
| 1375 |
+
if hasattr(self.fp, "getvalue"):
|
| 1376 |
+
# We've got a stringio like thing passed in. Yay for all in memory.
|
| 1377 |
+
# The decoder needs the entire file in one shot, so there's not
|
| 1378 |
+
# a lot we can do here other than give it the entire file.
|
| 1379 |
+
# unless we could do something like get the address of the
|
| 1380 |
+
# underlying string for stringio.
|
| 1381 |
+
#
|
| 1382 |
+
# Rearranging for supporting byteio items, since they have a fileno
|
| 1383 |
+
# that returns an OSError if there's no underlying fp. Easier to
|
| 1384 |
+
# deal with here by reordering.
|
| 1385 |
+
logger.debug("have getvalue. just sending in a string from getvalue")
|
| 1386 |
+
n, err = decoder.decode(self.fp.getvalue())
|
| 1387 |
+
elif fp:
|
| 1388 |
+
# we've got a actual file on disk, pass in the fp.
|
| 1389 |
+
logger.debug("have fileno, calling fileno version of the decoder.")
|
| 1390 |
+
if not close_self_fp:
|
| 1391 |
+
self.fp.seek(0)
|
| 1392 |
+
# Save and restore the file position, because libtiff will move it
|
| 1393 |
+
# outside of the Python runtime, and that will confuse
|
| 1394 |
+
# io.BufferedReader and possible others.
|
| 1395 |
+
# NOTE: This must use os.lseek(), and not fp.tell()/fp.seek(),
|
| 1396 |
+
# because the buffer read head already may not equal the actual
|
| 1397 |
+
# file position, and fp.seek() may just adjust it's internal
|
| 1398 |
+
# pointer and not actually seek the OS file handle.
|
| 1399 |
+
pos = os.lseek(fp, 0, os.SEEK_CUR)
|
| 1400 |
+
# 4 bytes, otherwise the trace might error out
|
| 1401 |
+
n, err = decoder.decode(b"fpfp")
|
| 1402 |
+
os.lseek(fp, pos, os.SEEK_SET)
|
| 1403 |
+
else:
|
| 1404 |
+
# we have something else.
|
| 1405 |
+
logger.debug("don't have fileno or getvalue. just reading")
|
| 1406 |
+
self.fp.seek(0)
|
| 1407 |
+
# UNDONE -- so much for that buffer size thing.
|
| 1408 |
+
n, err = decoder.decode(self.fp.read())
|
| 1409 |
+
|
| 1410 |
+
self.tile = []
|
| 1411 |
+
self.readonly = 0
|
| 1412 |
+
|
| 1413 |
+
self.load_end()
|
| 1414 |
+
|
| 1415 |
+
if close_self_fp:
|
| 1416 |
+
self.fp.close()
|
| 1417 |
+
self.fp = None # might be shared
|
| 1418 |
+
|
| 1419 |
+
if err < 0:
|
| 1420 |
+
msg = f"decoder error {err}"
|
| 1421 |
+
raise OSError(msg)
|
| 1422 |
+
|
| 1423 |
+
return Image.Image.load(self)
|
| 1424 |
+
|
| 1425 |
+
def _setup(self) -> None:
|
| 1426 |
+
"""Setup this image object based on current tags"""
|
| 1427 |
+
|
| 1428 |
+
if 0xBC01 in self.tag_v2:
|
| 1429 |
+
msg = "Windows Media Photo files not yet supported"
|
| 1430 |
+
raise OSError(msg)
|
| 1431 |
+
|
| 1432 |
+
# extract relevant tags
|
| 1433 |
+
self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)]
|
| 1434 |
+
self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1)
|
| 1435 |
+
|
| 1436 |
+
# photometric is a required tag, but not everyone is reading
|
| 1437 |
+
# the specification
|
| 1438 |
+
photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0)
|
| 1439 |
+
|
| 1440 |
+
# old style jpeg compression images most certainly are YCbCr
|
| 1441 |
+
if self._compression == "tiff_jpeg":
|
| 1442 |
+
photo = 6
|
| 1443 |
+
|
| 1444 |
+
fillorder = self.tag_v2.get(FILLORDER, 1)
|
| 1445 |
+
|
| 1446 |
+
logger.debug("*** Summary ***")
|
| 1447 |
+
logger.debug("- compression: %s", self._compression)
|
| 1448 |
+
logger.debug("- photometric_interpretation: %s", photo)
|
| 1449 |
+
logger.debug("- planar_configuration: %s", self._planar_configuration)
|
| 1450 |
+
logger.debug("- fill_order: %s", fillorder)
|
| 1451 |
+
logger.debug("- YCbCr subsampling: %s", self.tag_v2.get(YCBCRSUBSAMPLING))
|
| 1452 |
+
|
| 1453 |
+
# size
|
| 1454 |
+
try:
|
| 1455 |
+
xsize = self.tag_v2[IMAGEWIDTH]
|
| 1456 |
+
ysize = self.tag_v2[IMAGELENGTH]
|
| 1457 |
+
except KeyError as e:
|
| 1458 |
+
msg = "Missing dimensions"
|
| 1459 |
+
raise TypeError(msg) from e
|
| 1460 |
+
if not isinstance(xsize, int) or not isinstance(ysize, int):
|
| 1461 |
+
msg = "Invalid dimensions"
|
| 1462 |
+
raise ValueError(msg)
|
| 1463 |
+
self._tile_size = xsize, ysize
|
| 1464 |
+
orientation = self.tag_v2.get(ExifTags.Base.Orientation)
|
| 1465 |
+
if orientation in (5, 6, 7, 8):
|
| 1466 |
+
self._size = ysize, xsize
|
| 1467 |
+
else:
|
| 1468 |
+
self._size = xsize, ysize
|
| 1469 |
+
|
| 1470 |
+
logger.debug("- size: %s", self.size)
|
| 1471 |
+
|
| 1472 |
+
sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,))
|
| 1473 |
+
if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1:
|
| 1474 |
+
# SAMPLEFORMAT is properly per band, so an RGB image will
|
| 1475 |
+
# be (1,1,1). But, we don't support per band pixel types,
|
| 1476 |
+
# and anything more than one band is a uint8. So, just
|
| 1477 |
+
# take the first element. Revisit this if adding support
|
| 1478 |
+
# for more exotic images.
|
| 1479 |
+
sample_format = (1,)
|
| 1480 |
+
|
| 1481 |
+
bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,))
|
| 1482 |
+
extra_tuple = self.tag_v2.get(EXTRASAMPLES, ())
|
| 1483 |
+
if photo in (2, 6, 8): # RGB, YCbCr, LAB
|
| 1484 |
+
bps_count = 3
|
| 1485 |
+
elif photo == 5: # CMYK
|
| 1486 |
+
bps_count = 4
|
| 1487 |
+
else:
|
| 1488 |
+
bps_count = 1
|
| 1489 |
+
bps_count += len(extra_tuple)
|
| 1490 |
+
bps_actual_count = len(bps_tuple)
|
| 1491 |
+
samples_per_pixel = self.tag_v2.get(
|
| 1492 |
+
SAMPLESPERPIXEL,
|
| 1493 |
+
3 if self._compression == "tiff_jpeg" and photo in (2, 6) else 1,
|
| 1494 |
+
)
|
| 1495 |
+
|
| 1496 |
+
if samples_per_pixel > MAX_SAMPLESPERPIXEL:
|
| 1497 |
+
# DOS check, samples_per_pixel can be a Long, and we extend the tuple below
|
| 1498 |
+
logger.error(
|
| 1499 |
+
"More samples per pixel than can be decoded: %s", samples_per_pixel
|
| 1500 |
+
)
|
| 1501 |
+
msg = "Invalid value for samples per pixel"
|
| 1502 |
+
raise SyntaxError(msg)
|
| 1503 |
+
|
| 1504 |
+
if samples_per_pixel < bps_actual_count:
|
| 1505 |
+
# If a file has more values in bps_tuple than expected,
|
| 1506 |
+
# remove the excess.
|
| 1507 |
+
bps_tuple = bps_tuple[:samples_per_pixel]
|
| 1508 |
+
elif samples_per_pixel > bps_actual_count and bps_actual_count == 1:
|
| 1509 |
+
# If a file has only one value in bps_tuple, when it should have more,
|
| 1510 |
+
# presume it is the same number of bits for all of the samples.
|
| 1511 |
+
bps_tuple = bps_tuple * samples_per_pixel
|
| 1512 |
+
|
| 1513 |
+
if len(bps_tuple) != samples_per_pixel:
|
| 1514 |
+
msg = "unknown data organization"
|
| 1515 |
+
raise SyntaxError(msg)
|
| 1516 |
+
|
| 1517 |
+
# mode: check photometric interpretation and bits per pixel
|
| 1518 |
+
key = (
|
| 1519 |
+
self.tag_v2.prefix,
|
| 1520 |
+
photo,
|
| 1521 |
+
sample_format,
|
| 1522 |
+
fillorder,
|
| 1523 |
+
bps_tuple,
|
| 1524 |
+
extra_tuple,
|
| 1525 |
+
)
|
| 1526 |
+
logger.debug("format key: %s", key)
|
| 1527 |
+
try:
|
| 1528 |
+
self._mode, rawmode = OPEN_INFO[key]
|
| 1529 |
+
except KeyError as e:
|
| 1530 |
+
logger.debug("- unsupported format")
|
| 1531 |
+
msg = "unknown pixel mode"
|
| 1532 |
+
raise SyntaxError(msg) from e
|
| 1533 |
+
|
| 1534 |
+
logger.debug("- raw mode: %s", rawmode)
|
| 1535 |
+
logger.debug("- pil mode: %s", self.mode)
|
| 1536 |
+
|
| 1537 |
+
self.info["compression"] = self._compression
|
| 1538 |
+
|
| 1539 |
+
xres = self.tag_v2.get(X_RESOLUTION, 1)
|
| 1540 |
+
yres = self.tag_v2.get(Y_RESOLUTION, 1)
|
| 1541 |
+
|
| 1542 |
+
if xres and yres:
|
| 1543 |
+
resunit = self.tag_v2.get(RESOLUTION_UNIT)
|
| 1544 |
+
if resunit == 2: # dots per inch
|
| 1545 |
+
self.info["dpi"] = (xres, yres)
|
| 1546 |
+
elif resunit == 3: # dots per centimeter. convert to dpi
|
| 1547 |
+
self.info["dpi"] = (xres * 2.54, yres * 2.54)
|
| 1548 |
+
elif resunit is None: # used to default to 1, but now 2)
|
| 1549 |
+
self.info["dpi"] = (xres, yres)
|
| 1550 |
+
# For backward compatibility,
|
| 1551 |
+
# we also preserve the old behavior
|
| 1552 |
+
self.info["resolution"] = xres, yres
|
| 1553 |
+
else: # No absolute unit of measurement
|
| 1554 |
+
self.info["resolution"] = xres, yres
|
| 1555 |
+
|
| 1556 |
+
# build tile descriptors
|
| 1557 |
+
x = y = layer = 0
|
| 1558 |
+
self.tile = []
|
| 1559 |
+
self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw"
|
| 1560 |
+
if self.use_load_libtiff:
|
| 1561 |
+
# Decoder expects entire file as one tile.
|
| 1562 |
+
# There's a buffer size limit in load (64k)
|
| 1563 |
+
# so large g4 images will fail if we use that
|
| 1564 |
+
# function.
|
| 1565 |
+
#
|
| 1566 |
+
# Setup the one tile for the whole image, then
|
| 1567 |
+
# use the _load_libtiff function.
|
| 1568 |
+
|
| 1569 |
+
# libtiff handles the fillmode for us, so 1;IR should
|
| 1570 |
+
# actually be 1;I. Including the R double reverses the
|
| 1571 |
+
# bits, so stripes of the image are reversed. See
|
| 1572 |
+
# https://github.com/python-pillow/Pillow/issues/279
|
| 1573 |
+
if fillorder == 2:
|
| 1574 |
+
# Replace fillorder with fillorder=1
|
| 1575 |
+
key = key[:3] + (1,) + key[4:]
|
| 1576 |
+
logger.debug("format key: %s", key)
|
| 1577 |
+
# this should always work, since all the
|
| 1578 |
+
# fillorder==2 modes have a corresponding
|
| 1579 |
+
# fillorder=1 mode
|
| 1580 |
+
self._mode, rawmode = OPEN_INFO[key]
|
| 1581 |
+
# YCbCr images with new jpeg compression with pixels in one plane
|
| 1582 |
+
# unpacked straight into RGB values
|
| 1583 |
+
if (
|
| 1584 |
+
photo == 6
|
| 1585 |
+
and self._compression == "jpeg"
|
| 1586 |
+
and self._planar_configuration == 1
|
| 1587 |
+
):
|
| 1588 |
+
rawmode = "RGB"
|
| 1589 |
+
# libtiff always returns the bytes in native order.
|
| 1590 |
+
# we're expecting image byte order. So, if the rawmode
|
| 1591 |
+
# contains I;16, we need to convert from native to image
|
| 1592 |
+
# byte order.
|
| 1593 |
+
elif rawmode == "I;16":
|
| 1594 |
+
rawmode = "I;16N"
|
| 1595 |
+
elif rawmode.endswith((";16B", ";16L")):
|
| 1596 |
+
rawmode = rawmode[:-1] + "N"
|
| 1597 |
+
|
| 1598 |
+
# Offset in the tile tuple is 0, we go from 0,0 to
|
| 1599 |
+
# w,h, and we only do this once -- eds
|
| 1600 |
+
a = (rawmode, self._compression, False, self.tag_v2.offset)
|
| 1601 |
+
self.tile.append(ImageFile._Tile("libtiff", (0, 0, xsize, ysize), 0, a))
|
| 1602 |
+
|
| 1603 |
+
elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2:
|
| 1604 |
+
# striped image
|
| 1605 |
+
if STRIPOFFSETS in self.tag_v2:
|
| 1606 |
+
offsets = self.tag_v2[STRIPOFFSETS]
|
| 1607 |
+
h = self.tag_v2.get(ROWSPERSTRIP, ysize)
|
| 1608 |
+
w = xsize
|
| 1609 |
+
else:
|
| 1610 |
+
# tiled image
|
| 1611 |
+
offsets = self.tag_v2[TILEOFFSETS]
|
| 1612 |
+
tilewidth = self.tag_v2.get(TILEWIDTH)
|
| 1613 |
+
h = self.tag_v2.get(TILELENGTH)
|
| 1614 |
+
if not isinstance(tilewidth, int) or not isinstance(h, int):
|
| 1615 |
+
msg = "Invalid tile dimensions"
|
| 1616 |
+
raise ValueError(msg)
|
| 1617 |
+
w = tilewidth
|
| 1618 |
+
|
| 1619 |
+
if w == xsize and h == ysize and self._planar_configuration != 2:
|
| 1620 |
+
# Every tile covers the image. Only use the last offset
|
| 1621 |
+
offsets = offsets[-1:]
|
| 1622 |
+
|
| 1623 |
+
for offset in offsets:
|
| 1624 |
+
if x + w > xsize:
|
| 1625 |
+
stride = w * sum(bps_tuple) / 8 # bytes per line
|
| 1626 |
+
else:
|
| 1627 |
+
stride = 0
|
| 1628 |
+
|
| 1629 |
+
tile_rawmode = rawmode
|
| 1630 |
+
if self._planar_configuration == 2:
|
| 1631 |
+
# each band on it's own layer
|
| 1632 |
+
tile_rawmode = rawmode[layer]
|
| 1633 |
+
# adjust stride width accordingly
|
| 1634 |
+
stride /= bps_count
|
| 1635 |
+
|
| 1636 |
+
args = (tile_rawmode, int(stride), 1)
|
| 1637 |
+
self.tile.append(
|
| 1638 |
+
ImageFile._Tile(
|
| 1639 |
+
self._compression,
|
| 1640 |
+
(x, y, min(x + w, xsize), min(y + h, ysize)),
|
| 1641 |
+
offset,
|
| 1642 |
+
args,
|
| 1643 |
+
)
|
| 1644 |
+
)
|
| 1645 |
+
x += w
|
| 1646 |
+
if x >= xsize:
|
| 1647 |
+
x, y = 0, y + h
|
| 1648 |
+
if y >= ysize:
|
| 1649 |
+
y = 0
|
| 1650 |
+
layer += 1
|
| 1651 |
+
else:
|
| 1652 |
+
logger.debug("- unsupported data organization")
|
| 1653 |
+
msg = "unknown data organization"
|
| 1654 |
+
raise SyntaxError(msg)
|
| 1655 |
+
|
| 1656 |
+
# Fix up info.
|
| 1657 |
+
if ICCPROFILE in self.tag_v2:
|
| 1658 |
+
self.info["icc_profile"] = self.tag_v2[ICCPROFILE]
|
| 1659 |
+
|
| 1660 |
+
# fixup palette descriptor
|
| 1661 |
+
|
| 1662 |
+
if self.mode in ["P", "PA"]:
|
| 1663 |
+
palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]]
|
| 1664 |
+
self.palette = ImagePalette.raw("RGB;L", b"".join(palette))
|
| 1665 |
+
|
| 1666 |
+
|
| 1667 |
+
#
|
| 1668 |
+
# --------------------------------------------------------------------
|
| 1669 |
+
# Write TIFF files
|
| 1670 |
+
|
| 1671 |
+
# little endian is default except for image modes with
|
| 1672 |
+
# explicit big endian byte-order
|
| 1673 |
+
|
| 1674 |
+
SAVE_INFO = {
|
| 1675 |
+
# mode => rawmode, byteorder, photometrics,
|
| 1676 |
+
# sampleformat, bitspersample, extra
|
| 1677 |
+
"1": ("1", II, 1, 1, (1,), None),
|
| 1678 |
+
"L": ("L", II, 1, 1, (8,), None),
|
| 1679 |
+
"LA": ("LA", II, 1, 1, (8, 8), 2),
|
| 1680 |
+
"P": ("P", II, 3, 1, (8,), None),
|
| 1681 |
+
"PA": ("PA", II, 3, 1, (8, 8), 2),
|
| 1682 |
+
"I": ("I;32S", II, 1, 2, (32,), None),
|
| 1683 |
+
"I;16": ("I;16", II, 1, 1, (16,), None),
|
| 1684 |
+
"I;16L": ("I;16L", II, 1, 1, (16,), None),
|
| 1685 |
+
"F": ("F;32F", II, 1, 3, (32,), None),
|
| 1686 |
+
"RGB": ("RGB", II, 2, 1, (8, 8, 8), None),
|
| 1687 |
+
"RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0),
|
| 1688 |
+
"RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2),
|
| 1689 |
+
"CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None),
|
| 1690 |
+
"YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None),
|
| 1691 |
+
"LAB": ("LAB", II, 8, 1, (8, 8, 8), None),
|
| 1692 |
+
"I;16B": ("I;16B", MM, 1, 1, (16,), None),
|
| 1693 |
+
}
|
| 1694 |
+
|
| 1695 |
+
|
| 1696 |
+
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 1697 |
+
try:
|
| 1698 |
+
rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode]
|
| 1699 |
+
except KeyError as e:
|
| 1700 |
+
msg = f"cannot write mode {im.mode} as TIFF"
|
| 1701 |
+
raise OSError(msg) from e
|
| 1702 |
+
|
| 1703 |
+
encoderinfo = im.encoderinfo
|
| 1704 |
+
encoderconfig = im.encoderconfig
|
| 1705 |
+
|
| 1706 |
+
ifd = ImageFileDirectory_v2(prefix=prefix)
|
| 1707 |
+
if encoderinfo.get("big_tiff"):
|
| 1708 |
+
ifd._bigtiff = True
|
| 1709 |
+
|
| 1710 |
+
try:
|
| 1711 |
+
compression = encoderinfo["compression"]
|
| 1712 |
+
except KeyError:
|
| 1713 |
+
compression = im.info.get("compression")
|
| 1714 |
+
if isinstance(compression, int):
|
| 1715 |
+
# compression value may be from BMP. Ignore it
|
| 1716 |
+
compression = None
|
| 1717 |
+
if compression is None:
|
| 1718 |
+
compression = "raw"
|
| 1719 |
+
elif compression == "tiff_jpeg":
|
| 1720 |
+
# OJPEG is obsolete, so use new-style JPEG compression instead
|
| 1721 |
+
compression = "jpeg"
|
| 1722 |
+
elif compression == "tiff_deflate":
|
| 1723 |
+
compression = "tiff_adobe_deflate"
|
| 1724 |
+
|
| 1725 |
+
libtiff = WRITE_LIBTIFF or compression != "raw"
|
| 1726 |
+
|
| 1727 |
+
# required for color libtiff images
|
| 1728 |
+
ifd[PLANAR_CONFIGURATION] = 1
|
| 1729 |
+
|
| 1730 |
+
ifd[IMAGEWIDTH] = im.size[0]
|
| 1731 |
+
ifd[IMAGELENGTH] = im.size[1]
|
| 1732 |
+
|
| 1733 |
+
# write any arbitrary tags passed in as an ImageFileDirectory
|
| 1734 |
+
if "tiffinfo" in encoderinfo:
|
| 1735 |
+
info = encoderinfo["tiffinfo"]
|
| 1736 |
+
elif "exif" in encoderinfo:
|
| 1737 |
+
info = encoderinfo["exif"]
|
| 1738 |
+
if isinstance(info, bytes):
|
| 1739 |
+
exif = Image.Exif()
|
| 1740 |
+
exif.load(info)
|
| 1741 |
+
info = exif
|
| 1742 |
+
else:
|
| 1743 |
+
info = {}
|
| 1744 |
+
logger.debug("Tiffinfo Keys: %s", list(info))
|
| 1745 |
+
if isinstance(info, ImageFileDirectory_v1):
|
| 1746 |
+
info = info.to_v2()
|
| 1747 |
+
for key in info:
|
| 1748 |
+
if isinstance(info, Image.Exif) and key in TiffTags.TAGS_V2_GROUPS:
|
| 1749 |
+
ifd[key] = info.get_ifd(key)
|
| 1750 |
+
else:
|
| 1751 |
+
ifd[key] = info.get(key)
|
| 1752 |
+
try:
|
| 1753 |
+
ifd.tagtype[key] = info.tagtype[key]
|
| 1754 |
+
except Exception:
|
| 1755 |
+
pass # might not be an IFD. Might not have populated type
|
| 1756 |
+
|
| 1757 |
+
legacy_ifd = {}
|
| 1758 |
+
if hasattr(im, "tag"):
|
| 1759 |
+
legacy_ifd = im.tag.to_v2()
|
| 1760 |
+
|
| 1761 |
+
supplied_tags = {**legacy_ifd, **getattr(im, "tag_v2", {})}
|
| 1762 |
+
for tag in (
|
| 1763 |
+
# IFD offset that may not be correct in the saved image
|
| 1764 |
+
EXIFIFD,
|
| 1765 |
+
# Determined by the image format and should not be copied from legacy_ifd.
|
| 1766 |
+
SAMPLEFORMAT,
|
| 1767 |
+
):
|
| 1768 |
+
if tag in supplied_tags:
|
| 1769 |
+
del supplied_tags[tag]
|
| 1770 |
+
|
| 1771 |
+
# additions written by Greg Couch, gregc@cgl.ucsf.edu
|
| 1772 |
+
# inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com
|
| 1773 |
+
if hasattr(im, "tag_v2"):
|
| 1774 |
+
# preserve tags from original TIFF image file
|
| 1775 |
+
for key in (
|
| 1776 |
+
RESOLUTION_UNIT,
|
| 1777 |
+
X_RESOLUTION,
|
| 1778 |
+
Y_RESOLUTION,
|
| 1779 |
+
IPTC_NAA_CHUNK,
|
| 1780 |
+
PHOTOSHOP_CHUNK,
|
| 1781 |
+
XMP,
|
| 1782 |
+
):
|
| 1783 |
+
if key in im.tag_v2:
|
| 1784 |
+
if key == IPTC_NAA_CHUNK and im.tag_v2.tagtype[key] not in (
|
| 1785 |
+
TiffTags.BYTE,
|
| 1786 |
+
TiffTags.UNDEFINED,
|
| 1787 |
+
):
|
| 1788 |
+
del supplied_tags[key]
|
| 1789 |
+
else:
|
| 1790 |
+
ifd[key] = im.tag_v2[key]
|
| 1791 |
+
ifd.tagtype[key] = im.tag_v2.tagtype[key]
|
| 1792 |
+
|
| 1793 |
+
# preserve ICC profile (should also work when saving other formats
|
| 1794 |
+
# which support profiles as TIFF) -- 2008-06-06 Florian Hoech
|
| 1795 |
+
icc = encoderinfo.get("icc_profile", im.info.get("icc_profile"))
|
| 1796 |
+
if icc:
|
| 1797 |
+
ifd[ICCPROFILE] = icc
|
| 1798 |
+
|
| 1799 |
+
for key, name in [
|
| 1800 |
+
(IMAGEDESCRIPTION, "description"),
|
| 1801 |
+
(X_RESOLUTION, "resolution"),
|
| 1802 |
+
(Y_RESOLUTION, "resolution"),
|
| 1803 |
+
(X_RESOLUTION, "x_resolution"),
|
| 1804 |
+
(Y_RESOLUTION, "y_resolution"),
|
| 1805 |
+
(RESOLUTION_UNIT, "resolution_unit"),
|
| 1806 |
+
(SOFTWARE, "software"),
|
| 1807 |
+
(DATE_TIME, "date_time"),
|
| 1808 |
+
(ARTIST, "artist"),
|
| 1809 |
+
(COPYRIGHT, "copyright"),
|
| 1810 |
+
]:
|
| 1811 |
+
if name in encoderinfo:
|
| 1812 |
+
ifd[key] = encoderinfo[name]
|
| 1813 |
+
|
| 1814 |
+
dpi = encoderinfo.get("dpi")
|
| 1815 |
+
if dpi:
|
| 1816 |
+
ifd[RESOLUTION_UNIT] = 2
|
| 1817 |
+
ifd[X_RESOLUTION] = dpi[0]
|
| 1818 |
+
ifd[Y_RESOLUTION] = dpi[1]
|
| 1819 |
+
|
| 1820 |
+
if bits != (1,):
|
| 1821 |
+
ifd[BITSPERSAMPLE] = bits
|
| 1822 |
+
if len(bits) != 1:
|
| 1823 |
+
ifd[SAMPLESPERPIXEL] = len(bits)
|
| 1824 |
+
if extra is not None:
|
| 1825 |
+
ifd[EXTRASAMPLES] = extra
|
| 1826 |
+
if format != 1:
|
| 1827 |
+
ifd[SAMPLEFORMAT] = format
|
| 1828 |
+
|
| 1829 |
+
if PHOTOMETRIC_INTERPRETATION not in ifd:
|
| 1830 |
+
ifd[PHOTOMETRIC_INTERPRETATION] = photo
|
| 1831 |
+
elif im.mode in ("1", "L") and ifd[PHOTOMETRIC_INTERPRETATION] == 0:
|
| 1832 |
+
if im.mode == "1":
|
| 1833 |
+
inverted_im = im.copy()
|
| 1834 |
+
px = inverted_im.load()
|
| 1835 |
+
if px is not None:
|
| 1836 |
+
for y in range(inverted_im.height):
|
| 1837 |
+
for x in range(inverted_im.width):
|
| 1838 |
+
px[x, y] = 0 if px[x, y] == 255 else 255
|
| 1839 |
+
im = inverted_im
|
| 1840 |
+
else:
|
| 1841 |
+
im = ImageOps.invert(im)
|
| 1842 |
+
|
| 1843 |
+
if im.mode in ["P", "PA"]:
|
| 1844 |
+
lut = im.im.getpalette("RGB", "RGB;L")
|
| 1845 |
+
colormap = []
|
| 1846 |
+
colors = len(lut) // 3
|
| 1847 |
+
for i in range(3):
|
| 1848 |
+
colormap += [v * 256 for v in lut[colors * i : colors * (i + 1)]]
|
| 1849 |
+
colormap += [0] * (256 - colors)
|
| 1850 |
+
ifd[COLORMAP] = colormap
|
| 1851 |
+
# data orientation
|
| 1852 |
+
w, h = ifd[IMAGEWIDTH], ifd[IMAGELENGTH]
|
| 1853 |
+
stride = len(bits) * ((w * bits[0] + 7) // 8)
|
| 1854 |
+
if ROWSPERSTRIP not in ifd:
|
| 1855 |
+
# aim for given strip size (64 KB by default) when using libtiff writer
|
| 1856 |
+
if libtiff:
|
| 1857 |
+
im_strip_size = encoderinfo.get("strip_size", STRIP_SIZE)
|
| 1858 |
+
rows_per_strip = 1 if stride == 0 else min(im_strip_size // stride, h)
|
| 1859 |
+
# JPEG encoder expects multiple of 8 rows
|
| 1860 |
+
if compression == "jpeg":
|
| 1861 |
+
rows_per_strip = min(((rows_per_strip + 7) // 8) * 8, h)
|
| 1862 |
+
else:
|
| 1863 |
+
rows_per_strip = h
|
| 1864 |
+
if rows_per_strip == 0:
|
| 1865 |
+
rows_per_strip = 1
|
| 1866 |
+
ifd[ROWSPERSTRIP] = rows_per_strip
|
| 1867 |
+
strip_byte_counts = 1 if stride == 0 else stride * ifd[ROWSPERSTRIP]
|
| 1868 |
+
strips_per_image = (h + ifd[ROWSPERSTRIP] - 1) // ifd[ROWSPERSTRIP]
|
| 1869 |
+
if strip_byte_counts >= 2**16:
|
| 1870 |
+
ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG
|
| 1871 |
+
ifd[STRIPBYTECOUNTS] = (strip_byte_counts,) * (strips_per_image - 1) + (
|
| 1872 |
+
stride * h - strip_byte_counts * (strips_per_image - 1),
|
| 1873 |
+
)
|
| 1874 |
+
ifd[STRIPOFFSETS] = tuple(
|
| 1875 |
+
range(0, strip_byte_counts * strips_per_image, strip_byte_counts)
|
| 1876 |
+
) # this is adjusted by IFD writer
|
| 1877 |
+
# no compression by default:
|
| 1878 |
+
ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1)
|
| 1879 |
+
|
| 1880 |
+
if im.mode == "YCbCr":
|
| 1881 |
+
for tag, default_value in {
|
| 1882 |
+
YCBCRSUBSAMPLING: (1, 1),
|
| 1883 |
+
REFERENCEBLACKWHITE: (0, 255, 128, 255, 128, 255),
|
| 1884 |
+
}.items():
|
| 1885 |
+
ifd.setdefault(tag, default_value)
|
| 1886 |
+
|
| 1887 |
+
blocklist = [TILEWIDTH, TILELENGTH, TILEOFFSETS, TILEBYTECOUNTS]
|
| 1888 |
+
if libtiff:
|
| 1889 |
+
if "quality" in encoderinfo:
|
| 1890 |
+
quality = encoderinfo["quality"]
|
| 1891 |
+
if not isinstance(quality, int) or quality < 0 or quality > 100:
|
| 1892 |
+
msg = "Invalid quality setting"
|
| 1893 |
+
raise ValueError(msg)
|
| 1894 |
+
if compression != "jpeg":
|
| 1895 |
+
msg = "quality setting only supported for 'jpeg' compression"
|
| 1896 |
+
raise ValueError(msg)
|
| 1897 |
+
ifd[JPEGQUALITY] = quality
|
| 1898 |
+
|
| 1899 |
+
logger.debug("Saving using libtiff encoder")
|
| 1900 |
+
logger.debug("Items: %s", sorted(ifd.items()))
|
| 1901 |
+
_fp = 0
|
| 1902 |
+
if hasattr(fp, "fileno"):
|
| 1903 |
+
try:
|
| 1904 |
+
fp.seek(0)
|
| 1905 |
+
_fp = fp.fileno()
|
| 1906 |
+
except io.UnsupportedOperation:
|
| 1907 |
+
pass
|
| 1908 |
+
|
| 1909 |
+
# optional types for non core tags
|
| 1910 |
+
types = {}
|
| 1911 |
+
# STRIPOFFSETS and STRIPBYTECOUNTS are added by the library
|
| 1912 |
+
# based on the data in the strip.
|
| 1913 |
+
# OSUBFILETYPE is deprecated.
|
| 1914 |
+
# The other tags expect arrays with a certain length (fixed or depending on
|
| 1915 |
+
# BITSPERSAMPLE, etc), passing arrays with a different length will result in
|
| 1916 |
+
# segfaults. Block these tags until we add extra validation.
|
| 1917 |
+
# SUBIFD may also cause a segfault.
|
| 1918 |
+
blocklist += [
|
| 1919 |
+
OSUBFILETYPE,
|
| 1920 |
+
REFERENCEBLACKWHITE,
|
| 1921 |
+
STRIPBYTECOUNTS,
|
| 1922 |
+
STRIPOFFSETS,
|
| 1923 |
+
TRANSFERFUNCTION,
|
| 1924 |
+
SUBIFD,
|
| 1925 |
+
]
|
| 1926 |
+
|
| 1927 |
+
# bits per sample is a single short in the tiff directory, not a list.
|
| 1928 |
+
atts: dict[int, Any] = {BITSPERSAMPLE: bits[0]}
|
| 1929 |
+
# Merge the ones that we have with (optional) more bits from
|
| 1930 |
+
# the original file, e.g x,y resolution so that we can
|
| 1931 |
+
# save(load('')) == original file.
|
| 1932 |
+
for tag, value in itertools.chain(ifd.items(), supplied_tags.items()):
|
| 1933 |
+
# Libtiff can only process certain core items without adding
|
| 1934 |
+
# them to the custom dictionary.
|
| 1935 |
+
# Custom items are supported for int, float, unicode, string and byte
|
| 1936 |
+
# values. Other types and tuples require a tagtype.
|
| 1937 |
+
if tag not in TiffTags.LIBTIFF_CORE:
|
| 1938 |
+
if tag in TiffTags.TAGS_V2_GROUPS:
|
| 1939 |
+
types[tag] = TiffTags.LONG8
|
| 1940 |
+
elif tag in ifd.tagtype:
|
| 1941 |
+
types[tag] = ifd.tagtype[tag]
|
| 1942 |
+
elif isinstance(value, (int, float, str, bytes)) or (
|
| 1943 |
+
isinstance(value, tuple)
|
| 1944 |
+
and all(isinstance(v, (int, float, IFDRational)) for v in value)
|
| 1945 |
+
):
|
| 1946 |
+
type = TiffTags.lookup(tag).type
|
| 1947 |
+
if type:
|
| 1948 |
+
types[tag] = type
|
| 1949 |
+
if tag not in atts and tag not in blocklist:
|
| 1950 |
+
if isinstance(value, str):
|
| 1951 |
+
atts[tag] = value.encode("ascii", "replace") + b"\0"
|
| 1952 |
+
elif isinstance(value, IFDRational):
|
| 1953 |
+
atts[tag] = float(value)
|
| 1954 |
+
else:
|
| 1955 |
+
atts[tag] = value
|
| 1956 |
+
|
| 1957 |
+
if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1:
|
| 1958 |
+
atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0]
|
| 1959 |
+
|
| 1960 |
+
logger.debug("Converted items: %s", sorted(atts.items()))
|
| 1961 |
+
|
| 1962 |
+
# libtiff always expects the bytes in native order.
|
| 1963 |
+
# we're storing image byte order. So, if the rawmode
|
| 1964 |
+
# contains I;16, we need to convert from native to image
|
| 1965 |
+
# byte order.
|
| 1966 |
+
if im.mode in ("I;16", "I;16B", "I;16L"):
|
| 1967 |
+
rawmode = "I;16N"
|
| 1968 |
+
|
| 1969 |
+
# Pass tags as sorted list so that the tags are set in a fixed order.
|
| 1970 |
+
# This is required by libtiff for some tags. For example, the JPEGQUALITY
|
| 1971 |
+
# pseudo tag requires that the COMPRESS tag was already set.
|
| 1972 |
+
tags = list(atts.items())
|
| 1973 |
+
tags.sort()
|
| 1974 |
+
a = (rawmode, compression, _fp, filename, tags, types)
|
| 1975 |
+
encoder = Image._getencoder(im.mode, "libtiff", a, encoderconfig)
|
| 1976 |
+
encoder.setimage(im.im, (0, 0) + im.size)
|
| 1977 |
+
while True:
|
| 1978 |
+
errcode, data = encoder.encode(ImageFile.MAXBLOCK)[1:]
|
| 1979 |
+
if not _fp:
|
| 1980 |
+
fp.write(data)
|
| 1981 |
+
if errcode:
|
| 1982 |
+
break
|
| 1983 |
+
if errcode < 0:
|
| 1984 |
+
msg = f"encoder error {errcode} when writing image file"
|
| 1985 |
+
raise OSError(msg)
|
| 1986 |
+
|
| 1987 |
+
else:
|
| 1988 |
+
for tag in blocklist:
|
| 1989 |
+
del ifd[tag]
|
| 1990 |
+
offset = ifd.save(fp)
|
| 1991 |
+
|
| 1992 |
+
ImageFile._save(
|
| 1993 |
+
im,
|
| 1994 |
+
fp,
|
| 1995 |
+
[ImageFile._Tile("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))],
|
| 1996 |
+
)
|
| 1997 |
+
|
| 1998 |
+
# -- helper for multi-page save --
|
| 1999 |
+
if "_debug_multipage" in encoderinfo:
|
| 2000 |
+
# just to access o32 and o16 (using correct byte order)
|
| 2001 |
+
setattr(im, "_debug_multipage", ifd)
|
| 2002 |
+
|
| 2003 |
+
|
| 2004 |
+
class AppendingTiffWriter(io.BytesIO):
|
| 2005 |
+
fieldSizes = [
|
| 2006 |
+
0, # None
|
| 2007 |
+
1, # byte
|
| 2008 |
+
1, # ascii
|
| 2009 |
+
2, # short
|
| 2010 |
+
4, # long
|
| 2011 |
+
8, # rational
|
| 2012 |
+
1, # sbyte
|
| 2013 |
+
1, # undefined
|
| 2014 |
+
2, # sshort
|
| 2015 |
+
4, # slong
|
| 2016 |
+
8, # srational
|
| 2017 |
+
4, # float
|
| 2018 |
+
8, # double
|
| 2019 |
+
4, # ifd
|
| 2020 |
+
2, # unicode
|
| 2021 |
+
4, # complex
|
| 2022 |
+
8, # long8
|
| 2023 |
+
]
|
| 2024 |
+
|
| 2025 |
+
Tags = {
|
| 2026 |
+
273, # StripOffsets
|
| 2027 |
+
288, # FreeOffsets
|
| 2028 |
+
324, # TileOffsets
|
| 2029 |
+
519, # JPEGQTables
|
| 2030 |
+
520, # JPEGDCTables
|
| 2031 |
+
521, # JPEGACTables
|
| 2032 |
+
}
|
| 2033 |
+
|
| 2034 |
+
def __init__(self, fn: StrOrBytesPath | IO[bytes], new: bool = False) -> None:
|
| 2035 |
+
self.f: IO[bytes]
|
| 2036 |
+
if is_path(fn):
|
| 2037 |
+
self.name = fn
|
| 2038 |
+
self.close_fp = True
|
| 2039 |
+
try:
|
| 2040 |
+
self.f = open(fn, "w+b" if new else "r+b")
|
| 2041 |
+
except OSError:
|
| 2042 |
+
self.f = open(fn, "w+b")
|
| 2043 |
+
else:
|
| 2044 |
+
self.f = cast(IO[bytes], fn)
|
| 2045 |
+
self.close_fp = False
|
| 2046 |
+
self.beginning = self.f.tell()
|
| 2047 |
+
self.setup()
|
| 2048 |
+
|
| 2049 |
+
def setup(self) -> None:
|
| 2050 |
+
# Reset everything.
|
| 2051 |
+
self.f.seek(self.beginning, os.SEEK_SET)
|
| 2052 |
+
|
| 2053 |
+
self.whereToWriteNewIFDOffset: int | None = None
|
| 2054 |
+
self.offsetOfNewPage = 0
|
| 2055 |
+
|
| 2056 |
+
self.IIMM = iimm = self.f.read(4)
|
| 2057 |
+
self._bigtiff = b"\x2b" in iimm
|
| 2058 |
+
if not iimm:
|
| 2059 |
+
# empty file - first page
|
| 2060 |
+
self.isFirst = True
|
| 2061 |
+
return
|
| 2062 |
+
|
| 2063 |
+
self.isFirst = False
|
| 2064 |
+
if iimm not in PREFIXES:
|
| 2065 |
+
msg = "Invalid TIFF file header"
|
| 2066 |
+
raise RuntimeError(msg)
|
| 2067 |
+
|
| 2068 |
+
self.setEndian("<" if iimm.startswith(II) else ">")
|
| 2069 |
+
|
| 2070 |
+
if self._bigtiff:
|
| 2071 |
+
self.f.seek(4, os.SEEK_CUR)
|
| 2072 |
+
self.skipIFDs()
|
| 2073 |
+
self.goToEnd()
|
| 2074 |
+
|
| 2075 |
+
def finalize(self) -> None:
|
| 2076 |
+
if self.isFirst:
|
| 2077 |
+
return
|
| 2078 |
+
|
| 2079 |
+
# fix offsets
|
| 2080 |
+
self.f.seek(self.offsetOfNewPage)
|
| 2081 |
+
|
| 2082 |
+
iimm = self.f.read(4)
|
| 2083 |
+
if not iimm:
|
| 2084 |
+
# Make it easy to finish a frame without committing to a new one.
|
| 2085 |
+
return
|
| 2086 |
+
|
| 2087 |
+
if iimm != self.IIMM:
|
| 2088 |
+
msg = "IIMM of new page doesn't match IIMM of first page"
|
| 2089 |
+
raise RuntimeError(msg)
|
| 2090 |
+
|
| 2091 |
+
if self._bigtiff:
|
| 2092 |
+
self.f.seek(4, os.SEEK_CUR)
|
| 2093 |
+
ifd_offset = self._read(8 if self._bigtiff else 4)
|
| 2094 |
+
ifd_offset += self.offsetOfNewPage
|
| 2095 |
+
assert self.whereToWriteNewIFDOffset is not None
|
| 2096 |
+
self.f.seek(self.whereToWriteNewIFDOffset)
|
| 2097 |
+
self._write(ifd_offset, 8 if self._bigtiff else 4)
|
| 2098 |
+
self.f.seek(ifd_offset)
|
| 2099 |
+
self.fixIFD()
|
| 2100 |
+
|
| 2101 |
+
def newFrame(self) -> None:
|
| 2102 |
+
# Call this to finish a frame.
|
| 2103 |
+
self.finalize()
|
| 2104 |
+
self.setup()
|
| 2105 |
+
|
| 2106 |
+
def __enter__(self) -> AppendingTiffWriter:
|
| 2107 |
+
return self
|
| 2108 |
+
|
| 2109 |
+
def __exit__(self, *args: object) -> None:
|
| 2110 |
+
if self.close_fp:
|
| 2111 |
+
self.close()
|
| 2112 |
+
|
| 2113 |
+
def tell(self) -> int:
|
| 2114 |
+
return self.f.tell() - self.offsetOfNewPage
|
| 2115 |
+
|
| 2116 |
+
def seek(self, offset: int, whence: int = io.SEEK_SET) -> int:
|
| 2117 |
+
"""
|
| 2118 |
+
:param offset: Distance to seek.
|
| 2119 |
+
:param whence: Whether the distance is relative to the start,
|
| 2120 |
+
end or current position.
|
| 2121 |
+
:returns: The resulting position, relative to the start.
|
| 2122 |
+
"""
|
| 2123 |
+
if whence == os.SEEK_SET:
|
| 2124 |
+
offset += self.offsetOfNewPage
|
| 2125 |
+
|
| 2126 |
+
self.f.seek(offset, whence)
|
| 2127 |
+
return self.tell()
|
| 2128 |
+
|
| 2129 |
+
def goToEnd(self) -> None:
|
| 2130 |
+
self.f.seek(0, os.SEEK_END)
|
| 2131 |
+
pos = self.f.tell()
|
| 2132 |
+
|
| 2133 |
+
# pad to 16 byte boundary
|
| 2134 |
+
pad_bytes = 16 - pos % 16
|
| 2135 |
+
if 0 < pad_bytes < 16:
|
| 2136 |
+
self.f.write(bytes(pad_bytes))
|
| 2137 |
+
self.offsetOfNewPage = self.f.tell()
|
| 2138 |
+
|
| 2139 |
+
def setEndian(self, endian: str) -> None:
|
| 2140 |
+
self.endian = endian
|
| 2141 |
+
self.longFmt = f"{self.endian}L"
|
| 2142 |
+
self.shortFmt = f"{self.endian}H"
|
| 2143 |
+
self.tagFormat = f"{self.endian}HH" + ("Q" if self._bigtiff else "L")
|
| 2144 |
+
|
| 2145 |
+
def skipIFDs(self) -> None:
|
| 2146 |
+
while True:
|
| 2147 |
+
ifd_offset = self._read(8 if self._bigtiff else 4)
|
| 2148 |
+
if ifd_offset == 0:
|
| 2149 |
+
self.whereToWriteNewIFDOffset = self.f.tell() - (
|
| 2150 |
+
8 if self._bigtiff else 4
|
| 2151 |
+
)
|
| 2152 |
+
break
|
| 2153 |
+
|
| 2154 |
+
self.f.seek(ifd_offset)
|
| 2155 |
+
num_tags = self._read(8 if self._bigtiff else 2)
|
| 2156 |
+
self.f.seek(num_tags * (20 if self._bigtiff else 12), os.SEEK_CUR)
|
| 2157 |
+
|
| 2158 |
+
def write(self, data: Buffer, /) -> int:
|
| 2159 |
+
return self.f.write(data)
|
| 2160 |
+
|
| 2161 |
+
def _fmt(self, field_size: int) -> str:
|
| 2162 |
+
try:
|
| 2163 |
+
return {2: "H", 4: "L", 8: "Q"}[field_size]
|
| 2164 |
+
except KeyError:
|
| 2165 |
+
msg = "offset is not supported"
|
| 2166 |
+
raise RuntimeError(msg)
|
| 2167 |
+
|
| 2168 |
+
def _read(self, field_size: int) -> int:
|
| 2169 |
+
(value,) = struct.unpack(
|
| 2170 |
+
self.endian + self._fmt(field_size), self.f.read(field_size)
|
| 2171 |
+
)
|
| 2172 |
+
return value
|
| 2173 |
+
|
| 2174 |
+
def readShort(self) -> int:
|
| 2175 |
+
return self._read(2)
|
| 2176 |
+
|
| 2177 |
+
def readLong(self) -> int:
|
| 2178 |
+
return self._read(4)
|
| 2179 |
+
|
| 2180 |
+
@staticmethod
|
| 2181 |
+
def _verify_bytes_written(bytes_written: int | None, expected: int) -> None:
|
| 2182 |
+
if bytes_written is not None and bytes_written != expected:
|
| 2183 |
+
msg = f"wrote only {bytes_written} bytes but wanted {expected}"
|
| 2184 |
+
raise RuntimeError(msg)
|
| 2185 |
+
|
| 2186 |
+
def _rewriteLast(
|
| 2187 |
+
self, value: int, field_size: int, new_field_size: int = 0
|
| 2188 |
+
) -> None:
|
| 2189 |
+
self.f.seek(-field_size, os.SEEK_CUR)
|
| 2190 |
+
if not new_field_size:
|
| 2191 |
+
new_field_size = field_size
|
| 2192 |
+
bytes_written = self.f.write(
|
| 2193 |
+
struct.pack(self.endian + self._fmt(new_field_size), value)
|
| 2194 |
+
)
|
| 2195 |
+
self._verify_bytes_written(bytes_written, new_field_size)
|
| 2196 |
+
|
| 2197 |
+
def rewriteLastShortToLong(self, value: int) -> None:
|
| 2198 |
+
self._rewriteLast(value, 2, 4)
|
| 2199 |
+
|
| 2200 |
+
def rewriteLastShort(self, value: int) -> None:
|
| 2201 |
+
return self._rewriteLast(value, 2)
|
| 2202 |
+
|
| 2203 |
+
def rewriteLastLong(self, value: int) -> None:
|
| 2204 |
+
return self._rewriteLast(value, 4)
|
| 2205 |
+
|
| 2206 |
+
def _write(self, value: int, field_size: int) -> None:
|
| 2207 |
+
bytes_written = self.f.write(
|
| 2208 |
+
struct.pack(self.endian + self._fmt(field_size), value)
|
| 2209 |
+
)
|
| 2210 |
+
self._verify_bytes_written(bytes_written, field_size)
|
| 2211 |
+
|
| 2212 |
+
def writeShort(self, value: int) -> None:
|
| 2213 |
+
self._write(value, 2)
|
| 2214 |
+
|
| 2215 |
+
def writeLong(self, value: int) -> None:
|
| 2216 |
+
self._write(value, 4)
|
| 2217 |
+
|
| 2218 |
+
def close(self) -> None:
|
| 2219 |
+
self.finalize()
|
| 2220 |
+
if self.close_fp:
|
| 2221 |
+
self.f.close()
|
| 2222 |
+
|
| 2223 |
+
def fixIFD(self) -> None:
|
| 2224 |
+
num_tags = self._read(8 if self._bigtiff else 2)
|
| 2225 |
+
|
| 2226 |
+
for i in range(num_tags):
|
| 2227 |
+
tag, field_type, count = struct.unpack(
|
| 2228 |
+
self.tagFormat, self.f.read(12 if self._bigtiff else 8)
|
| 2229 |
+
)
|
| 2230 |
+
|
| 2231 |
+
field_size = self.fieldSizes[field_type]
|
| 2232 |
+
total_size = field_size * count
|
| 2233 |
+
fmt_size = 8 if self._bigtiff else 4
|
| 2234 |
+
is_local = total_size <= fmt_size
|
| 2235 |
+
if not is_local:
|
| 2236 |
+
offset = self._read(fmt_size) + self.offsetOfNewPage
|
| 2237 |
+
self._rewriteLast(offset, fmt_size)
|
| 2238 |
+
|
| 2239 |
+
if tag in self.Tags:
|
| 2240 |
+
cur_pos = self.f.tell()
|
| 2241 |
+
|
| 2242 |
+
logger.debug(
|
| 2243 |
+
"fixIFD: %s (%d) - type: %s (%d) - type size: %d - count: %d",
|
| 2244 |
+
TiffTags.lookup(tag).name,
|
| 2245 |
+
tag,
|
| 2246 |
+
TYPES.get(field_type, "unknown"),
|
| 2247 |
+
field_type,
|
| 2248 |
+
field_size,
|
| 2249 |
+
count,
|
| 2250 |
+
)
|
| 2251 |
+
|
| 2252 |
+
if is_local:
|
| 2253 |
+
self._fixOffsets(count, field_size)
|
| 2254 |
+
self.f.seek(cur_pos + fmt_size)
|
| 2255 |
+
else:
|
| 2256 |
+
self.f.seek(offset)
|
| 2257 |
+
self._fixOffsets(count, field_size)
|
| 2258 |
+
self.f.seek(cur_pos)
|
| 2259 |
+
|
| 2260 |
+
elif is_local:
|
| 2261 |
+
# skip the locally stored value that is not an offset
|
| 2262 |
+
self.f.seek(fmt_size, os.SEEK_CUR)
|
| 2263 |
+
|
| 2264 |
+
def _fixOffsets(self, count: int, field_size: int) -> None:
|
| 2265 |
+
for i in range(count):
|
| 2266 |
+
offset = self._read(field_size)
|
| 2267 |
+
offset += self.offsetOfNewPage
|
| 2268 |
+
|
| 2269 |
+
new_field_size = 0
|
| 2270 |
+
if self._bigtiff and field_size in (2, 4) and offset >= 2**32:
|
| 2271 |
+
# offset is now too large - we must convert long to long8
|
| 2272 |
+
new_field_size = 8
|
| 2273 |
+
elif field_size == 2 and offset >= 2**16:
|
| 2274 |
+
# offset is now too large - we must convert short to long
|
| 2275 |
+
new_field_size = 4
|
| 2276 |
+
if new_field_size:
|
| 2277 |
+
if count != 1:
|
| 2278 |
+
msg = "not implemented"
|
| 2279 |
+
raise RuntimeError(msg) # XXX TODO
|
| 2280 |
+
|
| 2281 |
+
# simple case - the offset is just one and therefore it is
|
| 2282 |
+
# local (not referenced with another offset)
|
| 2283 |
+
self._rewriteLast(offset, field_size, new_field_size)
|
| 2284 |
+
# Move back past the new offset, past 'count', and before 'field_type'
|
| 2285 |
+
rewind = -new_field_size - 4 - 2
|
| 2286 |
+
self.f.seek(rewind, os.SEEK_CUR)
|
| 2287 |
+
self.writeShort(new_field_size) # rewrite the type
|
| 2288 |
+
self.f.seek(2 - rewind, os.SEEK_CUR)
|
| 2289 |
+
else:
|
| 2290 |
+
self._rewriteLast(offset, field_size)
|
| 2291 |
+
|
| 2292 |
+
def fixOffsets(
|
| 2293 |
+
self, count: int, isShort: bool = False, isLong: bool = False
|
| 2294 |
+
) -> None:
|
| 2295 |
+
if isShort:
|
| 2296 |
+
field_size = 2
|
| 2297 |
+
elif isLong:
|
| 2298 |
+
field_size = 4
|
| 2299 |
+
else:
|
| 2300 |
+
field_size = 0
|
| 2301 |
+
return self._fixOffsets(count, field_size)
|
| 2302 |
+
|
| 2303 |
+
|
| 2304 |
+
def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 2305 |
+
append_images = list(im.encoderinfo.get("append_images", []))
|
| 2306 |
+
if not hasattr(im, "n_frames") and not append_images:
|
| 2307 |
+
return _save(im, fp, filename)
|
| 2308 |
+
|
| 2309 |
+
cur_idx = im.tell()
|
| 2310 |
+
try:
|
| 2311 |
+
with AppendingTiffWriter(fp) as tf:
|
| 2312 |
+
for ims in [im] + append_images:
|
| 2313 |
+
encoderinfo = ims._attach_default_encoderinfo(im)
|
| 2314 |
+
if not hasattr(ims, "encoderconfig"):
|
| 2315 |
+
ims.encoderconfig = ()
|
| 2316 |
+
nfr = getattr(ims, "n_frames", 1)
|
| 2317 |
+
|
| 2318 |
+
for idx in range(nfr):
|
| 2319 |
+
ims.seek(idx)
|
| 2320 |
+
ims.load()
|
| 2321 |
+
_save(ims, tf, filename)
|
| 2322 |
+
tf.newFrame()
|
| 2323 |
+
ims.encoderinfo = encoderinfo
|
| 2324 |
+
finally:
|
| 2325 |
+
im.seek(cur_idx)
|
| 2326 |
+
|
| 2327 |
+
|
| 2328 |
+
#
|
| 2329 |
+
# --------------------------------------------------------------------
|
| 2330 |
+
# Register
|
| 2331 |
+
|
| 2332 |
+
Image.register_open(TiffImageFile.format, TiffImageFile, _accept)
|
| 2333 |
+
Image.register_save(TiffImageFile.format, _save)
|
| 2334 |
+
Image.register_save_all(TiffImageFile.format, _save_all)
|
| 2335 |
+
|
| 2336 |
+
Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"])
|
| 2337 |
+
|
| 2338 |
+
Image.register_mime(TiffImageFile.format, "image/tiff")
|
lib/python3.13/site-packages/PIL/WmfImagePlugin.py
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# The Python Imaging Library
|
| 3 |
+
# $Id$
|
| 4 |
+
#
|
| 5 |
+
# WMF stub codec
|
| 6 |
+
#
|
| 7 |
+
# history:
|
| 8 |
+
# 1996-12-14 fl Created
|
| 9 |
+
# 2004-02-22 fl Turned into a stub driver
|
| 10 |
+
# 2004-02-23 fl Added EMF support
|
| 11 |
+
#
|
| 12 |
+
# Copyright (c) Secret Labs AB 1997-2004. All rights reserved.
|
| 13 |
+
# Copyright (c) Fredrik Lundh 1996.
|
| 14 |
+
#
|
| 15 |
+
# See the README file for information on usage and redistribution.
|
| 16 |
+
#
|
| 17 |
+
# WMF/EMF reference documentation:
|
| 18 |
+
# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf
|
| 19 |
+
# http://wvware.sourceforge.net/caolan/index.html
|
| 20 |
+
# http://wvware.sourceforge.net/caolan/ora-wmf.html
|
| 21 |
+
from __future__ import annotations
|
| 22 |
+
|
| 23 |
+
from typing import IO
|
| 24 |
+
|
| 25 |
+
from . import Image, ImageFile
|
| 26 |
+
from ._binary import i16le as word
|
| 27 |
+
from ._binary import si16le as short
|
| 28 |
+
from ._binary import si32le as _long
|
| 29 |
+
|
| 30 |
+
_handler = None
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def register_handler(handler: ImageFile.StubHandler | None) -> None:
|
| 34 |
+
"""
|
| 35 |
+
Install application-specific WMF image handler.
|
| 36 |
+
|
| 37 |
+
:param handler: Handler object.
|
| 38 |
+
"""
|
| 39 |
+
global _handler
|
| 40 |
+
_handler = handler
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
if hasattr(Image.core, "drawwmf"):
|
| 44 |
+
# install default handler (windows only)
|
| 45 |
+
|
| 46 |
+
class WmfHandler(ImageFile.StubHandler):
|
| 47 |
+
def open(self, im: ImageFile.StubImageFile) -> None:
|
| 48 |
+
im._mode = "RGB"
|
| 49 |
+
self.bbox = im.info["wmf_bbox"]
|
| 50 |
+
|
| 51 |
+
def load(self, im: ImageFile.StubImageFile) -> Image.Image:
|
| 52 |
+
im.fp.seek(0) # rewind
|
| 53 |
+
return Image.frombytes(
|
| 54 |
+
"RGB",
|
| 55 |
+
im.size,
|
| 56 |
+
Image.core.drawwmf(im.fp.read(), im.size, self.bbox),
|
| 57 |
+
"raw",
|
| 58 |
+
"BGR",
|
| 59 |
+
(im.size[0] * 3 + 3) & -4,
|
| 60 |
+
-1,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
register_handler(WmfHandler())
|
| 64 |
+
|
| 65 |
+
#
|
| 66 |
+
# --------------------------------------------------------------------
|
| 67 |
+
# Read WMF file
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def _accept(prefix: bytes) -> bool:
|
| 71 |
+
return prefix.startswith((b"\xd7\xcd\xc6\x9a\x00\x00", b"\x01\x00\x00\x00"))
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
##
|
| 75 |
+
# Image plugin for Windows metafiles.
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class WmfStubImageFile(ImageFile.StubImageFile):
|
| 79 |
+
format = "WMF"
|
| 80 |
+
format_description = "Windows Metafile"
|
| 81 |
+
|
| 82 |
+
def _open(self) -> None:
|
| 83 |
+
# check placeable header
|
| 84 |
+
s = self.fp.read(44)
|
| 85 |
+
|
| 86 |
+
if s.startswith(b"\xd7\xcd\xc6\x9a\x00\x00"):
|
| 87 |
+
# placeable windows metafile
|
| 88 |
+
|
| 89 |
+
# get units per inch
|
| 90 |
+
inch = word(s, 14)
|
| 91 |
+
if inch == 0:
|
| 92 |
+
msg = "Invalid inch"
|
| 93 |
+
raise ValueError(msg)
|
| 94 |
+
self._inch: tuple[float, float] = inch, inch
|
| 95 |
+
|
| 96 |
+
# get bounding box
|
| 97 |
+
x0 = short(s, 6)
|
| 98 |
+
y0 = short(s, 8)
|
| 99 |
+
x1 = short(s, 10)
|
| 100 |
+
y1 = short(s, 12)
|
| 101 |
+
|
| 102 |
+
# normalize size to 72 dots per inch
|
| 103 |
+
self.info["dpi"] = 72
|
| 104 |
+
size = (
|
| 105 |
+
(x1 - x0) * self.info["dpi"] // inch,
|
| 106 |
+
(y1 - y0) * self.info["dpi"] // inch,
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
self.info["wmf_bbox"] = x0, y0, x1, y1
|
| 110 |
+
|
| 111 |
+
# sanity check (standard metafile header)
|
| 112 |
+
if s[22:26] != b"\x01\x00\t\x00":
|
| 113 |
+
msg = "Unsupported WMF file format"
|
| 114 |
+
raise SyntaxError(msg)
|
| 115 |
+
|
| 116 |
+
elif s.startswith(b"\x01\x00\x00\x00") and s[40:44] == b" EMF":
|
| 117 |
+
# enhanced metafile
|
| 118 |
+
|
| 119 |
+
# get bounding box
|
| 120 |
+
x0 = _long(s, 8)
|
| 121 |
+
y0 = _long(s, 12)
|
| 122 |
+
x1 = _long(s, 16)
|
| 123 |
+
y1 = _long(s, 20)
|
| 124 |
+
|
| 125 |
+
# get frame (in 0.01 millimeter units)
|
| 126 |
+
frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36)
|
| 127 |
+
|
| 128 |
+
size = x1 - x0, y1 - y0
|
| 129 |
+
|
| 130 |
+
# calculate dots per inch from bbox and frame
|
| 131 |
+
xdpi = 2540.0 * (x1 - x0) / (frame[2] - frame[0])
|
| 132 |
+
ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1])
|
| 133 |
+
|
| 134 |
+
self.info["wmf_bbox"] = x0, y0, x1, y1
|
| 135 |
+
|
| 136 |
+
if xdpi == ydpi:
|
| 137 |
+
self.info["dpi"] = xdpi
|
| 138 |
+
else:
|
| 139 |
+
self.info["dpi"] = xdpi, ydpi
|
| 140 |
+
self._inch = xdpi, ydpi
|
| 141 |
+
|
| 142 |
+
else:
|
| 143 |
+
msg = "Unsupported file format"
|
| 144 |
+
raise SyntaxError(msg)
|
| 145 |
+
|
| 146 |
+
self._mode = "RGB"
|
| 147 |
+
self._size = size
|
| 148 |
+
|
| 149 |
+
loader = self._load()
|
| 150 |
+
if loader:
|
| 151 |
+
loader.open(self)
|
| 152 |
+
|
| 153 |
+
def _load(self) -> ImageFile.StubHandler | None:
|
| 154 |
+
return _handler
|
| 155 |
+
|
| 156 |
+
def load(
|
| 157 |
+
self, dpi: float | tuple[float, float] | None = None
|
| 158 |
+
) -> Image.core.PixelAccess | None:
|
| 159 |
+
if dpi is not None:
|
| 160 |
+
self.info["dpi"] = dpi
|
| 161 |
+
x0, y0, x1, y1 = self.info["wmf_bbox"]
|
| 162 |
+
if not isinstance(dpi, tuple):
|
| 163 |
+
dpi = dpi, dpi
|
| 164 |
+
self._size = (
|
| 165 |
+
int((x1 - x0) * dpi[0] / self._inch[0]),
|
| 166 |
+
int((y1 - y0) * dpi[1] / self._inch[1]),
|
| 167 |
+
)
|
| 168 |
+
return super().load()
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
|
| 172 |
+
if _handler is None or not hasattr(_handler, "save"):
|
| 173 |
+
msg = "WMF save handler not installed"
|
| 174 |
+
raise OSError(msg)
|
| 175 |
+
_handler.save(im, fp, filename)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
#
|
| 179 |
+
# --------------------------------------------------------------------
|
| 180 |
+
# Registry stuff
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept)
|
| 184 |
+
Image.register_save(WmfStubImageFile.format, _save)
|
| 185 |
+
|
| 186 |
+
Image.register_extensions(WmfStubImageFile.format, [".wmf", ".emf"])
|
lib/python3.13/site-packages/PIL/_imagingft.pyi
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections.abc import Callable
|
| 2 |
+
from typing import Any
|
| 3 |
+
|
| 4 |
+
from . import ImageFont, _imaging
|
| 5 |
+
|
| 6 |
+
class Font:
|
| 7 |
+
@property
|
| 8 |
+
def family(self) -> str | None: ...
|
| 9 |
+
@property
|
| 10 |
+
def style(self) -> str | None: ...
|
| 11 |
+
@property
|
| 12 |
+
def ascent(self) -> int: ...
|
| 13 |
+
@property
|
| 14 |
+
def descent(self) -> int: ...
|
| 15 |
+
@property
|
| 16 |
+
def height(self) -> int: ...
|
| 17 |
+
@property
|
| 18 |
+
def x_ppem(self) -> int: ...
|
| 19 |
+
@property
|
| 20 |
+
def y_ppem(self) -> int: ...
|
| 21 |
+
@property
|
| 22 |
+
def glyphs(self) -> int: ...
|
| 23 |
+
def render(
|
| 24 |
+
self,
|
| 25 |
+
string: str | bytes,
|
| 26 |
+
fill: Callable[[int, int], _imaging.ImagingCore],
|
| 27 |
+
mode: str,
|
| 28 |
+
dir: str | None,
|
| 29 |
+
features: list[str] | None,
|
| 30 |
+
lang: str | None,
|
| 31 |
+
stroke_width: float,
|
| 32 |
+
stroke_filled: bool,
|
| 33 |
+
anchor: str | None,
|
| 34 |
+
foreground_ink_long: int,
|
| 35 |
+
start: tuple[float, float],
|
| 36 |
+
/,
|
| 37 |
+
) -> tuple[_imaging.ImagingCore, tuple[int, int]]: ...
|
| 38 |
+
def getsize(
|
| 39 |
+
self,
|
| 40 |
+
string: str | bytes | bytearray,
|
| 41 |
+
mode: str,
|
| 42 |
+
dir: str | None,
|
| 43 |
+
features: list[str] | None,
|
| 44 |
+
lang: str | None,
|
| 45 |
+
anchor: str | None,
|
| 46 |
+
/,
|
| 47 |
+
) -> tuple[tuple[int, int], tuple[int, int]]: ...
|
| 48 |
+
def getlength(
|
| 49 |
+
self,
|
| 50 |
+
string: str | bytes,
|
| 51 |
+
mode: str,
|
| 52 |
+
dir: str | None,
|
| 53 |
+
features: list[str] | None,
|
| 54 |
+
lang: str | None,
|
| 55 |
+
/,
|
| 56 |
+
) -> float: ...
|
| 57 |
+
def getvarnames(self) -> list[bytes]: ...
|
| 58 |
+
def getvaraxes(self) -> list[ImageFont.Axis]: ...
|
| 59 |
+
def setvarname(self, instance_index: int, /) -> None: ...
|
| 60 |
+
def setvaraxes(self, axes: list[float], /) -> None: ...
|
| 61 |
+
|
| 62 |
+
def getfont(
|
| 63 |
+
filename: str | bytes,
|
| 64 |
+
size: float,
|
| 65 |
+
index: int,
|
| 66 |
+
encoding: str,
|
| 67 |
+
font_bytes: bytes,
|
| 68 |
+
layout_engine: int,
|
| 69 |
+
) -> Font: ...
|
| 70 |
+
def __getattr__(name: str) -> Any: ...
|
lib/python3.13/site-packages/PIL/_imagingmorph.pyi
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any
|
| 2 |
+
|
| 3 |
+
def __getattr__(name: str) -> Any: ...
|
lib/python3.13/site-packages/PIL/_imagingtk.cpython-313-x86_64-linux-gnu.so
ADDED
|
Binary file (47.1 kB). View file
|
|
|
lib/python3.13/site-packages/PIL/_tkinter_finder.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Find compiled module linking to Tcl / Tk libraries"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
import tkinter
|
| 7 |
+
|
| 8 |
+
tk = getattr(tkinter, "_tkinter")
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
if hasattr(sys, "pypy_find_executable"):
|
| 12 |
+
TKINTER_LIB = tk.tklib_cffi.__file__
|
| 13 |
+
else:
|
| 14 |
+
TKINTER_LIB = tk.__file__
|
| 15 |
+
except AttributeError:
|
| 16 |
+
# _tkinter may be compiled directly into Python, in which case __file__ is
|
| 17 |
+
# not available. load_tkinter_funcs will check the binary first in any case.
|
| 18 |
+
TKINTER_LIB = None
|
| 19 |
+
|
| 20 |
+
tk_version = str(tkinter.TkVersion)
|
lib/python3.13/site-packages/PIL/_typing.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from collections.abc import Sequence
|
| 6 |
+
from typing import Any, Protocol, TypeVar
|
| 7 |
+
|
| 8 |
+
TYPE_CHECKING = False
|
| 9 |
+
if TYPE_CHECKING:
|
| 10 |
+
from numbers import _IntegralLike as IntegralLike
|
| 11 |
+
|
| 12 |
+
try:
|
| 13 |
+
import numpy.typing as npt
|
| 14 |
+
|
| 15 |
+
NumpyArray = npt.NDArray[Any]
|
| 16 |
+
except ImportError:
|
| 17 |
+
pass
|
| 18 |
+
|
| 19 |
+
if sys.version_info >= (3, 13):
|
| 20 |
+
from types import CapsuleType
|
| 21 |
+
else:
|
| 22 |
+
CapsuleType = object
|
| 23 |
+
|
| 24 |
+
if sys.version_info >= (3, 12):
|
| 25 |
+
from collections.abc import Buffer
|
| 26 |
+
else:
|
| 27 |
+
Buffer = Any
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
_Ink = float | tuple[int, ...] | str
|
| 31 |
+
|
| 32 |
+
Coords = Sequence[float] | Sequence[Sequence[float]]
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
_T_co = TypeVar("_T_co", covariant=True)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class SupportsRead(Protocol[_T_co]):
|
| 39 |
+
def read(self, length: int = ..., /) -> _T_co: ...
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
StrOrBytesPath = str | bytes | os.PathLike[str] | os.PathLike[bytes]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
__all__ = ["Buffer", "IntegralLike", "StrOrBytesPath", "SupportsRead"]
|
lib/python3.13/site-packages/PIL/py.typed
ADDED
|
File without changes
|
lib/python3.13/site-packages/blake3/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .blake3 import *
|
| 2 |
+
|
| 3 |
+
__doc__ = blake3.__doc__
|
| 4 |
+
if hasattr(blake3, "__all__"):
|
| 5 |
+
__all__ = blake3.__all__
|
lib/python3.13/site-packages/blake3/__init__.pyi
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from os import PathLike
|
| 2 |
+
import sys
|
| 3 |
+
if sys.version_info >= (3, 12):
|
| 4 |
+
from collections.abc import Buffer
|
| 5 |
+
else:
|
| 6 |
+
from typing_extensions import Buffer
|
| 7 |
+
|
| 8 |
+
__version__: str = ...
|
| 9 |
+
|
| 10 |
+
class blake3:
|
| 11 |
+
name: str
|
| 12 |
+
digest_size: int
|
| 13 |
+
block_size: int
|
| 14 |
+
key_size: int
|
| 15 |
+
AUTO: int
|
| 16 |
+
def __init__(
|
| 17 |
+
self,
|
| 18 |
+
data: Buffer = ...,
|
| 19 |
+
/,
|
| 20 |
+
*,
|
| 21 |
+
key: Buffer = ...,
|
| 22 |
+
derive_key_context: str = ...,
|
| 23 |
+
max_threads: int = ...,
|
| 24 |
+
usedforsecurity: bool = ...,
|
| 25 |
+
): ...
|
| 26 |
+
def update(self, data: Buffer, /) -> blake3: ...
|
| 27 |
+
def update_mmap(self, path: str | PathLike[str]) -> blake3: ...
|
| 28 |
+
def copy(self) -> blake3: ...
|
| 29 |
+
def reset(self) -> None: ...
|
| 30 |
+
def digest(self, length: int = ..., *, seek: int = ...) -> bytes: ...
|
| 31 |
+
def hexdigest(self, length: int = ..., *, seek: int = ...) -> str: ...
|
lib/python3.13/site-packages/blake3/py.typed
ADDED
|
File without changes
|
lib/python3.13/site-packages/cbor2/__init__.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any
|
| 2 |
+
|
| 3 |
+
from ._decoder import CBORDecoder as CBORDecoder
|
| 4 |
+
from ._decoder import load as load
|
| 5 |
+
from ._decoder import loads as loads
|
| 6 |
+
from ._encoder import CBOREncoder as CBOREncoder
|
| 7 |
+
from ._encoder import dump as dump
|
| 8 |
+
from ._encoder import dumps as dumps
|
| 9 |
+
from ._encoder import shareable_encoder as shareable_encoder
|
| 10 |
+
from ._types import CBORDecodeEOF as CBORDecodeEOF
|
| 11 |
+
from ._types import CBORDecodeError as CBORDecodeError
|
| 12 |
+
from ._types import CBORDecodeValueError as CBORDecodeValueError
|
| 13 |
+
from ._types import CBOREncodeError as CBOREncodeError
|
| 14 |
+
from ._types import CBOREncodeTypeError as CBOREncodeTypeError
|
| 15 |
+
from ._types import CBOREncodeValueError as CBOREncodeValueError
|
| 16 |
+
from ._types import CBORError as CBORError
|
| 17 |
+
from ._types import CBORSimpleValue as CBORSimpleValue
|
| 18 |
+
from ._types import CBORTag as CBORTag
|
| 19 |
+
from ._types import FrozenDict as FrozenDict
|
| 20 |
+
from ._types import undefined as undefined
|
| 21 |
+
|
| 22 |
+
try:
|
| 23 |
+
from _cbor2 import * # noqa: F403
|
| 24 |
+
except ImportError:
|
| 25 |
+
# Couldn't import the optimized C version; ignore the failure and leave the
|
| 26 |
+
# pure Python implementations in place.
|
| 27 |
+
|
| 28 |
+
# Re-export imports so they look like they live directly in this package
|
| 29 |
+
key: str
|
| 30 |
+
value: Any
|
| 31 |
+
for key, value in list(locals().items()):
|
| 32 |
+
if callable(value) and getattr(value, "__module__", "").startswith("cbor2."):
|
| 33 |
+
value.__module__ = __name__
|
| 34 |
+
else:
|
| 35 |
+
# The pure Python implementations are replaced with the optimized C
|
| 36 |
+
# variants, but we still need to create the encoder dictionaries for the C
|
| 37 |
+
# variant here (this is much simpler than doing so in C, and doesn't affect
|
| 38 |
+
# overall performance as it's a one-off initialization cost).
|
| 39 |
+
def _init_cbor2() -> None:
|
| 40 |
+
from collections import OrderedDict
|
| 41 |
+
|
| 42 |
+
import _cbor2
|
| 43 |
+
|
| 44 |
+
from ._encoder import canonical_encoders, default_encoders
|
| 45 |
+
from ._types import CBORSimpleValue, CBORTag, undefined
|
| 46 |
+
|
| 47 |
+
_cbor2.default_encoders = OrderedDict(
|
| 48 |
+
[
|
| 49 |
+
(
|
| 50 |
+
(
|
| 51 |
+
_cbor2.CBORSimpleValue
|
| 52 |
+
if type_ is CBORSimpleValue
|
| 53 |
+
else _cbor2.CBORTag
|
| 54 |
+
if type_ is CBORTag
|
| 55 |
+
else type(_cbor2.undefined)
|
| 56 |
+
if type_ is type(undefined)
|
| 57 |
+
else type_
|
| 58 |
+
),
|
| 59 |
+
getattr(_cbor2.CBOREncoder, method.__name__),
|
| 60 |
+
)
|
| 61 |
+
for type_, method in default_encoders.items()
|
| 62 |
+
]
|
| 63 |
+
)
|
| 64 |
+
_cbor2.canonical_encoders = OrderedDict(
|
| 65 |
+
[
|
| 66 |
+
(
|
| 67 |
+
(
|
| 68 |
+
_cbor2.CBORSimpleValue
|
| 69 |
+
if type_ is CBORSimpleValue
|
| 70 |
+
else _cbor2.CBORTag
|
| 71 |
+
if type_ is CBORTag
|
| 72 |
+
else type(_cbor2.undefined)
|
| 73 |
+
if type_ is type(undefined)
|
| 74 |
+
else type_
|
| 75 |
+
),
|
| 76 |
+
getattr(_cbor2.CBOREncoder, method.__name__),
|
| 77 |
+
)
|
| 78 |
+
for type_, method in canonical_encoders.items()
|
| 79 |
+
]
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
_init_cbor2()
|
| 83 |
+
del _init_cbor2
|
lib/python3.13/site-packages/cbor2/_decoder.py
ADDED
|
@@ -0,0 +1,869 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import re
|
| 4 |
+
import struct
|
| 5 |
+
import sys
|
| 6 |
+
from codecs import getincrementaldecoder
|
| 7 |
+
from collections.abc import Callable, Mapping, Sequence
|
| 8 |
+
from datetime import date, datetime, timedelta, timezone
|
| 9 |
+
from io import BytesIO
|
| 10 |
+
from typing import IO, TYPE_CHECKING, Any, TypeVar, cast, overload
|
| 11 |
+
|
| 12 |
+
from ._types import (
|
| 13 |
+
CBORDecodeEOF,
|
| 14 |
+
CBORDecodeValueError,
|
| 15 |
+
CBORSimpleValue,
|
| 16 |
+
CBORTag,
|
| 17 |
+
FrozenDict,
|
| 18 |
+
break_marker,
|
| 19 |
+
undefined,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
if TYPE_CHECKING:
|
| 23 |
+
from decimal import Decimal
|
| 24 |
+
from email.message import Message
|
| 25 |
+
from fractions import Fraction
|
| 26 |
+
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
| 27 |
+
from typing import Literal
|
| 28 |
+
from uuid import UUID
|
| 29 |
+
|
| 30 |
+
T = TypeVar("T")
|
| 31 |
+
|
| 32 |
+
timestamp_re = re.compile(
|
| 33 |
+
r"^(\d{4})-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)" r"(?:\.(\d{1,6})\d*)?(?:Z|([+-])(\d\d):(\d\d))$"
|
| 34 |
+
)
|
| 35 |
+
incremental_utf8_decoder = getincrementaldecoder("utf-8")
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class CBORDecoder:
|
| 39 |
+
"""
|
| 40 |
+
The CBORDecoder class implements a fully featured `CBOR`_ decoder with
|
| 41 |
+
several extensions for handling shared references, big integers, rational
|
| 42 |
+
numbers and so on. Typically the class is not used directly, but the
|
| 43 |
+
:func:`load` and :func:`loads` functions are called to indirectly construct
|
| 44 |
+
and use the class.
|
| 45 |
+
|
| 46 |
+
When the class is constructed manually, the main entry points are
|
| 47 |
+
:meth:`decode` and :meth:`decode_from_bytes`.
|
| 48 |
+
|
| 49 |
+
.. _CBOR: https://cbor.io/
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
__slots__ = (
|
| 53 |
+
"_tag_hook",
|
| 54 |
+
"_object_hook",
|
| 55 |
+
"_share_index",
|
| 56 |
+
"_shareables",
|
| 57 |
+
"_fp",
|
| 58 |
+
"_fp_read",
|
| 59 |
+
"_immutable",
|
| 60 |
+
"_str_errors",
|
| 61 |
+
"_stringref_namespace",
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
_fp: IO[bytes]
|
| 65 |
+
_fp_read: Callable[[int], bytes]
|
| 66 |
+
|
| 67 |
+
def __init__(
|
| 68 |
+
self,
|
| 69 |
+
fp: IO[bytes],
|
| 70 |
+
tag_hook: Callable[[CBORDecoder, CBORTag], Any] | None = None,
|
| 71 |
+
object_hook: Callable[[CBORDecoder, dict[Any, Any]], Any] | None = None,
|
| 72 |
+
str_errors: Literal["strict", "error", "replace"] = "strict",
|
| 73 |
+
):
|
| 74 |
+
"""
|
| 75 |
+
:param fp:
|
| 76 |
+
the file to read from (any file-like object opened for reading in binary
|
| 77 |
+
mode)
|
| 78 |
+
:param tag_hook:
|
| 79 |
+
callable that takes 2 arguments: the decoder instance, and the
|
| 80 |
+
:class:`.CBORTag` to be decoded. This callback is invoked for any tags
|
| 81 |
+
for which there is no built-in decoder. The return value is substituted
|
| 82 |
+
for the :class:`.CBORTag` object in the deserialized output
|
| 83 |
+
:param object_hook:
|
| 84 |
+
callable that takes 2 arguments: the decoder instance, and a
|
| 85 |
+
dictionary. This callback is invoked for each deserialized
|
| 86 |
+
:class:`dict` object. The return value is substituted for the dict in
|
| 87 |
+
the deserialized output.
|
| 88 |
+
:param str_errors:
|
| 89 |
+
determines how to handle unicode decoding errors (see the `Error Handlers`_
|
| 90 |
+
section in the standard library documentation for details)
|
| 91 |
+
|
| 92 |
+
.. _Error Handlers: https://docs.python.org/3/library/codecs.html#error-handlers
|
| 93 |
+
|
| 94 |
+
"""
|
| 95 |
+
self.fp = fp
|
| 96 |
+
self.tag_hook = tag_hook
|
| 97 |
+
self.object_hook = object_hook
|
| 98 |
+
self.str_errors = str_errors
|
| 99 |
+
self._share_index: int | None = None
|
| 100 |
+
self._shareables: list[object] = []
|
| 101 |
+
self._stringref_namespace: list[str | bytes] | None = None
|
| 102 |
+
self._immutable = False
|
| 103 |
+
|
| 104 |
+
@property
|
| 105 |
+
def immutable(self) -> bool:
|
| 106 |
+
"""
|
| 107 |
+
Used by decoders to check if the calling context requires an immutable
|
| 108 |
+
type. Object_hook or tag_hook should raise an exception if this flag
|
| 109 |
+
is set unless the result can be safely used as a dict key.
|
| 110 |
+
"""
|
| 111 |
+
return self._immutable
|
| 112 |
+
|
| 113 |
+
@property
|
| 114 |
+
def fp(self) -> IO[bytes]:
|
| 115 |
+
return self._fp
|
| 116 |
+
|
| 117 |
+
@fp.setter
|
| 118 |
+
def fp(self, value: IO[bytes]) -> None:
|
| 119 |
+
try:
|
| 120 |
+
if not callable(value.read):
|
| 121 |
+
raise ValueError("fp.read is not callable")
|
| 122 |
+
except AttributeError:
|
| 123 |
+
raise ValueError("fp object has no read method")
|
| 124 |
+
else:
|
| 125 |
+
self._fp = value
|
| 126 |
+
self._fp_read = value.read
|
| 127 |
+
|
| 128 |
+
@property
|
| 129 |
+
def tag_hook(self) -> Callable[[CBORDecoder, CBORTag], Any] | None:
|
| 130 |
+
return self._tag_hook
|
| 131 |
+
|
| 132 |
+
@tag_hook.setter
|
| 133 |
+
def tag_hook(self, value: Callable[[CBORDecoder, CBORTag], Any] | None) -> None:
|
| 134 |
+
if value is None or callable(value):
|
| 135 |
+
self._tag_hook = value
|
| 136 |
+
else:
|
| 137 |
+
raise ValueError("tag_hook must be None or a callable")
|
| 138 |
+
|
| 139 |
+
@property
|
| 140 |
+
def object_hook(self) -> Callable[[CBORDecoder, dict[Any, Any]], Any] | None:
|
| 141 |
+
return self._object_hook
|
| 142 |
+
|
| 143 |
+
@object_hook.setter
|
| 144 |
+
def object_hook(self, value: Callable[[CBORDecoder, Mapping[Any, Any]], Any] | None) -> None:
|
| 145 |
+
if value is None or callable(value):
|
| 146 |
+
self._object_hook = value
|
| 147 |
+
else:
|
| 148 |
+
raise ValueError("object_hook must be None or a callable")
|
| 149 |
+
|
| 150 |
+
@property
|
| 151 |
+
def str_errors(self) -> Literal["strict", "error", "replace"]:
|
| 152 |
+
return self._str_errors
|
| 153 |
+
|
| 154 |
+
@str_errors.setter
|
| 155 |
+
def str_errors(self, value: Literal["strict", "error", "replace"]) -> None:
|
| 156 |
+
if value in ("strict", "error", "replace"):
|
| 157 |
+
self._str_errors = value
|
| 158 |
+
else:
|
| 159 |
+
raise ValueError(
|
| 160 |
+
f"invalid str_errors value {value!r} (must be one of 'strict', "
|
| 161 |
+
"'error', or 'replace')"
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
def set_shareable(self, value: T) -> T:
|
| 165 |
+
"""
|
| 166 |
+
Set the shareable value for the last encountered shared value marker,
|
| 167 |
+
if any. If the current shared index is ``None``, nothing is done.
|
| 168 |
+
|
| 169 |
+
:param value: the shared value
|
| 170 |
+
:returns: the shared value to permit chaining
|
| 171 |
+
"""
|
| 172 |
+
if self._share_index is not None:
|
| 173 |
+
self._shareables[self._share_index] = value
|
| 174 |
+
|
| 175 |
+
return value
|
| 176 |
+
|
| 177 |
+
def _stringref_namespace_add(self, string: str | bytes, length: int) -> None:
|
| 178 |
+
if self._stringref_namespace is not None:
|
| 179 |
+
next_index = len(self._stringref_namespace)
|
| 180 |
+
if next_index < 24:
|
| 181 |
+
is_referenced = length >= 3
|
| 182 |
+
elif next_index < 256:
|
| 183 |
+
is_referenced = length >= 4
|
| 184 |
+
elif next_index < 65536:
|
| 185 |
+
is_referenced = length >= 5
|
| 186 |
+
elif next_index < 4294967296:
|
| 187 |
+
is_referenced = length >= 7
|
| 188 |
+
else:
|
| 189 |
+
is_referenced = length >= 11
|
| 190 |
+
|
| 191 |
+
if is_referenced:
|
| 192 |
+
self._stringref_namespace.append(string)
|
| 193 |
+
|
| 194 |
+
def read(self, amount: int) -> bytes:
|
| 195 |
+
"""
|
| 196 |
+
Read bytes from the data stream.
|
| 197 |
+
|
| 198 |
+
:param int amount: the number of bytes to read
|
| 199 |
+
"""
|
| 200 |
+
data = self._fp_read(amount)
|
| 201 |
+
if len(data) < amount:
|
| 202 |
+
raise CBORDecodeEOF(
|
| 203 |
+
f"premature end of stream (expected to read {amount} bytes, got {len(data)} "
|
| 204 |
+
"instead)"
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
return data
|
| 208 |
+
|
| 209 |
+
def _decode(self, immutable: bool = False, unshared: bool = False) -> Any:
|
| 210 |
+
if immutable:
|
| 211 |
+
old_immutable = self._immutable
|
| 212 |
+
self._immutable = True
|
| 213 |
+
if unshared:
|
| 214 |
+
old_index = self._share_index
|
| 215 |
+
self._share_index = None
|
| 216 |
+
try:
|
| 217 |
+
initial_byte = self.read(1)[0]
|
| 218 |
+
major_type = initial_byte >> 5
|
| 219 |
+
subtype = initial_byte & 31
|
| 220 |
+
decoder = major_decoders[major_type]
|
| 221 |
+
return decoder(self, subtype)
|
| 222 |
+
finally:
|
| 223 |
+
if immutable:
|
| 224 |
+
self._immutable = old_immutable
|
| 225 |
+
if unshared:
|
| 226 |
+
self._share_index = old_index
|
| 227 |
+
|
| 228 |
+
def decode(self) -> object:
|
| 229 |
+
"""
|
| 230 |
+
Decode the next value from the stream.
|
| 231 |
+
|
| 232 |
+
:raises CBORDecodeError: if there is any problem decoding the stream
|
| 233 |
+
"""
|
| 234 |
+
return self._decode()
|
| 235 |
+
|
| 236 |
+
def decode_from_bytes(self, buf: bytes) -> object:
|
| 237 |
+
"""
|
| 238 |
+
Wrap the given bytestring as a file and call :meth:`decode` with it as
|
| 239 |
+
the argument.
|
| 240 |
+
|
| 241 |
+
This method was intended to be used from the ``tag_hook`` hook when an
|
| 242 |
+
object needs to be decoded separately from the rest but while still
|
| 243 |
+
taking advantage of the shared value registry.
|
| 244 |
+
"""
|
| 245 |
+
with BytesIO(buf) as fp:
|
| 246 |
+
old_fp = self.fp
|
| 247 |
+
self.fp = fp
|
| 248 |
+
retval = self._decode()
|
| 249 |
+
self.fp = old_fp
|
| 250 |
+
return retval
|
| 251 |
+
|
| 252 |
+
@overload
|
| 253 |
+
def _decode_length(self, subtype: int) -> int: ...
|
| 254 |
+
|
| 255 |
+
@overload
|
| 256 |
+
def _decode_length(self, subtype: int, allow_indefinite: Literal[True]) -> int | None: ...
|
| 257 |
+
|
| 258 |
+
def _decode_length(self, subtype: int, allow_indefinite: bool = False) -> int | None:
|
| 259 |
+
if subtype < 24:
|
| 260 |
+
return subtype
|
| 261 |
+
elif subtype == 24:
|
| 262 |
+
return self.read(1)[0]
|
| 263 |
+
elif subtype == 25:
|
| 264 |
+
return cast(int, struct.unpack(">H", self.read(2))[0])
|
| 265 |
+
elif subtype == 26:
|
| 266 |
+
return cast(int, struct.unpack(">L", self.read(4))[0])
|
| 267 |
+
elif subtype == 27:
|
| 268 |
+
return cast(int, struct.unpack(">Q", self.read(8))[0])
|
| 269 |
+
elif subtype == 31 and allow_indefinite:
|
| 270 |
+
return None
|
| 271 |
+
else:
|
| 272 |
+
raise CBORDecodeValueError(f"unknown unsigned integer subtype 0x{subtype:x}")
|
| 273 |
+
|
| 274 |
+
def decode_uint(self, subtype: int) -> int:
|
| 275 |
+
# Major tag 0
|
| 276 |
+
return self.set_shareable(self._decode_length(subtype))
|
| 277 |
+
|
| 278 |
+
def decode_negint(self, subtype: int) -> int:
|
| 279 |
+
# Major tag 1
|
| 280 |
+
return self.set_shareable(-self._decode_length(subtype) - 1)
|
| 281 |
+
|
| 282 |
+
def decode_bytestring(self, subtype: int) -> bytes:
|
| 283 |
+
# Major tag 2
|
| 284 |
+
length = self._decode_length(subtype, allow_indefinite=True)
|
| 285 |
+
if length is None:
|
| 286 |
+
# Indefinite length
|
| 287 |
+
buf: list[bytes] = []
|
| 288 |
+
while True:
|
| 289 |
+
initial_byte = self.read(1)[0]
|
| 290 |
+
if initial_byte == 0xFF:
|
| 291 |
+
result = b"".join(buf)
|
| 292 |
+
break
|
| 293 |
+
elif initial_byte >> 5 == 2:
|
| 294 |
+
length = self._decode_length(initial_byte & 0x1F)
|
| 295 |
+
if length is None or length > sys.maxsize:
|
| 296 |
+
raise CBORDecodeValueError(
|
| 297 |
+
f"invalid length for indefinite bytestring chunk 0x{length:x}"
|
| 298 |
+
)
|
| 299 |
+
value = self.read(length)
|
| 300 |
+
buf.append(value)
|
| 301 |
+
else:
|
| 302 |
+
raise CBORDecodeValueError(
|
| 303 |
+
"non-bytestring found in indefinite length bytestring"
|
| 304 |
+
)
|
| 305 |
+
else:
|
| 306 |
+
if length > sys.maxsize:
|
| 307 |
+
raise CBORDecodeValueError(f"invalid length for bytestring 0x{length:x}")
|
| 308 |
+
elif length <= 65536:
|
| 309 |
+
result = self.read(length)
|
| 310 |
+
else:
|
| 311 |
+
# Read large bytestrings 65536 (2 ** 16) bytes at a time
|
| 312 |
+
left = length
|
| 313 |
+
buffer = bytearray()
|
| 314 |
+
while left:
|
| 315 |
+
chunk_size = min(left, 65536)
|
| 316 |
+
buffer.extend(self.read(chunk_size))
|
| 317 |
+
left -= chunk_size
|
| 318 |
+
|
| 319 |
+
result = bytes(buffer)
|
| 320 |
+
|
| 321 |
+
self._stringref_namespace_add(result, length)
|
| 322 |
+
|
| 323 |
+
return self.set_shareable(result)
|
| 324 |
+
|
| 325 |
+
def decode_string(self, subtype: int) -> str:
|
| 326 |
+
# Major tag 3
|
| 327 |
+
length = self._decode_length(subtype, allow_indefinite=True)
|
| 328 |
+
if length is None:
|
| 329 |
+
# Indefinite length
|
| 330 |
+
# NOTE: It may seem redundant to repeat this code to handle UTF-8
|
| 331 |
+
# strings but there is a reason to do this separately to
|
| 332 |
+
# byte-strings. Specifically, the CBOR spec states (in sec. 2.2):
|
| 333 |
+
#
|
| 334 |
+
# Text strings with indefinite lengths act the same as byte
|
| 335 |
+
# strings with indefinite lengths, except that all their chunks
|
| 336 |
+
# MUST be definite-length text strings. Note that this implies
|
| 337 |
+
# that the bytes of a single UTF-8 character cannot be spread
|
| 338 |
+
# between chunks: a new chunk can only be started at a
|
| 339 |
+
# character boundary.
|
| 340 |
+
#
|
| 341 |
+
# This precludes using the indefinite bytestring decoder above as
|
| 342 |
+
# that would happily ignore UTF-8 characters split across chunks.
|
| 343 |
+
buf: list[str] = []
|
| 344 |
+
while True:
|
| 345 |
+
initial_byte = self.read(1)[0]
|
| 346 |
+
if initial_byte == 0xFF:
|
| 347 |
+
result = "".join(buf)
|
| 348 |
+
break
|
| 349 |
+
elif initial_byte >> 5 == 3:
|
| 350 |
+
length = self._decode_length(initial_byte & 0x1F)
|
| 351 |
+
if length is None or length > sys.maxsize:
|
| 352 |
+
raise CBORDecodeValueError(
|
| 353 |
+
f"invalid length for indefinite string chunk 0x{length:x}"
|
| 354 |
+
)
|
| 355 |
+
|
| 356 |
+
try:
|
| 357 |
+
value = self.read(length).decode("utf-8", self._str_errors)
|
| 358 |
+
except UnicodeDecodeError as exc:
|
| 359 |
+
raise CBORDecodeValueError("error decoding unicode string") from exc
|
| 360 |
+
|
| 361 |
+
buf.append(value)
|
| 362 |
+
else:
|
| 363 |
+
raise CBORDecodeValueError("non-string found in indefinite length string")
|
| 364 |
+
else:
|
| 365 |
+
if length > sys.maxsize:
|
| 366 |
+
raise CBORDecodeValueError(f"invalid length for string 0x{length:x}")
|
| 367 |
+
|
| 368 |
+
if length <= 65536:
|
| 369 |
+
try:
|
| 370 |
+
result = self.read(length).decode("utf-8", self._str_errors)
|
| 371 |
+
except UnicodeDecodeError as exc:
|
| 372 |
+
raise CBORDecodeValueError("error decoding unicode string") from exc
|
| 373 |
+
else:
|
| 374 |
+
# Read and decode large text strings 65536 (2 ** 16) bytes at a time
|
| 375 |
+
codec = incremental_utf8_decoder(self._str_errors)
|
| 376 |
+
left = length
|
| 377 |
+
result = ""
|
| 378 |
+
while left:
|
| 379 |
+
chunk_size = min(left, 65536)
|
| 380 |
+
final = left <= chunk_size
|
| 381 |
+
try:
|
| 382 |
+
result += codec.decode(self.read(chunk_size), final)
|
| 383 |
+
except UnicodeDecodeError as exc:
|
| 384 |
+
raise CBORDecodeValueError("error decoding unicode string") from exc
|
| 385 |
+
|
| 386 |
+
left -= chunk_size
|
| 387 |
+
|
| 388 |
+
self._stringref_namespace_add(result, length)
|
| 389 |
+
|
| 390 |
+
return self.set_shareable(result)
|
| 391 |
+
|
| 392 |
+
def decode_array(self, subtype: int) -> Sequence[Any]:
|
| 393 |
+
# Major tag 4
|
| 394 |
+
length = self._decode_length(subtype, allow_indefinite=True)
|
| 395 |
+
if length is None:
|
| 396 |
+
# Indefinite length
|
| 397 |
+
items: list[Any] = []
|
| 398 |
+
if not self._immutable:
|
| 399 |
+
self.set_shareable(items)
|
| 400 |
+
while True:
|
| 401 |
+
value = self._decode()
|
| 402 |
+
if value is break_marker:
|
| 403 |
+
break
|
| 404 |
+
else:
|
| 405 |
+
items.append(value)
|
| 406 |
+
else:
|
| 407 |
+
if length > sys.maxsize:
|
| 408 |
+
raise CBORDecodeValueError(f"invalid length for array 0x{length:x}")
|
| 409 |
+
|
| 410 |
+
items = []
|
| 411 |
+
if not self._immutable:
|
| 412 |
+
self.set_shareable(items)
|
| 413 |
+
|
| 414 |
+
for index in range(length):
|
| 415 |
+
items.append(self._decode())
|
| 416 |
+
|
| 417 |
+
if self._immutable:
|
| 418 |
+
items_tuple = tuple(items)
|
| 419 |
+
self.set_shareable(items_tuple)
|
| 420 |
+
return items_tuple
|
| 421 |
+
|
| 422 |
+
return items
|
| 423 |
+
|
| 424 |
+
def decode_map(self, subtype: int) -> Mapping[Any, Any]:
|
| 425 |
+
# Major tag 5
|
| 426 |
+
length = self._decode_length(subtype, allow_indefinite=True)
|
| 427 |
+
if length is None:
|
| 428 |
+
# Indefinite length
|
| 429 |
+
dictionary: dict[Any, Any] = {}
|
| 430 |
+
self.set_shareable(dictionary)
|
| 431 |
+
while True:
|
| 432 |
+
key = self._decode(immutable=True, unshared=True)
|
| 433 |
+
if key is break_marker:
|
| 434 |
+
break
|
| 435 |
+
else:
|
| 436 |
+
dictionary[key] = self._decode(unshared=True)
|
| 437 |
+
else:
|
| 438 |
+
dictionary = {}
|
| 439 |
+
self.set_shareable(dictionary)
|
| 440 |
+
for _ in range(length):
|
| 441 |
+
key = self._decode(immutable=True, unshared=True)
|
| 442 |
+
dictionary[key] = self._decode(unshared=True)
|
| 443 |
+
|
| 444 |
+
if self._object_hook:
|
| 445 |
+
dictionary = self._object_hook(self, dictionary)
|
| 446 |
+
self.set_shareable(dictionary)
|
| 447 |
+
elif self._immutable:
|
| 448 |
+
frozen_dict = FrozenDict(dictionary)
|
| 449 |
+
self.set_shareable(dictionary)
|
| 450 |
+
return frozen_dict
|
| 451 |
+
|
| 452 |
+
return dictionary
|
| 453 |
+
|
| 454 |
+
def decode_semantic(self, subtype: int) -> Any:
|
| 455 |
+
# Major tag 6
|
| 456 |
+
tagnum = self._decode_length(subtype)
|
| 457 |
+
if semantic_decoder := semantic_decoders.get(tagnum):
|
| 458 |
+
return semantic_decoder(self)
|
| 459 |
+
|
| 460 |
+
tag = CBORTag(tagnum, None)
|
| 461 |
+
self.set_shareable(tag)
|
| 462 |
+
tag.value = self._decode(unshared=True)
|
| 463 |
+
if self._tag_hook:
|
| 464 |
+
tag = self._tag_hook(self, tag)
|
| 465 |
+
|
| 466 |
+
return self.set_shareable(tag)
|
| 467 |
+
|
| 468 |
+
def decode_special(self, subtype: int) -> Any:
|
| 469 |
+
# Simple value
|
| 470 |
+
if subtype < 20:
|
| 471 |
+
# XXX Set shareable?
|
| 472 |
+
return CBORSimpleValue(subtype)
|
| 473 |
+
|
| 474 |
+
# Major tag 7
|
| 475 |
+
try:
|
| 476 |
+
return special_decoders[subtype](self)
|
| 477 |
+
except KeyError as e:
|
| 478 |
+
raise CBORDecodeValueError(
|
| 479 |
+
f"Undefined Reserved major type 7 subtype 0x{subtype:x}"
|
| 480 |
+
) from e
|
| 481 |
+
|
| 482 |
+
#
|
| 483 |
+
# Semantic decoders (major tag 6)
|
| 484 |
+
#
|
| 485 |
+
def decode_epoch_date(self) -> date:
|
| 486 |
+
# Semantic tag 100
|
| 487 |
+
value = self._decode()
|
| 488 |
+
return self.set_shareable(date.fromordinal(value + 719163))
|
| 489 |
+
|
| 490 |
+
def decode_date_string(self) -> date:
|
| 491 |
+
# Semantic tag 1004
|
| 492 |
+
value = self._decode()
|
| 493 |
+
return self.set_shareable(date.fromisoformat(value))
|
| 494 |
+
|
| 495 |
+
def decode_datetime_string(self) -> datetime:
|
| 496 |
+
# Semantic tag 0
|
| 497 |
+
value = self._decode()
|
| 498 |
+
match = timestamp_re.match(value)
|
| 499 |
+
if match:
|
| 500 |
+
(
|
| 501 |
+
year,
|
| 502 |
+
month,
|
| 503 |
+
day,
|
| 504 |
+
hour,
|
| 505 |
+
minute,
|
| 506 |
+
second,
|
| 507 |
+
secfrac,
|
| 508 |
+
offset_sign,
|
| 509 |
+
offset_h,
|
| 510 |
+
offset_m,
|
| 511 |
+
) = match.groups()
|
| 512 |
+
if secfrac is None:
|
| 513 |
+
microsecond = 0
|
| 514 |
+
else:
|
| 515 |
+
microsecond = int(f"{secfrac:<06}")
|
| 516 |
+
|
| 517 |
+
if offset_h:
|
| 518 |
+
if offset_sign == "-":
|
| 519 |
+
sign = -1
|
| 520 |
+
else:
|
| 521 |
+
sign = 1
|
| 522 |
+
hours = int(offset_h) * sign
|
| 523 |
+
minutes = int(offset_m) * sign
|
| 524 |
+
tz = timezone(timedelta(hours=hours, minutes=minutes))
|
| 525 |
+
else:
|
| 526 |
+
tz = timezone.utc
|
| 527 |
+
|
| 528 |
+
return self.set_shareable(
|
| 529 |
+
datetime(
|
| 530 |
+
int(year),
|
| 531 |
+
int(month),
|
| 532 |
+
int(day),
|
| 533 |
+
int(hour),
|
| 534 |
+
int(minute),
|
| 535 |
+
int(second),
|
| 536 |
+
microsecond,
|
| 537 |
+
tz,
|
| 538 |
+
)
|
| 539 |
+
)
|
| 540 |
+
else:
|
| 541 |
+
raise CBORDecodeValueError(f"invalid datetime string: {value!r}")
|
| 542 |
+
|
| 543 |
+
def decode_epoch_datetime(self) -> datetime:
|
| 544 |
+
# Semantic tag 1
|
| 545 |
+
value = self._decode()
|
| 546 |
+
|
| 547 |
+
try:
|
| 548 |
+
tmp = datetime.fromtimestamp(value, timezone.utc)
|
| 549 |
+
except (OverflowError, OSError, ValueError) as exc:
|
| 550 |
+
raise CBORDecodeValueError("error decoding datetime from epoch") from exc
|
| 551 |
+
|
| 552 |
+
return self.set_shareable(tmp)
|
| 553 |
+
|
| 554 |
+
def decode_positive_bignum(self) -> int:
|
| 555 |
+
# Semantic tag 2
|
| 556 |
+
from binascii import hexlify
|
| 557 |
+
|
| 558 |
+
value = self._decode()
|
| 559 |
+
if not isinstance(value, bytes):
|
| 560 |
+
raise CBORDecodeValueError("invalid bignum value " + str(value))
|
| 561 |
+
|
| 562 |
+
return self.set_shareable(int(hexlify(value), 16))
|
| 563 |
+
|
| 564 |
+
def decode_negative_bignum(self) -> int:
|
| 565 |
+
# Semantic tag 3
|
| 566 |
+
return self.set_shareable(-self.decode_positive_bignum() - 1)
|
| 567 |
+
|
| 568 |
+
def decode_fraction(self) -> Decimal:
|
| 569 |
+
# Semantic tag 4
|
| 570 |
+
from decimal import Decimal
|
| 571 |
+
|
| 572 |
+
try:
|
| 573 |
+
exp, sig = self._decode()
|
| 574 |
+
except (TypeError, ValueError) as e:
|
| 575 |
+
raise CBORDecodeValueError("Incorrect tag 4 payload") from e
|
| 576 |
+
tmp = Decimal(sig).as_tuple()
|
| 577 |
+
return self.set_shareable(Decimal((tmp.sign, tmp.digits, exp)))
|
| 578 |
+
|
| 579 |
+
def decode_bigfloat(self) -> Decimal:
|
| 580 |
+
# Semantic tag 5
|
| 581 |
+
from decimal import Decimal
|
| 582 |
+
|
| 583 |
+
try:
|
| 584 |
+
exp, sig = self._decode()
|
| 585 |
+
except (TypeError, ValueError) as e:
|
| 586 |
+
raise CBORDecodeValueError("Incorrect tag 5 payload") from e
|
| 587 |
+
|
| 588 |
+
return self.set_shareable(Decimal(sig) * (2 ** Decimal(exp)))
|
| 589 |
+
|
| 590 |
+
def decode_stringref(self) -> str | bytes:
|
| 591 |
+
# Semantic tag 25
|
| 592 |
+
if self._stringref_namespace is None:
|
| 593 |
+
raise CBORDecodeValueError("string reference outside of namespace")
|
| 594 |
+
|
| 595 |
+
index: int = self._decode()
|
| 596 |
+
try:
|
| 597 |
+
value = self._stringref_namespace[index]
|
| 598 |
+
except IndexError:
|
| 599 |
+
raise CBORDecodeValueError("string reference %d not found" % index)
|
| 600 |
+
|
| 601 |
+
return value
|
| 602 |
+
|
| 603 |
+
def decode_shareable(self) -> object:
|
| 604 |
+
# Semantic tag 28
|
| 605 |
+
old_index = self._share_index
|
| 606 |
+
self._share_index = len(self._shareables)
|
| 607 |
+
self._shareables.append(None)
|
| 608 |
+
try:
|
| 609 |
+
return self._decode()
|
| 610 |
+
finally:
|
| 611 |
+
self._share_index = old_index
|
| 612 |
+
|
| 613 |
+
def decode_sharedref(self) -> Any:
|
| 614 |
+
# Semantic tag 29
|
| 615 |
+
value = self._decode(unshared=True)
|
| 616 |
+
try:
|
| 617 |
+
shared = self._shareables[value]
|
| 618 |
+
except IndexError:
|
| 619 |
+
raise CBORDecodeValueError("shared reference %d not found" % value)
|
| 620 |
+
|
| 621 |
+
if shared is None:
|
| 622 |
+
raise CBORDecodeValueError("shared value %d has not been initialized" % value)
|
| 623 |
+
else:
|
| 624 |
+
return shared
|
| 625 |
+
|
| 626 |
+
def decode_complex(self) -> complex:
|
| 627 |
+
# Semantic tag 43000
|
| 628 |
+
inputval = self._decode(immutable=True, unshared=True)
|
| 629 |
+
try:
|
| 630 |
+
value = complex(*inputval)
|
| 631 |
+
except TypeError as exc:
|
| 632 |
+
if not isinstance(inputval, tuple):
|
| 633 |
+
raise CBORDecodeValueError(
|
| 634 |
+
"error decoding complex: input value was not a tuple"
|
| 635 |
+
) from None
|
| 636 |
+
|
| 637 |
+
raise CBORDecodeValueError("error decoding complex") from exc
|
| 638 |
+
|
| 639 |
+
return self.set_shareable(value)
|
| 640 |
+
|
| 641 |
+
def decode_rational(self) -> Fraction:
|
| 642 |
+
# Semantic tag 30
|
| 643 |
+
from fractions import Fraction
|
| 644 |
+
|
| 645 |
+
inputval = self._decode(immutable=True, unshared=True)
|
| 646 |
+
try:
|
| 647 |
+
value = Fraction(*inputval)
|
| 648 |
+
except (TypeError, ZeroDivisionError) as exc:
|
| 649 |
+
if not isinstance(inputval, tuple):
|
| 650 |
+
raise CBORDecodeValueError(
|
| 651 |
+
"error decoding rational: input value was not a tuple"
|
| 652 |
+
) from None
|
| 653 |
+
|
| 654 |
+
raise CBORDecodeValueError("error decoding rational") from exc
|
| 655 |
+
|
| 656 |
+
return self.set_shareable(value)
|
| 657 |
+
|
| 658 |
+
def decode_regexp(self) -> re.Pattern[str]:
|
| 659 |
+
# Semantic tag 35
|
| 660 |
+
try:
|
| 661 |
+
value = re.compile(self._decode())
|
| 662 |
+
except re.error as exc:
|
| 663 |
+
raise CBORDecodeValueError("error decoding regular expression") from exc
|
| 664 |
+
|
| 665 |
+
return self.set_shareable(value)
|
| 666 |
+
|
| 667 |
+
def decode_mime(self) -> Message:
|
| 668 |
+
# Semantic tag 36
|
| 669 |
+
from email.parser import Parser
|
| 670 |
+
|
| 671 |
+
try:
|
| 672 |
+
value = Parser().parsestr(self._decode())
|
| 673 |
+
except TypeError as exc:
|
| 674 |
+
raise CBORDecodeValueError("error decoding MIME message") from exc
|
| 675 |
+
|
| 676 |
+
return self.set_shareable(value)
|
| 677 |
+
|
| 678 |
+
def decode_uuid(self) -> UUID:
|
| 679 |
+
# Semantic tag 37
|
| 680 |
+
from uuid import UUID
|
| 681 |
+
|
| 682 |
+
try:
|
| 683 |
+
value = UUID(bytes=self._decode())
|
| 684 |
+
except (TypeError, ValueError) as exc:
|
| 685 |
+
raise CBORDecodeValueError("error decoding UUID value") from exc
|
| 686 |
+
|
| 687 |
+
return self.set_shareable(value)
|
| 688 |
+
|
| 689 |
+
def decode_stringref_namespace(self) -> Any:
|
| 690 |
+
# Semantic tag 256
|
| 691 |
+
old_namespace = self._stringref_namespace
|
| 692 |
+
self._stringref_namespace = []
|
| 693 |
+
value = self._decode()
|
| 694 |
+
self._stringref_namespace = old_namespace
|
| 695 |
+
return value
|
| 696 |
+
|
| 697 |
+
def decode_set(self) -> set[Any] | frozenset[Any]:
|
| 698 |
+
# Semantic tag 258
|
| 699 |
+
if self._immutable:
|
| 700 |
+
return self.set_shareable(frozenset(self._decode(immutable=True)))
|
| 701 |
+
else:
|
| 702 |
+
return self.set_shareable(set(self._decode(immutable=True)))
|
| 703 |
+
|
| 704 |
+
def decode_ipaddress(self) -> IPv4Address | IPv6Address | CBORTag:
|
| 705 |
+
# Semantic tag 260
|
| 706 |
+
from ipaddress import ip_address
|
| 707 |
+
|
| 708 |
+
buf = self.decode()
|
| 709 |
+
if not isinstance(buf, bytes) or len(buf) not in (4, 6, 16):
|
| 710 |
+
raise CBORDecodeValueError(f"invalid ipaddress value {buf!r}")
|
| 711 |
+
elif len(buf) in (4, 16):
|
| 712 |
+
return self.set_shareable(ip_address(buf))
|
| 713 |
+
elif len(buf) == 6:
|
| 714 |
+
# MAC address
|
| 715 |
+
return self.set_shareable(CBORTag(260, buf))
|
| 716 |
+
|
| 717 |
+
raise CBORDecodeValueError(f"invalid ipaddress value {buf!r}")
|
| 718 |
+
|
| 719 |
+
def decode_ipnetwork(self) -> IPv4Network | IPv6Network:
|
| 720 |
+
# Semantic tag 261
|
| 721 |
+
from ipaddress import ip_network
|
| 722 |
+
|
| 723 |
+
net_map = self.decode()
|
| 724 |
+
if isinstance(net_map, Mapping) and len(net_map) == 1:
|
| 725 |
+
for net in net_map.items():
|
| 726 |
+
try:
|
| 727 |
+
return self.set_shareable(ip_network(net, strict=False))
|
| 728 |
+
except (TypeError, ValueError):
|
| 729 |
+
break
|
| 730 |
+
|
| 731 |
+
raise CBORDecodeValueError(f"invalid ipnetwork value {net_map!r}")
|
| 732 |
+
|
| 733 |
+
def decode_self_describe_cbor(self) -> Any:
|
| 734 |
+
# Semantic tag 55799
|
| 735 |
+
return self._decode()
|
| 736 |
+
|
| 737 |
+
#
|
| 738 |
+
# Special decoders (major tag 7)
|
| 739 |
+
#
|
| 740 |
+
|
| 741 |
+
def decode_simple_value(self) -> CBORSimpleValue:
|
| 742 |
+
# XXX Set shareable?
|
| 743 |
+
return CBORSimpleValue(self.read(1)[0])
|
| 744 |
+
|
| 745 |
+
def decode_float16(self) -> float:
|
| 746 |
+
return self.set_shareable(cast(float, struct.unpack(">e", self.read(2))[0]))
|
| 747 |
+
|
| 748 |
+
def decode_float32(self) -> float:
|
| 749 |
+
return self.set_shareable(cast(float, struct.unpack(">f", self.read(4))[0]))
|
| 750 |
+
|
| 751 |
+
def decode_float64(self) -> float:
|
| 752 |
+
return self.set_shareable(cast(float, struct.unpack(">d", self.read(8))[0]))
|
| 753 |
+
|
| 754 |
+
|
| 755 |
+
major_decoders: dict[int, Callable[[CBORDecoder, int], Any]] = {
|
| 756 |
+
0: CBORDecoder.decode_uint,
|
| 757 |
+
1: CBORDecoder.decode_negint,
|
| 758 |
+
2: CBORDecoder.decode_bytestring,
|
| 759 |
+
3: CBORDecoder.decode_string,
|
| 760 |
+
4: CBORDecoder.decode_array,
|
| 761 |
+
5: CBORDecoder.decode_map,
|
| 762 |
+
6: CBORDecoder.decode_semantic,
|
| 763 |
+
7: CBORDecoder.decode_special,
|
| 764 |
+
}
|
| 765 |
+
|
| 766 |
+
special_decoders: dict[int, Callable[[CBORDecoder], Any]] = {
|
| 767 |
+
20: lambda self: False,
|
| 768 |
+
21: lambda self: True,
|
| 769 |
+
22: lambda self: None,
|
| 770 |
+
23: lambda self: undefined,
|
| 771 |
+
24: CBORDecoder.decode_simple_value,
|
| 772 |
+
25: CBORDecoder.decode_float16,
|
| 773 |
+
26: CBORDecoder.decode_float32,
|
| 774 |
+
27: CBORDecoder.decode_float64,
|
| 775 |
+
31: lambda self: break_marker,
|
| 776 |
+
}
|
| 777 |
+
|
| 778 |
+
semantic_decoders: dict[int, Callable[[CBORDecoder], Any]] = {
|
| 779 |
+
0: CBORDecoder.decode_datetime_string,
|
| 780 |
+
1: CBORDecoder.decode_epoch_datetime,
|
| 781 |
+
2: CBORDecoder.decode_positive_bignum,
|
| 782 |
+
3: CBORDecoder.decode_negative_bignum,
|
| 783 |
+
4: CBORDecoder.decode_fraction,
|
| 784 |
+
5: CBORDecoder.decode_bigfloat,
|
| 785 |
+
25: CBORDecoder.decode_stringref,
|
| 786 |
+
28: CBORDecoder.decode_shareable,
|
| 787 |
+
29: CBORDecoder.decode_sharedref,
|
| 788 |
+
30: CBORDecoder.decode_rational,
|
| 789 |
+
35: CBORDecoder.decode_regexp,
|
| 790 |
+
36: CBORDecoder.decode_mime,
|
| 791 |
+
37: CBORDecoder.decode_uuid,
|
| 792 |
+
100: CBORDecoder.decode_epoch_date,
|
| 793 |
+
256: CBORDecoder.decode_stringref_namespace,
|
| 794 |
+
258: CBORDecoder.decode_set,
|
| 795 |
+
260: CBORDecoder.decode_ipaddress,
|
| 796 |
+
261: CBORDecoder.decode_ipnetwork,
|
| 797 |
+
1004: CBORDecoder.decode_date_string,
|
| 798 |
+
43000: CBORDecoder.decode_complex,
|
| 799 |
+
55799: CBORDecoder.decode_self_describe_cbor,
|
| 800 |
+
}
|
| 801 |
+
|
| 802 |
+
|
| 803 |
+
def loads(
|
| 804 |
+
s: bytes | bytearray | memoryview,
|
| 805 |
+
tag_hook: Callable[[CBORDecoder, CBORTag], Any] | None = None,
|
| 806 |
+
object_hook: Callable[[CBORDecoder, dict[Any, Any]], Any] | None = None,
|
| 807 |
+
str_errors: Literal["strict", "error", "replace"] = "strict",
|
| 808 |
+
) -> Any:
|
| 809 |
+
"""
|
| 810 |
+
Deserialize an object from a bytestring.
|
| 811 |
+
|
| 812 |
+
:param bytes s:
|
| 813 |
+
the bytestring to deserialize
|
| 814 |
+
:param tag_hook:
|
| 815 |
+
callable that takes 2 arguments: the decoder instance, and the :class:`.CBORTag`
|
| 816 |
+
to be decoded. This callback is invoked for any tags for which there is no
|
| 817 |
+
built-in decoder. The return value is substituted for the :class:`.CBORTag`
|
| 818 |
+
object in the deserialized output
|
| 819 |
+
:param object_hook:
|
| 820 |
+
callable that takes 2 arguments: the decoder instance, and a dictionary. This
|
| 821 |
+
callback is invoked for each deserialized :class:`dict` object. The return value
|
| 822 |
+
is substituted for the dict in the deserialized output.
|
| 823 |
+
:param str_errors:
|
| 824 |
+
determines how to handle unicode decoding errors (see the `Error Handlers`_
|
| 825 |
+
section in the standard library documentation for details)
|
| 826 |
+
:return:
|
| 827 |
+
the deserialized object
|
| 828 |
+
|
| 829 |
+
.. _Error Handlers: https://docs.python.org/3/library/codecs.html#error-handlers
|
| 830 |
+
|
| 831 |
+
"""
|
| 832 |
+
with BytesIO(s) as fp:
|
| 833 |
+
return CBORDecoder(
|
| 834 |
+
fp, tag_hook=tag_hook, object_hook=object_hook, str_errors=str_errors
|
| 835 |
+
).decode()
|
| 836 |
+
|
| 837 |
+
|
| 838 |
+
def load(
|
| 839 |
+
fp: IO[bytes],
|
| 840 |
+
tag_hook: Callable[[CBORDecoder, CBORTag], Any] | None = None,
|
| 841 |
+
object_hook: Callable[[CBORDecoder, dict[Any, Any]], Any] | None = None,
|
| 842 |
+
str_errors: Literal["strict", "error", "replace"] = "strict",
|
| 843 |
+
) -> Any:
|
| 844 |
+
"""
|
| 845 |
+
Deserialize an object from an open file.
|
| 846 |
+
|
| 847 |
+
:param fp:
|
| 848 |
+
the file to read from (any file-like object opened for reading in binary mode)
|
| 849 |
+
:param tag_hook:
|
| 850 |
+
callable that takes 2 arguments: the decoder instance, and the :class:`.CBORTag`
|
| 851 |
+
to be decoded. This callback is invoked for any tags for which there is no
|
| 852 |
+
built-in decoder. The return value is substituted for the :class:`.CBORTag`
|
| 853 |
+
object in the deserialized output
|
| 854 |
+
:param object_hook:
|
| 855 |
+
callable that takes 2 arguments: the decoder instance, and a dictionary. This
|
| 856 |
+
callback is invoked for each deserialized :class:`dict` object. The return value
|
| 857 |
+
is substituted for the dict in the deserialized output.
|
| 858 |
+
:param str_errors:
|
| 859 |
+
determines how to handle unicode decoding errors (see the `Error Handlers`_
|
| 860 |
+
section in the standard library documentation for details)
|
| 861 |
+
:return:
|
| 862 |
+
the deserialized object
|
| 863 |
+
|
| 864 |
+
.. _Error Handlers: https://docs.python.org/3/library/codecs.html#error-handlers
|
| 865 |
+
|
| 866 |
+
"""
|
| 867 |
+
return CBORDecoder(
|
| 868 |
+
fp, tag_hook=tag_hook, object_hook=object_hook, str_errors=str_errors
|
| 869 |
+
).decode()
|
lib/python3.13/site-packages/cbor2/_encoder.py
ADDED
|
@@ -0,0 +1,838 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
import re
|
| 5 |
+
import struct
|
| 6 |
+
import sys
|
| 7 |
+
from collections import OrderedDict, defaultdict
|
| 8 |
+
from collections.abc import Callable, Generator, Mapping, Sequence, Set
|
| 9 |
+
from contextlib import contextmanager
|
| 10 |
+
from datetime import date, datetime, time, tzinfo
|
| 11 |
+
from functools import wraps
|
| 12 |
+
from io import BytesIO
|
| 13 |
+
from sys import modules
|
| 14 |
+
from typing import IO, TYPE_CHECKING, Any, cast
|
| 15 |
+
|
| 16 |
+
from ._types import (
|
| 17 |
+
CBOREncodeTypeError,
|
| 18 |
+
CBOREncodeValueError,
|
| 19 |
+
CBORSimpleValue,
|
| 20 |
+
CBORTag,
|
| 21 |
+
FrozenDict,
|
| 22 |
+
UndefinedType,
|
| 23 |
+
undefined,
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
if TYPE_CHECKING:
|
| 27 |
+
from decimal import Decimal
|
| 28 |
+
from email.message import Message
|
| 29 |
+
from fractions import Fraction
|
| 30 |
+
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
| 31 |
+
from uuid import UUID
|
| 32 |
+
|
| 33 |
+
if sys.version_info >= (3, 12):
|
| 34 |
+
from collections.abc import Buffer
|
| 35 |
+
else:
|
| 36 |
+
from typing_extensions import Buffer
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def shareable_encoder(
|
| 40 |
+
func: Callable[[CBOREncoder, Any], None],
|
| 41 |
+
) -> Callable[[CBOREncoder, Any], None]:
|
| 42 |
+
"""
|
| 43 |
+
Wrap the given encoder function to gracefully handle cyclic data
|
| 44 |
+
structures.
|
| 45 |
+
|
| 46 |
+
If value sharing is enabled, this marks the given value shared in the
|
| 47 |
+
datastream on the first call. If the value has already been passed to this
|
| 48 |
+
method, a reference marker is instead written to the data stream and the
|
| 49 |
+
wrapped function is not called.
|
| 50 |
+
|
| 51 |
+
If value sharing is disabled, only infinite recursion protection is done.
|
| 52 |
+
:rtype: Callable[[cbor2.CBOREncoder, Any], None]
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
@wraps(func)
|
| 56 |
+
def wrapper(encoder: CBOREncoder, value: Any) -> None:
|
| 57 |
+
encoder.encode_shared(func, value)
|
| 58 |
+
|
| 59 |
+
return wrapper
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def container_encoder(
|
| 63 |
+
func: Callable[[CBOREncoder, Any], Any],
|
| 64 |
+
) -> Callable[[CBOREncoder, Any], Any]:
|
| 65 |
+
"""
|
| 66 |
+
The given encoder is a container with child values. Handle cyclic or
|
| 67 |
+
duplicate references to the value and strings within the value
|
| 68 |
+
efficiently.
|
| 69 |
+
|
| 70 |
+
Containers may contain cyclic data structures or may contain values
|
| 71 |
+
or themselves by referenced multiple times throughout the greater
|
| 72 |
+
encoded value and could thus be more efficiently encoded with shared
|
| 73 |
+
value references and string references where duplication occurs.
|
| 74 |
+
|
| 75 |
+
If value sharing is enabled, this marks the given value shared in the
|
| 76 |
+
datastream on the first call. If the value has already been passed to this
|
| 77 |
+
method, a reference marker is instead written to the data stream and the
|
| 78 |
+
wrapped function is not called.
|
| 79 |
+
|
| 80 |
+
If value sharing is disabled, only infinite recursion protection is done.
|
| 81 |
+
|
| 82 |
+
If string referencing is enabled and this is the first use of this
|
| 83 |
+
method in encoding a value, all repeated references to long strings
|
| 84 |
+
and bytearrays will be replaced with references to the first
|
| 85 |
+
occurrence of those arrays.
|
| 86 |
+
|
| 87 |
+
If string referencing is disabled, all strings and bytearrays will
|
| 88 |
+
be encoded directly.
|
| 89 |
+
"""
|
| 90 |
+
|
| 91 |
+
@wraps(func)
|
| 92 |
+
def wrapper(encoder: CBOREncoder, value: Any) -> None:
|
| 93 |
+
encoder.encode_container(func, value)
|
| 94 |
+
|
| 95 |
+
return wrapper
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class CBOREncoder:
|
| 99 |
+
"""
|
| 100 |
+
The CBOREncoder class implements a fully featured `CBOR`_ encoder with
|
| 101 |
+
several extensions for handling shared references, big integers, rational
|
| 102 |
+
numbers and so on. Typically the class is not used directly, but the
|
| 103 |
+
:func:`dump` and :func:`dumps` functions are called to indirectly construct
|
| 104 |
+
and use the class.
|
| 105 |
+
|
| 106 |
+
When the class is constructed manually, the main entry points are
|
| 107 |
+
:meth:`encode` and :meth:`encode_to_bytes`.
|
| 108 |
+
|
| 109 |
+
.. _CBOR: https://cbor.io/
|
| 110 |
+
"""
|
| 111 |
+
|
| 112 |
+
__slots__ = (
|
| 113 |
+
"datetime_as_timestamp",
|
| 114 |
+
"date_as_datetime",
|
| 115 |
+
"_timezone",
|
| 116 |
+
"_default",
|
| 117 |
+
"value_sharing",
|
| 118 |
+
"_fp",
|
| 119 |
+
"_fp_write",
|
| 120 |
+
"_shared_containers",
|
| 121 |
+
"_encoders",
|
| 122 |
+
"_canonical",
|
| 123 |
+
"string_referencing",
|
| 124 |
+
"string_namespacing",
|
| 125 |
+
"_string_references",
|
| 126 |
+
"indefinite_containers",
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
_fp: IO[bytes]
|
| 130 |
+
_fp_write: Callable[[Buffer], int]
|
| 131 |
+
|
| 132 |
+
def __init__(
|
| 133 |
+
self,
|
| 134 |
+
fp: IO[bytes],
|
| 135 |
+
datetime_as_timestamp: bool = False,
|
| 136 |
+
timezone: tzinfo | None = None,
|
| 137 |
+
value_sharing: bool = False,
|
| 138 |
+
default: Callable[[CBOREncoder, Any], Any] | None = None,
|
| 139 |
+
canonical: bool = False,
|
| 140 |
+
date_as_datetime: bool = False,
|
| 141 |
+
string_referencing: bool = False,
|
| 142 |
+
indefinite_containers: bool = False,
|
| 143 |
+
):
|
| 144 |
+
"""
|
| 145 |
+
:param fp:
|
| 146 |
+
the file to write to (any file-like object opened for writing in binary
|
| 147 |
+
mode)
|
| 148 |
+
:param datetime_as_timestamp:
|
| 149 |
+
set to ``True`` to serialize datetimes as UNIX timestamps (this makes
|
| 150 |
+
datetimes more concise on the wire, but loses the timezone information)
|
| 151 |
+
:param timezone:
|
| 152 |
+
the default timezone to use for serializing naive datetimes; if this is not
|
| 153 |
+
specified naive datetimes will throw a :exc:`ValueError` when encoding is
|
| 154 |
+
attempted
|
| 155 |
+
:param value_sharing:
|
| 156 |
+
set to ``True`` to allow more efficient serializing of repeated values and,
|
| 157 |
+
more importantly, cyclic data structures, at the cost of extra line overhead
|
| 158 |
+
:param default:
|
| 159 |
+
a callable that is called by the encoder with two arguments (the encoder
|
| 160 |
+
instance and the value being encoded) when no suitable encoder has been
|
| 161 |
+
found, and should use the methods on the encoder to encode any objects it
|
| 162 |
+
wants to add to the data stream
|
| 163 |
+
:param canonical:
|
| 164 |
+
when ``True``, use "canonical" CBOR representation; this typically involves
|
| 165 |
+
sorting maps, sets, etc. into a pre-determined order ensuring that
|
| 166 |
+
serializations are comparable without decoding
|
| 167 |
+
:param date_as_datetime:
|
| 168 |
+
set to ``True`` to serialize date objects as datetimes (CBOR tag 0), which
|
| 169 |
+
was the default behavior in previous releases (cbor2 <= 4.1.2).
|
| 170 |
+
:param string_referencing:
|
| 171 |
+
set to ``True`` to allow more efficient serializing of repeated string
|
| 172 |
+
values
|
| 173 |
+
:param indefinite_containers:
|
| 174 |
+
encode containers as indefinite (use stop code instead of specifying length)
|
| 175 |
+
|
| 176 |
+
"""
|
| 177 |
+
self.fp = fp
|
| 178 |
+
self.datetime_as_timestamp = datetime_as_timestamp
|
| 179 |
+
self.date_as_datetime = date_as_datetime
|
| 180 |
+
self.timezone = timezone
|
| 181 |
+
self.value_sharing = value_sharing
|
| 182 |
+
self.string_referencing = string_referencing
|
| 183 |
+
self.string_namespacing = string_referencing
|
| 184 |
+
self.indefinite_containers = indefinite_containers
|
| 185 |
+
self.default = default
|
| 186 |
+
self._canonical = canonical
|
| 187 |
+
self._shared_containers: dict[
|
| 188 |
+
int, tuple[object, int | None]
|
| 189 |
+
] = {} # indexes used for value sharing
|
| 190 |
+
self._string_references: dict[str | bytes, int] = {} # indexes used for string references
|
| 191 |
+
self._encoders = default_encoders.copy()
|
| 192 |
+
if canonical:
|
| 193 |
+
self._encoders.update(canonical_encoders)
|
| 194 |
+
|
| 195 |
+
def _find_encoder(self, obj_type: type) -> Callable[[CBOREncoder, Any], None] | None:
|
| 196 |
+
for type_or_tuple, enc in list(self._encoders.items()):
|
| 197 |
+
if type(type_or_tuple) is tuple:
|
| 198 |
+
try:
|
| 199 |
+
modname, typename = type_or_tuple
|
| 200 |
+
except (TypeError, ValueError):
|
| 201 |
+
raise CBOREncodeValueError(
|
| 202 |
+
f"invalid deferred encoder type {type_or_tuple!r} (must be a "
|
| 203 |
+
"2-tuple of module name and type name, e.g. "
|
| 204 |
+
"('collections', 'defaultdict'))"
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
imported_type = getattr(modules.get(modname), typename, None)
|
| 208 |
+
if imported_type is not None:
|
| 209 |
+
del self._encoders[type_or_tuple]
|
| 210 |
+
self._encoders[imported_type] = enc
|
| 211 |
+
type_ = imported_type
|
| 212 |
+
else: # pragma: nocover
|
| 213 |
+
continue
|
| 214 |
+
else:
|
| 215 |
+
type_ = type_or_tuple
|
| 216 |
+
|
| 217 |
+
if issubclass(obj_type, type_):
|
| 218 |
+
self._encoders[obj_type] = enc
|
| 219 |
+
return enc
|
| 220 |
+
|
| 221 |
+
return None
|
| 222 |
+
|
| 223 |
+
@property
|
| 224 |
+
def fp(self) -> IO[bytes]:
|
| 225 |
+
return self._fp
|
| 226 |
+
|
| 227 |
+
@fp.setter
|
| 228 |
+
def fp(self, value: IO[bytes]) -> None:
|
| 229 |
+
try:
|
| 230 |
+
if not callable(value.write):
|
| 231 |
+
raise ValueError("fp.write is not callable")
|
| 232 |
+
except AttributeError:
|
| 233 |
+
raise ValueError("fp object has no write method")
|
| 234 |
+
else:
|
| 235 |
+
self._fp = value
|
| 236 |
+
self._fp_write = value.write
|
| 237 |
+
|
| 238 |
+
@property
|
| 239 |
+
def timezone(self) -> tzinfo | None:
|
| 240 |
+
return self._timezone
|
| 241 |
+
|
| 242 |
+
@timezone.setter
|
| 243 |
+
def timezone(self, value: tzinfo | None) -> None:
|
| 244 |
+
if value is None or isinstance(value, tzinfo):
|
| 245 |
+
self._timezone = value
|
| 246 |
+
else:
|
| 247 |
+
raise ValueError("timezone must be None or a tzinfo instance")
|
| 248 |
+
|
| 249 |
+
@property
|
| 250 |
+
def default(self) -> Callable[[CBOREncoder, Any], Any] | None:
|
| 251 |
+
return self._default
|
| 252 |
+
|
| 253 |
+
@default.setter
|
| 254 |
+
def default(self, value: Callable[[CBOREncoder, Any], Any] | None) -> None:
|
| 255 |
+
if value is None or callable(value):
|
| 256 |
+
self._default = value
|
| 257 |
+
else:
|
| 258 |
+
raise ValueError("default must be None or a callable")
|
| 259 |
+
|
| 260 |
+
@property
|
| 261 |
+
def canonical(self) -> bool:
|
| 262 |
+
return self._canonical
|
| 263 |
+
|
| 264 |
+
@contextmanager
|
| 265 |
+
def disable_value_sharing(self) -> Generator[None]:
|
| 266 |
+
"""
|
| 267 |
+
Disable value sharing in the encoder for the duration of the context
|
| 268 |
+
block.
|
| 269 |
+
"""
|
| 270 |
+
old_value_sharing = self.value_sharing
|
| 271 |
+
self.value_sharing = False
|
| 272 |
+
yield
|
| 273 |
+
self.value_sharing = old_value_sharing
|
| 274 |
+
|
| 275 |
+
@contextmanager
|
| 276 |
+
def disable_string_referencing(self) -> Generator[None]:
|
| 277 |
+
"""
|
| 278 |
+
Disable tracking of string references for the duration of the
|
| 279 |
+
context block.
|
| 280 |
+
"""
|
| 281 |
+
old_string_referencing = self.string_referencing
|
| 282 |
+
self.string_referencing = False
|
| 283 |
+
yield
|
| 284 |
+
self.string_referencing = old_string_referencing
|
| 285 |
+
|
| 286 |
+
@contextmanager
|
| 287 |
+
def disable_string_namespacing(self) -> Generator[None]:
|
| 288 |
+
"""
|
| 289 |
+
Disable generation of new string namespaces for the duration of the
|
| 290 |
+
context block.
|
| 291 |
+
"""
|
| 292 |
+
old_string_namespacing = self.string_namespacing
|
| 293 |
+
self.string_namespacing = False
|
| 294 |
+
yield
|
| 295 |
+
self.string_namespacing = old_string_namespacing
|
| 296 |
+
|
| 297 |
+
def write(self, data: bytes) -> None:
|
| 298 |
+
"""
|
| 299 |
+
Write bytes to the data stream.
|
| 300 |
+
|
| 301 |
+
:param bytes data:
|
| 302 |
+
the bytes to write
|
| 303 |
+
"""
|
| 304 |
+
self._fp_write(data)
|
| 305 |
+
|
| 306 |
+
def encode(self, obj: Any) -> None:
|
| 307 |
+
"""
|
| 308 |
+
Encode the given object using CBOR.
|
| 309 |
+
|
| 310 |
+
:param obj:
|
| 311 |
+
the object to encode
|
| 312 |
+
"""
|
| 313 |
+
obj_type = obj.__class__
|
| 314 |
+
encoder = self._encoders.get(obj_type) or self._find_encoder(obj_type) or self._default
|
| 315 |
+
if not encoder:
|
| 316 |
+
raise CBOREncodeTypeError(f"cannot serialize type {obj_type.__name__}")
|
| 317 |
+
|
| 318 |
+
encoder(self, obj)
|
| 319 |
+
|
| 320 |
+
def encode_to_bytes(self, obj: Any) -> bytes:
|
| 321 |
+
"""
|
| 322 |
+
Encode the given object to a byte buffer and return its value as bytes.
|
| 323 |
+
|
| 324 |
+
This method was intended to be used from the ``default`` hook when an
|
| 325 |
+
object needs to be encoded separately from the rest but while still
|
| 326 |
+
taking advantage of the shared value registry.
|
| 327 |
+
"""
|
| 328 |
+
with BytesIO() as fp:
|
| 329 |
+
old_fp = self.fp
|
| 330 |
+
self.fp = fp
|
| 331 |
+
self.encode(obj)
|
| 332 |
+
self.fp = old_fp
|
| 333 |
+
return fp.getvalue()
|
| 334 |
+
|
| 335 |
+
def encode_container(self, encoder: Callable[[CBOREncoder, Any], Any], value: Any) -> None:
|
| 336 |
+
if self.string_namespacing:
|
| 337 |
+
# Create a new string reference domain
|
| 338 |
+
self.encode_length(6, 256)
|
| 339 |
+
|
| 340 |
+
with self.disable_string_namespacing():
|
| 341 |
+
self.encode_shared(encoder, value)
|
| 342 |
+
|
| 343 |
+
def encode_shared(self, encoder: Callable[[CBOREncoder, Any], Any], value: Any) -> None:
|
| 344 |
+
value_id = id(value)
|
| 345 |
+
try:
|
| 346 |
+
index = self._shared_containers[id(value)][1]
|
| 347 |
+
except KeyError:
|
| 348 |
+
if self.value_sharing:
|
| 349 |
+
# Mark the container as shareable
|
| 350 |
+
self._shared_containers[value_id] = (
|
| 351 |
+
value,
|
| 352 |
+
len(self._shared_containers),
|
| 353 |
+
)
|
| 354 |
+
self.encode_length(6, 0x1C)
|
| 355 |
+
encoder(self, value)
|
| 356 |
+
else:
|
| 357 |
+
self._shared_containers[value_id] = (value, None)
|
| 358 |
+
try:
|
| 359 |
+
encoder(self, value)
|
| 360 |
+
finally:
|
| 361 |
+
del self._shared_containers[value_id]
|
| 362 |
+
else:
|
| 363 |
+
if self.value_sharing:
|
| 364 |
+
# Generate a reference to the previous index instead of
|
| 365 |
+
# encoding this again
|
| 366 |
+
self.encode_length(6, 0x1D)
|
| 367 |
+
self.encode_int(cast(int, index))
|
| 368 |
+
else:
|
| 369 |
+
raise CBOREncodeValueError(
|
| 370 |
+
"cyclic data structure detected but value sharing is disabled"
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
def _stringref(self, value: str | bytes) -> bool:
|
| 374 |
+
"""
|
| 375 |
+
Try to encode the string or bytestring as a reference.
|
| 376 |
+
|
| 377 |
+
Returns True if a reference was generated, False if the string
|
| 378 |
+
must still be emitted.
|
| 379 |
+
"""
|
| 380 |
+
try:
|
| 381 |
+
index = self._string_references[value]
|
| 382 |
+
self.encode_semantic(CBORTag(25, index))
|
| 383 |
+
return True
|
| 384 |
+
except KeyError:
|
| 385 |
+
length = len(value)
|
| 386 |
+
next_index = len(self._string_references)
|
| 387 |
+
if next_index < 24:
|
| 388 |
+
is_referenced = length >= 3
|
| 389 |
+
elif next_index < 256:
|
| 390 |
+
is_referenced = length >= 4
|
| 391 |
+
elif next_index < 65536:
|
| 392 |
+
is_referenced = length >= 5
|
| 393 |
+
elif next_index < 4294967296:
|
| 394 |
+
is_referenced = length >= 7
|
| 395 |
+
else:
|
| 396 |
+
is_referenced = length >= 11
|
| 397 |
+
|
| 398 |
+
if is_referenced:
|
| 399 |
+
self._string_references[value] = next_index
|
| 400 |
+
|
| 401 |
+
return False
|
| 402 |
+
|
| 403 |
+
def encode_length(self, major_tag: int, length: int | None) -> None:
|
| 404 |
+
major_tag <<= 5
|
| 405 |
+
if length is None: # Indefinite
|
| 406 |
+
self._fp_write(struct.pack(">B", major_tag | 31))
|
| 407 |
+
elif length < 24:
|
| 408 |
+
self._fp_write(struct.pack(">B", major_tag | length))
|
| 409 |
+
elif length < 256:
|
| 410 |
+
self._fp_write(struct.pack(">BB", major_tag | 24, length))
|
| 411 |
+
elif length < 65536:
|
| 412 |
+
self._fp_write(struct.pack(">BH", major_tag | 25, length))
|
| 413 |
+
elif length < 4294967296:
|
| 414 |
+
self._fp_write(struct.pack(">BL", major_tag | 26, length))
|
| 415 |
+
else:
|
| 416 |
+
self._fp_write(struct.pack(">BQ", major_tag | 27, length))
|
| 417 |
+
|
| 418 |
+
def encode_break(self) -> None:
|
| 419 |
+
# Break stop code for indefinite containers
|
| 420 |
+
self._fp_write(struct.pack(">B", (7 << 5) | 31))
|
| 421 |
+
|
| 422 |
+
def encode_int(self, value: int) -> None:
|
| 423 |
+
# Big integers (2 ** 64 and over)
|
| 424 |
+
if value >= 18446744073709551616 or value < -18446744073709551616:
|
| 425 |
+
if value >= 0:
|
| 426 |
+
major_type = 0x02
|
| 427 |
+
else:
|
| 428 |
+
major_type = 0x03
|
| 429 |
+
value = -value - 1
|
| 430 |
+
|
| 431 |
+
payload = value.to_bytes((value.bit_length() + 7) // 8, "big")
|
| 432 |
+
self.encode_semantic(CBORTag(major_type, payload))
|
| 433 |
+
elif value >= 0:
|
| 434 |
+
self.encode_length(0, value)
|
| 435 |
+
else:
|
| 436 |
+
self.encode_length(1, -(value + 1))
|
| 437 |
+
|
| 438 |
+
def encode_bytestring(self, value: bytes) -> None:
|
| 439 |
+
if self.string_referencing:
|
| 440 |
+
if self._stringref(value):
|
| 441 |
+
return
|
| 442 |
+
|
| 443 |
+
self.encode_length(2, len(value))
|
| 444 |
+
self._fp_write(value)
|
| 445 |
+
|
| 446 |
+
def encode_bytearray(self, value: bytearray) -> None:
|
| 447 |
+
self.encode_bytestring(bytes(value))
|
| 448 |
+
|
| 449 |
+
def encode_string(self, value: str) -> None:
|
| 450 |
+
if self.string_referencing:
|
| 451 |
+
if self._stringref(value):
|
| 452 |
+
return
|
| 453 |
+
|
| 454 |
+
encoded = value.encode("utf-8")
|
| 455 |
+
self.encode_length(3, len(encoded))
|
| 456 |
+
self._fp_write(encoded)
|
| 457 |
+
|
| 458 |
+
@container_encoder
|
| 459 |
+
def encode_array(self, value: Sequence[Any]) -> None:
|
| 460 |
+
self.encode_length(4, len(value) if not self.indefinite_containers else None)
|
| 461 |
+
for item in value:
|
| 462 |
+
self.encode(item)
|
| 463 |
+
|
| 464 |
+
if self.indefinite_containers:
|
| 465 |
+
self.encode_break()
|
| 466 |
+
|
| 467 |
+
@container_encoder
|
| 468 |
+
def encode_map(self, value: Mapping[Any, Any]) -> None:
|
| 469 |
+
self.encode_length(5, len(value) if not self.indefinite_containers else None)
|
| 470 |
+
for key, val in value.items():
|
| 471 |
+
self.encode(key)
|
| 472 |
+
self.encode(val)
|
| 473 |
+
|
| 474 |
+
if self.indefinite_containers:
|
| 475 |
+
self.encode_break()
|
| 476 |
+
|
| 477 |
+
def encode_sortable_key(self, value: Any) -> tuple[int, bytes]:
|
| 478 |
+
"""
|
| 479 |
+
Takes a key and calculates the length of its optimal byte
|
| 480 |
+
representation, along with the representation itself. This is used as
|
| 481 |
+
the sorting key in CBOR's canonical representations.
|
| 482 |
+
"""
|
| 483 |
+
with self.disable_string_referencing():
|
| 484 |
+
encoded = self.encode_to_bytes(value)
|
| 485 |
+
return len(encoded), encoded
|
| 486 |
+
|
| 487 |
+
@container_encoder
|
| 488 |
+
def encode_canonical_map(self, value: Mapping[Any, Any]) -> None:
|
| 489 |
+
"""Reorder keys according to Canonical CBOR specification"""
|
| 490 |
+
keyed_keys = ((self.encode_sortable_key(key), key, value) for key, value in value.items())
|
| 491 |
+
self.encode_length(5, len(value) if not self.indefinite_containers else None)
|
| 492 |
+
for sortkey, realkey, value in sorted(keyed_keys):
|
| 493 |
+
if self.string_referencing:
|
| 494 |
+
# String referencing requires that the order encoded is
|
| 495 |
+
# the same as the order emitted so string references are
|
| 496 |
+
# generated after an order is determined
|
| 497 |
+
self.encode(realkey)
|
| 498 |
+
else:
|
| 499 |
+
self._fp_write(sortkey[1])
|
| 500 |
+
self.encode(value)
|
| 501 |
+
|
| 502 |
+
if self.indefinite_containers:
|
| 503 |
+
self.encode_break()
|
| 504 |
+
|
| 505 |
+
def encode_semantic(self, value: CBORTag) -> None:
|
| 506 |
+
# Nested string reference domains are distinct
|
| 507 |
+
old_string_referencing = self.string_referencing
|
| 508 |
+
old_string_references = self._string_references
|
| 509 |
+
if value.tag == 256:
|
| 510 |
+
self.string_referencing = True
|
| 511 |
+
self._string_references = {}
|
| 512 |
+
|
| 513 |
+
self.encode_length(6, value.tag)
|
| 514 |
+
self.encode(value.value)
|
| 515 |
+
|
| 516 |
+
self.string_referencing = old_string_referencing
|
| 517 |
+
self._string_references = old_string_references
|
| 518 |
+
|
| 519 |
+
#
|
| 520 |
+
# Semantic decoders (major tag 6)
|
| 521 |
+
#
|
| 522 |
+
|
| 523 |
+
def encode_datetime(self, value: datetime) -> None:
|
| 524 |
+
# Semantic tag 0
|
| 525 |
+
if not value.tzinfo:
|
| 526 |
+
if self._timezone:
|
| 527 |
+
value = value.replace(tzinfo=self._timezone)
|
| 528 |
+
else:
|
| 529 |
+
raise CBOREncodeValueError(
|
| 530 |
+
f"naive datetime {value!r} encountered and no default timezone " "has been set"
|
| 531 |
+
)
|
| 532 |
+
|
| 533 |
+
if self.datetime_as_timestamp:
|
| 534 |
+
from calendar import timegm
|
| 535 |
+
|
| 536 |
+
if not value.microsecond:
|
| 537 |
+
timestamp: float = timegm(value.utctimetuple())
|
| 538 |
+
else:
|
| 539 |
+
timestamp = timegm(value.utctimetuple()) + value.microsecond / 1000000
|
| 540 |
+
|
| 541 |
+
self.encode_semantic(CBORTag(1, timestamp))
|
| 542 |
+
else:
|
| 543 |
+
datestring = value.isoformat().replace("+00:00", "Z")
|
| 544 |
+
self.encode_semantic(CBORTag(0, datestring))
|
| 545 |
+
|
| 546 |
+
def encode_date(self, value: date) -> None:
|
| 547 |
+
# Semantic tag 100
|
| 548 |
+
if self.date_as_datetime:
|
| 549 |
+
value = datetime.combine(value, time()).replace(tzinfo=self._timezone)
|
| 550 |
+
self.encode_datetime(value)
|
| 551 |
+
elif self.datetime_as_timestamp:
|
| 552 |
+
days_since_epoch = value.toordinal() - 719163
|
| 553 |
+
self.encode_semantic(CBORTag(100, days_since_epoch))
|
| 554 |
+
else:
|
| 555 |
+
datestring = value.isoformat()
|
| 556 |
+
self.encode_semantic(CBORTag(1004, datestring))
|
| 557 |
+
|
| 558 |
+
def encode_decimal(self, value: Decimal) -> None:
|
| 559 |
+
# Semantic tag 4
|
| 560 |
+
if value.is_nan():
|
| 561 |
+
self._fp_write(b"\xf9\x7e\x00")
|
| 562 |
+
elif value.is_infinite():
|
| 563 |
+
self._fp_write(b"\xf9\x7c\x00" if value > 0 else b"\xf9\xfc\x00")
|
| 564 |
+
else:
|
| 565 |
+
dt = value.as_tuple()
|
| 566 |
+
sig = 0
|
| 567 |
+
for digit in dt.digits:
|
| 568 |
+
sig = (sig * 10) + digit
|
| 569 |
+
if dt.sign:
|
| 570 |
+
sig = -sig
|
| 571 |
+
with self.disable_value_sharing():
|
| 572 |
+
self.encode_semantic(CBORTag(4, [dt.exponent, sig]))
|
| 573 |
+
|
| 574 |
+
def encode_stringref(self, value: str | bytes) -> None:
|
| 575 |
+
# Semantic tag 25
|
| 576 |
+
if not self._stringref(value):
|
| 577 |
+
self.encode(value)
|
| 578 |
+
|
| 579 |
+
def encode_rational(self, value: Fraction) -> None:
|
| 580 |
+
# Semantic tag 30
|
| 581 |
+
with self.disable_value_sharing():
|
| 582 |
+
self.encode_semantic(CBORTag(30, [value.numerator, value.denominator]))
|
| 583 |
+
|
| 584 |
+
def encode_regexp(self, value: re.Pattern[str]) -> None:
|
| 585 |
+
# Semantic tag 35
|
| 586 |
+
self.encode_semantic(CBORTag(35, str(value.pattern)))
|
| 587 |
+
|
| 588 |
+
def encode_mime(self, value: Message) -> None:
|
| 589 |
+
# Semantic tag 36
|
| 590 |
+
self.encode_semantic(CBORTag(36, value.as_string()))
|
| 591 |
+
|
| 592 |
+
def encode_uuid(self, value: UUID) -> None:
|
| 593 |
+
# Semantic tag 37
|
| 594 |
+
self.encode_semantic(CBORTag(37, value.bytes))
|
| 595 |
+
|
| 596 |
+
def encode_stringref_namespace(self, value: Any) -> None:
|
| 597 |
+
# Semantic tag 256
|
| 598 |
+
with self.disable_string_namespacing():
|
| 599 |
+
self.encode_semantic(CBORTag(256, value))
|
| 600 |
+
|
| 601 |
+
def encode_set(self, value: Set[Any]) -> None:
|
| 602 |
+
# Semantic tag 258
|
| 603 |
+
self.encode_semantic(CBORTag(258, tuple(value)))
|
| 604 |
+
|
| 605 |
+
def encode_canonical_set(self, value: Set[Any]) -> None:
|
| 606 |
+
# Semantic tag 258
|
| 607 |
+
values = sorted((self.encode_sortable_key(key), key) for key in value)
|
| 608 |
+
self.encode_semantic(CBORTag(258, [key[1] for key in values]))
|
| 609 |
+
|
| 610 |
+
def encode_ipaddress(self, value: IPv4Address | IPv6Address) -> None:
|
| 611 |
+
# Semantic tag 260
|
| 612 |
+
self.encode_semantic(CBORTag(260, value.packed))
|
| 613 |
+
|
| 614 |
+
def encode_ipnetwork(self, value: IPv4Network | IPv6Network) -> None:
|
| 615 |
+
# Semantic tag 261
|
| 616 |
+
self.encode_semantic(CBORTag(261, {value.network_address.packed: value.prefixlen}))
|
| 617 |
+
|
| 618 |
+
#
|
| 619 |
+
# Special encoders (major tag 7)
|
| 620 |
+
#
|
| 621 |
+
|
| 622 |
+
def encode_simple_value(self, value: CBORSimpleValue) -> None:
|
| 623 |
+
if value.value < 24:
|
| 624 |
+
self._fp_write(struct.pack(">B", 0xE0 | value.value))
|
| 625 |
+
else:
|
| 626 |
+
self._fp_write(struct.pack(">BB", 0xF8, value.value))
|
| 627 |
+
|
| 628 |
+
def encode_float(self, value: float) -> None:
|
| 629 |
+
# Handle special values efficiently
|
| 630 |
+
if math.isnan(value):
|
| 631 |
+
self._fp_write(b"\xf9\x7e\x00")
|
| 632 |
+
elif math.isinf(value):
|
| 633 |
+
self._fp_write(b"\xf9\x7c\x00" if value > 0 else b"\xf9\xfc\x00")
|
| 634 |
+
else:
|
| 635 |
+
self._fp_write(struct.pack(">Bd", 0xFB, value))
|
| 636 |
+
|
| 637 |
+
def encode_complex(self, value: complex) -> None:
|
| 638 |
+
# Semantic tag 43000
|
| 639 |
+
with self.disable_value_sharing():
|
| 640 |
+
self.encode_semantic(CBORTag(43000, [value.real, value.imag]))
|
| 641 |
+
|
| 642 |
+
def encode_minimal_float(self, value: float) -> None:
|
| 643 |
+
# Handle special values efficiently
|
| 644 |
+
if math.isnan(value):
|
| 645 |
+
self._fp_write(b"\xf9\x7e\x00")
|
| 646 |
+
elif math.isinf(value):
|
| 647 |
+
self._fp_write(b"\xf9\x7c\x00" if value > 0 else b"\xf9\xfc\x00")
|
| 648 |
+
else:
|
| 649 |
+
# Try each encoding in turn from longest to shortest
|
| 650 |
+
encoded = struct.pack(">Bd", 0xFB, value)
|
| 651 |
+
for format, tag in [(">Bf", 0xFA), (">Be", 0xF9)]:
|
| 652 |
+
try:
|
| 653 |
+
new_encoded = struct.pack(format, tag, value)
|
| 654 |
+
# Check if encoding as low-byte float loses precision
|
| 655 |
+
if struct.unpack(format, new_encoded)[1] == value:
|
| 656 |
+
encoded = new_encoded
|
| 657 |
+
else:
|
| 658 |
+
break
|
| 659 |
+
except OverflowError:
|
| 660 |
+
break
|
| 661 |
+
|
| 662 |
+
self._fp_write(encoded)
|
| 663 |
+
|
| 664 |
+
def encode_boolean(self, value: bool) -> None:
|
| 665 |
+
self._fp_write(b"\xf5" if value else b"\xf4")
|
| 666 |
+
|
| 667 |
+
def encode_none(self, value: None) -> None:
|
| 668 |
+
self._fp_write(b"\xf6")
|
| 669 |
+
|
| 670 |
+
def encode_undefined(self, value: UndefinedType) -> None:
|
| 671 |
+
self._fp_write(b"\xf7")
|
| 672 |
+
|
| 673 |
+
|
| 674 |
+
default_encoders: dict[type | tuple[str, str], Callable[[CBOREncoder, Any], None]] = {
|
| 675 |
+
bytes: CBOREncoder.encode_bytestring,
|
| 676 |
+
bytearray: CBOREncoder.encode_bytearray,
|
| 677 |
+
str: CBOREncoder.encode_string,
|
| 678 |
+
int: CBOREncoder.encode_int,
|
| 679 |
+
float: CBOREncoder.encode_float,
|
| 680 |
+
complex: CBOREncoder.encode_complex,
|
| 681 |
+
("decimal", "Decimal"): CBOREncoder.encode_decimal,
|
| 682 |
+
bool: CBOREncoder.encode_boolean,
|
| 683 |
+
type(None): CBOREncoder.encode_none,
|
| 684 |
+
tuple: CBOREncoder.encode_array,
|
| 685 |
+
list: CBOREncoder.encode_array,
|
| 686 |
+
dict: CBOREncoder.encode_map,
|
| 687 |
+
defaultdict: CBOREncoder.encode_map,
|
| 688 |
+
OrderedDict: CBOREncoder.encode_map,
|
| 689 |
+
FrozenDict: CBOREncoder.encode_map,
|
| 690 |
+
type(undefined): CBOREncoder.encode_undefined,
|
| 691 |
+
datetime: CBOREncoder.encode_datetime,
|
| 692 |
+
date: CBOREncoder.encode_date,
|
| 693 |
+
re.Pattern: CBOREncoder.encode_regexp,
|
| 694 |
+
("fractions", "Fraction"): CBOREncoder.encode_rational,
|
| 695 |
+
("email.message", "Message"): CBOREncoder.encode_mime,
|
| 696 |
+
("uuid", "UUID"): CBOREncoder.encode_uuid,
|
| 697 |
+
("ipaddress", "IPv4Address"): CBOREncoder.encode_ipaddress,
|
| 698 |
+
("ipaddress", "IPv6Address"): CBOREncoder.encode_ipaddress,
|
| 699 |
+
("ipaddress", "IPv4Network"): CBOREncoder.encode_ipnetwork,
|
| 700 |
+
("ipaddress", "IPv6Network"): CBOREncoder.encode_ipnetwork,
|
| 701 |
+
CBORSimpleValue: CBOREncoder.encode_simple_value,
|
| 702 |
+
CBORTag: CBOREncoder.encode_semantic,
|
| 703 |
+
set: CBOREncoder.encode_set,
|
| 704 |
+
frozenset: CBOREncoder.encode_set,
|
| 705 |
+
}
|
| 706 |
+
|
| 707 |
+
|
| 708 |
+
canonical_encoders: dict[type | tuple[str, str], Callable[[CBOREncoder, Any], None]] = {
|
| 709 |
+
float: CBOREncoder.encode_minimal_float,
|
| 710 |
+
dict: CBOREncoder.encode_canonical_map,
|
| 711 |
+
defaultdict: CBOREncoder.encode_canonical_map,
|
| 712 |
+
OrderedDict: CBOREncoder.encode_canonical_map,
|
| 713 |
+
FrozenDict: CBOREncoder.encode_canonical_map,
|
| 714 |
+
set: CBOREncoder.encode_canonical_set,
|
| 715 |
+
frozenset: CBOREncoder.encode_canonical_set,
|
| 716 |
+
}
|
| 717 |
+
|
| 718 |
+
|
| 719 |
+
def dumps(
|
| 720 |
+
obj: object,
|
| 721 |
+
datetime_as_timestamp: bool = False,
|
| 722 |
+
timezone: tzinfo | None = None,
|
| 723 |
+
value_sharing: bool = False,
|
| 724 |
+
default: Callable[[CBOREncoder, Any], None] | None = None,
|
| 725 |
+
canonical: bool = False,
|
| 726 |
+
date_as_datetime: bool = False,
|
| 727 |
+
string_referencing: bool = False,
|
| 728 |
+
indefinite_containers: bool = False,
|
| 729 |
+
) -> bytes:
|
| 730 |
+
"""
|
| 731 |
+
Serialize an object to a bytestring.
|
| 732 |
+
|
| 733 |
+
:param obj:
|
| 734 |
+
the object to serialize
|
| 735 |
+
:param datetime_as_timestamp:
|
| 736 |
+
set to ``True`` to serialize datetimes as UNIX timestamps (this makes datetimes
|
| 737 |
+
more concise on the wire, but loses the timezone information)
|
| 738 |
+
:param timezone:
|
| 739 |
+
the default timezone to use for serializing naive datetimes; if this is not
|
| 740 |
+
specified naive datetimes will throw a :exc:`ValueError` when encoding is
|
| 741 |
+
attempted
|
| 742 |
+
:param value_sharing:
|
| 743 |
+
set to ``True`` to allow more efficient serializing of repeated values
|
| 744 |
+
and, more importantly, cyclic data structures, at the cost of extra
|
| 745 |
+
line overhead
|
| 746 |
+
:param default:
|
| 747 |
+
a callable that is called by the encoder with two arguments (the encoder
|
| 748 |
+
instance and the value being encoded) when no suitable encoder has been found,
|
| 749 |
+
and should use the methods on the encoder to encode any objects it wants to add
|
| 750 |
+
to the data stream
|
| 751 |
+
:param canonical:
|
| 752 |
+
when ``True``, use "canonical" CBOR representation; this typically involves
|
| 753 |
+
sorting maps, sets, etc. into a pre-determined order ensuring that
|
| 754 |
+
serializations are comparable without decoding
|
| 755 |
+
:param date_as_datetime:
|
| 756 |
+
set to ``True`` to serialize date objects as datetimes (CBOR tag 0), which was
|
| 757 |
+
the default behavior in previous releases (cbor2 <= 4.1.2).
|
| 758 |
+
:param string_referencing:
|
| 759 |
+
set to ``True`` to allow more efficient serializing of repeated string values
|
| 760 |
+
:param indefinite_containers:
|
| 761 |
+
encode containers as indefinite (use stop code instead of specifying length)
|
| 762 |
+
:return: the serialized output
|
| 763 |
+
|
| 764 |
+
"""
|
| 765 |
+
with BytesIO() as fp:
|
| 766 |
+
CBOREncoder(
|
| 767 |
+
fp,
|
| 768 |
+
datetime_as_timestamp=datetime_as_timestamp,
|
| 769 |
+
timezone=timezone,
|
| 770 |
+
value_sharing=value_sharing,
|
| 771 |
+
default=default,
|
| 772 |
+
canonical=canonical,
|
| 773 |
+
date_as_datetime=date_as_datetime,
|
| 774 |
+
string_referencing=string_referencing,
|
| 775 |
+
indefinite_containers=indefinite_containers,
|
| 776 |
+
).encode(obj)
|
| 777 |
+
return fp.getvalue()
|
| 778 |
+
|
| 779 |
+
|
| 780 |
+
def dump(
|
| 781 |
+
obj: object,
|
| 782 |
+
fp: IO[bytes],
|
| 783 |
+
datetime_as_timestamp: bool = False,
|
| 784 |
+
timezone: tzinfo | None = None,
|
| 785 |
+
value_sharing: bool = False,
|
| 786 |
+
default: Callable[[CBOREncoder, Any], None] | None = None,
|
| 787 |
+
canonical: bool = False,
|
| 788 |
+
date_as_datetime: bool = False,
|
| 789 |
+
string_referencing: bool = False,
|
| 790 |
+
indefinite_containers: bool = False,
|
| 791 |
+
) -> None:
|
| 792 |
+
"""
|
| 793 |
+
Serialize an object to a file.
|
| 794 |
+
|
| 795 |
+
:param obj:
|
| 796 |
+
the object to serialize
|
| 797 |
+
:param fp:
|
| 798 |
+
the file to write to (any file-like object opened for writing in binary mode)
|
| 799 |
+
:param datetime_as_timestamp:
|
| 800 |
+
set to ``True`` to serialize datetimes as UNIX timestamps (this makes datetimes
|
| 801 |
+
more concise on the wire, but loses the timezone information)
|
| 802 |
+
:param timezone:
|
| 803 |
+
the default timezone to use for serializing naive datetimes; if this is not
|
| 804 |
+
specified naive datetimes will throw a :exc:`ValueError` when encoding is
|
| 805 |
+
attempted
|
| 806 |
+
:param value_sharing:
|
| 807 |
+
set to ``True`` to allow more efficient serializing of repeated values
|
| 808 |
+
and, more importantly, cyclic data structures, at the cost of extra
|
| 809 |
+
line overhead
|
| 810 |
+
:param default:
|
| 811 |
+
a callable that is called by the encoder with two arguments (the encoder
|
| 812 |
+
instance and the value being encoded) when no suitable encoder has been found,
|
| 813 |
+
and should use the methods on the encoder to encode any objects it wants to add
|
| 814 |
+
to the data stream
|
| 815 |
+
:param canonical:
|
| 816 |
+
when ``True``, use "canonical" CBOR representation; this typically involves
|
| 817 |
+
sorting maps, sets, etc. into a pre-determined order ensuring that
|
| 818 |
+
serializations are comparable without decoding
|
| 819 |
+
:param date_as_datetime:
|
| 820 |
+
set to ``True`` to serialize date objects as datetimes (CBOR tag 0), which was
|
| 821 |
+
the default behavior in previous releases (cbor2 <= 4.1.2).
|
| 822 |
+
:param indefinite_containers:
|
| 823 |
+
encode containers as indefinite (use stop code instead of specifying length)
|
| 824 |
+
:param string_referencing:
|
| 825 |
+
set to ``True`` to allow more efficient serializing of repeated string values
|
| 826 |
+
|
| 827 |
+
"""
|
| 828 |
+
CBOREncoder(
|
| 829 |
+
fp,
|
| 830 |
+
datetime_as_timestamp=datetime_as_timestamp,
|
| 831 |
+
timezone=timezone,
|
| 832 |
+
value_sharing=value_sharing,
|
| 833 |
+
default=default,
|
| 834 |
+
canonical=canonical,
|
| 835 |
+
date_as_datetime=date_as_datetime,
|
| 836 |
+
string_referencing=string_referencing,
|
| 837 |
+
indefinite_containers=indefinite_containers,
|
| 838 |
+
).encode(obj)
|
lib/python3.13/site-packages/cbor2/_types.py
ADDED
|
@@ -0,0 +1,231 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import threading
|
| 4 |
+
from collections import namedtuple
|
| 5 |
+
from collections.abc import Iterable, Iterator, Mapping
|
| 6 |
+
from functools import total_ordering
|
| 7 |
+
from reprlib import recursive_repr
|
| 8 |
+
from typing import Any, TypeVar
|
| 9 |
+
|
| 10 |
+
KT = TypeVar("KT")
|
| 11 |
+
VT_co = TypeVar("VT_co", covariant=True)
|
| 12 |
+
|
| 13 |
+
thread_locals = threading.local()
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class CBORError(Exception):
|
| 17 |
+
"""Base class for errors that occur during CBOR encoding or decoding."""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class CBOREncodeError(CBORError):
|
| 21 |
+
"""Raised for exceptions occurring during CBOR encoding."""
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class CBOREncodeTypeError(CBOREncodeError, TypeError):
|
| 25 |
+
"""Raised when attempting to encode a type that cannot be serialized."""
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class CBOREncodeValueError(CBOREncodeError, ValueError):
|
| 29 |
+
"""Raised when the CBOR encoder encounters an invalid value."""
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class CBORDecodeError(CBORError):
|
| 33 |
+
"""Raised for exceptions occurring during CBOR decoding."""
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class CBORDecodeValueError(CBORDecodeError, ValueError):
|
| 37 |
+
"""Raised when the CBOR stream being decoded contains an invalid value."""
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class CBORDecodeEOF(CBORDecodeError, EOFError):
|
| 41 |
+
"""Raised when decoding unexpectedly reaches EOF."""
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@total_ordering
|
| 45 |
+
class CBORTag:
|
| 46 |
+
"""
|
| 47 |
+
Represents a CBOR semantic tag.
|
| 48 |
+
|
| 49 |
+
:param int tag: tag number
|
| 50 |
+
:param value: encapsulated value (any object)
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
__slots__ = "tag", "value"
|
| 54 |
+
|
| 55 |
+
def __init__(self, tag: str | int, value: Any) -> None:
|
| 56 |
+
if not isinstance(tag, int) or tag not in range(2**64):
|
| 57 |
+
raise TypeError("CBORTag tags must be positive integers less than 2**64")
|
| 58 |
+
self.tag = tag
|
| 59 |
+
self.value = value
|
| 60 |
+
|
| 61 |
+
def __eq__(self, other: object) -> bool:
|
| 62 |
+
if isinstance(other, CBORTag):
|
| 63 |
+
return (self.tag, self.value) == (other.tag, other.value)
|
| 64 |
+
|
| 65 |
+
return NotImplemented
|
| 66 |
+
|
| 67 |
+
def __le__(self, other: object) -> bool:
|
| 68 |
+
if isinstance(other, CBORTag):
|
| 69 |
+
return (self.tag, self.value) <= (other.tag, other.value)
|
| 70 |
+
|
| 71 |
+
return NotImplemented
|
| 72 |
+
|
| 73 |
+
@recursive_repr()
|
| 74 |
+
def __repr__(self) -> str:
|
| 75 |
+
return f"CBORTag({self.tag}, {self.value!r})"
|
| 76 |
+
|
| 77 |
+
def __hash__(self) -> int:
|
| 78 |
+
self_id = id(self)
|
| 79 |
+
try:
|
| 80 |
+
running_hashes = thread_locals.running_hashes
|
| 81 |
+
except AttributeError:
|
| 82 |
+
running_hashes = thread_locals.running_hashes = set()
|
| 83 |
+
|
| 84 |
+
if self_id in running_hashes:
|
| 85 |
+
raise RuntimeError(
|
| 86 |
+
"This CBORTag is not hashable because it contains a reference to itself"
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
running_hashes.add(self_id)
|
| 90 |
+
try:
|
| 91 |
+
return hash((self.tag, self.value))
|
| 92 |
+
finally:
|
| 93 |
+
running_hashes.remove(self_id)
|
| 94 |
+
if not running_hashes:
|
| 95 |
+
del thread_locals.running_hashes
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class CBORSimpleValue(namedtuple("CBORSimpleValue", ["value"])):
|
| 99 |
+
"""
|
| 100 |
+
Represents a CBOR "simple value".
|
| 101 |
+
|
| 102 |
+
:param int value: the value (0-255)
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
__slots__ = ()
|
| 106 |
+
|
| 107 |
+
value: int
|
| 108 |
+
|
| 109 |
+
def __hash__(self) -> int:
|
| 110 |
+
return hash(self.value)
|
| 111 |
+
|
| 112 |
+
def __new__(cls, value: int) -> CBORSimpleValue:
|
| 113 |
+
if value < 0 or value > 255 or 23 < value < 32:
|
| 114 |
+
raise TypeError("simple value out of range (0..23, 32..255)")
|
| 115 |
+
|
| 116 |
+
return super().__new__(cls, value)
|
| 117 |
+
|
| 118 |
+
def __eq__(self, other: object) -> bool:
|
| 119 |
+
if isinstance(other, int):
|
| 120 |
+
return self.value == other
|
| 121 |
+
elif isinstance(other, CBORSimpleValue):
|
| 122 |
+
return self.value == other.value
|
| 123 |
+
|
| 124 |
+
return NotImplemented
|
| 125 |
+
|
| 126 |
+
def __ne__(self, other: object) -> bool:
|
| 127 |
+
if isinstance(other, int):
|
| 128 |
+
return self.value != other
|
| 129 |
+
elif isinstance(other, CBORSimpleValue):
|
| 130 |
+
return self.value != other.value
|
| 131 |
+
|
| 132 |
+
return NotImplemented
|
| 133 |
+
|
| 134 |
+
def __lt__(self, other: object) -> bool:
|
| 135 |
+
if isinstance(other, int):
|
| 136 |
+
return self.value < other
|
| 137 |
+
elif isinstance(other, CBORSimpleValue):
|
| 138 |
+
return self.value < other.value
|
| 139 |
+
|
| 140 |
+
return NotImplemented
|
| 141 |
+
|
| 142 |
+
def __le__(self, other: object) -> bool:
|
| 143 |
+
if isinstance(other, int):
|
| 144 |
+
return self.value <= other
|
| 145 |
+
elif isinstance(other, CBORSimpleValue):
|
| 146 |
+
return self.value <= other.value
|
| 147 |
+
|
| 148 |
+
return NotImplemented
|
| 149 |
+
|
| 150 |
+
def __ge__(self, other: object) -> bool:
|
| 151 |
+
if isinstance(other, int):
|
| 152 |
+
return self.value >= other
|
| 153 |
+
elif isinstance(other, CBORSimpleValue):
|
| 154 |
+
return self.value >= other.value
|
| 155 |
+
|
| 156 |
+
return NotImplemented
|
| 157 |
+
|
| 158 |
+
def __gt__(self, other: object) -> bool:
|
| 159 |
+
if isinstance(other, int):
|
| 160 |
+
return self.value > other
|
| 161 |
+
elif isinstance(other, CBORSimpleValue):
|
| 162 |
+
return self.value > other.value
|
| 163 |
+
|
| 164 |
+
return NotImplemented
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class FrozenDict(Mapping[KT, VT_co]):
|
| 168 |
+
"""
|
| 169 |
+
A hashable, immutable mapping type.
|
| 170 |
+
|
| 171 |
+
The arguments to ``FrozenDict`` are processed just like those to ``dict``.
|
| 172 |
+
"""
|
| 173 |
+
|
| 174 |
+
def __init__(self, *args: Mapping[KT, VT_co] | Iterable[tuple[KT, VT_co]]) -> None:
|
| 175 |
+
self._d: dict[KT, VT_co] = dict(*args)
|
| 176 |
+
self._hash: int | None = None
|
| 177 |
+
|
| 178 |
+
def __iter__(self) -> Iterator[KT]:
|
| 179 |
+
return iter(self._d)
|
| 180 |
+
|
| 181 |
+
def __len__(self) -> int:
|
| 182 |
+
return len(self._d)
|
| 183 |
+
|
| 184 |
+
def __getitem__(self, key: KT) -> VT_co:
|
| 185 |
+
return self._d[key]
|
| 186 |
+
|
| 187 |
+
def __repr__(self) -> str:
|
| 188 |
+
return f"{self.__class__.__name__}({self._d})"
|
| 189 |
+
|
| 190 |
+
def __hash__(self) -> int:
|
| 191 |
+
if self._hash is None:
|
| 192 |
+
self._hash = hash((frozenset(self), frozenset(self.values())))
|
| 193 |
+
|
| 194 |
+
return self._hash
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
class UndefinedType:
|
| 198 |
+
__slots__ = ()
|
| 199 |
+
|
| 200 |
+
def __new__(cls: type[UndefinedType]) -> UndefinedType:
|
| 201 |
+
try:
|
| 202 |
+
return undefined
|
| 203 |
+
except NameError:
|
| 204 |
+
return super().__new__(cls)
|
| 205 |
+
|
| 206 |
+
def __repr__(self) -> str:
|
| 207 |
+
return "undefined"
|
| 208 |
+
|
| 209 |
+
def __bool__(self) -> bool:
|
| 210 |
+
return False
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
class BreakMarkerType:
|
| 214 |
+
__slots__ = ()
|
| 215 |
+
|
| 216 |
+
def __new__(cls: type[BreakMarkerType]) -> BreakMarkerType:
|
| 217 |
+
try:
|
| 218 |
+
return break_marker
|
| 219 |
+
except NameError:
|
| 220 |
+
return super().__new__(cls)
|
| 221 |
+
|
| 222 |
+
def __repr__(self) -> str:
|
| 223 |
+
return "break_marker"
|
| 224 |
+
|
| 225 |
+
def __bool__(self) -> bool:
|
| 226 |
+
return True
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
#: Represents the "undefined" value.
|
| 230 |
+
undefined = UndefinedType()
|
| 231 |
+
break_marker = BreakMarkerType()
|
lib/python3.13/site-packages/cbor2/encoder.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from warnings import warn
|
| 2 |
+
|
| 3 |
+
from ._encoder import CBOREncoder as CBOREncoder
|
| 4 |
+
from ._encoder import dump as dump
|
| 5 |
+
from ._encoder import dumps as dumps
|
| 6 |
+
from ._encoder import shareable_encoder as shareable_encoder
|
| 7 |
+
|
| 8 |
+
warn(
|
| 9 |
+
"The cbor2.encoder module has been deprecated. Instead import everything directly from cbor2."
|
| 10 |
+
)
|
lib/python3.13/site-packages/cbor2/py.typed
ADDED
|
File without changes
|
lib/python3.13/site-packages/cbor2/types.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from warnings import warn
|
| 2 |
+
|
| 3 |
+
from ._types import CBORDecodeEOF as CBORDecodeEOF
|
| 4 |
+
from ._types import CBORDecodeError as CBORDecodeError
|
| 5 |
+
from ._types import CBORDecodeValueError as CBORDecodeValueError
|
| 6 |
+
from ._types import CBOREncodeError as CBOREncodeError
|
| 7 |
+
from ._types import CBOREncodeTypeError as CBOREncodeTypeError
|
| 8 |
+
from ._types import CBOREncodeValueError as CBOREncodeValueError
|
| 9 |
+
from ._types import CBORError as CBORError
|
| 10 |
+
from ._types import CBORSimpleValue as CBORSimpleValue
|
| 11 |
+
from ._types import CBORTag as CBORTag
|
| 12 |
+
from ._types import FrozenDict as FrozenDict
|
| 13 |
+
from ._types import undefined as undefined
|
| 14 |
+
|
| 15 |
+
warn("The cbor2.types module has been deprecated. Instead import everything directly from cbor2.")
|
lib/python3.13/site-packages/click-8.3.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
uv
|
lib/python3.13/site-packages/click-8.3.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
click-8.3.1.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
|
| 2 |
+
click-8.3.1.dist-info/METADATA,sha256=XZeBrMAE0ghTE88SjfrSDuSyNCpBPplxJR1tbwD9oZg,2621
|
| 3 |
+
click-8.3.1.dist-info/RECORD,,
|
| 4 |
+
click-8.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 5 |
+
click-8.3.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
| 6 |
+
click-8.3.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
|
| 7 |
+
click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473
|
| 8 |
+
click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693
|
| 9 |
+
click/_termui_impl.py,sha256=rgCb3On8X5A4200rA5L6i13u5iapmFer7sru57Jy6zA,27093
|
| 10 |
+
click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400
|
| 11 |
+
click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943
|
| 12 |
+
click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465
|
| 13 |
+
click/core.py,sha256=U6Bfxt8GkjNDqyJ0HqXvluJHtyZ4sY5USAvM1Cdq7mQ,132105
|
| 14 |
+
click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461
|
| 15 |
+
click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954
|
| 16 |
+
click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730
|
| 17 |
+
click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923
|
| 18 |
+
click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010
|
| 19 |
+
click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 20 |
+
click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994
|
| 21 |
+
click/termui.py,sha256=hqCEjNndU-nzW08nRAkBaVgfZp_FdCA9KxfIWlKYaMc,31037
|
| 22 |
+
click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102
|
| 23 |
+
click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927
|
| 24 |
+
click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257
|
lib/python3.13/site-packages/dill-0.4.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
uv
|
lib/python3.13/site-packages/dill-0.4.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2004-2016 California Institute of Technology.
|
| 2 |
+
Copyright (c) 2016-2025 The Uncertainty Quantification Foundation.
|
| 3 |
+
All rights reserved.
|
| 4 |
+
|
| 5 |
+
This software is available subject to the conditions and terms laid
|
| 6 |
+
out below. By downloading and using this software you are agreeing
|
| 7 |
+
to the following conditions.
|
| 8 |
+
|
| 9 |
+
Redistribution and use in source and binary forms, with or without
|
| 10 |
+
modification, are permitted provided that the following conditions
|
| 11 |
+
are met:
|
| 12 |
+
|
| 13 |
+
- Redistributions of source code must retain the above copyright
|
| 14 |
+
notice, this list of conditions and the following disclaimer.
|
| 15 |
+
|
| 16 |
+
- Redistributions in binary form must reproduce the above copyright
|
| 17 |
+
notice, this list of conditions and the following disclaimer in the
|
| 18 |
+
documentation and/or other materials provided with the distribution.
|
| 19 |
+
|
| 20 |
+
- Neither the names of the copyright holders nor the names of any of
|
| 21 |
+
the contributors may be used to endorse or promote products derived
|
| 22 |
+
from this software without specific prior written permission.
|
| 23 |
+
|
| 24 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 25 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
| 26 |
+
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
| 27 |
+
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
|
| 28 |
+
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
| 29 |
+
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
| 30 |
+
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
| 31 |
+
OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
| 32 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
| 33 |
+
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
| 34 |
+
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 35 |
+
|
lib/python3.13/site-packages/dill-0.4.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,281 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: dill
|
| 3 |
+
Version: 0.4.0
|
| 4 |
+
Summary: serialize all of Python
|
| 5 |
+
Home-page: https://github.com/uqfoundation/dill
|
| 6 |
+
Download-URL: https://pypi.org/project/dill/#files
|
| 7 |
+
Author: Mike McKerns
|
| 8 |
+
Author-email: mmckerns@uqfoundation.org
|
| 9 |
+
Maintainer: Mike McKerns
|
| 10 |
+
Maintainer-email: mmckerns@uqfoundation.org
|
| 11 |
+
License: BSD-3-Clause
|
| 12 |
+
Project-URL: Documentation, http://dill.rtfd.io
|
| 13 |
+
Project-URL: Source Code, https://github.com/uqfoundation/dill
|
| 14 |
+
Project-URL: Bug Tracker, https://github.com/uqfoundation/dill/issues
|
| 15 |
+
Platform: Linux
|
| 16 |
+
Platform: Windows
|
| 17 |
+
Platform: Mac
|
| 18 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 19 |
+
Classifier: Intended Audience :: Developers
|
| 20 |
+
Classifier: Intended Audience :: Science/Research
|
| 21 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 22 |
+
Classifier: Programming Language :: Python :: 3
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 29 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 30 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 31 |
+
Classifier: Topic :: Scientific/Engineering
|
| 32 |
+
Classifier: Topic :: Software Development
|
| 33 |
+
Requires-Python: >=3.8
|
| 34 |
+
License-File: LICENSE
|
| 35 |
+
Provides-Extra: graph
|
| 36 |
+
Requires-Dist: objgraph >=1.7.2 ; extra == 'graph'
|
| 37 |
+
Provides-Extra: profile
|
| 38 |
+
Requires-Dist: gprof2dot >=2022.7.29 ; extra == 'profile'
|
| 39 |
+
Provides-Extra: readline
|
| 40 |
+
|
| 41 |
+
-----------------------------
|
| 42 |
+
dill: serialize all of Python
|
| 43 |
+
-----------------------------
|
| 44 |
+
|
| 45 |
+
About Dill
|
| 46 |
+
==========
|
| 47 |
+
|
| 48 |
+
``dill`` extends Python's ``pickle`` module for serializing and de-serializing
|
| 49 |
+
Python objects to the majority of the built-in Python types. Serialization
|
| 50 |
+
is the process of converting an object to a byte stream, and the inverse
|
| 51 |
+
of which is converting a byte stream back to a Python object hierarchy.
|
| 52 |
+
|
| 53 |
+
``dill`` provides the user the same interface as the ``pickle`` module, and
|
| 54 |
+
also includes some additional features. In addition to pickling Python
|
| 55 |
+
objects, ``dill`` provides the ability to save the state of an interpreter
|
| 56 |
+
session in a single command. Hence, it would be feasible to save an
|
| 57 |
+
interpreter session, close the interpreter, ship the pickled file to
|
| 58 |
+
another computer, open a new interpreter, unpickle the session and
|
| 59 |
+
thus continue from the 'saved' state of the original interpreter
|
| 60 |
+
session.
|
| 61 |
+
|
| 62 |
+
``dill`` can be used to store Python objects to a file, but the primary
|
| 63 |
+
usage is to send Python objects across the network as a byte stream.
|
| 64 |
+
``dill`` is quite flexible, and allows arbitrary user defined classes
|
| 65 |
+
and functions to be serialized. Thus ``dill`` is not intended to be
|
| 66 |
+
secure against erroneously or maliciously constructed data. It is
|
| 67 |
+
left to the user to decide whether the data they unpickle is from
|
| 68 |
+
a trustworthy source.
|
| 69 |
+
|
| 70 |
+
``dill`` is part of ``pathos``, a Python framework for heterogeneous computing.
|
| 71 |
+
``dill`` is in active development, so any user feedback, bug reports, comments,
|
| 72 |
+
or suggestions are highly appreciated. A list of issues is located at
|
| 73 |
+
https://github.com/uqfoundation/dill/issues, with a legacy list maintained at
|
| 74 |
+
https://uqfoundation.github.io/project/pathos/query.
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
Major Features
|
| 78 |
+
==============
|
| 79 |
+
|
| 80 |
+
``dill`` can pickle the following standard types:
|
| 81 |
+
|
| 82 |
+
- none, type, bool, int, float, complex, bytes, str,
|
| 83 |
+
- tuple, list, dict, file, buffer, builtin,
|
| 84 |
+
- Python classes, namedtuples, dataclasses, metaclasses,
|
| 85 |
+
- instances of classes,
|
| 86 |
+
- set, frozenset, array, functions, exceptions
|
| 87 |
+
|
| 88 |
+
``dill`` can also pickle more 'exotic' standard types:
|
| 89 |
+
|
| 90 |
+
- functions with yields, nested functions, lambdas,
|
| 91 |
+
- cell, method, unboundmethod, module, code, methodwrapper,
|
| 92 |
+
- methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor,
|
| 93 |
+
- dictproxy, slice, notimplemented, ellipsis, quit
|
| 94 |
+
|
| 95 |
+
``dill`` cannot yet pickle these standard types:
|
| 96 |
+
|
| 97 |
+
- frame, generator, traceback
|
| 98 |
+
|
| 99 |
+
``dill`` also provides the capability to:
|
| 100 |
+
|
| 101 |
+
- save and load Python interpreter sessions
|
| 102 |
+
- save and extract the source code from functions and classes
|
| 103 |
+
- interactively diagnose pickling errors
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
Current Release
|
| 107 |
+
===============
|
| 108 |
+
|
| 109 |
+
The latest released version of ``dill`` is available from:
|
| 110 |
+
|
| 111 |
+
https://pypi.org/project/dill
|
| 112 |
+
|
| 113 |
+
``dill`` is distributed under a 3-clause BSD license.
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
Development Version
|
| 117 |
+
===================
|
| 118 |
+
|
| 119 |
+
You can get the latest development version with all the shiny new features at:
|
| 120 |
+
|
| 121 |
+
https://github.com/uqfoundation
|
| 122 |
+
|
| 123 |
+
If you have a new contribution, please submit a pull request.
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
Installation
|
| 127 |
+
============
|
| 128 |
+
|
| 129 |
+
``dill`` can be installed with ``pip``::
|
| 130 |
+
|
| 131 |
+
$ pip install dill
|
| 132 |
+
|
| 133 |
+
To optionally include the ``objgraph`` diagnostic tool in the install::
|
| 134 |
+
|
| 135 |
+
$ pip install dill[graph]
|
| 136 |
+
|
| 137 |
+
To optionally include the ``gprof2dot`` diagnostic tool in the install::
|
| 138 |
+
|
| 139 |
+
$ pip install dill[profile]
|
| 140 |
+
|
| 141 |
+
For windows users, to optionally install session history tools::
|
| 142 |
+
|
| 143 |
+
$ pip install dill[readline]
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
Requirements
|
| 147 |
+
============
|
| 148 |
+
|
| 149 |
+
``dill`` requires:
|
| 150 |
+
|
| 151 |
+
- ``python`` (or ``pypy``), **>=3.8**
|
| 152 |
+
- ``setuptools``, **>=42**
|
| 153 |
+
|
| 154 |
+
Optional requirements:
|
| 155 |
+
|
| 156 |
+
- ``objgraph``, **>=1.7.2**
|
| 157 |
+
- ``gprof2dot``, **>=2022.7.29**
|
| 158 |
+
- ``pyreadline``, **>=1.7.1** (on windows)
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
Basic Usage
|
| 162 |
+
===========
|
| 163 |
+
|
| 164 |
+
``dill`` is a drop-in replacement for ``pickle``. Existing code can be
|
| 165 |
+
updated to allow complete pickling using::
|
| 166 |
+
|
| 167 |
+
>>> import dill as pickle
|
| 168 |
+
|
| 169 |
+
or::
|
| 170 |
+
|
| 171 |
+
>>> from dill import dumps, loads
|
| 172 |
+
|
| 173 |
+
``dumps`` converts the object to a unique byte string, and ``loads`` performs
|
| 174 |
+
the inverse operation::
|
| 175 |
+
|
| 176 |
+
>>> squared = lambda x: x**2
|
| 177 |
+
>>> loads(dumps(squared))(3)
|
| 178 |
+
9
|
| 179 |
+
|
| 180 |
+
There are a number of options to control serialization which are provided
|
| 181 |
+
as keyword arguments to several ``dill`` functions:
|
| 182 |
+
|
| 183 |
+
* with *protocol*, the pickle protocol level can be set. This uses the
|
| 184 |
+
same value as the ``pickle`` module, *DEFAULT_PROTOCOL*.
|
| 185 |
+
* with *byref=True*, ``dill`` to behave a lot more like pickle with
|
| 186 |
+
certain objects (like modules) pickled by reference as opposed to
|
| 187 |
+
attempting to pickle the object itself.
|
| 188 |
+
* with *recurse=True*, objects referred to in the global dictionary are
|
| 189 |
+
recursively traced and pickled, instead of the default behavior of
|
| 190 |
+
attempting to store the entire global dictionary.
|
| 191 |
+
* with *fmode*, the contents of the file can be pickled along with the file
|
| 192 |
+
handle, which is useful if the object is being sent over the wire to a
|
| 193 |
+
remote system which does not have the original file on disk. Options are
|
| 194 |
+
*HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content
|
| 195 |
+
and *FILE_FMODE* for content and handle.
|
| 196 |
+
* with *ignore=False*, objects reconstructed with types defined in the
|
| 197 |
+
top-level script environment use the existing type in the environment
|
| 198 |
+
rather than a possibly different reconstructed type.
|
| 199 |
+
|
| 200 |
+
The default serialization can also be set globally in *dill.settings*.
|
| 201 |
+
Thus, we can modify how ``dill`` handles references to the global dictionary
|
| 202 |
+
locally or globally::
|
| 203 |
+
|
| 204 |
+
>>> import dill.settings
|
| 205 |
+
>>> dumps(absolute) == dumps(absolute, recurse=True)
|
| 206 |
+
False
|
| 207 |
+
>>> dill.settings['recurse'] = True
|
| 208 |
+
>>> dumps(absolute) == dumps(absolute, recurse=True)
|
| 209 |
+
True
|
| 210 |
+
|
| 211 |
+
``dill`` also includes source code inspection, as an alternate to pickling::
|
| 212 |
+
|
| 213 |
+
>>> import dill.source
|
| 214 |
+
>>> print(dill.source.getsource(squared))
|
| 215 |
+
squared = lambda x:x**2
|
| 216 |
+
|
| 217 |
+
To aid in debugging pickling issues, use *dill.detect* which provides
|
| 218 |
+
tools like pickle tracing::
|
| 219 |
+
|
| 220 |
+
>>> import dill.detect
|
| 221 |
+
>>> with dill.detect.trace():
|
| 222 |
+
>>> dumps(squared)
|
| 223 |
+
┬ F1: <function <lambda> at 0x7fe074f8c280>
|
| 224 |
+
├┬ F2: <function _create_function at 0x7fe074c49c10>
|
| 225 |
+
│└ # F2 [34 B]
|
| 226 |
+
├┬ Co: <code object <lambda> at 0x7fe07501eb30, file "<stdin>", line 1>
|
| 227 |
+
│├┬ F2: <function _create_code at 0x7fe074c49ca0>
|
| 228 |
+
││└ # F2 [19 B]
|
| 229 |
+
│└ # Co [87 B]
|
| 230 |
+
├┬ D1: <dict object at 0x7fe0750d4680>
|
| 231 |
+
│└ # D1 [22 B]
|
| 232 |
+
├┬ D2: <dict object at 0x7fe074c5a1c0>
|
| 233 |
+
│└ # D2 [2 B]
|
| 234 |
+
├┬ D2: <dict object at 0x7fe074f903c0>
|
| 235 |
+
│├┬ D2: <dict object at 0x7fe074f8ebc0>
|
| 236 |
+
││└ # D2 [2 B]
|
| 237 |
+
│└ # D2 [23 B]
|
| 238 |
+
└ # F1 [180 B]
|
| 239 |
+
|
| 240 |
+
With trace, we see how ``dill`` stored the lambda (``F1``) by first storing
|
| 241 |
+
``_create_function``, the underlying code object (``Co``) and ``_create_code``
|
| 242 |
+
(which is used to handle code objects), then we handle the reference to
|
| 243 |
+
the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that
|
| 244 |
+
save the lambda object's state. A ``#`` marks when the object is actually stored.
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
More Information
|
| 248 |
+
================
|
| 249 |
+
|
| 250 |
+
Probably the best way to get started is to look at the documentation at
|
| 251 |
+
http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that
|
| 252 |
+
demonstrate how ``dill`` can serialize different Python objects. You can
|
| 253 |
+
run the test suite with ``python -m dill.tests``. The contents of any
|
| 254 |
+
pickle file can be examined with ``undill``. As ``dill`` conforms to
|
| 255 |
+
the ``pickle`` interface, the examples and documentation found at
|
| 256 |
+
http://docs.python.org/library/pickle.html also apply to ``dill``
|
| 257 |
+
if one will ``import dill as pickle``. The source code is also generally
|
| 258 |
+
well documented, so further questions may be resolved by inspecting the
|
| 259 |
+
code itself. Please feel free to submit a ticket on github, or ask a
|
| 260 |
+
question on stackoverflow (**@Mike McKerns**).
|
| 261 |
+
If you would like to share how you use ``dill`` in your work, please send
|
| 262 |
+
an email (to **mmckerns at uqfoundation dot org**).
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
Citation
|
| 266 |
+
========
|
| 267 |
+
|
| 268 |
+
If you use ``dill`` to do research that leads to publication, we ask that you
|
| 269 |
+
acknowledge use of ``dill`` by citing the following in your publication::
|
| 270 |
+
|
| 271 |
+
M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis,
|
| 272 |
+
"Building a framework for predictive science", Proceedings of
|
| 273 |
+
the 10th Python in Science Conference, 2011;
|
| 274 |
+
http://arxiv.org/pdf/1202.1056
|
| 275 |
+
|
| 276 |
+
Michael McKerns and Michael Aivazis,
|
| 277 |
+
"pathos: a framework for heterogeneous computing", 2010- ;
|
| 278 |
+
https://uqfoundation.github.io/project/pathos
|
| 279 |
+
|
| 280 |
+
Please see https://uqfoundation.github.io/project/pathos or
|
| 281 |
+
http://arxiv.org/pdf/1202.1056 for further information.
|
lib/python3.13/site-packages/dill-0.4.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/get_gprof,sha256=RJK4bpM1yFRyWYQSCbfkDncOYNBDNQvj0orFcxRNvXM,2492
|
| 2 |
+
../../../bin/get_objgraph,sha256=xeSieqv3p1z_ySPamewOYStkTSXwAq8ZcyBqTAZRbd8,1686
|
| 3 |
+
../../../bin/undill,sha256=sRpjEOjvR2Cf4w19MNcdixMDyJV4lWBxgYhuIBPwHDk,622
|
| 4 |
+
dill-0.4.0.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
|
| 5 |
+
dill-0.4.0.dist-info/LICENSE,sha256=mYpIzbuubSrNbyOVUajkpFDpZ-udj9jiQsNOnmTkDiY,1790
|
| 6 |
+
dill-0.4.0.dist-info/METADATA,sha256=Zzr1eKtuTvrriL-H_4DmN7fxjB9tENzGRz0DwAI7u5Q,10174
|
| 7 |
+
dill-0.4.0.dist-info/RECORD,,
|
| 8 |
+
dill-0.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 9 |
+
dill-0.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
| 10 |
+
dill-0.4.0.dist-info/top_level.txt,sha256=HLSIyYIjQzJiBvs3_-16ntezE3j6mWGTW0DT1xDd7X0,5
|
| 11 |
+
dill/__diff.py,sha256=yLCKcd0GkDxR86wdE0SG2qw5Yxhon6qHeOeI1li8Vrc,7146
|
| 12 |
+
dill/__info__.py,sha256=sD2tcjIAuRmMAIqBtF4-JWK7VTCIoQx3axUNBQb-qPw,10756
|
| 13 |
+
dill/__init__.py,sha256=u5X9cMJo3zAtpGN4MUATU71s5wNvnIbXsZXi29DyvXE,3798
|
| 14 |
+
dill/_dill.py,sha256=DX51DqW1DXL_5VdZ-zF5w9avxiVOsNh6cNwMkd39Fwo,91313
|
| 15 |
+
dill/_objects.py,sha256=wPETr0_Gtj__cFAIEO6FRSC_vpBYexmlDiHl_d8geJU,19740
|
| 16 |
+
dill/_shims.py,sha256=mGN22u7TeK-keDvhGvWZVvCm-GW0TY04MXW-Hncbh7c,6635
|
| 17 |
+
dill/detect.py,sha256=L0eWBEVzqMV6nf8gd5R4G-cytzJEjBNTKAlsVVkJRlY,11207
|
| 18 |
+
dill/logger.py,sha256=WNX59lxDHuZnSMKZ294i0uCDTmbneWPLu79Be7mOa-Q,11143
|
| 19 |
+
dill/objtypes.py,sha256=NvSqfHX6AY2-QOVUv0hEda_GEcx_uiuGPQvILJu8TyY,736
|
| 20 |
+
dill/pointers.py,sha256=Q8AYqhdcvdq9X4HZ46lihM2bnZqGZG2vVoGpRxTHRNE,4467
|
| 21 |
+
dill/session.py,sha256=qc41kfzXT9MIx1KGSEIVbUsDKBHErhptpeQzwcGspkU,23541
|
| 22 |
+
dill/settings.py,sha256=hmtpAbplWE-HHxRHLPEkOxpEqWoCMTMn_FRa-1seao4,630
|
| 23 |
+
dill/source.py,sha256=aaK9d3J7yNQuVw15ESRXxVqE8DxYHf1TfSOBEQYcZmU,45507
|
| 24 |
+
dill/temp.py,sha256=QhkBKO5eNsEwSHdhiw5nCzALchAsl1yWQ1apZlVXY7w,8027
|
| 25 |
+
dill/tests/__init__.py,sha256=5z3npvfFh6Xw5xa8ML-AQk3PDyTc3MIngJlUVVf35vk,479
|
| 26 |
+
dill/tests/__main__.py,sha256=i9A86Q2NGoEhO-080sgnv2-TXxTRoH-gN2RTaPvpGvA,899
|
| 27 |
+
dill/tests/test_abc.py,sha256=OT50obWxTiqk2eZLrbfHGPU9_Wr30wFTQ1Z8CfsdClU,4227
|
| 28 |
+
dill/tests/test_check.py,sha256=OT6iu8R1BzZFQdiR3YgPoKKV7KJ2AYLJP62zTgsThdo,1396
|
| 29 |
+
dill/tests/test_classdef.py,sha256=M3zzqKruuqIFTZDUqKYbjMtJmVkI59N0DxWZe019-qM,8600
|
| 30 |
+
dill/tests/test_dataclasses.py,sha256=MPkDH5pfvYlYbwlGm-uFMN7svhPUImqKUYpROWO_Wak,890
|
| 31 |
+
dill/tests/test_detect.py,sha256=wjgVFO8UMDLaM7A1cu1bQNXgYQCOpLGYqMYxID5CNsk,4144
|
| 32 |
+
dill/tests/test_dictviews.py,sha256=qPAuqmp1yApPsARB6EKjmIWIAigK6P0L2Q68my02D-I,1337
|
| 33 |
+
dill/tests/test_diff.py,sha256=TXCpiSPVTwEVR9-cs_sr_MYP9T7R2Pw2N2PAvnOvWFI,2667
|
| 34 |
+
dill/tests/test_extendpickle.py,sha256=mcnPB0_YIqx6Ct678LUzWER1FRA1kVSRxE6PKhaN2oQ,1315
|
| 35 |
+
dill/tests/test_fglobals.py,sha256=tbdBdLzW_7rx5mR-BzeThzEhH0KbtDZfAazLBYC_h94,1676
|
| 36 |
+
dill/tests/test_file.py,sha256=XjW_EFSPsH7udEijchzef-OunBiJGJ-ieLRm8Vcai4Q,13578
|
| 37 |
+
dill/tests/test_functions.py,sha256=AVBbHCDffw4Jca3Kijjae4rQfX0ZFTgurUjPeP2qqoM,4267
|
| 38 |
+
dill/tests/test_functors.py,sha256=GSHR6UGBuiq0iUk4mk04Ly9ohSVdt9fBCrEYwByzSxk,930
|
| 39 |
+
dill/tests/test_logger.py,sha256=PiZHr3bGIh6rW_Rx3uAQlVl5oFl-n2BQjXkpPzbmjng,2385
|
| 40 |
+
dill/tests/test_mixins.py,sha256=GHO-GXeYvWzZeysJnsIDR8sTj_RADBs0-z23r_SfoNA,4007
|
| 41 |
+
dill/tests/test_module.py,sha256=BOJ1mU4qaK680gEERfFXAIElTTuOg1nY4Ne26UhcpvU,1943
|
| 42 |
+
dill/tests/test_moduledict.py,sha256=SlosX5tkmdfGLuhYweASu-WKIsrgphXrEm20veZmFJc,1182
|
| 43 |
+
dill/tests/test_nested.py,sha256=dLgI3ZKWWAyZxXfTF131EHtph8yTS-e9Wqb8Mj3HA6A,3146
|
| 44 |
+
dill/tests/test_objects.py,sha256=QyYSMiBZDyjhsGwDkdUeJH65rHODQMrLFajy-fUqzLI,1931
|
| 45 |
+
dill/tests/test_properties.py,sha256=PinOM_C2Fzfj_3FlQm8OdjAmJ8WQ38nHWKkfHI4rCeA,1346
|
| 46 |
+
dill/tests/test_pycapsule.py,sha256=XgT8VQZAAY3q7oV3N8726PnqU4GjuxSoT_FuFsoNsso,1417
|
| 47 |
+
dill/tests/test_recursive.py,sha256=Bq2Q--Ro3Mb4CppYm5K5v0__5mGcFvSEjzmZOC81C64,4182
|
| 48 |
+
dill/tests/test_registered.py,sha256=jgUBl_msBiqQTgvCC1bwKTGK-jmAz2FSE4pcJgEDRBw,1573
|
| 49 |
+
dill/tests/test_restricted.py,sha256=mk93WLtNAEQr03h4N-NZO7Wr9xPWvzgMPOPEdi4Aku4,783
|
| 50 |
+
dill/tests/test_selected.py,sha256=LjhZNntQ9KDJf9lhEYTHqzbFpsoeEFK2wobO1JT0FRg,3258
|
| 51 |
+
dill/tests/test_session.py,sha256=-xmj46QxC60MaRIljv2eVrqIQYZrIcL54PsPaTLpoXc,10161
|
| 52 |
+
dill/tests/test_source.py,sha256=EOTG_Fh0cWlICTRq04KH0Y5_G9BgGigkAOhr3CD1-dk,7059
|
| 53 |
+
dill/tests/test_sources.py,sha256=sQXVRsv_u9i1kTyJYRtX_Ykb3TgaPrQ3sI2az8bNxQQ,8672
|
| 54 |
+
dill/tests/test_temp.py,sha256=8NvuImVkYM1dw-j_nhaeDAb8Oan9whCOoehXktqurtw,2619
|
| 55 |
+
dill/tests/test_threads.py,sha256=PD8RVdtu_kCkM7AVV8a81ywVWRi1g3GyDRocx04ZRFg,1257
|
| 56 |
+
dill/tests/test_weakref.py,sha256=TfQHVMqgzeBywBiTjHj6ep8E6GF90y5SFzLgWbRbc2Q,1602
|
lib/python3.13/site-packages/dill-0.4.0.dist-info/REQUESTED
ADDED
|
File without changes
|
lib/python3.13/site-packages/dill-0.4.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.43.0)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
lib/python3.13/site-packages/dill-0.4.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
dill
|
lib/python3.13/site-packages/dns/__init__.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
|
| 3 |
+
# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc.
|
| 4 |
+
#
|
| 5 |
+
# Permission to use, copy, modify, and distribute this software and its
|
| 6 |
+
# documentation for any purpose with or without fee is hereby granted,
|
| 7 |
+
# provided that the above copyright notice and this permission notice
|
| 8 |
+
# appear in all copies.
|
| 9 |
+
#
|
| 10 |
+
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
| 11 |
+
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
| 12 |
+
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
| 13 |
+
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
| 14 |
+
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
| 15 |
+
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
| 16 |
+
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
| 17 |
+
|
| 18 |
+
"""dnspython DNS toolkit"""
|
| 19 |
+
|
| 20 |
+
__all__ = [
|
| 21 |
+
"asyncbackend",
|
| 22 |
+
"asyncquery",
|
| 23 |
+
"asyncresolver",
|
| 24 |
+
"btree",
|
| 25 |
+
"btreezone",
|
| 26 |
+
"dnssec",
|
| 27 |
+
"dnssecalgs",
|
| 28 |
+
"dnssectypes",
|
| 29 |
+
"e164",
|
| 30 |
+
"edns",
|
| 31 |
+
"entropy",
|
| 32 |
+
"exception",
|
| 33 |
+
"flags",
|
| 34 |
+
"immutable",
|
| 35 |
+
"inet",
|
| 36 |
+
"ipv4",
|
| 37 |
+
"ipv6",
|
| 38 |
+
"message",
|
| 39 |
+
"name",
|
| 40 |
+
"namedict",
|
| 41 |
+
"node",
|
| 42 |
+
"opcode",
|
| 43 |
+
"query",
|
| 44 |
+
"quic",
|
| 45 |
+
"rcode",
|
| 46 |
+
"rdata",
|
| 47 |
+
"rdataclass",
|
| 48 |
+
"rdataset",
|
| 49 |
+
"rdatatype",
|
| 50 |
+
"renderer",
|
| 51 |
+
"resolver",
|
| 52 |
+
"reversename",
|
| 53 |
+
"rrset",
|
| 54 |
+
"serial",
|
| 55 |
+
"set",
|
| 56 |
+
"tokenizer",
|
| 57 |
+
"transaction",
|
| 58 |
+
"tsig",
|
| 59 |
+
"tsigkeyring",
|
| 60 |
+
"ttl",
|
| 61 |
+
"rdtypes",
|
| 62 |
+
"update",
|
| 63 |
+
"version",
|
| 64 |
+
"versioned",
|
| 65 |
+
"wire",
|
| 66 |
+
"xfr",
|
| 67 |
+
"zone",
|
| 68 |
+
"zonetypes",
|
| 69 |
+
"zonefile",
|
| 70 |
+
]
|
| 71 |
+
|
| 72 |
+
from dns.version import version as __version__ # noqa
|
lib/python3.13/site-packages/dns/_asyncbackend.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
|
| 3 |
+
# This is a nullcontext for both sync and async. 3.7 has a nullcontext,
|
| 4 |
+
# but it is only for sync use.
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class NullContext:
|
| 8 |
+
def __init__(self, enter_result=None):
|
| 9 |
+
self.enter_result = enter_result
|
| 10 |
+
|
| 11 |
+
def __enter__(self):
|
| 12 |
+
return self.enter_result
|
| 13 |
+
|
| 14 |
+
def __exit__(self, exc_type, exc_value, traceback):
|
| 15 |
+
pass
|
| 16 |
+
|
| 17 |
+
async def __aenter__(self):
|
| 18 |
+
return self.enter_result
|
| 19 |
+
|
| 20 |
+
async def __aexit__(self, exc_type, exc_value, traceback):
|
| 21 |
+
pass
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
# These are declared here so backends can import them without creating
|
| 25 |
+
# circular dependencies with dns.asyncbackend.
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class Socket: # pragma: no cover
|
| 29 |
+
def __init__(self, family: int, type: int):
|
| 30 |
+
self.family = family
|
| 31 |
+
self.type = type
|
| 32 |
+
|
| 33 |
+
async def close(self):
|
| 34 |
+
pass
|
| 35 |
+
|
| 36 |
+
async def getpeername(self):
|
| 37 |
+
raise NotImplementedError
|
| 38 |
+
|
| 39 |
+
async def getsockname(self):
|
| 40 |
+
raise NotImplementedError
|
| 41 |
+
|
| 42 |
+
async def getpeercert(self, timeout):
|
| 43 |
+
raise NotImplementedError
|
| 44 |
+
|
| 45 |
+
async def __aenter__(self):
|
| 46 |
+
return self
|
| 47 |
+
|
| 48 |
+
async def __aexit__(self, exc_type, exc_value, traceback):
|
| 49 |
+
await self.close()
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class DatagramSocket(Socket): # pragma: no cover
|
| 53 |
+
async def sendto(self, what, destination, timeout):
|
| 54 |
+
raise NotImplementedError
|
| 55 |
+
|
| 56 |
+
async def recvfrom(self, size, timeout):
|
| 57 |
+
raise NotImplementedError
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class StreamSocket(Socket): # pragma: no cover
|
| 61 |
+
async def sendall(self, what, timeout):
|
| 62 |
+
raise NotImplementedError
|
| 63 |
+
|
| 64 |
+
async def recv(self, size, timeout):
|
| 65 |
+
raise NotImplementedError
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class NullTransport:
|
| 69 |
+
async def connect_tcp(self, host, port, timeout, local_address):
|
| 70 |
+
raise NotImplementedError
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class Backend: # pragma: no cover
|
| 74 |
+
def name(self) -> str:
|
| 75 |
+
return "unknown"
|
| 76 |
+
|
| 77 |
+
async def make_socket(
|
| 78 |
+
self,
|
| 79 |
+
af,
|
| 80 |
+
socktype,
|
| 81 |
+
proto=0,
|
| 82 |
+
source=None,
|
| 83 |
+
destination=None,
|
| 84 |
+
timeout=None,
|
| 85 |
+
ssl_context=None,
|
| 86 |
+
server_hostname=None,
|
| 87 |
+
):
|
| 88 |
+
raise NotImplementedError
|
| 89 |
+
|
| 90 |
+
def datagram_connection_required(self):
|
| 91 |
+
return False
|
| 92 |
+
|
| 93 |
+
async def sleep(self, interval):
|
| 94 |
+
raise NotImplementedError
|
| 95 |
+
|
| 96 |
+
def get_transport_class(self):
|
| 97 |
+
raise NotImplementedError
|
| 98 |
+
|
| 99 |
+
async def wait_for(self, awaitable, timeout):
|
| 100 |
+
raise NotImplementedError
|
lib/python3.13/site-packages/dns/_asyncio_backend.py
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
|
| 3 |
+
"""asyncio library query support"""
|
| 4 |
+
|
| 5 |
+
import asyncio
|
| 6 |
+
import socket
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
import dns._asyncbackend
|
| 10 |
+
import dns._features
|
| 11 |
+
import dns.exception
|
| 12 |
+
import dns.inet
|
| 13 |
+
|
| 14 |
+
_is_win32 = sys.platform == "win32"
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def _get_running_loop():
|
| 18 |
+
try:
|
| 19 |
+
return asyncio.get_running_loop()
|
| 20 |
+
except AttributeError: # pragma: no cover
|
| 21 |
+
return asyncio.get_event_loop()
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class _DatagramProtocol:
|
| 25 |
+
def __init__(self):
|
| 26 |
+
self.transport = None
|
| 27 |
+
self.recvfrom = None
|
| 28 |
+
|
| 29 |
+
def connection_made(self, transport):
|
| 30 |
+
self.transport = transport
|
| 31 |
+
|
| 32 |
+
def datagram_received(self, data, addr):
|
| 33 |
+
if self.recvfrom and not self.recvfrom.done():
|
| 34 |
+
self.recvfrom.set_result((data, addr))
|
| 35 |
+
|
| 36 |
+
def error_received(self, exc): # pragma: no cover
|
| 37 |
+
if self.recvfrom and not self.recvfrom.done():
|
| 38 |
+
self.recvfrom.set_exception(exc)
|
| 39 |
+
|
| 40 |
+
def connection_lost(self, exc):
|
| 41 |
+
if self.recvfrom and not self.recvfrom.done():
|
| 42 |
+
if exc is None:
|
| 43 |
+
# EOF we triggered. Is there a better way to do this?
|
| 44 |
+
try:
|
| 45 |
+
raise EOFError("EOF")
|
| 46 |
+
except EOFError as e:
|
| 47 |
+
self.recvfrom.set_exception(e)
|
| 48 |
+
else:
|
| 49 |
+
self.recvfrom.set_exception(exc)
|
| 50 |
+
|
| 51 |
+
def close(self):
|
| 52 |
+
if self.transport is not None:
|
| 53 |
+
self.transport.close()
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
async def _maybe_wait_for(awaitable, timeout):
|
| 57 |
+
if timeout is not None:
|
| 58 |
+
try:
|
| 59 |
+
return await asyncio.wait_for(awaitable, timeout)
|
| 60 |
+
except asyncio.TimeoutError:
|
| 61 |
+
raise dns.exception.Timeout(timeout=timeout)
|
| 62 |
+
else:
|
| 63 |
+
return await awaitable
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class DatagramSocket(dns._asyncbackend.DatagramSocket):
|
| 67 |
+
def __init__(self, family, transport, protocol):
|
| 68 |
+
super().__init__(family, socket.SOCK_DGRAM)
|
| 69 |
+
self.transport = transport
|
| 70 |
+
self.protocol = protocol
|
| 71 |
+
|
| 72 |
+
async def sendto(self, what, destination, timeout): # pragma: no cover
|
| 73 |
+
# no timeout for asyncio sendto
|
| 74 |
+
self.transport.sendto(what, destination)
|
| 75 |
+
return len(what)
|
| 76 |
+
|
| 77 |
+
async def recvfrom(self, size, timeout):
|
| 78 |
+
# ignore size as there's no way I know to tell protocol about it
|
| 79 |
+
done = _get_running_loop().create_future()
|
| 80 |
+
try:
|
| 81 |
+
assert self.protocol.recvfrom is None
|
| 82 |
+
self.protocol.recvfrom = done
|
| 83 |
+
await _maybe_wait_for(done, timeout)
|
| 84 |
+
return done.result()
|
| 85 |
+
finally:
|
| 86 |
+
self.protocol.recvfrom = None
|
| 87 |
+
|
| 88 |
+
async def close(self):
|
| 89 |
+
self.protocol.close()
|
| 90 |
+
|
| 91 |
+
async def getpeername(self):
|
| 92 |
+
return self.transport.get_extra_info("peername")
|
| 93 |
+
|
| 94 |
+
async def getsockname(self):
|
| 95 |
+
return self.transport.get_extra_info("sockname")
|
| 96 |
+
|
| 97 |
+
async def getpeercert(self, timeout):
|
| 98 |
+
raise NotImplementedError
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class StreamSocket(dns._asyncbackend.StreamSocket):
|
| 102 |
+
def __init__(self, af, reader, writer):
|
| 103 |
+
super().__init__(af, socket.SOCK_STREAM)
|
| 104 |
+
self.reader = reader
|
| 105 |
+
self.writer = writer
|
| 106 |
+
|
| 107 |
+
async def sendall(self, what, timeout):
|
| 108 |
+
self.writer.write(what)
|
| 109 |
+
return await _maybe_wait_for(self.writer.drain(), timeout)
|
| 110 |
+
|
| 111 |
+
async def recv(self, size, timeout):
|
| 112 |
+
return await _maybe_wait_for(self.reader.read(size), timeout)
|
| 113 |
+
|
| 114 |
+
async def close(self):
|
| 115 |
+
self.writer.close()
|
| 116 |
+
|
| 117 |
+
async def getpeername(self):
|
| 118 |
+
return self.writer.get_extra_info("peername")
|
| 119 |
+
|
| 120 |
+
async def getsockname(self):
|
| 121 |
+
return self.writer.get_extra_info("sockname")
|
| 122 |
+
|
| 123 |
+
async def getpeercert(self, timeout):
|
| 124 |
+
return self.writer.get_extra_info("peercert")
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
if dns._features.have("doh"):
|
| 128 |
+
import anyio
|
| 129 |
+
import httpcore
|
| 130 |
+
import httpcore._backends.anyio
|
| 131 |
+
import httpx
|
| 132 |
+
|
| 133 |
+
_CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend
|
| 134 |
+
_CoreAnyIOStream = httpcore._backends.anyio.AnyIOStream # pyright: ignore
|
| 135 |
+
|
| 136 |
+
from dns.query import _compute_times, _expiration_for_this_attempt, _remaining
|
| 137 |
+
|
| 138 |
+
class _NetworkBackend(_CoreAsyncNetworkBackend):
|
| 139 |
+
def __init__(self, resolver, local_port, bootstrap_address, family):
|
| 140 |
+
super().__init__()
|
| 141 |
+
self._local_port = local_port
|
| 142 |
+
self._resolver = resolver
|
| 143 |
+
self._bootstrap_address = bootstrap_address
|
| 144 |
+
self._family = family
|
| 145 |
+
if local_port != 0:
|
| 146 |
+
raise NotImplementedError(
|
| 147 |
+
"the asyncio transport for HTTPX cannot set the local port"
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
async def connect_tcp(
|
| 151 |
+
self, host, port, timeout=None, local_address=None, socket_options=None
|
| 152 |
+
): # pylint: disable=signature-differs
|
| 153 |
+
addresses = []
|
| 154 |
+
_, expiration = _compute_times(timeout)
|
| 155 |
+
if dns.inet.is_address(host):
|
| 156 |
+
addresses.append(host)
|
| 157 |
+
elif self._bootstrap_address is not None:
|
| 158 |
+
addresses.append(self._bootstrap_address)
|
| 159 |
+
else:
|
| 160 |
+
timeout = _remaining(expiration)
|
| 161 |
+
family = self._family
|
| 162 |
+
if local_address:
|
| 163 |
+
family = dns.inet.af_for_address(local_address)
|
| 164 |
+
answers = await self._resolver.resolve_name(
|
| 165 |
+
host, family=family, lifetime=timeout
|
| 166 |
+
)
|
| 167 |
+
addresses = answers.addresses()
|
| 168 |
+
for address in addresses:
|
| 169 |
+
try:
|
| 170 |
+
attempt_expiration = _expiration_for_this_attempt(2.0, expiration)
|
| 171 |
+
timeout = _remaining(attempt_expiration)
|
| 172 |
+
with anyio.fail_after(timeout):
|
| 173 |
+
stream = await anyio.connect_tcp(
|
| 174 |
+
remote_host=address,
|
| 175 |
+
remote_port=port,
|
| 176 |
+
local_host=local_address,
|
| 177 |
+
)
|
| 178 |
+
return _CoreAnyIOStream(stream)
|
| 179 |
+
except Exception:
|
| 180 |
+
pass
|
| 181 |
+
raise httpcore.ConnectError
|
| 182 |
+
|
| 183 |
+
async def connect_unix_socket(
|
| 184 |
+
self, path, timeout=None, socket_options=None
|
| 185 |
+
): # pylint: disable=signature-differs
|
| 186 |
+
raise NotImplementedError
|
| 187 |
+
|
| 188 |
+
async def sleep(self, seconds): # pylint: disable=signature-differs
|
| 189 |
+
await anyio.sleep(seconds)
|
| 190 |
+
|
| 191 |
+
class _HTTPTransport(httpx.AsyncHTTPTransport):
|
| 192 |
+
def __init__(
|
| 193 |
+
self,
|
| 194 |
+
*args,
|
| 195 |
+
local_port=0,
|
| 196 |
+
bootstrap_address=None,
|
| 197 |
+
resolver=None,
|
| 198 |
+
family=socket.AF_UNSPEC,
|
| 199 |
+
**kwargs,
|
| 200 |
+
):
|
| 201 |
+
if resolver is None and bootstrap_address is None:
|
| 202 |
+
# pylint: disable=import-outside-toplevel,redefined-outer-name
|
| 203 |
+
import dns.asyncresolver
|
| 204 |
+
|
| 205 |
+
resolver = dns.asyncresolver.Resolver()
|
| 206 |
+
super().__init__(*args, **kwargs)
|
| 207 |
+
self._pool._network_backend = _NetworkBackend(
|
| 208 |
+
resolver, local_port, bootstrap_address, family
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
else:
|
| 212 |
+
_HTTPTransport = dns._asyncbackend.NullTransport # type: ignore
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
class Backend(dns._asyncbackend.Backend):
|
| 216 |
+
def name(self):
|
| 217 |
+
return "asyncio"
|
| 218 |
+
|
| 219 |
+
async def make_socket(
|
| 220 |
+
self,
|
| 221 |
+
af,
|
| 222 |
+
socktype,
|
| 223 |
+
proto=0,
|
| 224 |
+
source=None,
|
| 225 |
+
destination=None,
|
| 226 |
+
timeout=None,
|
| 227 |
+
ssl_context=None,
|
| 228 |
+
server_hostname=None,
|
| 229 |
+
):
|
| 230 |
+
loop = _get_running_loop()
|
| 231 |
+
if socktype == socket.SOCK_DGRAM:
|
| 232 |
+
if _is_win32 and source is None:
|
| 233 |
+
# Win32 wants explicit binding before recvfrom(). This is the
|
| 234 |
+
# proper fix for [#637].
|
| 235 |
+
source = (dns.inet.any_for_af(af), 0)
|
| 236 |
+
transport, protocol = await loop.create_datagram_endpoint(
|
| 237 |
+
_DatagramProtocol, # pyright: ignore
|
| 238 |
+
source,
|
| 239 |
+
family=af,
|
| 240 |
+
proto=proto,
|
| 241 |
+
remote_addr=destination,
|
| 242 |
+
)
|
| 243 |
+
return DatagramSocket(af, transport, protocol)
|
| 244 |
+
elif socktype == socket.SOCK_STREAM:
|
| 245 |
+
if destination is None:
|
| 246 |
+
# This shouldn't happen, but we check to make code analysis software
|
| 247 |
+
# happier.
|
| 248 |
+
raise ValueError("destination required for stream sockets")
|
| 249 |
+
(r, w) = await _maybe_wait_for(
|
| 250 |
+
asyncio.open_connection(
|
| 251 |
+
destination[0],
|
| 252 |
+
destination[1],
|
| 253 |
+
ssl=ssl_context,
|
| 254 |
+
family=af,
|
| 255 |
+
proto=proto,
|
| 256 |
+
local_addr=source,
|
| 257 |
+
server_hostname=server_hostname,
|
| 258 |
+
),
|
| 259 |
+
timeout,
|
| 260 |
+
)
|
| 261 |
+
return StreamSocket(af, r, w)
|
| 262 |
+
raise NotImplementedError(
|
| 263 |
+
"unsupported socket " + f"type {socktype}"
|
| 264 |
+
) # pragma: no cover
|
| 265 |
+
|
| 266 |
+
async def sleep(self, interval):
|
| 267 |
+
await asyncio.sleep(interval)
|
| 268 |
+
|
| 269 |
+
def datagram_connection_required(self):
|
| 270 |
+
return False
|
| 271 |
+
|
| 272 |
+
def get_transport_class(self):
|
| 273 |
+
return _HTTPTransport
|
| 274 |
+
|
| 275 |
+
async def wait_for(self, awaitable, timeout):
|
| 276 |
+
return await _maybe_wait_for(awaitable, timeout)
|
lib/python3.13/site-packages/dns/_ddr.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
#
|
| 3 |
+
# Support for Discovery of Designated Resolvers
|
| 4 |
+
|
| 5 |
+
import socket
|
| 6 |
+
import time
|
| 7 |
+
from urllib.parse import urlparse
|
| 8 |
+
|
| 9 |
+
import dns.asyncbackend
|
| 10 |
+
import dns.inet
|
| 11 |
+
import dns.name
|
| 12 |
+
import dns.nameserver
|
| 13 |
+
import dns.query
|
| 14 |
+
import dns.rdtypes.svcbbase
|
| 15 |
+
|
| 16 |
+
# The special name of the local resolver when using DDR
|
| 17 |
+
_local_resolver_name = dns.name.from_text("_dns.resolver.arpa")
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#
|
| 21 |
+
# Processing is split up into I/O independent and I/O dependent parts to
|
| 22 |
+
# make supporting sync and async versions easy.
|
| 23 |
+
#
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class _SVCBInfo:
|
| 27 |
+
def __init__(self, bootstrap_address, port, hostname, nameservers):
|
| 28 |
+
self.bootstrap_address = bootstrap_address
|
| 29 |
+
self.port = port
|
| 30 |
+
self.hostname = hostname
|
| 31 |
+
self.nameservers = nameservers
|
| 32 |
+
|
| 33 |
+
def ddr_check_certificate(self, cert):
|
| 34 |
+
"""Verify that the _SVCBInfo's address is in the cert's subjectAltName (SAN)"""
|
| 35 |
+
for name, value in cert["subjectAltName"]:
|
| 36 |
+
if name == "IP Address" and value == self.bootstrap_address:
|
| 37 |
+
return True
|
| 38 |
+
return False
|
| 39 |
+
|
| 40 |
+
def make_tls_context(self):
|
| 41 |
+
ssl = dns.query.ssl
|
| 42 |
+
ctx = ssl.create_default_context()
|
| 43 |
+
ctx.minimum_version = ssl.TLSVersion.TLSv1_2
|
| 44 |
+
return ctx
|
| 45 |
+
|
| 46 |
+
def ddr_tls_check_sync(self, lifetime):
|
| 47 |
+
ctx = self.make_tls_context()
|
| 48 |
+
expiration = time.time() + lifetime
|
| 49 |
+
with socket.create_connection(
|
| 50 |
+
(self.bootstrap_address, self.port), lifetime
|
| 51 |
+
) as s:
|
| 52 |
+
with ctx.wrap_socket(s, server_hostname=self.hostname) as ts:
|
| 53 |
+
ts.settimeout(dns.query._remaining(expiration))
|
| 54 |
+
ts.do_handshake()
|
| 55 |
+
cert = ts.getpeercert()
|
| 56 |
+
return self.ddr_check_certificate(cert)
|
| 57 |
+
|
| 58 |
+
async def ddr_tls_check_async(self, lifetime, backend=None):
|
| 59 |
+
if backend is None:
|
| 60 |
+
backend = dns.asyncbackend.get_default_backend()
|
| 61 |
+
ctx = self.make_tls_context()
|
| 62 |
+
expiration = time.time() + lifetime
|
| 63 |
+
async with await backend.make_socket(
|
| 64 |
+
dns.inet.af_for_address(self.bootstrap_address),
|
| 65 |
+
socket.SOCK_STREAM,
|
| 66 |
+
0,
|
| 67 |
+
None,
|
| 68 |
+
(self.bootstrap_address, self.port),
|
| 69 |
+
lifetime,
|
| 70 |
+
ctx,
|
| 71 |
+
self.hostname,
|
| 72 |
+
) as ts:
|
| 73 |
+
cert = await ts.getpeercert(dns.query._remaining(expiration))
|
| 74 |
+
return self.ddr_check_certificate(cert)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _extract_nameservers_from_svcb(answer):
|
| 78 |
+
bootstrap_address = answer.nameserver
|
| 79 |
+
if not dns.inet.is_address(bootstrap_address):
|
| 80 |
+
return []
|
| 81 |
+
infos = []
|
| 82 |
+
for rr in answer.rrset.processing_order():
|
| 83 |
+
nameservers = []
|
| 84 |
+
param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.ALPN)
|
| 85 |
+
if param is None:
|
| 86 |
+
continue
|
| 87 |
+
alpns = set(param.ids)
|
| 88 |
+
host = rr.target.to_text(omit_final_dot=True)
|
| 89 |
+
port = None
|
| 90 |
+
param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.PORT)
|
| 91 |
+
if param is not None:
|
| 92 |
+
port = param.port
|
| 93 |
+
# For now we ignore address hints and address resolution and always use the
|
| 94 |
+
# bootstrap address
|
| 95 |
+
if b"h2" in alpns:
|
| 96 |
+
param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.DOHPATH)
|
| 97 |
+
if param is None or not param.value.endswith(b"{?dns}"):
|
| 98 |
+
continue
|
| 99 |
+
path = param.value[:-6].decode()
|
| 100 |
+
if not path.startswith("/"):
|
| 101 |
+
path = "/" + path
|
| 102 |
+
if port is None:
|
| 103 |
+
port = 443
|
| 104 |
+
url = f"https://{host}:{port}{path}"
|
| 105 |
+
# check the URL
|
| 106 |
+
try:
|
| 107 |
+
urlparse(url)
|
| 108 |
+
nameservers.append(dns.nameserver.DoHNameserver(url, bootstrap_address))
|
| 109 |
+
except Exception:
|
| 110 |
+
# continue processing other ALPN types
|
| 111 |
+
pass
|
| 112 |
+
if b"dot" in alpns:
|
| 113 |
+
if port is None:
|
| 114 |
+
port = 853
|
| 115 |
+
nameservers.append(
|
| 116 |
+
dns.nameserver.DoTNameserver(bootstrap_address, port, host)
|
| 117 |
+
)
|
| 118 |
+
if b"doq" in alpns:
|
| 119 |
+
if port is None:
|
| 120 |
+
port = 853
|
| 121 |
+
nameservers.append(
|
| 122 |
+
dns.nameserver.DoQNameserver(bootstrap_address, port, True, host)
|
| 123 |
+
)
|
| 124 |
+
if len(nameservers) > 0:
|
| 125 |
+
infos.append(_SVCBInfo(bootstrap_address, port, host, nameservers))
|
| 126 |
+
return infos
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def _get_nameservers_sync(answer, lifetime):
|
| 130 |
+
"""Return a list of TLS-validated resolver nameservers extracted from an SVCB
|
| 131 |
+
answer."""
|
| 132 |
+
nameservers = []
|
| 133 |
+
infos = _extract_nameservers_from_svcb(answer)
|
| 134 |
+
for info in infos:
|
| 135 |
+
try:
|
| 136 |
+
if info.ddr_tls_check_sync(lifetime):
|
| 137 |
+
nameservers.extend(info.nameservers)
|
| 138 |
+
except Exception:
|
| 139 |
+
pass
|
| 140 |
+
return nameservers
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
async def _get_nameservers_async(answer, lifetime):
|
| 144 |
+
"""Return a list of TLS-validated resolver nameservers extracted from an SVCB
|
| 145 |
+
answer."""
|
| 146 |
+
nameservers = []
|
| 147 |
+
infos = _extract_nameservers_from_svcb(answer)
|
| 148 |
+
for info in infos:
|
| 149 |
+
try:
|
| 150 |
+
if await info.ddr_tls_check_async(lifetime):
|
| 151 |
+
nameservers.extend(info.nameservers)
|
| 152 |
+
except Exception:
|
| 153 |
+
pass
|
| 154 |
+
return nameservers
|
lib/python3.13/site-packages/dns/_features.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
|
| 3 |
+
import importlib.metadata
|
| 4 |
+
import itertools
|
| 5 |
+
import string
|
| 6 |
+
from typing import Dict, List, Tuple
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def _tuple_from_text(version: str) -> Tuple:
|
| 10 |
+
text_parts = version.split(".")
|
| 11 |
+
int_parts = []
|
| 12 |
+
for text_part in text_parts:
|
| 13 |
+
digit_prefix = "".join(
|
| 14 |
+
itertools.takewhile(lambda x: x in string.digits, text_part)
|
| 15 |
+
)
|
| 16 |
+
try:
|
| 17 |
+
int_parts.append(int(digit_prefix))
|
| 18 |
+
except Exception:
|
| 19 |
+
break
|
| 20 |
+
return tuple(int_parts)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def _version_check(
|
| 24 |
+
requirement: str,
|
| 25 |
+
) -> bool:
|
| 26 |
+
"""Is the requirement fulfilled?
|
| 27 |
+
|
| 28 |
+
The requirement must be of the form
|
| 29 |
+
|
| 30 |
+
package>=version
|
| 31 |
+
"""
|
| 32 |
+
package, minimum = requirement.split(">=")
|
| 33 |
+
try:
|
| 34 |
+
version = importlib.metadata.version(package)
|
| 35 |
+
# This shouldn't happen, but it apparently can.
|
| 36 |
+
if version is None:
|
| 37 |
+
return False
|
| 38 |
+
except Exception:
|
| 39 |
+
return False
|
| 40 |
+
t_version = _tuple_from_text(version)
|
| 41 |
+
t_minimum = _tuple_from_text(minimum)
|
| 42 |
+
if t_version < t_minimum:
|
| 43 |
+
return False
|
| 44 |
+
return True
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
_cache: Dict[str, bool] = {}
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def have(feature: str) -> bool:
|
| 51 |
+
"""Is *feature* available?
|
| 52 |
+
|
| 53 |
+
This tests if all optional packages needed for the
|
| 54 |
+
feature are available and recent enough.
|
| 55 |
+
|
| 56 |
+
Returns ``True`` if the feature is available,
|
| 57 |
+
and ``False`` if it is not or if metadata is
|
| 58 |
+
missing.
|
| 59 |
+
"""
|
| 60 |
+
value = _cache.get(feature)
|
| 61 |
+
if value is not None:
|
| 62 |
+
return value
|
| 63 |
+
requirements = _requirements.get(feature)
|
| 64 |
+
if requirements is None:
|
| 65 |
+
# we make a cache entry here for consistency not performance
|
| 66 |
+
_cache[feature] = False
|
| 67 |
+
return False
|
| 68 |
+
ok = True
|
| 69 |
+
for requirement in requirements:
|
| 70 |
+
if not _version_check(requirement):
|
| 71 |
+
ok = False
|
| 72 |
+
break
|
| 73 |
+
_cache[feature] = ok
|
| 74 |
+
return ok
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def force(feature: str, enabled: bool) -> None:
|
| 78 |
+
"""Force the status of *feature* to be *enabled*.
|
| 79 |
+
|
| 80 |
+
This method is provided as a workaround for any cases
|
| 81 |
+
where importlib.metadata is ineffective, or for testing.
|
| 82 |
+
"""
|
| 83 |
+
_cache[feature] = enabled
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
_requirements: Dict[str, List[str]] = {
|
| 87 |
+
### BEGIN generated requirements
|
| 88 |
+
"dnssec": ["cryptography>=45"],
|
| 89 |
+
"doh": ["httpcore>=1.0.0", "httpx>=0.28.0", "h2>=4.2.0"],
|
| 90 |
+
"doq": ["aioquic>=1.2.0"],
|
| 91 |
+
"idna": ["idna>=3.10"],
|
| 92 |
+
"trio": ["trio>=0.30"],
|
| 93 |
+
"wmi": ["wmi>=1.5.1"],
|
| 94 |
+
### END generated requirements
|
| 95 |
+
}
|
lib/python3.13/site-packages/dns/_immutable_ctx.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
|
| 3 |
+
# This implementation of the immutable decorator requires python >=
|
| 4 |
+
# 3.7, and is significantly more storage efficient when making classes
|
| 5 |
+
# with slots immutable. It's also faster.
|
| 6 |
+
|
| 7 |
+
import contextvars
|
| 8 |
+
import inspect
|
| 9 |
+
|
| 10 |
+
_in__init__ = contextvars.ContextVar("_immutable_in__init__", default=False)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class _Immutable:
|
| 14 |
+
"""Immutable mixin class"""
|
| 15 |
+
|
| 16 |
+
# We set slots to the empty list to say "we don't have any attributes".
|
| 17 |
+
# We do this so that if we're mixed in with a class with __slots__, we
|
| 18 |
+
# don't cause a __dict__ to be added which would waste space.
|
| 19 |
+
|
| 20 |
+
__slots__ = ()
|
| 21 |
+
|
| 22 |
+
def __setattr__(self, name, value):
|
| 23 |
+
if _in__init__.get() is not self:
|
| 24 |
+
raise TypeError("object doesn't support attribute assignment")
|
| 25 |
+
else:
|
| 26 |
+
super().__setattr__(name, value)
|
| 27 |
+
|
| 28 |
+
def __delattr__(self, name):
|
| 29 |
+
if _in__init__.get() is not self:
|
| 30 |
+
raise TypeError("object doesn't support attribute assignment")
|
| 31 |
+
else:
|
| 32 |
+
super().__delattr__(name)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def _immutable_init(f):
|
| 36 |
+
def nf(*args, **kwargs):
|
| 37 |
+
previous = _in__init__.set(args[0])
|
| 38 |
+
try:
|
| 39 |
+
# call the actual __init__
|
| 40 |
+
f(*args, **kwargs)
|
| 41 |
+
finally:
|
| 42 |
+
_in__init__.reset(previous)
|
| 43 |
+
|
| 44 |
+
nf.__signature__ = inspect.signature(f) # pyright: ignore
|
| 45 |
+
return nf
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def immutable(cls):
|
| 49 |
+
if _Immutable in cls.__mro__:
|
| 50 |
+
# Some ancestor already has the mixin, so just make sure we keep
|
| 51 |
+
# following the __init__ protocol.
|
| 52 |
+
cls.__init__ = _immutable_init(cls.__init__)
|
| 53 |
+
if hasattr(cls, "__setstate__"):
|
| 54 |
+
cls.__setstate__ = _immutable_init(cls.__setstate__)
|
| 55 |
+
ncls = cls
|
| 56 |
+
else:
|
| 57 |
+
# Mixin the Immutable class and follow the __init__ protocol.
|
| 58 |
+
class ncls(_Immutable, cls):
|
| 59 |
+
# We have to do the __slots__ declaration here too!
|
| 60 |
+
__slots__ = ()
|
| 61 |
+
|
| 62 |
+
@_immutable_init
|
| 63 |
+
def __init__(self, *args, **kwargs):
|
| 64 |
+
super().__init__(*args, **kwargs)
|
| 65 |
+
|
| 66 |
+
if hasattr(cls, "__setstate__"):
|
| 67 |
+
|
| 68 |
+
@_immutable_init
|
| 69 |
+
def __setstate__(self, *args, **kwargs):
|
| 70 |
+
super().__setstate__(*args, **kwargs)
|
| 71 |
+
|
| 72 |
+
# make ncls have the same name and module as cls
|
| 73 |
+
ncls.__name__ = cls.__name__
|
| 74 |
+
ncls.__qualname__ = cls.__qualname__
|
| 75 |
+
ncls.__module__ = cls.__module__
|
| 76 |
+
return ncls
|
lib/python3.13/site-packages/dns/_no_ssl.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import enum
|
| 2 |
+
from typing import Any
|
| 3 |
+
|
| 4 |
+
CERT_NONE = 0
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class TLSVersion(enum.IntEnum):
|
| 8 |
+
TLSv1_2 = 12
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class WantReadException(Exception):
|
| 12 |
+
pass
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class WantWriteException(Exception):
|
| 16 |
+
pass
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class SSLWantReadError(Exception):
|
| 20 |
+
pass
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class SSLWantWriteError(Exception):
|
| 24 |
+
pass
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class SSLContext:
|
| 28 |
+
def __init__(self) -> None:
|
| 29 |
+
self.minimum_version: Any = TLSVersion.TLSv1_2
|
| 30 |
+
self.check_hostname: bool = False
|
| 31 |
+
self.verify_mode: int = CERT_NONE
|
| 32 |
+
|
| 33 |
+
def wrap_socket(self, *args, **kwargs) -> "SSLSocket": # type: ignore
|
| 34 |
+
raise Exception("no ssl support") # pylint: disable=broad-exception-raised
|
| 35 |
+
|
| 36 |
+
def set_alpn_protocols(self, *args, **kwargs): # type: ignore
|
| 37 |
+
raise Exception("no ssl support") # pylint: disable=broad-exception-raised
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class SSLSocket:
|
| 41 |
+
def pending(self) -> bool:
|
| 42 |
+
raise Exception("no ssl support") # pylint: disable=broad-exception-raised
|
| 43 |
+
|
| 44 |
+
def do_handshake(self) -> None:
|
| 45 |
+
raise Exception("no ssl support") # pylint: disable=broad-exception-raised
|
| 46 |
+
|
| 47 |
+
def settimeout(self, value: Any) -> None:
|
| 48 |
+
pass
|
| 49 |
+
|
| 50 |
+
def getpeercert(self) -> Any:
|
| 51 |
+
raise Exception("no ssl support") # pylint: disable=broad-exception-raised
|
| 52 |
+
|
| 53 |
+
def __enter__(self):
|
| 54 |
+
return self
|
| 55 |
+
|
| 56 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 57 |
+
return False
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def create_default_context(*args, **kwargs) -> SSLContext: # type: ignore
|
| 61 |
+
raise Exception("no ssl support") # pylint: disable=broad-exception-raised
|
lib/python3.13/site-packages/dns/_tls_util.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
from typing import Tuple
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def convert_verify_to_cafile_and_capath(
|
| 8 |
+
verify: bool | str,
|
| 9 |
+
) -> Tuple[str | None, str | None]:
|
| 10 |
+
cafile: str | None = None
|
| 11 |
+
capath: str | None = None
|
| 12 |
+
if isinstance(verify, str):
|
| 13 |
+
if os.path.isfile(verify):
|
| 14 |
+
cafile = verify
|
| 15 |
+
elif os.path.isdir(verify):
|
| 16 |
+
capath = verify
|
| 17 |
+
else:
|
| 18 |
+
raise ValueError("invalid verify string")
|
| 19 |
+
return cafile, capath
|
lib/python3.13/site-packages/dns/_trio_backend.py
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
|
| 3 |
+
"""trio async I/O library query support"""
|
| 4 |
+
|
| 5 |
+
import socket
|
| 6 |
+
|
| 7 |
+
import trio
|
| 8 |
+
import trio.socket # type: ignore
|
| 9 |
+
|
| 10 |
+
import dns._asyncbackend
|
| 11 |
+
import dns._features
|
| 12 |
+
import dns.exception
|
| 13 |
+
import dns.inet
|
| 14 |
+
|
| 15 |
+
if not dns._features.have("trio"):
|
| 16 |
+
raise ImportError("trio not found or too old")
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _maybe_timeout(timeout):
|
| 20 |
+
if timeout is not None:
|
| 21 |
+
return trio.move_on_after(timeout)
|
| 22 |
+
else:
|
| 23 |
+
return dns._asyncbackend.NullContext()
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
# for brevity
|
| 27 |
+
_lltuple = dns.inet.low_level_address_tuple
|
| 28 |
+
|
| 29 |
+
# pylint: disable=redefined-outer-name
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class DatagramSocket(dns._asyncbackend.DatagramSocket):
|
| 33 |
+
def __init__(self, sock):
|
| 34 |
+
super().__init__(sock.family, socket.SOCK_DGRAM)
|
| 35 |
+
self.socket = sock
|
| 36 |
+
|
| 37 |
+
async def sendto(self, what, destination, timeout):
|
| 38 |
+
with _maybe_timeout(timeout):
|
| 39 |
+
if destination is None:
|
| 40 |
+
return await self.socket.send(what)
|
| 41 |
+
else:
|
| 42 |
+
return await self.socket.sendto(what, destination)
|
| 43 |
+
raise dns.exception.Timeout(
|
| 44 |
+
timeout=timeout
|
| 45 |
+
) # pragma: no cover lgtm[py/unreachable-statement]
|
| 46 |
+
|
| 47 |
+
async def recvfrom(self, size, timeout):
|
| 48 |
+
with _maybe_timeout(timeout):
|
| 49 |
+
return await self.socket.recvfrom(size)
|
| 50 |
+
raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement]
|
| 51 |
+
|
| 52 |
+
async def close(self):
|
| 53 |
+
self.socket.close()
|
| 54 |
+
|
| 55 |
+
async def getpeername(self):
|
| 56 |
+
return self.socket.getpeername()
|
| 57 |
+
|
| 58 |
+
async def getsockname(self):
|
| 59 |
+
return self.socket.getsockname()
|
| 60 |
+
|
| 61 |
+
async def getpeercert(self, timeout):
|
| 62 |
+
raise NotImplementedError
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class StreamSocket(dns._asyncbackend.StreamSocket):
|
| 66 |
+
def __init__(self, family, stream, tls=False):
|
| 67 |
+
super().__init__(family, socket.SOCK_STREAM)
|
| 68 |
+
self.stream = stream
|
| 69 |
+
self.tls = tls
|
| 70 |
+
|
| 71 |
+
async def sendall(self, what, timeout):
|
| 72 |
+
with _maybe_timeout(timeout):
|
| 73 |
+
return await self.stream.send_all(what)
|
| 74 |
+
raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement]
|
| 75 |
+
|
| 76 |
+
async def recv(self, size, timeout):
|
| 77 |
+
with _maybe_timeout(timeout):
|
| 78 |
+
return await self.stream.receive_some(size)
|
| 79 |
+
raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement]
|
| 80 |
+
|
| 81 |
+
async def close(self):
|
| 82 |
+
await self.stream.aclose()
|
| 83 |
+
|
| 84 |
+
async def getpeername(self):
|
| 85 |
+
if self.tls:
|
| 86 |
+
return self.stream.transport_stream.socket.getpeername()
|
| 87 |
+
else:
|
| 88 |
+
return self.stream.socket.getpeername()
|
| 89 |
+
|
| 90 |
+
async def getsockname(self):
|
| 91 |
+
if self.tls:
|
| 92 |
+
return self.stream.transport_stream.socket.getsockname()
|
| 93 |
+
else:
|
| 94 |
+
return self.stream.socket.getsockname()
|
| 95 |
+
|
| 96 |
+
async def getpeercert(self, timeout):
|
| 97 |
+
if self.tls:
|
| 98 |
+
with _maybe_timeout(timeout):
|
| 99 |
+
await self.stream.do_handshake()
|
| 100 |
+
return self.stream.getpeercert()
|
| 101 |
+
else:
|
| 102 |
+
raise NotImplementedError
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
if dns._features.have("doh"):
|
| 106 |
+
import httpcore
|
| 107 |
+
import httpcore._backends.trio
|
| 108 |
+
import httpx
|
| 109 |
+
|
| 110 |
+
_CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend
|
| 111 |
+
_CoreTrioStream = httpcore._backends.trio.TrioStream
|
| 112 |
+
|
| 113 |
+
from dns.query import _compute_times, _expiration_for_this_attempt, _remaining
|
| 114 |
+
|
| 115 |
+
class _NetworkBackend(_CoreAsyncNetworkBackend):
|
| 116 |
+
def __init__(self, resolver, local_port, bootstrap_address, family):
|
| 117 |
+
super().__init__()
|
| 118 |
+
self._local_port = local_port
|
| 119 |
+
self._resolver = resolver
|
| 120 |
+
self._bootstrap_address = bootstrap_address
|
| 121 |
+
self._family = family
|
| 122 |
+
|
| 123 |
+
async def connect_tcp(
|
| 124 |
+
self, host, port, timeout=None, local_address=None, socket_options=None
|
| 125 |
+
): # pylint: disable=signature-differs
|
| 126 |
+
addresses = []
|
| 127 |
+
_, expiration = _compute_times(timeout)
|
| 128 |
+
if dns.inet.is_address(host):
|
| 129 |
+
addresses.append(host)
|
| 130 |
+
elif self._bootstrap_address is not None:
|
| 131 |
+
addresses.append(self._bootstrap_address)
|
| 132 |
+
else:
|
| 133 |
+
timeout = _remaining(expiration)
|
| 134 |
+
family = self._family
|
| 135 |
+
if local_address:
|
| 136 |
+
family = dns.inet.af_for_address(local_address)
|
| 137 |
+
answers = await self._resolver.resolve_name(
|
| 138 |
+
host, family=family, lifetime=timeout
|
| 139 |
+
)
|
| 140 |
+
addresses = answers.addresses()
|
| 141 |
+
for address in addresses:
|
| 142 |
+
try:
|
| 143 |
+
af = dns.inet.af_for_address(address)
|
| 144 |
+
if local_address is not None or self._local_port != 0:
|
| 145 |
+
source = (local_address, self._local_port)
|
| 146 |
+
else:
|
| 147 |
+
source = None
|
| 148 |
+
destination = (address, port)
|
| 149 |
+
attempt_expiration = _expiration_for_this_attempt(2.0, expiration)
|
| 150 |
+
timeout = _remaining(attempt_expiration)
|
| 151 |
+
sock = await Backend().make_socket(
|
| 152 |
+
af, socket.SOCK_STREAM, 0, source, destination, timeout
|
| 153 |
+
)
|
| 154 |
+
assert isinstance(sock, StreamSocket)
|
| 155 |
+
return _CoreTrioStream(sock.stream)
|
| 156 |
+
except Exception:
|
| 157 |
+
continue
|
| 158 |
+
raise httpcore.ConnectError
|
| 159 |
+
|
| 160 |
+
async def connect_unix_socket(
|
| 161 |
+
self, path, timeout=None, socket_options=None
|
| 162 |
+
): # pylint: disable=signature-differs
|
| 163 |
+
raise NotImplementedError
|
| 164 |
+
|
| 165 |
+
async def sleep(self, seconds): # pylint: disable=signature-differs
|
| 166 |
+
await trio.sleep(seconds)
|
| 167 |
+
|
| 168 |
+
class _HTTPTransport(httpx.AsyncHTTPTransport):
|
| 169 |
+
def __init__(
|
| 170 |
+
self,
|
| 171 |
+
*args,
|
| 172 |
+
local_port=0,
|
| 173 |
+
bootstrap_address=None,
|
| 174 |
+
resolver=None,
|
| 175 |
+
family=socket.AF_UNSPEC,
|
| 176 |
+
**kwargs,
|
| 177 |
+
):
|
| 178 |
+
if resolver is None and bootstrap_address is None:
|
| 179 |
+
# pylint: disable=import-outside-toplevel,redefined-outer-name
|
| 180 |
+
import dns.asyncresolver
|
| 181 |
+
|
| 182 |
+
resolver = dns.asyncresolver.Resolver()
|
| 183 |
+
super().__init__(*args, **kwargs)
|
| 184 |
+
self._pool._network_backend = _NetworkBackend(
|
| 185 |
+
resolver, local_port, bootstrap_address, family
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
else:
|
| 189 |
+
_HTTPTransport = dns._asyncbackend.NullTransport # type: ignore
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
class Backend(dns._asyncbackend.Backend):
|
| 193 |
+
def name(self):
|
| 194 |
+
return "trio"
|
| 195 |
+
|
| 196 |
+
async def make_socket(
|
| 197 |
+
self,
|
| 198 |
+
af,
|
| 199 |
+
socktype,
|
| 200 |
+
proto=0,
|
| 201 |
+
source=None,
|
| 202 |
+
destination=None,
|
| 203 |
+
timeout=None,
|
| 204 |
+
ssl_context=None,
|
| 205 |
+
server_hostname=None,
|
| 206 |
+
):
|
| 207 |
+
s = trio.socket.socket(af, socktype, proto)
|
| 208 |
+
stream = None
|
| 209 |
+
try:
|
| 210 |
+
if source:
|
| 211 |
+
await s.bind(_lltuple(source, af))
|
| 212 |
+
if socktype == socket.SOCK_STREAM or destination is not None:
|
| 213 |
+
connected = False
|
| 214 |
+
with _maybe_timeout(timeout):
|
| 215 |
+
assert destination is not None
|
| 216 |
+
await s.connect(_lltuple(destination, af))
|
| 217 |
+
connected = True
|
| 218 |
+
if not connected:
|
| 219 |
+
raise dns.exception.Timeout(
|
| 220 |
+
timeout=timeout
|
| 221 |
+
) # lgtm[py/unreachable-statement]
|
| 222 |
+
except Exception: # pragma: no cover
|
| 223 |
+
s.close()
|
| 224 |
+
raise
|
| 225 |
+
if socktype == socket.SOCK_DGRAM:
|
| 226 |
+
return DatagramSocket(s)
|
| 227 |
+
elif socktype == socket.SOCK_STREAM:
|
| 228 |
+
stream = trio.SocketStream(s)
|
| 229 |
+
tls = False
|
| 230 |
+
if ssl_context:
|
| 231 |
+
tls = True
|
| 232 |
+
try:
|
| 233 |
+
stream = trio.SSLStream(
|
| 234 |
+
stream, ssl_context, server_hostname=server_hostname
|
| 235 |
+
)
|
| 236 |
+
except Exception: # pragma: no cover
|
| 237 |
+
await stream.aclose()
|
| 238 |
+
raise
|
| 239 |
+
return StreamSocket(af, stream, tls)
|
| 240 |
+
raise NotImplementedError(
|
| 241 |
+
"unsupported socket " + f"type {socktype}"
|
| 242 |
+
) # pragma: no cover
|
| 243 |
+
|
| 244 |
+
async def sleep(self, interval):
|
| 245 |
+
await trio.sleep(interval)
|
| 246 |
+
|
| 247 |
+
def get_transport_class(self):
|
| 248 |
+
return _HTTPTransport
|
| 249 |
+
|
| 250 |
+
async def wait_for(self, awaitable, timeout):
|
| 251 |
+
with _maybe_timeout(timeout):
|
| 252 |
+
return await awaitable
|
| 253 |
+
raise dns.exception.Timeout(
|
| 254 |
+
timeout=timeout
|
| 255 |
+
) # pragma: no cover lgtm[py/unreachable-statement]
|
lib/python3.13/site-packages/dns/asyncbackend.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
| 2 |
+
|
| 3 |
+
from typing import Dict
|
| 4 |
+
|
| 5 |
+
import dns.exception
|
| 6 |
+
|
| 7 |
+
# pylint: disable=unused-import
|
| 8 |
+
from dns._asyncbackend import ( # noqa: F401 lgtm[py/unused-import]
|
| 9 |
+
Backend,
|
| 10 |
+
DatagramSocket,
|
| 11 |
+
Socket,
|
| 12 |
+
StreamSocket,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
# pylint: enable=unused-import
|
| 16 |
+
|
| 17 |
+
_default_backend = None
|
| 18 |
+
|
| 19 |
+
_backends: Dict[str, Backend] = {}
|
| 20 |
+
|
| 21 |
+
# Allow sniffio import to be disabled for testing purposes
|
| 22 |
+
_no_sniffio = False
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class AsyncLibraryNotFoundError(dns.exception.DNSException):
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def get_backend(name: str) -> Backend:
|
| 30 |
+
"""Get the specified asynchronous backend.
|
| 31 |
+
|
| 32 |
+
*name*, a ``str``, the name of the backend. Currently the "trio"
|
| 33 |
+
and "asyncio" backends are available.
|
| 34 |
+
|
| 35 |
+
Raises NotImplementedError if an unknown backend name is specified.
|
| 36 |
+
"""
|
| 37 |
+
# pylint: disable=import-outside-toplevel,redefined-outer-name
|
| 38 |
+
backend = _backends.get(name)
|
| 39 |
+
if backend:
|
| 40 |
+
return backend
|
| 41 |
+
if name == "trio":
|
| 42 |
+
import dns._trio_backend
|
| 43 |
+
|
| 44 |
+
backend = dns._trio_backend.Backend()
|
| 45 |
+
elif name == "asyncio":
|
| 46 |
+
import dns._asyncio_backend
|
| 47 |
+
|
| 48 |
+
backend = dns._asyncio_backend.Backend()
|
| 49 |
+
else:
|
| 50 |
+
raise NotImplementedError(f"unimplemented async backend {name}")
|
| 51 |
+
_backends[name] = backend
|
| 52 |
+
return backend
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def sniff() -> str:
|
| 56 |
+
"""Attempt to determine the in-use asynchronous I/O library by using
|
| 57 |
+
the ``sniffio`` module if it is available.
|
| 58 |
+
|
| 59 |
+
Returns the name of the library, or raises AsyncLibraryNotFoundError
|
| 60 |
+
if the library cannot be determined.
|
| 61 |
+
"""
|
| 62 |
+
# pylint: disable=import-outside-toplevel
|
| 63 |
+
try:
|
| 64 |
+
if _no_sniffio:
|
| 65 |
+
raise ImportError
|
| 66 |
+
import sniffio
|
| 67 |
+
|
| 68 |
+
try:
|
| 69 |
+
return sniffio.current_async_library()
|
| 70 |
+
except sniffio.AsyncLibraryNotFoundError:
|
| 71 |
+
raise AsyncLibraryNotFoundError("sniffio cannot determine async library")
|
| 72 |
+
except ImportError:
|
| 73 |
+
import asyncio
|
| 74 |
+
|
| 75 |
+
try:
|
| 76 |
+
asyncio.get_running_loop()
|
| 77 |
+
return "asyncio"
|
| 78 |
+
except RuntimeError:
|
| 79 |
+
raise AsyncLibraryNotFoundError("no async library detected")
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def get_default_backend() -> Backend:
|
| 83 |
+
"""Get the default backend, initializing it if necessary."""
|
| 84 |
+
if _default_backend:
|
| 85 |
+
return _default_backend
|
| 86 |
+
|
| 87 |
+
return set_default_backend(sniff())
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def set_default_backend(name: str) -> Backend:
|
| 91 |
+
"""Set the default backend.
|
| 92 |
+
|
| 93 |
+
It's not normally necessary to call this method, as
|
| 94 |
+
``get_default_backend()`` will initialize the backend
|
| 95 |
+
appropriately in many cases. If ``sniffio`` is not installed, or
|
| 96 |
+
in testing situations, this function allows the backend to be set
|
| 97 |
+
explicitly.
|
| 98 |
+
"""
|
| 99 |
+
global _default_backend
|
| 100 |
+
_default_backend = get_backend(name)
|
| 101 |
+
return _default_backend
|