diff --git a/.gitattributes b/.gitattributes index dc72d1440ffeb0f38a03225417dd13ac6abc85cb..cdc8b1e7919d6fd6b3ad777d69f035f12241c371 100644 --- a/.gitattributes +++ b/.gitattributes @@ -539,3 +539,5 @@ mantis_evalkit/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_ mantis_evalkit/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors_classmode.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text parrot/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text mantis_evalkit/lib/python3.10/site-packages/sklearn/preprocessing/__pycache__/_data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +moondream/lib/python3.10/site-packages/torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/__init__.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0d1f7edf5dc6343538563a8caa7de6829752c4f9 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__init__.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8c16881cb3ae1ea6e729b62bb2e5d625faa3f5fe --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__init__.py @@ -0,0 +1,2937 @@ +#------------------------------------------------------------------- +# tarfile.py +#------------------------------------------------------------------- +# Copyright (C) 2002 Lars Gustaebel +# All rights reserved. +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +"""Read from and write to tar format archives. +""" + +version = "0.9.0" +__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" +__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." + +#--------- +# Imports +#--------- +from builtins import open as bltn_open +import sys +import os +import io +import shutil +import stat +import time +import struct +import copy +import re + +from .compat.py38 import removesuffix + +try: + import pwd +except ImportError: + pwd = None +try: + import grp +except ImportError: + grp = None + +# os.symlink on Windows prior to 6.0 raises NotImplementedError +# OSError (winerror=1314) will be raised if the caller does not hold the +# SeCreateSymbolicLinkPrivilege privilege +symlink_exception = (AttributeError, NotImplementedError, OSError) + +# from tarfile import * +__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError", + "CompressionError", "StreamError", "ExtractError", "HeaderError", + "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT", + "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter", + "tar_filter", "FilterError", "AbsoluteLinkError", + "OutsideDestinationError", "SpecialFileError", "AbsolutePathError", + "LinkOutsideDestinationError"] + + +#--------------------------------------------------------- +# tar constants +#--------------------------------------------------------- +NUL = b"\0" # the null character +BLOCKSIZE = 512 # length of processing blocks +RECORDSIZE = BLOCKSIZE * 20 # length of records +GNU_MAGIC = b"ustar \0" # magic gnu tar string +POSIX_MAGIC = b"ustar\x0000" # magic posix tar string + +LENGTH_NAME = 100 # maximum length of a filename +LENGTH_LINK = 100 # maximum length of a linkname +LENGTH_PREFIX = 155 # maximum length of the prefix field + +REGTYPE = b"0" # regular file +AREGTYPE = b"\0" # regular file +LNKTYPE = b"1" # link (inside tarfile) +SYMTYPE = b"2" # symbolic link +CHRTYPE = b"3" # character special device +BLKTYPE = b"4" # block special device +DIRTYPE = b"5" # directory +FIFOTYPE = b"6" # fifo special device +CONTTYPE = b"7" # contiguous file + +GNUTYPE_LONGNAME = b"L" # GNU tar longname +GNUTYPE_LONGLINK = b"K" # GNU tar longlink +GNUTYPE_SPARSE = b"S" # GNU tar sparse file + +XHDTYPE = b"x" # POSIX.1-2001 extended header +XGLTYPE = b"g" # POSIX.1-2001 global header +SOLARIS_XHDTYPE = b"X" # Solaris extended header + +USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format +GNU_FORMAT = 1 # GNU tar format +PAX_FORMAT = 2 # POSIX.1-2001 (pax) format +DEFAULT_FORMAT = PAX_FORMAT + +#--------------------------------------------------------- +# tarfile constants +#--------------------------------------------------------- +# File types that tarfile supports: +SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, + SYMTYPE, DIRTYPE, FIFOTYPE, + CONTTYPE, CHRTYPE, BLKTYPE, + GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# File types that will be treated as a regular file. +REGULAR_TYPES = (REGTYPE, AREGTYPE, + CONTTYPE, GNUTYPE_SPARSE) + +# File types that are part of the GNU tar format. +GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# Fields from a pax header that override a TarInfo attribute. +PAX_FIELDS = ("path", "linkpath", "size", "mtime", + "uid", "gid", "uname", "gname") + +# Fields from a pax header that are affected by hdrcharset. +PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"} + +# Fields in a pax header that are numbers, all other fields +# are treated as strings. +PAX_NUMBER_FIELDS = { + "atime": float, + "ctime": float, + "mtime": float, + "uid": int, + "gid": int, + "size": int +} + +#--------------------------------------------------------- +# initialization +#--------------------------------------------------------- +if os.name == "nt": + ENCODING = "utf-8" +else: + ENCODING = sys.getfilesystemencoding() + +#--------------------------------------------------------- +# Some useful functions +#--------------------------------------------------------- + +def stn(s, length, encoding, errors): + """Convert a string to a null-terminated bytes object. + """ + if s is None: + raise ValueError("metadata cannot contain None") + s = s.encode(encoding, errors) + return s[:length] + (length - len(s)) * NUL + +def nts(s, encoding, errors): + """Convert a null-terminated bytes object to a string. + """ + p = s.find(b"\0") + if p != -1: + s = s[:p] + return s.decode(encoding, errors) + +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] in (0o200, 0o377): + n = 0 + for i in range(len(s) - 1): + n <<= 8 + n += s[i + 1] + if s[0] == 0o377: + n = -(256 ** (len(s) - 1) - n) + else: + try: + s = nts(s, "ascii", "strict") + n = int(s.strip() or "0", 8) + except ValueError: + raise InvalidHeaderError("invalid header") + return n + +def itn(n, digits=8, format=DEFAULT_FORMAT): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0o200 or 0o377 byte indicate this + # particular encoding, the following digits-1 bytes are a big-endian + # base-256 representation. This allows values up to (256**(digits-1))-1. + # A 0o200 byte indicates a positive number, a 0o377 byte a negative + # number. + original_n = n + n = int(n) + if 0 <= n < 8 ** (digits - 1): + s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL + elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1): + if n >= 0: + s = bytearray([0o200]) + else: + s = bytearray([0o377]) + n = 256 ** digits + n + + for i in range(digits - 1): + s.insert(1, n & 0o377) + n >>= 8 + else: + raise ValueError("overflow in number field") + + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. + """ + unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf)) + signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf)) + return unsigned_chksum, signed_chksum + +def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None): + """Copy length bytes from fileobj src to fileobj dst. + If length is None, copy the entire content. + """ + bufsize = bufsize or 16 * 1024 + if length == 0: + return + if length is None: + shutil.copyfileobj(src, dst, bufsize) + return + + blocks, remainder = divmod(length, bufsize) + for b in range(blocks): + buf = src.read(bufsize) + if len(buf) < bufsize: + raise exception("unexpected end of data") + dst.write(buf) + + if remainder != 0: + buf = src.read(remainder) + if len(buf) < remainder: + raise exception("unexpected end of data") + dst.write(buf) + return + +def _safe_print(s): + encoding = getattr(sys.stdout, 'encoding', None) + if encoding is not None: + s = s.encode(encoding, 'backslashreplace').decode(encoding) + print(s, end=' ') + + +class TarError(Exception): + """Base exception.""" + pass +class ExtractError(TarError): + """General exception for extract errors.""" + pass +class ReadError(TarError): + """Exception for unreadable tar archives.""" + pass +class CompressionError(TarError): + """Exception for unavailable compression methods.""" + pass +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles.""" + pass +class HeaderError(TarError): + """Base exception for header errors.""" + pass +class EmptyHeaderError(HeaderError): + """Exception for empty headers.""" + pass +class TruncatedHeaderError(HeaderError): + """Exception for truncated headers.""" + pass +class EOFHeaderError(HeaderError): + """Exception for end of file headers.""" + pass +class InvalidHeaderError(HeaderError): + """Exception for invalid headers.""" + pass +class SubsequentHeaderError(HeaderError): + """Exception for missing and invalid extended headers.""" + pass + +#--------------------------- +# internal stream interface +#--------------------------- +class _LowLevelFile: + """Low-level file object. Supports reading and writing. + It is used instead of a regular file object for streaming + access. + """ + + def __init__(self, name, mode): + mode = { + "r": os.O_RDONLY, + "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + }[mode] + if hasattr(os, "O_BINARY"): + mode |= os.O_BINARY + self.fd = os.open(name, mode, 0o666) + + def close(self): + os.close(self.fd) + + def read(self, size): + return os.read(self.fd, size) + + def write(self, s): + os.write(self.fd, s) + +class _Stream: + """Class that serves as an adapter between TarFile and + a stream-like object. The stream-like object only + needs to have a read() or write() method that works with bytes, + and the method is accessed blockwise. + Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin.buffer, + sys.stdout.buffer, a socket, a tape device etc. + + _Stream is intended to be used only internally. + """ + + def __init__(self, name, mode, comptype, fileobj, bufsize, + compresslevel): + """Construct a _Stream object. + """ + self._extfileobj = True + if fileobj is None: + fileobj = _LowLevelFile(name, mode) + self._extfileobj = False + + if comptype == '*': + # Enable transparent compression detection for the + # stream interface + fileobj = _StreamProxy(fileobj) + comptype = fileobj.getcomptype() + + self.name = name or "" + self.mode = mode + self.comptype = comptype + self.fileobj = fileobj + self.bufsize = bufsize + self.buf = b"" + self.pos = 0 + self.closed = False + + try: + if comptype == "gz": + try: + import zlib + except ImportError: + raise CompressionError("zlib module is not available") from None + self.zlib = zlib + self.crc = zlib.crc32(b"") + if mode == "r": + self.exception = zlib.error + self._init_read_gz() + else: + self._init_write_gz(compresslevel) + + elif comptype == "bz2": + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") from None + if mode == "r": + self.dbuf = b"" + self.cmp = bz2.BZ2Decompressor() + self.exception = OSError + else: + self.cmp = bz2.BZ2Compressor(compresslevel) + + elif comptype == "xz": + try: + import lzma + except ImportError: + raise CompressionError("lzma module is not available") from None + if mode == "r": + self.dbuf = b"" + self.cmp = lzma.LZMADecompressor() + self.exception = lzma.LZMAError + else: + self.cmp = lzma.LZMACompressor() + + elif comptype != "tar": + raise CompressionError("unknown compression type %r" % comptype) + + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + def __del__(self): + if hasattr(self, "closed") and not self.closed: + self.close() + + def _init_write_gz(self, compresslevel): + """Initialize for writing with gzip compression. + """ + self.cmp = self.zlib.compressobj(compresslevel, + self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) + timestamp = struct.pack(" self.bufsize: + self.fileobj.write(self.buf[:self.bufsize]) + self.buf = self.buf[self.bufsize:] + + def close(self): + """Close the _Stream object. No operation should be + done on it afterwards. + """ + if self.closed: + return + + self.closed = True + try: + if self.mode == "w" and self.comptype != "tar": + self.buf += self.cmp.flush() + + if self.mode == "w" and self.buf: + self.fileobj.write(self.buf) + self.buf = b"" + if self.comptype == "gz": + self.fileobj.write(struct.pack("= 0: + blocks, remainder = divmod(pos - self.pos, self.bufsize) + for i in range(blocks): + self.read(self.bufsize) + self.read(remainder) + else: + raise StreamError("seeking backwards is not allowed") + return self.pos + + def read(self, size): + """Return the next size number of bytes from the stream.""" + assert size is not None + buf = self._read(size) + self.pos += len(buf) + return buf + + def _read(self, size): + """Return size bytes from the stream. + """ + if self.comptype == "tar": + return self.__read(size) + + c = len(self.dbuf) + t = [self.dbuf] + while c < size: + # Skip underlying buffer to avoid unaligned double buffering. + if self.buf: + buf = self.buf + self.buf = b"" + else: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + try: + buf = self.cmp.decompress(buf) + except self.exception as e: + raise ReadError("invalid compressed data") from e + t.append(buf) + c += len(buf) + t = b"".join(t) + self.dbuf = t[size:] + return t[:size] + + def __read(self, size): + """Return size bytes from stream. If internal buffer is empty, + read another block from the stream. + """ + c = len(self.buf) + t = [self.buf] + while c < size: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + t.append(buf) + c += len(buf) + t = b"".join(t) + self.buf = t[size:] + return t[:size] +# class _Stream + +class _StreamProxy(object): + """Small proxy class that enables transparent compression + detection for the Stream interface (mode 'r|*'). + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + self.buf = self.fileobj.read(BLOCKSIZE) + + def read(self, size): + self.read = self.fileobj.read + return self.buf + + def getcomptype(self): + if self.buf.startswith(b"\x1f\x8b\x08"): + return "gz" + elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY": + return "bz2" + elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")): + return "xz" + else: + return "tar" + + def close(self): + self.fileobj.close() +# class StreamProxy + +#------------------------ +# Extraction file object +#------------------------ +class _FileInFile(object): + """A thin wrapper around an existing file object that + provides a part of its data as an individual file + object. + """ + + def __init__(self, fileobj, offset, size, name, blockinfo=None): + self.fileobj = fileobj + self.offset = offset + self.size = size + self.position = 0 + self.name = name + self.closed = False + + if blockinfo is None: + blockinfo = [(0, size)] + + # Construct a map with data and zero blocks. + self.map_index = 0 + self.map = [] + lastpos = 0 + realpos = self.offset + for offset, size in blockinfo: + if offset > lastpos: + self.map.append((False, lastpos, offset, None)) + self.map.append((True, offset, offset + size, realpos)) + realpos += size + lastpos = offset + size + if lastpos < self.size: + self.map.append((False, lastpos, self.size, None)) + + def flush(self): + pass + + @property + def mode(self): + return 'rb' + + def readable(self): + return True + + def writable(self): + return False + + def seekable(self): + return self.fileobj.seekable() + + def tell(self): + """Return the current file position. + """ + return self.position + + def seek(self, position, whence=io.SEEK_SET): + """Seek to a position in the file. + """ + if whence == io.SEEK_SET: + self.position = min(max(position, 0), self.size) + elif whence == io.SEEK_CUR: + if position < 0: + self.position = max(self.position + position, 0) + else: + self.position = min(self.position + position, self.size) + elif whence == io.SEEK_END: + self.position = max(min(self.size + position, self.size), 0) + else: + raise ValueError("Invalid argument") + return self.position + + def read(self, size=None): + """Read data from the file. + """ + if size is None: + size = self.size - self.position + else: + size = min(size, self.size - self.position) + + buf = b"" + while size > 0: + while True: + data, start, stop, offset = self.map[self.map_index] + if start <= self.position < stop: + break + else: + self.map_index += 1 + if self.map_index == len(self.map): + self.map_index = 0 + length = min(size, stop - self.position) + if data: + self.fileobj.seek(offset + (self.position - start)) + b = self.fileobj.read(length) + if len(b) != length: + raise ReadError("unexpected end of data") + buf += b + else: + buf += NUL * length + size -= length + self.position += length + return buf + + def readinto(self, b): + buf = self.read(len(b)) + b[:len(buf)] = buf + return len(buf) + + def close(self): + self.closed = True +#class _FileInFile + +class ExFileObject(io.BufferedReader): + + def __init__(self, tarfile, tarinfo): + fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data, + tarinfo.size, tarinfo.name, tarinfo.sparse) + super().__init__(fileobj) +#class ExFileObject + + +#----------------------------- +# extraction filters (PEP 706) +#----------------------------- + +class FilterError(TarError): + pass + +class AbsolutePathError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'member {tarinfo.name!r} has an absolute path') + +class OutsideDestinationError(FilterError): + def __init__(self, tarinfo, path): + self.tarinfo = tarinfo + self._path = path + super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, ' + + 'which is outside the destination') + +class SpecialFileError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'{tarinfo.name!r} is a special file') + +class AbsoluteLinkError(FilterError): + def __init__(self, tarinfo): + self.tarinfo = tarinfo + super().__init__(f'{tarinfo.name!r} is a link to an absolute path') + +class LinkOutsideDestinationError(FilterError): + def __init__(self, tarinfo, path): + self.tarinfo = tarinfo + self._path = path + super().__init__(f'{tarinfo.name!r} would link to {path!r}, ' + + 'which is outside the destination') + +def _get_filtered_attrs(member, dest_path, for_data=True): + new_attrs = {} + name = member.name + dest_path = os.path.realpath(dest_path) + # Strip leading / (tar's directory separator) from filenames. + # Include os.sep (target OS directory separator) as well. + if name.startswith(('/', os.sep)): + name = new_attrs['name'] = member.path.lstrip('/' + os.sep) + if os.path.isabs(name): + # Path is absolute even after stripping. + # For example, 'C:/foo' on Windows. + raise AbsolutePathError(member) + # Ensure we stay in the destination + target_path = os.path.realpath(os.path.join(dest_path, name)) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise OutsideDestinationError(member, target_path) + # Limit permissions (no high bits, and go-w) + mode = member.mode + if mode is not None: + # Strip high bits & group/other write bits + mode = mode & 0o755 + if for_data: + # For data, handle permissions & file types + if member.isreg() or member.islnk(): + if not mode & 0o100: + # Clear executable bits if not executable by user + mode &= ~0o111 + # Ensure owner can read & write + mode |= 0o600 + elif member.isdir() or member.issym(): + # Ignore mode for directories & symlinks + mode = None + else: + # Reject special files + raise SpecialFileError(member) + if mode != member.mode: + new_attrs['mode'] = mode + if for_data: + # Ignore ownership for 'data' + if member.uid is not None: + new_attrs['uid'] = None + if member.gid is not None: + new_attrs['gid'] = None + if member.uname is not None: + new_attrs['uname'] = None + if member.gname is not None: + new_attrs['gname'] = None + # Check link destination for 'data' + if member.islnk() or member.issym(): + if os.path.isabs(member.linkname): + raise AbsoluteLinkError(member) + if member.issym(): + target_path = os.path.join(dest_path, + os.path.dirname(name), + member.linkname) + else: + target_path = os.path.join(dest_path, + member.linkname) + target_path = os.path.realpath(target_path) + if os.path.commonpath([target_path, dest_path]) != dest_path: + raise LinkOutsideDestinationError(member, target_path) + return new_attrs + +def fully_trusted_filter(member, dest_path): + return member + +def tar_filter(member, dest_path): + new_attrs = _get_filtered_attrs(member, dest_path, False) + if new_attrs: + return member.replace(**new_attrs, deep=False) + return member + +def data_filter(member, dest_path): + new_attrs = _get_filtered_attrs(member, dest_path, True) + if new_attrs: + return member.replace(**new_attrs, deep=False) + return member + +_NAMED_FILTERS = { + "fully_trusted": fully_trusted_filter, + "tar": tar_filter, + "data": data_filter, +} + +#------------------ +# Exported Classes +#------------------ + +# Sentinel for replace() defaults, meaning "don't change the attribute" +_KEEP = object() + +class TarInfo(object): + """Informational class which holds the details about an + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + + __slots__ = dict( + name = 'Name of the archive member.', + mode = 'Permission bits.', + uid = 'User ID of the user who originally stored this member.', + gid = 'Group ID of the user who originally stored this member.', + size = 'Size in bytes.', + mtime = 'Time of last modification.', + chksum = 'Header checksum.', + type = ('File type. type is usually one of these constants: ' + 'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, ' + 'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'), + linkname = ('Name of the target file name, which is only present ' + 'in TarInfo objects of type LNKTYPE and SYMTYPE.'), + uname = 'User name.', + gname = 'Group name.', + devmajor = 'Device major number.', + devminor = 'Device minor number.', + offset = 'The tar header starts here.', + offset_data = "The file's data starts here.", + pax_headers = ('A dictionary containing key-value pairs of an ' + 'associated pax extended header.'), + sparse = 'Sparse member information.', + _tarfile = None, + _sparse_structs = None, + _link_target = None, + ) + + def __init__(self, name=""): + """Construct a TarInfo object. name is the optional name + of the member. + """ + self.name = name # member name + self.mode = 0o644 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "" # user name + self.gname = "" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here + + self.sparse = None # sparse member information + self.pax_headers = {} # pax header information + + @property + def tarfile(self): + import warnings + warnings.warn( + 'The undocumented "tarfile" attribute of TarInfo objects ' + + 'is deprecated and will be removed in Python 3.16', + DeprecationWarning, stacklevel=2) + return self._tarfile + + @tarfile.setter + def tarfile(self, tarfile): + import warnings + warnings.warn( + 'The undocumented "tarfile" attribute of TarInfo objects ' + + 'is deprecated and will be removed in Python 3.16', + DeprecationWarning, stacklevel=2) + self._tarfile = tarfile + + @property + def path(self): + 'In pax headers, "name" is called "path".' + return self.name + + @path.setter + def path(self, name): + self.name = name + + @property + def linkpath(self): + 'In pax headers, "linkname" is called "linkpath".' + return self.linkname + + @linkpath.setter + def linkpath(self, linkname): + self.linkname = linkname + + def __repr__(self): + return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + + def replace(self, *, + name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP, + uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP, + deep=True, _KEEP=_KEEP): + """Return a deep copy of self with the given attributes replaced. + """ + if deep: + result = copy.deepcopy(self) + else: + result = copy.copy(self) + if name is not _KEEP: + result.name = name + if mtime is not _KEEP: + result.mtime = mtime + if mode is not _KEEP: + result.mode = mode + if linkname is not _KEEP: + result.linkname = linkname + if uid is not _KEEP: + result.uid = uid + if gid is not _KEEP: + result.gid = gid + if uname is not _KEEP: + result.uname = uname + if gname is not _KEEP: + result.gname = gname + return result + + def get_info(self): + """Return the TarInfo's attributes as a dictionary. + """ + if self.mode is None: + mode = None + else: + mode = self.mode & 0o7777 + info = { + "name": self.name, + "mode": mode, + "uid": self.uid, + "gid": self.gid, + "size": self.size, + "mtime": self.mtime, + "chksum": self.chksum, + "type": self.type, + "linkname": self.linkname, + "uname": self.uname, + "gname": self.gname, + "devmajor": self.devmajor, + "devminor": self.devminor + } + + if info["type"] == DIRTYPE and not info["name"].endswith("/"): + info["name"] += "/" + + return info + + def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): + """Return a tar header as a string of 512 byte blocks. + """ + info = self.get_info() + for name, value in info.items(): + if value is None: + raise ValueError("%s may not be None" % name) + + if format == USTAR_FORMAT: + return self.create_ustar_header(info, encoding, errors) + elif format == GNU_FORMAT: + return self.create_gnu_header(info, encoding, errors) + elif format == PAX_FORMAT: + return self.create_pax_header(info, encoding) + else: + raise ValueError("invalid format") + + def create_ustar_header(self, info, encoding, errors): + """Return the object as a ustar header block. + """ + info["magic"] = POSIX_MAGIC + + if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK: + raise ValueError("linkname is too long") + + if len(info["name"].encode(encoding, errors)) > LENGTH_NAME: + info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors) + + return self._create_header(info, USTAR_FORMAT, encoding, errors) + + def create_gnu_header(self, info, encoding, errors): + """Return the object as a GNU header block sequence. + """ + info["magic"] = GNU_MAGIC + + buf = b"" + if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK: + buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) + + if len(info["name"].encode(encoding, errors)) > LENGTH_NAME: + buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) + + return buf + self._create_header(info, GNU_FORMAT, encoding, errors) + + def create_pax_header(self, info, encoding): + """Return the object as a ustar header block. If it cannot be + represented this way, prepend a pax extended header sequence + with supplement information. + """ + info["magic"] = POSIX_MAGIC + pax_headers = self.pax_headers.copy() + + # Test string fields for values that exceed the field length or cannot + # be represented in ASCII encoding. + for name, hname, length in ( + ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), + ("uname", "uname", 32), ("gname", "gname", 32)): + + if hname in pax_headers: + # The pax header has priority. + continue + + # Try to encode the string as ASCII. + try: + info[name].encode("ascii", "strict") + except UnicodeEncodeError: + pax_headers[hname] = info[name] + continue + + if len(info[name]) > length: + pax_headers[hname] = info[name] + + # Test number fields for values that exceed the field limit or values + # that like to be stored as float. + for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): + needs_pax = False + + val = info[name] + val_is_float = isinstance(val, float) + val_int = round(val) if val_is_float else val + if not 0 <= val_int < 8 ** (digits - 1): + # Avoid overflow. + info[name] = 0 + needs_pax = True + elif val_is_float: + # Put rounded value in ustar header, and full + # precision value in pax header. + info[name] = val_int + needs_pax = True + + # The existing pax header has priority. + if needs_pax and name not in pax_headers: + pax_headers[name] = str(val) + + # Create a pax extended header if necessary. + if pax_headers: + buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) + else: + buf = b"" + + return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") + + @classmethod + def create_pax_global_header(cls, pax_headers): + """Return the object as a pax global header block sequence. + """ + return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8") + + def _posix_split_name(self, name, encoding, errors): + """Split a name longer than 100 chars into a prefix + and a name part. + """ + components = name.split("/") + for i in range(1, len(components)): + prefix = "/".join(components[:i]) + name = "/".join(components[i:]) + if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \ + len(name.encode(encoding, errors)) <= LENGTH_NAME: + break + else: + raise ValueError("name is too long") + + return prefix, name + + @staticmethod + def _create_header(info, format, encoding, errors): + """Return a header block. info is a dictionary with file + information, format must be one of the *_FORMAT constants. + """ + has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE) + if has_device_fields: + devmajor = itn(info.get("devmajor", 0), 8, format) + devminor = itn(info.get("devminor", 0), 8, format) + else: + devmajor = stn("", 8, encoding, errors) + devminor = stn("", 8, encoding, errors) + + # None values in metadata should cause ValueError. + # itn()/stn() do this for all fields except type. + filetype = info.get("type", REGTYPE) + if filetype is None: + raise ValueError("TarInfo.type must not be None") + + parts = [ + stn(info.get("name", ""), 100, encoding, errors), + itn(info.get("mode", 0) & 0o7777, 8, format), + itn(info.get("uid", 0), 8, format), + itn(info.get("gid", 0), 8, format), + itn(info.get("size", 0), 12, format), + itn(info.get("mtime", 0), 12, format), + b" ", # checksum field + filetype, + stn(info.get("linkname", ""), 100, encoding, errors), + info.get("magic", POSIX_MAGIC), + stn(info.get("uname", ""), 32, encoding, errors), + stn(info.get("gname", ""), 32, encoding, errors), + devmajor, + devminor, + stn(info.get("prefix", ""), 155, encoding, errors) + ] + + buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) + chksum = calc_chksums(buf[-BLOCKSIZE:])[0] + buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:] + return buf + + @staticmethod + def _create_payload(payload): + """Return the string payload filled with zero bytes + up to the next 512 byte border. + """ + blocks, remainder = divmod(len(payload), BLOCKSIZE) + if remainder > 0: + payload += (BLOCKSIZE - remainder) * NUL + return payload + + @classmethod + def _create_gnu_long_header(cls, name, type, encoding, errors): + """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence + for name. + """ + name = name.encode(encoding, errors) + NUL + + info = {} + info["name"] = "././@LongLink" + info["type"] = type + info["size"] = len(name) + info["magic"] = GNU_MAGIC + + # create extended header + name blocks. + return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ + cls._create_payload(name) + + @classmethod + def _create_pax_generic_header(cls, pax_headers, type, encoding): + """Return a POSIX.1-2008 extended or global header sequence + that contains a list of keyword, value pairs. The values + must be strings. + """ + # Check if one of the fields contains surrogate characters and thereby + # forces hdrcharset=BINARY, see _proc_pax() for more information. + binary = False + for keyword, value in pax_headers.items(): + try: + value.encode("utf-8", "strict") + except UnicodeEncodeError: + binary = True + break + + records = b"" + if binary: + # Put the hdrcharset field at the beginning of the header. + records += b"21 hdrcharset=BINARY\n" + + for keyword, value in pax_headers.items(): + keyword = keyword.encode("utf-8") + if binary: + # Try to restore the original byte representation of 'value'. + # Needless to say, that the encoding must match the string. + value = value.encode(encoding, "surrogateescape") + else: + value = value.encode("utf-8") + + l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' + n = p = 0 + while True: + n = l + len(str(p)) + if n == p: + break + p = n + records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" + + # We use a hardcoded "././@PaxHeader" name like star does + # instead of the one that POSIX recommends. + info = {} + info["name"] = "././@PaxHeader" + info["type"] = type + info["size"] = len(records) + info["magic"] = POSIX_MAGIC + + # Create pax header + record blocks. + return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ + cls._create_payload(records) + + @classmethod + def frombuf(cls, buf, encoding, errors): + """Construct a TarInfo object from a 512 byte bytes object. + """ + if len(buf) == 0: + raise EmptyHeaderError("empty header") + if len(buf) != BLOCKSIZE: + raise TruncatedHeaderError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise EOFHeaderError("end of file header") + + chksum = nti(buf[148:156]) + if chksum not in calc_chksums(buf): + raise InvalidHeaderError("bad checksum") + + obj = cls() + obj.name = nts(buf[0:100], encoding, errors) + obj.mode = nti(buf[100:108]) + obj.uid = nti(buf[108:116]) + obj.gid = nti(buf[116:124]) + obj.size = nti(buf[124:136]) + obj.mtime = nti(buf[136:148]) + obj.chksum = chksum + obj.type = buf[156:157] + obj.linkname = nts(buf[157:257], encoding, errors) + obj.uname = nts(buf[265:297], encoding, errors) + obj.gname = nts(buf[297:329], encoding, errors) + obj.devmajor = nti(buf[329:337]) + obj.devminor = nti(buf[337:345]) + prefix = nts(buf[345:500], encoding, errors) + + # Old V7 tar format represents a directory as a regular + # file with a trailing slash. + if obj.type == AREGTYPE and obj.name.endswith("/"): + obj.type = DIRTYPE + + # The old GNU sparse format occupies some of the unused + # space in the buffer for up to 4 sparse structures. + # Save them for later processing in _proc_sparse(). + if obj.type == GNUTYPE_SPARSE: + pos = 386 + structs = [] + for i in range(4): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[482]) + origsize = nti(buf[483:495]) + obj._sparse_structs = (structs, isextended, origsize) + + # Remove redundant slashes from directories. + if obj.isdir(): + obj.name = obj.name.rstrip("/") + + # Reconstruct a ustar longname. + if prefix and obj.type not in GNU_TYPES: + obj.name = prefix + "/" + obj.name + return obj + + @classmethod + def fromtarfile(cls, tarfile): + """Return the next TarInfo object from TarFile object + tarfile. + """ + buf = tarfile.fileobj.read(BLOCKSIZE) + obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) + obj.offset = tarfile.fileobj.tell() - BLOCKSIZE + return obj._proc_member(tarfile) + + #-------------------------------------------------------------------------- + # The following are methods that are called depending on the type of a + # member. The entry point is _proc_member() which can be overridden in a + # subclass to add custom _proc_*() methods. A _proc_*() method MUST + # implement the following + # operations: + # 1. Set self.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set tarfile.offset to the position where the next member's header will + # begin. + # 3. Return self or another valid TarInfo object. + def _proc_member(self, tarfile): + """Choose the right processing method depending on + the type and call it. + """ + if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self._proc_gnulong(tarfile) + elif self.type == GNUTYPE_SPARSE: + return self._proc_sparse(tarfile) + elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): + return self._proc_pax(tarfile) + else: + return self._proc_builtin(tarfile) + + def _proc_builtin(self, tarfile): + """Process a builtin type or an unknown type which + will be treated as a regular file. + """ + self.offset_data = tarfile.fileobj.tell() + offset = self.offset_data + if self.isreg() or self.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + offset += self._block(self.size) + tarfile.offset = offset + + # Patch the TarInfo object with saved global + # header information. + self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) + + # Remove redundant slashes from directories. This is to be consistent + # with frombuf(). + if self.isdir(): + self.name = self.name.rstrip("/") + + return self + + def _proc_gnulong(self, tarfile): + """Process the blocks that hold a GNU longname + or longlink member. + """ + buf = tarfile.fileobj.read(self._block(self.size)) + + # Fetch the next header and process it. + try: + next = self.fromtarfile(tarfile) + except HeaderError as e: + raise SubsequentHeaderError(str(e)) from None + + # Patch the TarInfo object from the next header with + # the longname information. + next.offset = self.offset + if self.type == GNUTYPE_LONGNAME: + next.name = nts(buf, tarfile.encoding, tarfile.errors) + elif self.type == GNUTYPE_LONGLINK: + next.linkname = nts(buf, tarfile.encoding, tarfile.errors) + + # Remove redundant slashes from directories. This is to be consistent + # with frombuf(). + if next.isdir(): + next.name = removesuffix(next.name, "/") + + return next + + def _proc_sparse(self, tarfile): + """Process a GNU sparse header plus extra headers. + """ + # We already collected some sparse structures in frombuf(). + structs, isextended, origsize = self._sparse_structs + del self._sparse_structs + + # Collect sparse structures from extended header blocks. + while isextended: + buf = tarfile.fileobj.read(BLOCKSIZE) + pos = 0 + for i in range(21): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset and numbytes: + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[504]) + self.sparse = structs + + self.offset_data = tarfile.fileobj.tell() + tarfile.offset = self.offset_data + self._block(self.size) + self.size = origsize + return self + + def _proc_pax(self, tarfile): + """Process an extended or global header as described in + POSIX.1-2008. + """ + # Read the header information. + buf = tarfile.fileobj.read(self._block(self.size)) + + # A pax header stores supplemental information for either + # the following file (extended) or all following files + # (global). + if self.type == XGLTYPE: + pax_headers = tarfile.pax_headers + else: + pax_headers = tarfile.pax_headers.copy() + + # Check if the pax header contains a hdrcharset field. This tells us + # the encoding of the path, linkpath, uname and gname fields. Normally, + # these fields are UTF-8 encoded but since POSIX.1-2008 tar + # implementations are allowed to store them as raw binary strings if + # the translation to UTF-8 fails. + match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) + if match is not None: + pax_headers["hdrcharset"] = match.group(1).decode("utf-8") + + # For the time being, we don't care about anything other than "BINARY". + # The only other value that is currently allowed by the standard is + # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. + hdrcharset = pax_headers.get("hdrcharset") + if hdrcharset == "BINARY": + encoding = tarfile.encoding + else: + encoding = "utf-8" + + # Parse pax header information. A record looks like that: + # "%d %s=%s\n" % (length, keyword, value). length is the size + # of the complete record including the length field itself and + # the newline. keyword and value are both UTF-8 encoded strings. + regex = re.compile(br"(\d+) ([^=]+)=") + pos = 0 + while match := regex.match(buf, pos): + length, keyword = match.groups() + length = int(length) + if length == 0: + raise InvalidHeaderError("invalid header") + value = buf[match.end(2) + 1:match.start(1) + length - 1] + + # Normally, we could just use "utf-8" as the encoding and "strict" + # as the error handler, but we better not take the risk. For + # example, GNU tar <= 1.23 is known to store filenames it cannot + # translate to UTF-8 as raw strings (unfortunately without a + # hdrcharset=BINARY header). + # We first try the strict standard encoding, and if that fails we + # fall back on the user's encoding and error handler. + keyword = self._decode_pax_field(keyword, "utf-8", "utf-8", + tarfile.errors) + if keyword in PAX_NAME_FIELDS: + value = self._decode_pax_field(value, encoding, tarfile.encoding, + tarfile.errors) + else: + value = self._decode_pax_field(value, "utf-8", "utf-8", + tarfile.errors) + + pax_headers[keyword] = value + pos += length + + # Fetch the next header. + try: + next = self.fromtarfile(tarfile) + except HeaderError as e: + raise SubsequentHeaderError(str(e)) from None + + # Process GNU sparse information. + if "GNU.sparse.map" in pax_headers: + # GNU extended sparse format version 0.1. + self._proc_gnusparse_01(next, pax_headers) + + elif "GNU.sparse.size" in pax_headers: + # GNU extended sparse format version 0.0. + self._proc_gnusparse_00(next, pax_headers, buf) + + elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": + # GNU extended sparse format version 1.0. + self._proc_gnusparse_10(next, pax_headers, tarfile) + + if self.type in (XHDTYPE, SOLARIS_XHDTYPE): + # Patch the TarInfo object with the extended header info. + next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) + next.offset = self.offset + + if "size" in pax_headers: + # If the extended header replaces the size field, + # we need to recalculate the offset where the next + # header starts. + offset = next.offset_data + if next.isreg() or next.type not in SUPPORTED_TYPES: + offset += next._block(next.size) + tarfile.offset = offset + + return next + + def _proc_gnusparse_00(self, next, pax_headers, buf): + """Process a GNU tar extended sparse header, version 0.0. + """ + offsets = [] + for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): + offsets.append(int(match.group(1))) + numbytes = [] + for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): + numbytes.append(int(match.group(1))) + next.sparse = list(zip(offsets, numbytes)) + + def _proc_gnusparse_01(self, next, pax_headers): + """Process a GNU tar extended sparse header, version 0.1. + """ + sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _proc_gnusparse_10(self, next, pax_headers, tarfile): + """Process a GNU tar extended sparse header, version 1.0. + """ + fields = None + sparse = [] + buf = tarfile.fileobj.read(BLOCKSIZE) + fields, buf = buf.split(b"\n", 1) + fields = int(fields) + while len(sparse) < fields * 2: + if b"\n" not in buf: + buf += tarfile.fileobj.read(BLOCKSIZE) + number, buf = buf.split(b"\n", 1) + sparse.append(int(number)) + next.offset_data = tarfile.fileobj.tell() + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _apply_pax_info(self, pax_headers, encoding, errors): + """Replace fields with supplemental information from a previous + pax extended or global header. + """ + for keyword, value in pax_headers.items(): + if keyword == "GNU.sparse.name": + setattr(self, "path", value) + elif keyword == "GNU.sparse.size": + setattr(self, "size", int(value)) + elif keyword == "GNU.sparse.realsize": + setattr(self, "size", int(value)) + elif keyword in PAX_FIELDS: + if keyword in PAX_NUMBER_FIELDS: + try: + value = PAX_NUMBER_FIELDS[keyword](value) + except ValueError: + value = 0 + if keyword == "path": + value = value.rstrip("/") + setattr(self, keyword, value) + + self.pax_headers = pax_headers.copy() + + def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): + """Decode a single field from a pax record. + """ + try: + return value.decode(encoding, "strict") + except UnicodeDecodeError: + return value.decode(fallback_encoding, fallback_errors) + + def _block(self, count): + """Round up a byte count by BLOCKSIZE and return it, + e.g. _block(834) => 1024. + """ + blocks, remainder = divmod(count, BLOCKSIZE) + if remainder: + blocks += 1 + return blocks * BLOCKSIZE + + def isreg(self): + 'Return True if the Tarinfo object is a regular file.' + return self.type in REGULAR_TYPES + + def isfile(self): + 'Return True if the Tarinfo object is a regular file.' + return self.isreg() + + def isdir(self): + 'Return True if it is a directory.' + return self.type == DIRTYPE + + def issym(self): + 'Return True if it is a symbolic link.' + return self.type == SYMTYPE + + def islnk(self): + 'Return True if it is a hard link.' + return self.type == LNKTYPE + + def ischr(self): + 'Return True if it is a character device.' + return self.type == CHRTYPE + + def isblk(self): + 'Return True if it is a block device.' + return self.type == BLKTYPE + + def isfifo(self): + 'Return True if it is a FIFO.' + return self.type == FIFOTYPE + + def issparse(self): + return self.sparse is not None + + def isdev(self): + 'Return True if it is one of character device, block device or FIFO.' + return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) +# class TarInfo + +class TarFile(object): + """The TarFile Class provides an interface to tar archives. + """ + + debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) + + dereference = False # If true, add content of linked file to the + # tar file, else the link. + + ignore_zeros = False # If true, skips empty or invalid blocks and + # continues processing. + + errorlevel = 1 # If 0, fatal errors only appear in debug + # messages (if debug >= 0). If > 0, errors + # are passed to the caller as exceptions. + + format = DEFAULT_FORMAT # The format to use when creating an archive. + + encoding = ENCODING # Encoding for 8-bit character strings. + + errors = None # Error handler for unicode conversion. + + tarinfo = TarInfo # The default TarInfo class to use. + + fileobject = ExFileObject # The file-object for extractfile(). + + extraction_filter = None # The default filter for extraction. + + def __init__(self, name=None, mode="r", fileobj=None, format=None, + tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, + errors="surrogateescape", pax_headers=None, debug=None, + errorlevel=None, copybufsize=None, stream=False): + """Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. 'mode' + defaults to 'r'. + If 'fileobj' is given, it is used for reading or writing data. If it + can be determined, 'mode' is overridden by 'fileobj's mode. + 'fileobj' is not closed, when TarFile is closed. + """ + modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"} + if mode not in modes: + raise ValueError("mode must be 'r', 'a', 'w' or 'x'") + self.mode = mode + self._mode = modes[mode] + + if not fileobj: + if self.mode == "a" and not os.path.exists(name): + # Create nonexistent files in append mode. + self.mode = "w" + self._mode = "wb" + fileobj = bltn_open(name, self._mode) + self._extfileobj = False + else: + if (name is None and hasattr(fileobj, "name") and + isinstance(fileobj.name, (str, bytes))): + name = fileobj.name + if hasattr(fileobj, "mode"): + self._mode = fileobj.mode + self._extfileobj = True + self.name = os.path.abspath(name) if name else None + self.fileobj = fileobj + + self.stream = stream + + # Init attributes. + if format is not None: + self.format = format + if tarinfo is not None: + self.tarinfo = tarinfo + if dereference is not None: + self.dereference = dereference + if ignore_zeros is not None: + self.ignore_zeros = ignore_zeros + if encoding is not None: + self.encoding = encoding + self.errors = errors + + if pax_headers is not None and self.format == PAX_FORMAT: + self.pax_headers = pax_headers + else: + self.pax_headers = {} + + if debug is not None: + self.debug = debug + if errorlevel is not None: + self.errorlevel = errorlevel + + # Init datastructures. + self.copybufsize = copybufsize + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = self.fileobj.tell() + # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added + + try: + if self.mode == "r": + self.firstmember = None + self.firstmember = self.next() + + if self.mode == "a": + # Move to the end of the archive, + # before the first empty block. + while True: + self.fileobj.seek(self.offset) + try: + tarinfo = self.tarinfo.fromtarfile(self) + self.members.append(tarinfo) + except EOFHeaderError: + self.fileobj.seek(self.offset) + break + except HeaderError as e: + raise ReadError(str(e)) from None + + if self.mode in ("a", "w", "x"): + self._loaded = True + + if self.pax_headers: + buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) + self.fileobj.write(buf) + self.offset += len(buf) + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + #-------------------------------------------------------------------------- + # Below are the classmethods which act as alternate constructors to the + # TarFile class. The open() method is the only one that is needed for + # public use; it is the "super"-constructor and is able to select an + # adequate "sub"-constructor for a particular compression using the mapping + # from OPEN_METH. + # + # This concept allows one to subclass TarFile without losing the comfort of + # the super-constructor. A sub-constructor is registered and made available + # by adding it to the mapping in OPEN_METH. + + @classmethod + def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): + r"""Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:\*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + + 'r|\*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + """ + + if not name and not fileobj: + raise ValueError("nothing to open") + + if mode in ("r", "r:*"): + # Find out which *open() is appropriate for opening the file. + def not_compressed(comptype): + return cls.OPEN_METH[comptype] == 'taropen' + error_msgs = [] + for comptype in sorted(cls.OPEN_METH, key=not_compressed): + func = getattr(cls, cls.OPEN_METH[comptype]) + if fileobj is not None: + saved_pos = fileobj.tell() + try: + return func(name, "r", fileobj, **kwargs) + except (ReadError, CompressionError) as e: + error_msgs.append(f'- method {comptype}: {e!r}') + if fileobj is not None: + fileobj.seek(saved_pos) + continue + error_msgs_summary = '\n'.join(error_msgs) + raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}") + + elif ":" in mode: + filemode, comptype = mode.split(":", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + # Select the *open() function according to + # given compression. + if comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + else: + raise CompressionError("unknown compression type %r" % comptype) + return func(name, filemode, fileobj, **kwargs) + + elif "|" in mode: + filemode, comptype = mode.split("|", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + if filemode not in ("r", "w"): + raise ValueError("mode must be 'r' or 'w'") + + compresslevel = kwargs.pop("compresslevel", 9) + stream = _Stream(name, filemode, comptype, fileobj, bufsize, + compresslevel) + try: + t = cls(name, filemode, stream, **kwargs) + except: + stream.close() + raise + t._extfileobj = False + return t + + elif mode in ("a", "w", "x"): + return cls.taropen(name, mode, fileobj, **kwargs) + + raise ValueError("undiscernible mode") + + @classmethod + def taropen(cls, name, mode="r", fileobj=None, **kwargs): + """Open uncompressed tar archive name for reading or writing. + """ + if mode not in ("r", "a", "w", "x"): + raise ValueError("mode must be 'r', 'a', 'w' or 'x'") + return cls(name, mode, fileobj, **kwargs) + + @classmethod + def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open gzip compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if mode not in ("r", "w", "x"): + raise ValueError("mode must be 'r', 'w' or 'x'") + + try: + from gzip import GzipFile + except ImportError: + raise CompressionError("gzip module is not available") from None + + try: + fileobj = GzipFile(name, mode + "b", compresslevel, fileobj) + except OSError as e: + if fileobj is not None and mode == 'r': + raise ReadError("not a gzip file") from e + raise + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except OSError as e: + fileobj.close() + if mode == 'r': + raise ReadError("not a gzip file") from e + raise + except: + fileobj.close() + raise + t._extfileobj = False + return t + + @classmethod + def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open bzip2 compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if mode not in ("r", "w", "x"): + raise ValueError("mode must be 'r', 'w' or 'x'") + + try: + from bz2 import BZ2File + except ImportError: + raise CompressionError("bz2 module is not available") from None + + fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel) + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except (OSError, EOFError) as e: + fileobj.close() + if mode == 'r': + raise ReadError("not a bzip2 file") from e + raise + except: + fileobj.close() + raise + t._extfileobj = False + return t + + @classmethod + def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs): + """Open lzma compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if mode not in ("r", "w", "x"): + raise ValueError("mode must be 'r', 'w' or 'x'") + + try: + from lzma import LZMAFile, LZMAError + except ImportError: + raise CompressionError("lzma module is not available") from None + + fileobj = LZMAFile(fileobj or name, mode, preset=preset) + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except (LZMAError, EOFError) as e: + fileobj.close() + if mode == 'r': + raise ReadError("not an lzma file") from e + raise + except: + fileobj.close() + raise + t._extfileobj = False + return t + + # All *open() methods are registered here. + OPEN_METH = { + "tar": "taropen", # uncompressed tar + "gz": "gzopen", # gzip compressed tar + "bz2": "bz2open", # bzip2 compressed tar + "xz": "xzopen" # lzma compressed tar + } + + #-------------------------------------------------------------------------- + # The public methods which TarFile provides: + + def close(self): + """Close the TarFile. In write-mode, two finishing zero blocks are + appended to the archive. + """ + if self.closed: + return + + self.closed = True + try: + if self.mode in ("a", "w", "x"): + self.fileobj.write(NUL * (BLOCKSIZE * 2)) + self.offset += (BLOCKSIZE * 2) + # fill up the end with zero-blocks + # (like option -b20 for tar does) + blocks, remainder = divmod(self.offset, RECORDSIZE) + if remainder > 0: + self.fileobj.write(NUL * (RECORDSIZE - remainder)) + finally: + if not self._extfileobj: + self.fileobj.close() + + def getmember(self, name): + """Return a TarInfo object for member 'name'. If 'name' can not be + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurrence is assumed to be the + most up-to-date version. + """ + tarinfo = self._getmember(name.rstrip('/')) + if tarinfo is None: + raise KeyError("filename %r not found" % name) + return tarinfo + + def getmembers(self): + """Return the members of the archive as a list of TarInfo objects. The + list has the same order as the members in the archive. + """ + self._check() + if not self._loaded: # if we want to obtain a list of + self._load() # all members, we first have to + # scan the whole archive. + return self.members + + def getnames(self): + """Return the members of the archive as a list of their names. It has + the same order as the list returned by getmembers(). + """ + return [tarinfo.name for tarinfo in self.getmembers()] + + def gettarinfo(self, name=None, arcname=None, fileobj=None): + """Create a TarInfo object from the result of os.stat or equivalent + on an existing file. The file is either named by 'name', or + specified as a file object 'fileobj' with a file descriptor. If + given, 'arcname' specifies an alternative name for the file in the + archive, otherwise, the name is taken from the 'name' attribute of + 'fileobj', or the 'name' argument. The name should be a text + string. + """ + self._check("awx") + + # When fileobj is given, replace name by + # fileobj's real name. + if fileobj is not None: + name = fileobj.name + + # Building the name of the member in the archive. + # Backward slashes are converted to forward slashes, + # Absolute paths are turned to relative paths. + if arcname is None: + arcname = name + drv, arcname = os.path.splitdrive(arcname) + arcname = arcname.replace(os.sep, "/") + arcname = arcname.lstrip("/") + + # Now, fill the TarInfo object with + # information specific for the file. + tarinfo = self.tarinfo() + tarinfo._tarfile = self # To be removed in 3.16. + + # Use os.stat or os.lstat, depending on if symlinks shall be resolved. + if fileobj is None: + if not self.dereference: + statres = os.lstat(name) + else: + statres = os.stat(name) + else: + statres = os.fstat(fileobj.fileno()) + linkname = "" + + stmd = statres.st_mode + if stat.S_ISREG(stmd): + inode = (statres.st_ino, statres.st_dev) + if not self.dereference and statres.st_nlink > 1 and \ + inode in self.inodes and arcname != self.inodes[inode]: + # Is it a hardlink to an already + # archived file? + type = LNKTYPE + linkname = self.inodes[inode] + else: + # The inode is added only if its valid. + # For win32 it is always 0. + type = REGTYPE + if inode[0]: + self.inodes[inode] = arcname + elif stat.S_ISDIR(stmd): + type = DIRTYPE + elif stat.S_ISFIFO(stmd): + type = FIFOTYPE + elif stat.S_ISLNK(stmd): + type = SYMTYPE + linkname = os.readlink(name) + elif stat.S_ISCHR(stmd): + type = CHRTYPE + elif stat.S_ISBLK(stmd): + type = BLKTYPE + else: + return None + + # Fill the TarInfo object with all + # information we can get. + tarinfo.name = arcname + tarinfo.mode = stmd + tarinfo.uid = statres.st_uid + tarinfo.gid = statres.st_gid + if type == REGTYPE: + tarinfo.size = statres.st_size + else: + tarinfo.size = 0 + tarinfo.mtime = statres.st_mtime + tarinfo.type = type + tarinfo.linkname = linkname + if pwd: + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + if grp: + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass + + if type in (CHRTYPE, BLKTYPE): + if hasattr(os, "major") and hasattr(os, "minor"): + tarinfo.devmajor = os.major(statres.st_rdev) + tarinfo.devminor = os.minor(statres.st_rdev) + return tarinfo + + def list(self, verbose=True, *, members=None): + """Print a table of contents to sys.stdout. If 'verbose' is False, only + the names of the members are printed. If it is True, an 'ls -l'-like + output is produced. 'members' is optional and must be a subset of the + list returned by getmembers(). + """ + # Convert tarinfo type to stat type. + type2mode = {REGTYPE: stat.S_IFREG, SYMTYPE: stat.S_IFLNK, + FIFOTYPE: stat.S_IFIFO, CHRTYPE: stat.S_IFCHR, + DIRTYPE: stat.S_IFDIR, BLKTYPE: stat.S_IFBLK} + self._check() + + if members is None: + members = self + for tarinfo in members: + if verbose: + if tarinfo.mode is None: + _safe_print("??????????") + else: + modetype = type2mode.get(tarinfo.type, 0) + _safe_print(stat.filemode(modetype | tarinfo.mode)) + _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid, + tarinfo.gname or tarinfo.gid)) + if tarinfo.ischr() or tarinfo.isblk(): + _safe_print("%10s" % + ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor))) + else: + _safe_print("%10d" % tarinfo.size) + if tarinfo.mtime is None: + _safe_print("????-??-?? ??:??:??") + else: + _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6]) + + _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else "")) + + if verbose: + if tarinfo.issym(): + _safe_print("-> " + tarinfo.linkname) + if tarinfo.islnk(): + _safe_print("link to " + tarinfo.linkname) + print() + + def add(self, name, arcname=None, recursive=True, *, filter=None): + """Add the file 'name' to the archive. 'name' may be any type of file + (directory, fifo, symbolic link, etc.). If given, 'arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting 'recursive' to False. 'filter' is a function + that expects a TarInfo object argument and returns the changed + TarInfo object, if it returns None the TarInfo object will be + excluded from the archive. + """ + self._check("awx") + + if arcname is None: + arcname = name + + # Skip if somebody tries to archive the archive... + if self.name is not None and os.path.abspath(name) == self.name: + self._dbg(2, "tarfile: Skipped %r" % name) + return + + self._dbg(1, name) + + # Create a TarInfo object from the file. + tarinfo = self.gettarinfo(name, arcname) + + if tarinfo is None: + self._dbg(1, "tarfile: Unsupported type %r" % name) + return + + # Change or exclude the TarInfo object. + if filter is not None: + tarinfo = filter(tarinfo) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Append the tar header and data to the archive. + if tarinfo.isreg(): + with bltn_open(name, "rb") as f: + self.addfile(tarinfo, f) + + elif tarinfo.isdir(): + self.addfile(tarinfo) + if recursive: + for f in sorted(os.listdir(name)): + self.add(os.path.join(name, f), os.path.join(arcname, f), + recursive, filter=filter) + + else: + self.addfile(tarinfo) + + def addfile(self, tarinfo, fileobj=None): + """Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents + a non zero-size regular file, the 'fileobj' argument should be a binary file, + and tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects directly, or by using gettarinfo(). + """ + self._check("awx") + + if fileobj is None and tarinfo.isreg() and tarinfo.size != 0: + raise ValueError("fileobj not provided for non zero-size regular file") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(self.format, self.encoding, self.errors) + self.fileobj.write(buf) + self.offset += len(buf) + bufsize=self.copybufsize + # If there's data to follow, append it. + if fileobj is not None: + copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize) + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + self.fileobj.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + self.offset += blocks * BLOCKSIZE + + self.members.append(tarinfo) + + def _get_filter_function(self, filter): + if filter is None: + filter = self.extraction_filter + if filter is None: + import warnings + warnings.warn( + 'Python 3.14 will, by default, filter extracted tar ' + + 'archives and reject files or modify their metadata. ' + + 'Use the filter argument to control this behavior.', + DeprecationWarning, stacklevel=3) + return fully_trusted_filter + if isinstance(filter, str): + raise TypeError( + 'String names are not supported for ' + + 'TarFile.extraction_filter. Use a function such as ' + + 'tarfile.data_filter directly.') + return filter + if callable(filter): + return filter + try: + return _NAMED_FILTERS[filter] + except KeyError: + raise ValueError(f"filter {filter!r} not found") from None + + def extractall(self, path=".", members=None, *, numeric_owner=False, + filter=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. 'path' specifies a different directory + to extract to. 'members' is optional and must be a subset of the + list returned by getmembers(). If 'numeric_owner' is True, only + the numbers for user/group names are used and not the names. + + The 'filter' function will be called on each member just + before extraction. + It can return a changed TarInfo or None to skip the member. + String names of common filters are accepted. + """ + directories = [] + + filter_function = self._get_filter_function(filter) + if members is None: + members = self + + for member in members: + tarinfo = self._get_extract_tarinfo(member, filter_function, path) + if tarinfo is None: + continue + if tarinfo.isdir(): + # For directories, delay setting attributes until later, + # since permissions can interfere with extraction and + # extracting contents can reset mtime. + directories.append(tarinfo) + self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(), + numeric_owner=numeric_owner) + + # Reverse sort directories. + directories.sort(key=lambda a: a.name, reverse=True) + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath, numeric_owner=numeric_owner) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError as e: + self._handle_nonfatal_error(e) + + def extract(self, member, path="", set_attrs=True, *, numeric_owner=False, + filter=None): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. 'member' may be a filename or a TarInfo object. You can + specify a different directory using 'path'. File attributes (owner, + mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner' + is True, only the numbers for user/group names are used and not + the names. + + The 'filter' function will be called before extraction. + It can return a changed TarInfo or None to skip the member. + String names of common filters are accepted. + """ + filter_function = self._get_filter_function(filter) + tarinfo = self._get_extract_tarinfo(member, filter_function, path) + if tarinfo is not None: + self._extract_one(tarinfo, path, set_attrs, numeric_owner) + + def _get_extract_tarinfo(self, member, filter_function, path): + """Get filtered TarInfo (or None) from member, which might be a str""" + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + unfiltered = tarinfo + try: + tarinfo = filter_function(tarinfo, path) + except (OSError, FilterError) as e: + self._handle_fatal_error(e) + except ExtractError as e: + self._handle_nonfatal_error(e) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % unfiltered.name) + return None + # Prepare the link target for makelink(). + if tarinfo.islnk(): + tarinfo = copy.copy(tarinfo) + tarinfo._link_target = os.path.join(path, tarinfo.linkname) + return tarinfo + + def _extract_one(self, tarinfo, path, set_attrs, numeric_owner): + """Extract from filtered tarinfo to disk""" + self._check("r") + + try: + self._extract_member(tarinfo, os.path.join(path, tarinfo.name), + set_attrs=set_attrs, + numeric_owner=numeric_owner) + except OSError as e: + self._handle_fatal_error(e) + except ExtractError as e: + self._handle_nonfatal_error(e) + + def _handle_nonfatal_error(self, e): + """Handle non-fatal error (ExtractError) according to errorlevel""" + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def _handle_fatal_error(self, e): + """Handle "fatal" error according to self.errorlevel""" + if self.errorlevel > 0: + raise + elif isinstance(e, OSError): + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + else: + self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e)) + + def extractfile(self, member): + """Extract a member from the archive as a file object. 'member' may be + a filename or a TarInfo object. If 'member' is a regular file or + a link, an io.BufferedReader object is returned. For all other + existing members, None is returned. If 'member' does not appear + in the archive, KeyError is raised. + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES: + # Members with unknown types are treated as regular files. + return self.fileobject(self, tarinfo) + + elif tarinfo.islnk() or tarinfo.issym(): + if isinstance(self.fileobj, _Stream): + # A small but ugly workaround for the case that someone tries + # to extract a (sym)link as a file-object from a non-seekable + # stream of tar blocks. + raise StreamError("cannot extract (sym)link as file object") + else: + # A (sym)link's file object is its target's file object. + return self.extractfile(self._find_link_target(tarinfo)) + else: + # If there's no data associated with the member (directory, chrdev, + # blkdev, etc.), return None instead of a file object. + return None + + def _extract_member(self, tarinfo, targetpath, set_attrs=True, + numeric_owner=False): + """Extract the TarInfo object tarinfo to a physical + file called targetpath. + """ + # Fetch the TarInfo object for the given name + # and build the destination pathname, replacing + # forward slashes to platform specific separators. + targetpath = targetpath.rstrip("/") + targetpath = targetpath.replace("/", os.sep) + + # Create all upper directories. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + # Create directories that are not part of the archive with + # default permissions. + os.makedirs(upperdirs, exist_ok=True) + + if tarinfo.islnk() or tarinfo.issym(): + self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) + else: + self._dbg(1, tarinfo.name) + + if tarinfo.isreg(): + self.makefile(tarinfo, targetpath) + elif tarinfo.isdir(): + self.makedir(tarinfo, targetpath) + elif tarinfo.isfifo(): + self.makefifo(tarinfo, targetpath) + elif tarinfo.ischr() or tarinfo.isblk(): + self.makedev(tarinfo, targetpath) + elif tarinfo.islnk() or tarinfo.issym(): + self.makelink(tarinfo, targetpath) + elif tarinfo.type not in SUPPORTED_TYPES: + self.makeunknown(tarinfo, targetpath) + else: + self.makefile(tarinfo, targetpath) + + if set_attrs: + self.chown(tarinfo, targetpath, numeric_owner) + if not tarinfo.issym(): + self.chmod(tarinfo, targetpath) + self.utime(tarinfo, targetpath) + + #-------------------------------------------------------------------------- + # Below are the different file methods. They are called via + # _extract_member() when extract() is called. They can be replaced in a + # subclass to implement other functionality. + + def makedir(self, tarinfo, targetpath): + """Make a directory called targetpath. + """ + try: + if tarinfo.mode is None: + # Use the system's default mode + os.mkdir(targetpath) + else: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) + except FileExistsError: + if not os.path.isdir(targetpath): + raise + + def makefile(self, tarinfo, targetpath): + """Make a file called targetpath. + """ + source = self.fileobj + source.seek(tarinfo.offset_data) + bufsize = self.copybufsize + with bltn_open(targetpath, "wb") as target: + if tarinfo.sparse is not None: + for offset, size in tarinfo.sparse: + target.seek(offset) + copyfileobj(source, target, size, ReadError, bufsize) + target.seek(tarinfo.size) + target.truncate() + else: + copyfileobj(source, target, tarinfo.size, ReadError, bufsize) + + def makeunknown(self, tarinfo, targetpath): + """Make a file from a TarInfo object with an unknown type + at targetpath. + """ + self.makefile(tarinfo, targetpath) + self._dbg(1, "tarfile: Unknown file type %r, " \ + "extracted as regular file." % tarinfo.type) + + def makefifo(self, tarinfo, targetpath): + """Make a fifo called targetpath. + """ + if hasattr(os, "mkfifo"): + os.mkfifo(targetpath) + else: + raise ExtractError("fifo not supported by system") + + def makedev(self, tarinfo, targetpath): + """Make a character or block device called targetpath. + """ + if not hasattr(os, "mknod") or not hasattr(os, "makedev"): + raise ExtractError("special devices not supported by system") + + mode = tarinfo.mode + if mode is None: + # Use mknod's default + mode = 0o600 + if tarinfo.isblk(): + mode |= stat.S_IFBLK + else: + mode |= stat.S_IFCHR + + os.mknod(targetpath, mode, + os.makedev(tarinfo.devmajor, tarinfo.devminor)) + + def makelink(self, tarinfo, targetpath): + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ + try: + # For systems that support symbolic and hard links. + if tarinfo.issym(): + if os.path.lexists(targetpath): + # Avoid FileExistsError on following os.symlink. + os.unlink(targetpath) + os.symlink(tarinfo.linkname, targetpath) + else: + if os.path.exists(tarinfo._link_target): + os.link(tarinfo._link_target, targetpath) + else: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except symlink_exception: + try: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except KeyError: + raise ExtractError("unable to resolve link inside archive") from None + + def chown(self, tarinfo, targetpath, numeric_owner): + """Set owner of targetpath according to tarinfo. If numeric_owner + is True, use .gid/.uid instead of .gname/.uname. If numeric_owner + is False, fall back to .gid/.uid when the search based on name + fails. + """ + if hasattr(os, "geteuid") and os.geteuid() == 0: + # We have to be root to do so. + g = tarinfo.gid + u = tarinfo.uid + if not numeric_owner: + try: + if grp and tarinfo.gname: + g = grp.getgrnam(tarinfo.gname)[2] + except KeyError: + pass + try: + if pwd and tarinfo.uname: + u = pwd.getpwnam(tarinfo.uname)[2] + except KeyError: + pass + if g is None: + g = -1 + if u is None: + u = -1 + try: + if tarinfo.issym() and hasattr(os, "lchown"): + os.lchown(targetpath, u, g) + else: + os.chown(targetpath, u, g) + except (OSError, OverflowError) as e: + # OverflowError can be raised if an ID doesn't fit in 'id_t' + raise ExtractError("could not change owner") from e + + def chmod(self, tarinfo, targetpath): + """Set file permissions of targetpath according to tarinfo. + """ + if tarinfo.mode is None: + return + try: + os.chmod(targetpath, tarinfo.mode) + except OSError as e: + raise ExtractError("could not change mode") from e + + def utime(self, tarinfo, targetpath): + """Set modification time of targetpath according to tarinfo. + """ + mtime = tarinfo.mtime + if mtime is None: + return + if not hasattr(os, 'utime'): + return + try: + os.utime(targetpath, (mtime, mtime)) + except OSError as e: + raise ExtractError("could not change modification time") from e + + #-------------------------------------------------------------------------- + def next(self): + """Return the next member of the archive as a TarInfo object, when + TarFile is opened for reading. Return None if there is no more + available. + """ + self._check("ra") + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Advance the file pointer. + if self.offset != self.fileobj.tell(): + if self.offset == 0: + return None + self.fileobj.seek(self.offset - 1) + if not self.fileobj.read(1): + raise ReadError("unexpected end of data") + + # Read the next block. + tarinfo = None + while True: + try: + tarinfo = self.tarinfo.fromtarfile(self) + except EOFHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + except InvalidHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + elif self.offset == 0: + raise ReadError(str(e)) from None + except EmptyHeaderError: + if self.offset == 0: + raise ReadError("empty file") from None + except TruncatedHeaderError as e: + if self.offset == 0: + raise ReadError(str(e)) from None + except SubsequentHeaderError as e: + raise ReadError(str(e)) from None + except Exception as e: + try: + import zlib + if isinstance(e, zlib.error): + raise ReadError(f'zlib error: {e}') from None + else: + raise e + except ImportError: + raise e + break + + if tarinfo is not None: + # if streaming the file we do not want to cache the tarinfo + if not self.stream: + self.members.append(tarinfo) + else: + self._loaded = True + + return tarinfo + + #-------------------------------------------------------------------------- + # Little helper methods: + + def _getmember(self, name, tarinfo=None, normalize=False): + """Find an archive member by name from bottom to top. + If tarinfo is given, it is used as the starting point. + """ + # Ensure that all members have been loaded. + members = self.getmembers() + + # Limit the member search list up to tarinfo. + skipping = False + if tarinfo is not None: + try: + index = members.index(tarinfo) + except ValueError: + # The given starting point might be a (modified) copy. + # We'll later skip members until we find an equivalent. + skipping = True + else: + # Happy fast path + members = members[:index] + + if normalize: + name = os.path.normpath(name) + + for member in reversed(members): + if skipping: + if tarinfo.offset == member.offset: + skipping = False + continue + if normalize: + member_name = os.path.normpath(member.name) + else: + member_name = member.name + + if name == member_name: + return member + + if skipping: + # Starting point was not found + raise ValueError(tarinfo) + + def _load(self): + """Read through the entire archive file and look for readable + members. This should not run if the file is set to stream. + """ + if not self.stream: + while self.next() is not None: + pass + self._loaded = True + + def _check(self, mode=None): + """Check if TarFile is still open, and if the operation's mode + corresponds to TarFile's mode. + """ + if self.closed: + raise OSError("%s is closed" % self.__class__.__name__) + if mode is not None and self.mode not in mode: + raise OSError("bad operation for mode %r" % self.mode) + + def _find_link_target(self, tarinfo): + """Find the target member of a symlink or hardlink member in the + archive. + """ + if tarinfo.issym(): + # Always search the entire archive. + linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname))) + limit = None + else: + # Search the archive before the link, because a hard link is + # just a reference to an already archived file. + linkname = tarinfo.linkname + limit = tarinfo + + member = self._getmember(linkname, tarinfo=limit, normalize=True) + if member is None: + raise KeyError("linkname %r not found" % linkname) + return member + + def __iter__(self): + """Provide an iterator object. + """ + if self._loaded: + yield from self.members + return + + # Yield items using TarFile's next() method. + # When all members have been read, set TarFile as _loaded. + index = 0 + # Fix for SF #1100429: Under rare circumstances it can + # happen that getmembers() is called during iteration, + # which will have already exhausted the next() method. + if self.firstmember is not None: + tarinfo = self.next() + index += 1 + yield tarinfo + + while True: + if index < len(self.members): + tarinfo = self.members[index] + elif not self._loaded: + tarinfo = self.next() + if not tarinfo: + self._loaded = True + return + else: + return + index += 1 + yield tarinfo + + def _dbg(self, level, msg): + """Write debugging output to sys.stderr. + """ + if level <= self.debug: + print(msg, file=sys.stderr) + + def __enter__(self): + self._check() + return self + + def __exit__(self, type, value, traceback): + if type is None: + self.close() + else: + # An exception occurred. We must not call close() because + # it would try to write end-of-archive blocks and padding. + if not self._extfileobj: + self.fileobj.close() + self.closed = True + +#-------------------- +# exported functions +#-------------------- + +def is_tarfile(name): + """Return True if name points to a tar archive that we + are able to handle, else return False. + + 'name' should be a string, file, or file-like object. + """ + try: + if hasattr(name, "read"): + pos = name.tell() + t = open(fileobj=name) + name.seek(pos) + else: + t = open(name) + t.close() + return True + except TarError: + return False + +open = TarFile.open + + +def main(): + import argparse + + description = 'A simple command-line interface for tarfile module.' + parser = argparse.ArgumentParser(description=description) + parser.add_argument('-v', '--verbose', action='store_true', default=False, + help='Verbose output') + parser.add_argument('--filter', metavar='', + choices=_NAMED_FILTERS, + help='Filter for extraction') + + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument('-l', '--list', metavar='', + help='Show listing of a tarfile') + group.add_argument('-e', '--extract', nargs='+', + metavar=('', ''), + help='Extract tarfile into target dir') + group.add_argument('-c', '--create', nargs='+', + metavar=('', ''), + help='Create tarfile from sources') + group.add_argument('-t', '--test', metavar='', + help='Test if a tarfile is valid') + + args = parser.parse_args() + + if args.filter and args.extract is None: + parser.exit(1, '--filter is only valid for extraction\n') + + if args.test is not None: + src = args.test + if is_tarfile(src): + with open(src, 'r') as tar: + tar.getmembers() + print(tar.getmembers(), file=sys.stderr) + if args.verbose: + print('{!r} is a tar archive.'.format(src)) + else: + parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) + + elif args.list is not None: + src = args.list + if is_tarfile(src): + with TarFile.open(src, 'r:*') as tf: + tf.list(verbose=args.verbose) + else: + parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) + + elif args.extract is not None: + if len(args.extract) == 1: + src = args.extract[0] + curdir = os.curdir + elif len(args.extract) == 2: + src, curdir = args.extract + else: + parser.exit(1, parser.format_help()) + + if is_tarfile(src): + with TarFile.open(src, 'r:*') as tf: + tf.extractall(path=curdir, filter=args.filter) + if args.verbose: + if curdir == '.': + msg = '{!r} file is extracted.'.format(src) + else: + msg = ('{!r} file is extracted ' + 'into {!r} directory.').format(src, curdir) + print(msg) + else: + parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) + + elif args.create is not None: + tar_name = args.create.pop(0) + _, ext = os.path.splitext(tar_name) + compressions = { + # gz + '.gz': 'gz', + '.tgz': 'gz', + # xz + '.xz': 'xz', + '.txz': 'xz', + # bz2 + '.bz2': 'bz2', + '.tbz': 'bz2', + '.tbz2': 'bz2', + '.tb2': 'bz2', + } + tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w' + tar_files = args.create + + with TarFile.open(tar_name, tar_mode) as tf: + for file_name in tar_files: + tf.add(file_name) + + if args.verbose: + print('{!r} file created.'.format(tar_name)) + +if __name__ == '__main__': + main() diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__main__.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..daf55090862ae43283b6552073549ac3ab2ad50a --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__main__.py @@ -0,0 +1,5 @@ +from . import main + + +if __name__ == '__main__': + main() diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__pycache__/__init__.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ce0547301ca41cfd82ff9341985136c2d384c07 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__pycache__/__init__.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__init__.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__pycache__/py38.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__pycache__/py38.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c178f02d7099e740efa2eb4bad11aa2ee6bfcc5 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__pycache__/py38.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/py38.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/py38.py new file mode 100644 index 0000000000000000000000000000000000000000..20fbbfc1c095baf9f8c72902b24296b50ad3ab9d --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/py38.py @@ -0,0 +1,24 @@ +import sys + + +if sys.version_info < (3, 9): + + def removesuffix(self, suffix): + # suffix='' should not call self[:-0]. + if suffix and self.endswith(suffix): + return self[: -len(suffix)] + else: + return self[:] + + def removeprefix(self, prefix): + if self.startswith(prefix): + return self[len(prefix) :] + else: + return self[:] +else: + + def removesuffix(self, suffix): + return self.removesuffix(suffix) + + def removeprefix(self, prefix): + return self.removeprefix(prefix) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..0a523bece3e50519653c4d7a38399baa487fefa1 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012 Erik Rose + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..924f9b60ff53d1984520684b3f3800ccb1369c67 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a29978deee94c64619445fc042926f0fe515aafecb26d5e071b5df8adfcaa49d +size 138269 diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60b24bd036095df3dea7a79f63291d106a96bfd7 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/recipes.pyi b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/recipes.pyi new file mode 100644 index 0000000000000000000000000000000000000000..739acec05fb3fdd5778aa27db82face3442c3c3f --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/recipes.pyi @@ -0,0 +1,136 @@ +"""Stubs for more_itertools.recipes""" + +from __future__ import annotations + +from typing import ( + Any, + Callable, + Iterable, + Iterator, + overload, + Sequence, + Type, + TypeVar, +) + +# Type and type variable definitions +_T = TypeVar('_T') +_T1 = TypeVar('_T1') +_T2 = TypeVar('_T2') +_U = TypeVar('_U') + +def take(n: int, iterable: Iterable[_T]) -> list[_T]: ... +def tabulate( + function: Callable[[int], _T], start: int = ... +) -> Iterator[_T]: ... +def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ... +def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ... +@overload +def nth(iterable: Iterable[_T], n: int) -> _T | None: ... +@overload +def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ... +def all_equal( + iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... +) -> bool: ... +def quantify( + iterable: Iterable[_T], pred: Callable[[_T], bool] = ... +) -> int: ... +def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ... +def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ... +def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ... +def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ... +def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ... +def repeatfunc( + func: Callable[..., _U], times: int | None = ..., *args: Any +) -> Iterator[_U]: ... +def pairwise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ... +def grouper( + iterable: Iterable[_T], + n: int, + incomplete: str = ..., + fillvalue: _U = ..., +) -> Iterator[tuple[_T | _U, ...]]: ... +def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ... +def partition( + pred: Callable[[_T], object] | None, iterable: Iterable[_T] +) -> tuple[Iterator[_T], Iterator[_T]]: ... +def powerset(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... +def unique_everseen( + iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... +) -> Iterator[_T]: ... +def unique_justseen( + iterable: Iterable[_T], key: Callable[[_T], object] | None = ... +) -> Iterator[_T]: ... +def unique( + iterable: Iterable[_T], + key: Callable[[_T], object] | None = ..., + reverse: bool = False, +) -> Iterator[_T]: ... +@overload +def iter_except( + func: Callable[[], _T], + exception: Type[BaseException] | tuple[Type[BaseException], ...], + first: None = ..., +) -> Iterator[_T]: ... +@overload +def iter_except( + func: Callable[[], _T], + exception: Type[BaseException] | tuple[Type[BaseException], ...], + first: Callable[[], _U], +) -> Iterator[_T | _U]: ... +@overload +def first_true( + iterable: Iterable[_T], *, pred: Callable[[_T], object] | None = ... +) -> _T | None: ... +@overload +def first_true( + iterable: Iterable[_T], + default: _U, + pred: Callable[[_T], object] | None = ..., +) -> _T | _U: ... +def random_product( + *args: Iterable[_T], repeat: int = ... +) -> tuple[_T, ...]: ... +def random_permutation( + iterable: Iterable[_T], r: int | None = ... +) -> tuple[_T, ...]: ... +def random_combination(iterable: Iterable[_T], r: int) -> tuple[_T, ...]: ... +def random_combination_with_replacement( + iterable: Iterable[_T], r: int +) -> tuple[_T, ...]: ... +def nth_combination( + iterable: Iterable[_T], r: int, index: int +) -> tuple[_T, ...]: ... +def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[_T | _U]: ... +def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ... +def before_and_after( + predicate: Callable[[_T], bool], it: Iterable[_T] +) -> tuple[Iterator[_T], Iterator[_T]]: ... +def triplewise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: ... +def sliding_window( + iterable: Iterable[_T], n: int +) -> Iterator[tuple[_T, ...]]: ... +def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ... +def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ... +def iter_index( + iterable: Iterable[_T], + value: Any, + start: int | None = ..., + stop: int | None = ..., +) -> Iterator[int]: ... +def sieve(n: int) -> Iterator[int]: ... +def batched( + iterable: Iterable[_T], n: int, *, strict: bool = False +) -> Iterator[tuple[_T]]: ... +def transpose( + it: Iterable[Iterable[_T]], +) -> Iterator[tuple[_T, ...]]: ... +def reshape( + matrix: Iterable[Iterable[_T]], cols: int +) -> Iterator[tuple[_T, ...]]: ... +def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ... +def factor(n: int) -> Iterator[int]: ... +def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ... +def sum_of_squares(it: Iterable[_T]) -> _T: ... +def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ... +def totient(n: int) -> int: ... diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/_setuptools_logging.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/_setuptools_logging.py new file mode 100644 index 0000000000000000000000000000000000000000..006c0985238cb7ec93a15cb544f549611b53bd38 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/_setuptools_logging.py @@ -0,0 +1,26 @@ +# copied from setuptools.logging, omitting monkeypatching +from __future__ import annotations + +import logging +import sys + + +def _not_warning(record): + return record.levelno < logging.WARNING + + +def configure(): + """ + Configure logging to emit warning and above to stderr + and everything else to stdout. This behavior is provided + for compatibility with distutils.log but may change in + the future. + """ + err_handler = logging.StreamHandler() + err_handler.setLevel(logging.WARNING) + out_handler = logging.StreamHandler(sys.stdout) + out_handler.addFilter(_not_warning) + handlers = err_handler, out_handler + logging.basicConfig( + format="{message}", style="{", handlers=handlers, level=logging.DEBUG + ) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/bdist_wheel.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/bdist_wheel.py new file mode 100644 index 0000000000000000000000000000000000000000..6b811ee3dfeb4d082260350172f7f4f3221b3192 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/bdist_wheel.py @@ -0,0 +1,595 @@ +""" +Create a wheel (.whl) distribution. + +A wheel is a built archive format. +""" + +from __future__ import annotations + +import os +import re +import shutil +import stat +import struct +import sys +import sysconfig +import warnings +from email.generator import BytesGenerator, Generator +from email.policy import EmailPolicy +from glob import iglob +from shutil import rmtree +from zipfile import ZIP_DEFLATED, ZIP_STORED + +import setuptools +from setuptools import Command + +from . import __version__ as wheel_version +from .macosx_libfile import calculate_macosx_platform_tag +from .metadata import pkginfo_to_metadata +from .util import log +from .vendored.packaging import tags +from .vendored.packaging import version as _packaging_version +from .wheelfile import WheelFile + + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub("[^A-Za-z0-9.]+", "-", name) + + +def safe_version(version): + """ + Convert an arbitrary string to a standard version string + """ + try: + # normalize the version + return str(_packaging_version.Version(version)) + except _packaging_version.InvalidVersion: + version = version.replace(" ", ".") + return re.sub("[^A-Za-z0-9.]+", "-", version) + + +setuptools_major_version = int(setuptools.__version__.split(".")[0]) + +PY_LIMITED_API_PATTERN = r"cp3\d" + + +def _is_32bit_interpreter(): + return struct.calcsize("P") == 4 + + +def python_tag(): + return f"py{sys.version_info[0]}" + + +def get_platform(archive_root): + """Return our platform name 'win32', 'linux_x86_64'""" + result = sysconfig.get_platform() + if result.startswith("macosx") and archive_root is not None: + result = calculate_macosx_platform_tag(archive_root, result) + elif _is_32bit_interpreter(): + if result == "linux-x86_64": + # pip pull request #3497 + result = "linux-i686" + elif result == "linux-aarch64": + # packaging pull request #234 + # TODO armv8l, packaging pull request #690 => this did not land + # in pip/packaging yet + result = "linux-armv7l" + + return result.replace("-", "_") + + +def get_flag(var, fallback, expected=True, warn=True): + """Use a fallback value for determining SOABI flags if the needed config + var is unset or unavailable.""" + val = sysconfig.get_config_var(var) + if val is None: + if warn: + warnings.warn( + f"Config variable '{var}' is unset, Python ABI tag may " "be incorrect", + RuntimeWarning, + stacklevel=2, + ) + return fallback + return val == expected + + +def get_abi_tag(): + """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2).""" + soabi = sysconfig.get_config_var("SOABI") + impl = tags.interpreter_name() + if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"): + d = "" + m = "" + u = "" + if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")): + d = "d" + + if get_flag( + "WITH_PYMALLOC", + impl == "cp", + warn=(impl == "cp" and sys.version_info < (3, 8)), + ) and sys.version_info < (3, 8): + m = "m" + + abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}" + elif soabi and impl == "cp" and soabi.startswith("cpython"): + # non-Windows + abi = "cp" + soabi.split("-")[1] + elif soabi and impl == "cp" and soabi.startswith("cp"): + # Windows + abi = soabi.split("-")[0] + elif soabi and impl == "pp": + # we want something like pypy36-pp73 + abi = "-".join(soabi.split("-")[:2]) + abi = abi.replace(".", "_").replace("-", "_") + elif soabi and impl == "graalpy": + abi = "-".join(soabi.split("-")[:3]) + abi = abi.replace(".", "_").replace("-", "_") + elif soabi: + abi = soabi.replace(".", "_").replace("-", "_") + else: + abi = None + + return abi + + +def safer_name(name): + return safe_name(name).replace("-", "_") + + +def safer_version(version): + return safe_version(version).replace("-", "_") + + +def remove_readonly(func, path, excinfo): + remove_readonly_exc(func, path, excinfo[1]) + + +def remove_readonly_exc(func, path, exc): + os.chmod(path, stat.S_IWRITE) + func(path) + + +class bdist_wheel(Command): + description = "create a wheel distribution" + + supported_compressions = { + "stored": ZIP_STORED, + "deflated": ZIP_DEFLATED, + } + + user_options = [ + ("bdist-dir=", "b", "temporary directory for creating the distribution"), + ( + "plat-name=", + "p", + "platform name to embed in generated filenames " + "(default: %s)" % get_platform(None), + ), + ( + "keep-temp", + "k", + "keep the pseudo-installation tree around after " + "creating the distribution archive", + ), + ("dist-dir=", "d", "directory to put final built distributions in"), + ("skip-build", None, "skip rebuilding everything (for testing/debugging)"), + ( + "relative", + None, + "build the archive using relative paths " "(default: false)", + ), + ( + "owner=", + "u", + "Owner name used when creating a tar file" " [default: current user]", + ), + ( + "group=", + "g", + "Group name used when creating a tar file" " [default: current group]", + ), + ("universal", None, "make a universal wheel" " (default: false)"), + ( + "compression=", + None, + "zipfile compression (one of: {})" " (default: 'deflated')".format( + ", ".join(supported_compressions) + ), + ), + ( + "python-tag=", + None, + "Python implementation compatibility tag" + " (default: '%s')" % (python_tag()), + ), + ( + "build-number=", + None, + "Build number for this particular version. " + "As specified in PEP-0427, this must start with a digit. " + "[default: None]", + ), + ( + "py-limited-api=", + None, + "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)", + ), + ] + + boolean_options = ["keep-temp", "skip-build", "relative", "universal"] + + def initialize_options(self): + self.bdist_dir = None + self.data_dir = None + self.plat_name = None + self.plat_tag = None + self.format = "zip" + self.keep_temp = False + self.dist_dir = None + self.egginfo_dir = None + self.root_is_pure = None + self.skip_build = None + self.relative = False + self.owner = None + self.group = None + self.universal = False + self.compression = "deflated" + self.python_tag = python_tag() + self.build_number = None + self.py_limited_api = False + self.plat_name_supplied = False + + def finalize_options(self): + if self.bdist_dir is None: + bdist_base = self.get_finalized_command("bdist").bdist_base + self.bdist_dir = os.path.join(bdist_base, "wheel") + + egg_info = self.distribution.get_command_obj("egg_info") + egg_info.ensure_finalized() # needed for correct `wheel_dist_name` + + self.data_dir = self.wheel_dist_name + ".data" + self.plat_name_supplied = self.plat_name is not None + + try: + self.compression = self.supported_compressions[self.compression] + except KeyError: + raise ValueError(f"Unsupported compression: {self.compression}") from None + + need_options = ("dist_dir", "plat_name", "skip_build") + + self.set_undefined_options("bdist", *zip(need_options, need_options)) + + self.root_is_pure = not ( + self.distribution.has_ext_modules() or self.distribution.has_c_libraries() + ) + + if self.py_limited_api and not re.match( + PY_LIMITED_API_PATTERN, self.py_limited_api + ): + raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN) + + # Support legacy [wheel] section for setting universal + wheel = self.distribution.get_option_dict("wheel") + if "universal" in wheel: + # please don't define this in your global configs + log.warning( + "The [wheel] section is deprecated. Use [bdist_wheel] instead.", + ) + val = wheel["universal"][1].strip() + if val.lower() in ("1", "true", "yes"): + self.universal = True + + if self.build_number is not None and not self.build_number[:1].isdigit(): + raise ValueError("Build tag (build-number) must start with a digit.") + + @property + def wheel_dist_name(self): + """Return distribution full name with - replaced with _""" + components = ( + safer_name(self.distribution.get_name()), + safer_version(self.distribution.get_version()), + ) + if self.build_number: + components += (self.build_number,) + return "-".join(components) + + def get_tag(self): + # bdist sets self.plat_name if unset, we should only use it for purepy + # wheels if the user supplied it. + if self.plat_name_supplied: + plat_name = self.plat_name + elif self.root_is_pure: + plat_name = "any" + else: + # macosx contains system version in platform name so need special handle + if self.plat_name and not self.plat_name.startswith("macosx"): + plat_name = self.plat_name + else: + # on macosx always limit the platform name to comply with any + # c-extension modules in bdist_dir, since the user can specify + # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake + + # on other platforms, and on macosx if there are no c-extension + # modules, use the default platform name. + plat_name = get_platform(self.bdist_dir) + + if _is_32bit_interpreter(): + if plat_name in ("linux-x86_64", "linux_x86_64"): + plat_name = "linux_i686" + if plat_name in ("linux-aarch64", "linux_aarch64"): + # TODO armv8l, packaging pull request #690 => this did not land + # in pip/packaging yet + plat_name = "linux_armv7l" + + plat_name = ( + plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_") + ) + + if self.root_is_pure: + if self.universal: + impl = "py2.py3" + else: + impl = self.python_tag + tag = (impl, "none", plat_name) + else: + impl_name = tags.interpreter_name() + impl_ver = tags.interpreter_version() + impl = impl_name + impl_ver + # We don't work on CPython 3.1, 3.0. + if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"): + impl = self.py_limited_api + abi_tag = "abi3" + else: + abi_tag = str(get_abi_tag()).lower() + tag = (impl, abi_tag, plat_name) + # issue gh-374: allow overriding plat_name + supported_tags = [ + (t.interpreter, t.abi, plat_name) for t in tags.sys_tags() + ] + assert ( + tag in supported_tags + ), f"would build wheel with unsupported tag {tag}" + return tag + + def run(self): + build_scripts = self.reinitialize_command("build_scripts") + build_scripts.executable = "python" + build_scripts.force = True + + build_ext = self.reinitialize_command("build_ext") + build_ext.inplace = False + + if not self.skip_build: + self.run_command("build") + + install = self.reinitialize_command("install", reinit_subcommands=True) + install.root = self.bdist_dir + install.compile = False + install.skip_build = self.skip_build + install.warn_dir = False + + # A wheel without setuptools scripts is more cross-platform. + # Use the (undocumented) `no_ep` option to setuptools' + # install_scripts command to avoid creating entry point scripts. + install_scripts = self.reinitialize_command("install_scripts") + install_scripts.no_ep = True + + # Use a custom scheme for the archive, because we have to decide + # at installation time which scheme to use. + for key in ("headers", "scripts", "data", "purelib", "platlib"): + setattr(install, "install_" + key, os.path.join(self.data_dir, key)) + + basedir_observed = "" + + if os.name == "nt": + # win32 barfs if any of these are ''; could be '.'? + # (distutils.command.install:change_roots bug) + basedir_observed = os.path.normpath(os.path.join(self.data_dir, "..")) + self.install_libbase = self.install_lib = basedir_observed + + setattr( + install, + "install_purelib" if self.root_is_pure else "install_platlib", + basedir_observed, + ) + + log.info(f"installing to {self.bdist_dir}") + + self.run_command("install") + + impl_tag, abi_tag, plat_tag = self.get_tag() + archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}" + if not self.relative: + archive_root = self.bdist_dir + else: + archive_root = os.path.join( + self.bdist_dir, self._ensure_relative(install.install_base) + ) + + self.set_undefined_options("install_egg_info", ("target", "egginfo_dir")) + distinfo_dirname = ( + f"{safer_name(self.distribution.get_name())}-" + f"{safer_version(self.distribution.get_version())}.dist-info" + ) + distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname) + self.egg2dist(self.egginfo_dir, distinfo_dir) + + self.write_wheelfile(distinfo_dir) + + # Make the archive + if not os.path.exists(self.dist_dir): + os.makedirs(self.dist_dir) + + wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl") + with WheelFile(wheel_path, "w", self.compression) as wf: + wf.write_files(archive_root) + + # Add to 'Distribution.dist_files' so that the "upload" command works + getattr(self.distribution, "dist_files", []).append( + ( + "bdist_wheel", + "{}.{}".format(*sys.version_info[:2]), # like 3.7 + wheel_path, + ) + ) + + if not self.keep_temp: + log.info(f"removing {self.bdist_dir}") + if not self.dry_run: + if sys.version_info < (3, 12): + rmtree(self.bdist_dir, onerror=remove_readonly) + else: + rmtree(self.bdist_dir, onexc=remove_readonly_exc) + + def write_wheelfile( + self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")" + ): + from email.message import Message + + msg = Message() + msg["Wheel-Version"] = "1.0" # of the spec + msg["Generator"] = generator + msg["Root-Is-Purelib"] = str(self.root_is_pure).lower() + if self.build_number is not None: + msg["Build"] = self.build_number + + # Doesn't work for bdist_wininst + impl_tag, abi_tag, plat_tag = self.get_tag() + for impl in impl_tag.split("."): + for abi in abi_tag.split("."): + for plat in plat_tag.split("."): + msg["Tag"] = "-".join((impl, abi, plat)) + + wheelfile_path = os.path.join(wheelfile_base, "WHEEL") + log.info(f"creating {wheelfile_path}") + with open(wheelfile_path, "wb") as f: + BytesGenerator(f, maxheaderlen=0).flatten(msg) + + def _ensure_relative(self, path): + # copied from dir_util, deleted + drive, path = os.path.splitdrive(path) + if path[0:1] == os.sep: + path = drive + path[1:] + return path + + @property + def license_paths(self): + if setuptools_major_version >= 57: + # Setuptools has resolved any patterns to actual file names + return self.distribution.metadata.license_files or () + + files = set() + metadata = self.distribution.get_option_dict("metadata") + if setuptools_major_version >= 42: + # Setuptools recognizes the license_files option but does not do globbing + patterns = self.distribution.metadata.license_files + else: + # Prior to those, wheel is entirely responsible for handling license files + if "license_files" in metadata: + patterns = metadata["license_files"][1].split() + else: + patterns = () + + if "license_file" in metadata: + warnings.warn( + 'The "license_file" option is deprecated. Use "license_files" instead.', + DeprecationWarning, + stacklevel=2, + ) + files.add(metadata["license_file"][1]) + + if not files and not patterns and not isinstance(patterns, list): + patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*") + + for pattern in patterns: + for path in iglob(pattern): + if path.endswith("~"): + log.debug( + f'ignoring license file "{path}" as it looks like a backup' + ) + continue + + if path not in files and os.path.isfile(path): + log.info( + f'adding license file "{path}" (matched pattern "{pattern}")' + ) + files.add(path) + + return files + + def egg2dist(self, egginfo_path, distinfo_path): + """Convert an .egg-info directory into a .dist-info directory""" + + def adios(p): + """Appropriately delete directory, file or link.""" + if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p): + shutil.rmtree(p) + elif os.path.exists(p): + os.unlink(p) + + adios(distinfo_path) + + if not os.path.exists(egginfo_path): + # There is no egg-info. This is probably because the egg-info + # file/directory is not named matching the distribution name used + # to name the archive file. Check for this case and report + # accordingly. + import glob + + pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info") + possible = glob.glob(pat) + err = f"Egg metadata expected at {egginfo_path} but not found" + if possible: + alt = os.path.basename(possible[0]) + err += f" ({alt} found - possible misnamed archive file?)" + + raise ValueError(err) + + if os.path.isfile(egginfo_path): + # .egg-info is a single file + pkginfo_path = egginfo_path + pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path) + os.mkdir(distinfo_path) + else: + # .egg-info is a directory + pkginfo_path = os.path.join(egginfo_path, "PKG-INFO") + pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path) + + # ignore common egg metadata that is useless to wheel + shutil.copytree( + egginfo_path, + distinfo_path, + ignore=lambda x, y: { + "PKG-INFO", + "requires.txt", + "SOURCES.txt", + "not-zip-safe", + }, + ) + + # delete dependency_links if it is only whitespace + dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt") + with open(dependency_links_path, encoding="utf-8") as dependency_links_file: + dependency_links = dependency_links_file.read().strip() + if not dependency_links: + adios(dependency_links_path) + + pkg_info_path = os.path.join(distinfo_path, "METADATA") + serialization_policy = EmailPolicy( + utf8=True, + mangle_from_=False, + max_line_length=0, + ) + with open(pkg_info_path, "w", encoding="utf-8") as out: + Generator(out, policy=serialization_policy).flatten(pkg_info) + + for license_path in self.license_paths: + filename = os.path.basename(license_path) + shutil.copy(license_path, os.path.join(distinfo_path, filename)) + + adios(egginfo_path) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/macosx_libfile.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/macosx_libfile.py new file mode 100644 index 0000000000000000000000000000000000000000..8953c3f8051909dca648b9afef210f5763aa55f9 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/macosx_libfile.py @@ -0,0 +1,469 @@ +""" +This module contains function to analyse dynamic library +headers to extract system information + +Currently only for MacOSX + +Library file on macosx system starts with Mach-O or Fat field. +This can be distinguish by first 32 bites and it is called magic number. +Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means +reversed bytes order. +Both fields can occur in two types: 32 and 64 bytes. + +FAT field inform that this library contains few version of library +(typically for different types version). It contains +information where Mach-O headers starts. + +Each section started with Mach-O header contains one library +(So if file starts with this field it contains only one version). + +After filed Mach-O there are section fields. +Each of them starts with two fields: +cmd - magic number for this command +cmdsize - total size occupied by this section information. + +In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier) +and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting, +because them contains information about minimal system version. + +Important remarks: +- For fat files this implementation looks for maximum number version. + It not check if it is 32 or 64 and do not compare it with currently built package. + So it is possible to false report higher version that needed. +- All structures signatures are taken form macosx header files. +- I think that binary format will be more stable than `otool` output. + and if apple introduce some changes both implementation will need to be updated. +- The system compile will set the deployment target no lower than + 11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment + target when the arm64 target is 11.0. +""" + +from __future__ import annotations + +import ctypes +import os +import sys + +"""here the needed const and struct from mach-o header files""" + +FAT_MAGIC = 0xCAFEBABE +FAT_CIGAM = 0xBEBAFECA +FAT_MAGIC_64 = 0xCAFEBABF +FAT_CIGAM_64 = 0xBFBAFECA +MH_MAGIC = 0xFEEDFACE +MH_CIGAM = 0xCEFAEDFE +MH_MAGIC_64 = 0xFEEDFACF +MH_CIGAM_64 = 0xCFFAEDFE + +LC_VERSION_MIN_MACOSX = 0x24 +LC_BUILD_VERSION = 0x32 + +CPU_TYPE_ARM64 = 0x0100000C + +mach_header_fields = [ + ("magic", ctypes.c_uint32), + ("cputype", ctypes.c_int), + ("cpusubtype", ctypes.c_int), + ("filetype", ctypes.c_uint32), + ("ncmds", ctypes.c_uint32), + ("sizeofcmds", ctypes.c_uint32), + ("flags", ctypes.c_uint32), +] +""" +struct mach_header { + uint32_t magic; /* mach magic number identifier */ + cpu_type_t cputype; /* cpu specifier */ + cpu_subtype_t cpusubtype; /* machine specifier */ + uint32_t filetype; /* type of file */ + uint32_t ncmds; /* number of load commands */ + uint32_t sizeofcmds; /* the size of all the load commands */ + uint32_t flags; /* flags */ +}; +typedef integer_t cpu_type_t; +typedef integer_t cpu_subtype_t; +""" + +mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)] +""" +struct mach_header_64 { + uint32_t magic; /* mach magic number identifier */ + cpu_type_t cputype; /* cpu specifier */ + cpu_subtype_t cpusubtype; /* machine specifier */ + uint32_t filetype; /* type of file */ + uint32_t ncmds; /* number of load commands */ + uint32_t sizeofcmds; /* the size of all the load commands */ + uint32_t flags; /* flags */ + uint32_t reserved; /* reserved */ +}; +""" + +fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)] +""" +struct fat_header { + uint32_t magic; /* FAT_MAGIC or FAT_MAGIC_64 */ + uint32_t nfat_arch; /* number of structs that follow */ +}; +""" + +fat_arch_fields = [ + ("cputype", ctypes.c_int), + ("cpusubtype", ctypes.c_int), + ("offset", ctypes.c_uint32), + ("size", ctypes.c_uint32), + ("align", ctypes.c_uint32), +] +""" +struct fat_arch { + cpu_type_t cputype; /* cpu specifier (int) */ + cpu_subtype_t cpusubtype; /* machine specifier (int) */ + uint32_t offset; /* file offset to this object file */ + uint32_t size; /* size of this object file */ + uint32_t align; /* alignment as a power of 2 */ +}; +""" + +fat_arch_64_fields = [ + ("cputype", ctypes.c_int), + ("cpusubtype", ctypes.c_int), + ("offset", ctypes.c_uint64), + ("size", ctypes.c_uint64), + ("align", ctypes.c_uint32), + ("reserved", ctypes.c_uint32), +] +""" +struct fat_arch_64 { + cpu_type_t cputype; /* cpu specifier (int) */ + cpu_subtype_t cpusubtype; /* machine specifier (int) */ + uint64_t offset; /* file offset to this object file */ + uint64_t size; /* size of this object file */ + uint32_t align; /* alignment as a power of 2 */ + uint32_t reserved; /* reserved */ +}; +""" + +segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)] +"""base for reading segment info""" + +segment_command_fields = [ + ("cmd", ctypes.c_uint32), + ("cmdsize", ctypes.c_uint32), + ("segname", ctypes.c_char * 16), + ("vmaddr", ctypes.c_uint32), + ("vmsize", ctypes.c_uint32), + ("fileoff", ctypes.c_uint32), + ("filesize", ctypes.c_uint32), + ("maxprot", ctypes.c_int), + ("initprot", ctypes.c_int), + ("nsects", ctypes.c_uint32), + ("flags", ctypes.c_uint32), +] +""" +struct segment_command { /* for 32-bit architectures */ + uint32_t cmd; /* LC_SEGMENT */ + uint32_t cmdsize; /* includes sizeof section structs */ + char segname[16]; /* segment name */ + uint32_t vmaddr; /* memory address of this segment */ + uint32_t vmsize; /* memory size of this segment */ + uint32_t fileoff; /* file offset of this segment */ + uint32_t filesize; /* amount to map from the file */ + vm_prot_t maxprot; /* maximum VM protection */ + vm_prot_t initprot; /* initial VM protection */ + uint32_t nsects; /* number of sections in segment */ + uint32_t flags; /* flags */ +}; +typedef int vm_prot_t; +""" + +segment_command_fields_64 = [ + ("cmd", ctypes.c_uint32), + ("cmdsize", ctypes.c_uint32), + ("segname", ctypes.c_char * 16), + ("vmaddr", ctypes.c_uint64), + ("vmsize", ctypes.c_uint64), + ("fileoff", ctypes.c_uint64), + ("filesize", ctypes.c_uint64), + ("maxprot", ctypes.c_int), + ("initprot", ctypes.c_int), + ("nsects", ctypes.c_uint32), + ("flags", ctypes.c_uint32), +] +""" +struct segment_command_64 { /* for 64-bit architectures */ + uint32_t cmd; /* LC_SEGMENT_64 */ + uint32_t cmdsize; /* includes sizeof section_64 structs */ + char segname[16]; /* segment name */ + uint64_t vmaddr; /* memory address of this segment */ + uint64_t vmsize; /* memory size of this segment */ + uint64_t fileoff; /* file offset of this segment */ + uint64_t filesize; /* amount to map from the file */ + vm_prot_t maxprot; /* maximum VM protection */ + vm_prot_t initprot; /* initial VM protection */ + uint32_t nsects; /* number of sections in segment */ + uint32_t flags; /* flags */ +}; +""" + +version_min_command_fields = segment_base_fields + [ + ("version", ctypes.c_uint32), + ("sdk", ctypes.c_uint32), +] +""" +struct version_min_command { + uint32_t cmd; /* LC_VERSION_MIN_MACOSX or + LC_VERSION_MIN_IPHONEOS or + LC_VERSION_MIN_WATCHOS or + LC_VERSION_MIN_TVOS */ + uint32_t cmdsize; /* sizeof(struct min_version_command) */ + uint32_t version; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */ + uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */ +}; +""" + +build_version_command_fields = segment_base_fields + [ + ("platform", ctypes.c_uint32), + ("minos", ctypes.c_uint32), + ("sdk", ctypes.c_uint32), + ("ntools", ctypes.c_uint32), +] +""" +struct build_version_command { + uint32_t cmd; /* LC_BUILD_VERSION */ + uint32_t cmdsize; /* sizeof(struct build_version_command) plus */ + /* ntools * sizeof(struct build_tool_version) */ + uint32_t platform; /* platform */ + uint32_t minos; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */ + uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */ + uint32_t ntools; /* number of tool entries following this */ +}; +""" + + +def swap32(x): + return ( + ((x << 24) & 0xFF000000) + | ((x << 8) & 0x00FF0000) + | ((x >> 8) & 0x0000FF00) + | ((x >> 24) & 0x000000FF) + ) + + +def get_base_class_and_magic_number(lib_file, seek=None): + if seek is None: + seek = lib_file.tell() + else: + lib_file.seek(seek) + magic_number = ctypes.c_uint32.from_buffer_copy( + lib_file.read(ctypes.sizeof(ctypes.c_uint32)) + ).value + + # Handle wrong byte order + if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]: + if sys.byteorder == "little": + BaseClass = ctypes.BigEndianStructure + else: + BaseClass = ctypes.LittleEndianStructure + + magic_number = swap32(magic_number) + else: + BaseClass = ctypes.Structure + + lib_file.seek(seek) + return BaseClass, magic_number + + +def read_data(struct_class, lib_file): + return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class))) + + +def extract_macosx_min_system_version(path_to_lib): + with open(path_to_lib, "rb") as lib_file: + BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0) + if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]: + return + + if magic_number in [FAT_MAGIC, FAT_CIGAM_64]: + + class FatHeader(BaseClass): + _fields_ = fat_header_fields + + fat_header = read_data(FatHeader, lib_file) + if magic_number == FAT_MAGIC: + + class FatArch(BaseClass): + _fields_ = fat_arch_fields + + else: + + class FatArch(BaseClass): + _fields_ = fat_arch_64_fields + + fat_arch_list = [ + read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch) + ] + + versions_list = [] + for el in fat_arch_list: + try: + version = read_mach_header(lib_file, el.offset) + if version is not None: + if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1: + # Xcode will not set the deployment target below 11.0.0 + # for the arm64 architecture. Ignore the arm64 deployment + # in fat binaries when the target is 11.0.0, that way + # the other architectures can select a lower deployment + # target. + # This is safe because there is no arm64 variant for + # macOS 10.15 or earlier. + if version == (11, 0, 0): + continue + versions_list.append(version) + except ValueError: + pass + + if len(versions_list) > 0: + return max(versions_list) + else: + return None + + else: + try: + return read_mach_header(lib_file, 0) + except ValueError: + """when some error during read library files""" + return None + + +def read_mach_header(lib_file, seek=None): + """ + This function parses a Mach-O header and extracts + information about the minimal macOS version. + + :param lib_file: reference to opened library file with pointer + """ + base_class, magic_number = get_base_class_and_magic_number(lib_file, seek) + arch = "32" if magic_number == MH_MAGIC else "64" + + class SegmentBase(base_class): + _fields_ = segment_base_fields + + if arch == "32": + + class MachHeader(base_class): + _fields_ = mach_header_fields + + else: + + class MachHeader(base_class): + _fields_ = mach_header_fields_64 + + mach_header = read_data(MachHeader, lib_file) + for _i in range(mach_header.ncmds): + pos = lib_file.tell() + segment_base = read_data(SegmentBase, lib_file) + lib_file.seek(pos) + if segment_base.cmd == LC_VERSION_MIN_MACOSX: + + class VersionMinCommand(base_class): + _fields_ = version_min_command_fields + + version_info = read_data(VersionMinCommand, lib_file) + return parse_version(version_info.version) + elif segment_base.cmd == LC_BUILD_VERSION: + + class VersionBuild(base_class): + _fields_ = build_version_command_fields + + version_info = read_data(VersionBuild, lib_file) + return parse_version(version_info.minos) + else: + lib_file.seek(pos + segment_base.cmdsize) + continue + + +def parse_version(version): + x = (version & 0xFFFF0000) >> 16 + y = (version & 0x0000FF00) >> 8 + z = version & 0x000000FF + return x, y, z + + +def calculate_macosx_platform_tag(archive_root, platform_tag): + """ + Calculate proper macosx platform tag basing on files which are included to wheel + + Example platform tag `macosx-10.14-x86_64` + """ + prefix, base_version, suffix = platform_tag.split("-") + base_version = tuple(int(x) for x in base_version.split(".")) + base_version = base_version[:2] + if base_version[0] > 10: + base_version = (base_version[0], 0) + assert len(base_version) == 2 + if "MACOSX_DEPLOYMENT_TARGET" in os.environ: + deploy_target = tuple( + int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".") + ) + deploy_target = deploy_target[:2] + if deploy_target[0] > 10: + deploy_target = (deploy_target[0], 0) + if deploy_target < base_version: + sys.stderr.write( + "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than " + "the version on which the Python interpreter was compiled ({}), and " + "will be ignored.\n".format( + ".".join(str(x) for x in deploy_target), + ".".join(str(x) for x in base_version), + ) + ) + else: + base_version = deploy_target + + assert len(base_version) == 2 + start_version = base_version + versions_dict = {} + for dirpath, _dirnames, filenames in os.walk(archive_root): + for filename in filenames: + if filename.endswith(".dylib") or filename.endswith(".so"): + lib_path = os.path.join(dirpath, filename) + min_ver = extract_macosx_min_system_version(lib_path) + if min_ver is not None: + min_ver = min_ver[0:2] + if min_ver[0] > 10: + min_ver = (min_ver[0], 0) + versions_dict[lib_path] = min_ver + + if len(versions_dict) > 0: + base_version = max(base_version, max(versions_dict.values())) + + # macosx platform tag do not support minor bugfix release + fin_base_version = "_".join([str(x) for x in base_version]) + if start_version < base_version: + problematic_files = [k for k, v in versions_dict.items() if v > start_version] + problematic_files = "\n".join(problematic_files) + if len(problematic_files) == 1: + files_form = "this file" + else: + files_form = "these files" + error_message = ( + "[WARNING] This wheel needs a higher macOS version than {} " + "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least " + + fin_base_version + + " or recreate " + + files_form + + " with lower " + "MACOSX_DEPLOYMENT_TARGET: \n" + problematic_files + ) + + if "MACOSX_DEPLOYMENT_TARGET" in os.environ: + error_message = error_message.format( + "is set in MACOSX_DEPLOYMENT_TARGET variable." + ) + else: + error_message = error_message.format( + "the version your Python interpreter is compiled against." + ) + + sys.stderr.write(error_message) + + platform_tag = prefix + "_" + fin_base_version + "_" + suffix + return platform_tag diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/util.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/util.py new file mode 100644 index 0000000000000000000000000000000000000000..d98d98cb52bef152098bd4277dabcd99b0f528bc --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/util.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +import base64 +import logging + +log = logging.getLogger("wheel") + +# ensure Python logging is configured +try: + __import__("setuptools.logging") +except ImportError: + # setuptools < ?? + from . import _setuptools_logging + + _setuptools_logging.configure() + + +def urlsafe_b64encode(data: bytes) -> bytes: + """urlsafe_b64encode without padding""" + return base64.urlsafe_b64encode(data).rstrip(b"=") + + +def urlsafe_b64decode(data: bytes) -> bytes: + """urlsafe_b64decode without padding""" + pad = b"=" * (4 - (len(data) & 3)) + return base64.urlsafe_b64decode(data + pad) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/__init__.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__init__.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/__init__.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..244d183a5e384aba37958aeff02329be655be228 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/__init__.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_musllinux.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_musllinux.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2355c5140f93a2c23142fa9e287a7140b78ae664 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_musllinux.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_parser.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e4d0a3a52c5a45b4257c0eda30f351481c35ed2 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_parser.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_structures.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_structures.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5cf695d70b8bbab390236858c77f4c7894c6d289 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_structures.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f433c4a19d04c134936a6f804488793aea7c7fa7 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/markers.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/markers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f6e792061e550c69cbbf6db385a6fdfc374478c Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/markers.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/requirements.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/requirements.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ca950506d062a01920449224e012d6cfc8780de8 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/requirements.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/tags.cpython-310.pyc b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/tags.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ff3d3326bb3a051d42e1eba7e0519c318628672 Binary files /dev/null and b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/tags.cpython-310.pyc differ diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_elffile.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_elffile.py new file mode 100644 index 0000000000000000000000000000000000000000..6fb19b30bb53c18f38a9ef02dd7c4478670fb962 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_elffile.py @@ -0,0 +1,108 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +import enum +import os +import struct +from typing import IO, Optional, Tuple + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error: + raise ELFInvalid("unable to parse identification") + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or " + f"encoding ({self.encoding})" + ) + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> Tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> Optional[str]: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py new file mode 100644 index 0000000000000000000000000000000000000000..1f5f4ab3e514d1846d3bd189bf081fdb1528ba08 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py @@ -0,0 +1,260 @@ +import collections +import contextlib +import functools +import os +import re +import sys +import warnings +from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + +# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` +# as the type for `path` until then. +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: + try: + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None + + +def _is_linux_armhf(executable: str) -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) + + +def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: + if "armv7l" in archs: + return _is_linux_armhf(executable) + if "i686" in archs: + return _is_linux_i686(executable) + allowed_archs = { + "x86_64", + "aarch64", + "ppc64", + "ppc64le", + "s390x", + "loongarch64", + "riscv64", + } + return any(arch in allowed_archs for arch in archs) + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> Optional[str]: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # Should be a string like "glibc 2.17". + version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.rsplit() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> Optional[str]: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> Optional[str]: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> Tuple[int, int]: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + f"Expected glibc version with 2 components major.minor," + f" got: {version_str}", + RuntimeWarning, + ) + return -1, -1 + return int(m.group("major")), int(m.group("minor")) + + +@functools.lru_cache +def _get_glibc_version() -> Tuple[int, int]: + version_str = _glibc_version_string() + if version_str is None: + return (-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17): + if hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + (2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + (2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + (2, 5): "manylinux1", +} + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate manylinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be manylinux-compatible. + + :returns: An iterator of compatible manylinux tags. + """ + if not _have_compatible_abi(sys.executable, archs): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if set(archs) & {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for arch in archs: + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + tag = "manylinux_{}_{}".format(*glibc_version) + if _is_compatible(arch, glibc_version): + yield f"{tag}_{arch}" + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_compatible(arch, glibc_version): + yield f"{legacy_tag}_{arch}" diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py new file mode 100644 index 0000000000000000000000000000000000000000..eb4251b5c1e82772b2b0ea539943da5141fd55ec --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py @@ -0,0 +1,83 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +import functools +import re +import subprocess +import sys +from typing import Iterator, NamedTuple, Optional, Sequence + +from ._elffile import ELFFile + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> Optional[_MuslVersion]: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache +def _get_musl_version(executable: str) -> Optional[_MuslVersion]: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: + return None + proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for arch in archs: + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_parser.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..513686a2190f9911c08ca1ca263f37b799f44702 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_parser.py @@ -0,0 +1,356 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains EBNF-inspired grammar representing +the implementation. +""" + +import ast +from typing import Any, List, NamedTuple, Optional, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] +# MarkerList = List[Union["MarkerList", MarkerAtom, str]] +# mypy does not support recursive type definition +# https://github.com/python/mypy/issues/731 +MarkerAtom = Any +MarkerList = List[Any] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: List[str] + specifier: str + marker: Optional[MarkerList] + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> Tuple[str, str, Optional[MarkerList]]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, span_start=url_start, after="URL and whitespace" + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + after=( + "version specifier" + if specifier + else "name and no valid version specifier" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, after: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected end or semicolon (after {after})", + span_start=span_start, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> List[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens( + "LEFT_BRACKET", + "RIGHT_BRACKET", + around="extras", + ): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: List[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="version specifier", + ): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + span_start = tokenizer.position + parsed_specifiers += tokenizer.read().text + if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): + tokenizer.raise_syntax_error( + ".* suffix can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position + 1, + ) + if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): + tokenizer.raise_syntax_error( + "Local version label can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position, + ) + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: + retval = _parse_marker(tokenizer) + tokenizer.expect("END", expected="end of marker expression") + return retval + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="marker expression", + ): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if env_var in ("platform_python_implementation", "python_implementation"): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of " + "<=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_structures.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_structures.py new file mode 100644 index 0000000000000000000000000000000000000000..90a6465f9682c886363eea5327dac64bf623a6ff --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_structures.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InfinityType: + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..dd0d648d49a7c1a62d25ce5c9107aa448a8a22d1 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py @@ -0,0 +1,192 @@ +import contextlib +import re +from dataclasses import dataclass +from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: Tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return "\n ".join([self.message, self.source, marker]) + + +DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { + "LEFT_PARENTHESIS": r"\(", + "RIGHT_PARENTHESIS": r"\)", + "LEFT_BRACKET": r"\[", + "RIGHT_BRACKET": r"\]", + "SEMICOLON": r";", + "COMMA": r",", + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": r"(===|==|~=|!=|<=|>=|<|>)", + "BOOLOP": r"\b(or|and)\b", + "IN": r"\bin\b", + "NOT": r"\bnot\b", + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extra + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": r"\@", + "URL": r"[^ \t]+", + "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", + "VERSION_PREFIX_TRAIL": r"\.\*", + "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", + "WS": r"[ \t]+", + "END": r"$", +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: "Dict[str, Union[str, re.Pattern[str]]]", + ) -> None: + self.source = source + self.rules: Dict[str, re.Pattern[str]] = { + name: re.compile(pattern) for name, pattern in rules.items() + } + self.next_token: Optional[Token] = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert ( + self.next_token is None + ), f"Cannot check for {name!r}, already have {self.next_token!r}" + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: Optional[int] = None, + span_end: Optional[int] = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens( + self, open_token: str, close_token: str, *, around: str + ) -> Iterator[None]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected matching {close_token} for {open_token}, after {around}", + span_start=open_position, + ) + + self.read() diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/markers.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/markers.py new file mode 100644 index 0000000000000000000000000000000000000000..c96d22a5a445e7353cd2454dec4255d3785c07b3 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/markers.py @@ -0,0 +1,253 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import operator +import os +import platform +import sys +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from ._parser import ( + MarkerAtom, + MarkerList, + Op, + Value, + Variable, +) +from ._parser import ( + parse_marker as _parse_marker, +) +from ._tokenizer import ParserSyntaxError +from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name + +__all__ = [ + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", +] + +Operator = Callable[[str, str], bool] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +def _normalize_extra_values(results: Any) -> Any: + """ + Normalize extra values. + """ + if isinstance(results[0], tuple): + lhs, op, rhs = results[0] + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + results[0] = lhs, op, rhs + return results + + +def _format_marker( + marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True +) -> str: + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: Dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs: str, op: Op, rhs: str) -> bool: + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs, prereleases=True) + + oper: Optional[Operator] = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +def _normalize(*values: str, key: str) -> Tuple[str, ...]: + # PEP 685 โ€“ Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + return tuple(canonicalize_name(v) for v in values) + + # other environment markers don't have such standards + return values + + +def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: + groups: List[List[bool]] = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, str)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + environment_key = lhs.value + lhs_value = environment[environment_key] + rhs_value = rhs.value + else: + lhs_value = lhs.value + environment_key = rhs.value + rhs_value = environment[environment_key] + + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info: "sys._version_info") -> str: + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Dict[str, str]: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. + try: + self._markers = _normalize_extra_values(_parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + + def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + current_environment["extra"] = "" + if environment is not None: + current_environment.update(environment) + # The API used to allow setting extra to None. We need to handle this + # case for backwards compatibility. + if current_environment["extra"] is None: + current_environment["extra"] = "" + + return _evaluate_markers(self._markers, current_environment) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/requirements.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/requirements.py new file mode 100644 index 0000000000000000000000000000000000000000..bdc43a7e98d87dba0c2069bfb4554f71d228cad4 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/requirements.py @@ -0,0 +1,90 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from typing import Any, Iterator, Optional, Set + +from ._parser import parse_requirement as _parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet +from .utils import canonicalize_name + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +class Requirement: + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string: str) -> None: + try: + parsed = _parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + self.url: Optional[str] = parsed.url or None + self.extras: Set[str] = set(parsed.extras or []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Optional[Marker] = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) + + def _iter_parts(self, name: str) -> Iterator[str]: + yield name + + if self.extras: + formatted_extras = ",".join(sorted(self.extras)) + yield f"[{formatted_extras}]" + + if self.specifier: + yield str(self.specifier) + + if self.url: + yield f"@ {self.url}" + if self.marker: + yield " " + + if self.marker: + yield f"; {self.marker}" + + def __str__(self) -> str: + return "".join(self._iter_parts(self.name)) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash( + ( + self.__class__.__name__, + *self._iter_parts(canonicalize_name(self.name)), + ) + ) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + canonicalize_name(self.name) == canonicalize_name(other.name) + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/specifiers.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/specifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..6d4066ae2770a3112f37d0e30d9a98fe59c4861f --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/specifiers.py @@ -0,0 +1,1011 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" + +import abc +import itertools +import re +from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union + +from .utils import canonicalize_version +from .version import Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] + + +def _coerce_version(version: UnparsedVersion) -> Version: + if not isinstance(version, Version): + version = Version(version) + return version + + +class InvalidSpecifier(ValueError): + """ + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier-like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier-like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier-like + objects are equal. + + :param other: The other object to check against. + """ + + @property + @abc.abstractmethod + def prereleases(self) -> Optional[bool]: + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. + """ + + @prereleases.setter + def prereleases(self, value: bool) -> None: + """Setter for :attr:`prereleases`. + + :param value: The value to set. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. + + .. tip:: + + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ + + _operator_regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. + (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: '{spec}'") + + self._spec: Tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 + @property # type: ignore[override] + def prereleases(self) -> bool: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if Version(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> Tuple[str, str]: + canonical_version = canonicalize_version( + self._spec[1], + strip_trailing_zero=(self._spec[0] != "~="), + ) + return self._spec[0], canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = _version_join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + def _compare_equal(self, prospective: Version, spec: str) -> bool: + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + normalized_prospective = canonicalize_version( + prospective.public, strip_trailing_zero=False + ) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) + # Split the spec out by bangs and dots, and pretend that there is + # an implicit dot in between a release segment and a pre-release segment. + split_spec = _version_split(normalized_spec) + + # Split the prospective version out by bangs and dots, and pretend + # that there is an implicit dot in between a release segment and + # a pre-release segment. + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = padded_prospective[: len(split_spec)] + + return shortened_prospective == split_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + def __contains__(self, item: Union[str, Version]) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + False + >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") + True + >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) + True + """ + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version, this allows us to have a shortcut for + # "2.0" in Specifier(">=2") + normalized_item = _coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(Specifier().contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = _coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version: str) -> List[str]: + """Split version into components. + + The split components are intended for version comparison. The logic does + not attempt to retain the original version string, so joining the + components back with :func:`_version_join` may not produce the original + version string. + """ + result: List[str] = [] + + epoch, _, rest = version.rpartition("!") + result.append(epoch or "0") + + for item in rest.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _version_join(components: List[str]) -> str: + """Join split version components into a version string. + + This function assumes the input came from :func:`_version_split`, where the + first component must be the epoch (either empty or numeric), and all other + components numeric. + """ + epoch, *rest = components + return f"{epoch}!{'.'.join(rest)}" + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return ( + list(itertools.chain.from_iterable(left_split)), + list(itertools.chain.from_iterable(right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + + def __init__( + self, specifiers: str = "", prereleases: Optional[bool] = None + ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ + + # Split on `,` to break each individual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Make each individual specifier a Specifier and save in a frozen set for later. + self._specs = frozenset(map(Specifier, split_specifiers)) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + @property + def prereleases(self) -> Optional[bool]: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" + return len(self._specs) + + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. + + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) + + def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, + item: UnparsedVersion, + prereleases: Optional[bool] = None, + installed: Optional[bool] = None, + ) -> bool: + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + # Ensure that our item is a Version instance. + if not isinstance(item, Version): + item = Version(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + if installed and item.is_prerelease: + item = Version(item.base_version) + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + [] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iter(iterable) + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases. + else: + filtered: List[UnparsedVersionVar] = [] + found_prereleases: List[UnparsedVersionVar] = [] + + for item in iterable: + parsed_version = _coerce_version(item) + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return iter(found_prereleases) + + return iter(filtered) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/tags.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/tags.py new file mode 100644 index 0000000000000000000000000000000000000000..89f1926137dd2d2a6bd63616bf5b9f722fc8d584 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/tags.py @@ -0,0 +1,571 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import logging +import platform +import re +import struct +import subprocess +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +MacVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: Dict[str, str] = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = struct.calcsize("P") == 4 + + +class Tag: + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] + + def __init__(self, interpreter: str, abi: str, platform: str) -> None: + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self) -> str: + return self._interpreter + + @property + def abi(self) -> str: + return self._abi + + @property + def platform(self) -> str: + return self._platform + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) + ) + + def __hash__(self) -> int: + return self._hash + + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" + + def __repr__(self) -> str: + return f"<{self} @ {id(self)}>" + + +def parse_tag(tag: str) -> FrozenSet[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: + value: Union[int, str, None] = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_").replace(" ", "_") + + +def _is_threaded_cpython(abis: List[str]) -> bool: + """ + Determine if the ABI corresponds to a threaded (`--disable-gil`) build. + + The threaded builds are indicated by a "t" in the abiflags. + """ + if len(abis) == 0: + return False + # expect e.g., cp313 + m = re.match(r"cp\d+(.*)", abis[0]) + if not m: + return False + abiflags = m.group(1) + return "t" in abiflags + + +def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) + builds do not support abi3. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + threading = debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): + threading = "t" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append(f"cp{version}{threading}") + abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") + return abis + + +def cpython_tags( + python_version: Optional[PythonVersion] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + if not python_version: + python_version = sys.version_info[:2] + + interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + threading = _is_threaded_cpython(abis) + use_abi3 = _abi3_applies(python_version, threading) + if use_abi3: + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if use_abi3: + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> List[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] + + +def generic_tags( + interpreter: Optional[str] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + else: + abis = list(abis) + platforms = list(platforms or platform_tags()) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( + python_version: Optional[PythonVersion] = None, + interpreter: Optional[str] = None, + platforms: Optional[Iterable[str]] = None, +) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms( + version: Optional[MacVersion] = None, arch: Optional[str] = None +) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + text=True, + ).stdout + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + + if (10, 0) <= version and version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + for minor_version in range(version[1], -1, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=10, minor=minor_version, binary_format=binary_format + ) + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + for major_version in range(version[0], 10, -1): + compat_version = major_version, 0 + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=major_version, minor=0, binary_format=binary_format + ) + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + else: + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_format = "universal2" + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) + if not linux.startswith("linux_"): + # we should never be here, just yield the sysconfig one and return + yield linux + return + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv8l" + _, arch = linux.split("_", 1) + archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) + yield from _manylinux.platform_tags(archs) + yield from _musllinux.platform_tags(archs) + for arch in archs: + yield f"linux_{arch}" + + +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name() -> str: + """ + Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. + """ + name = sys.implementation.name + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: + """ + Returns the version of the running interpreter. + """ + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + + interp_name = interpreter_name() + if interp_name == "cp": + yield from cpython_tags(warn=warn) + else: + yield from generic_tags() + + if interp_name == "pp": + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) + else: + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/utils.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c2c2f75aa806282d322c76c2117c0f0fdfb09d25 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/utils.py @@ -0,0 +1,172 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import re +from typing import FrozenSet, NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidName(ValueError): + """ + An invalid distribution name; users should refer to the packaging user guide. + """ + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +# Core metadata spec for `Name` +_validate_regex = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE +) +_canonicalize_regex = re.compile(r"[-_.]+") +_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: + if validate and not _validate_regex.match(name): + raise InvalidName(f"name is invalid: {name!r}") + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast(NormalizedName, value) + + +def is_normalized_name(name: str) -> bool: + return _normalized_regex.match(name) is not None + + +def canonicalize_version( + version: Union[Version, str], *, strip_trailing_zero: bool = True +) -> str: + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ + if isinstance(version, str): + try: + parsed = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + else: + parsed = version + + parts = [] + + # Epoch + if parsed.epoch != 0: + parts.append(f"{parsed.epoch}!") + + # Release segment + release_segment = ".".join(str(x) for x in parsed.release) + if strip_trailing_zero: + # NB: This strips trailing '.0's to normalize + release_segment = re.sub(r"(\.0)+$", "", release_segment) + parts.append(release_segment) + + # Pre-release + if parsed.pre is not None: + parts.append("".join(str(x) for x in parsed.pre)) + + # Post-release + if parsed.post is not None: + parts.append(f".post{parsed.post}") + + # Development release + if parsed.dev is not None: + parts.append(f".dev{parsed.dev}") + + # Local version segment + if parsed.local is not None: + parts.append(f"+{parsed.local}") + + return "".join(parts) + + +def parse_wheel_filename( + filename: str, +) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name. + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename}") + name = canonicalize_name(name_part) + + try: + version = Version(parts[1]) + except InvalidVersion as e: + raise InvalidWheelFilename( + f"Invalid wheel filename (invalid version): {filename}" + ) from e + + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in '{filename}'" + ) + build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") + + name = canonicalize_name(name_part) + + try: + version = Version(version_part) + except InvalidVersion as e: + raise InvalidSdistFilename( + f"Invalid sdist filename (invalid version): {filename}" + ) from e + + return (name, version) diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/version.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/version.py new file mode 100644 index 0000000000000000000000000000000000000000..cda8e99935c8d92010b84437ae83d75031245d61 --- /dev/null +++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/version.py @@ -0,0 +1,561 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" + +import itertools +import re +from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] + +LocalType = Tuple[Union[int, str], ...] + +CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] +CmpLocalType = Union[ + NegativeInfinityType, + Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + CmpPrePostDevType, + CmpPrePostDevType, + CmpPrePostDevType, + CmpLocalType, +] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] + + +class _Version(NamedTuple): + epoch: int + release: Tuple[int, ...] + dev: Optional[Tuple[str, int]] + pre: Optional[Tuple[str, int]] + post: Optional[Tuple[str, int]] + local: Optional[LocalType] + + +def parse(version: str) -> "Version": + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. + """ + return Version(version) + + +class InvalidVersion(ValueError): + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' + """ + + +class _BaseVersion: + _key: Tuple[Any, ...] + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +_VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    
+    >>> v2
+    
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _key: CmpKey
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: '{version}'")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        
+        """
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be rounded-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._version.epoch
+
+    @property
+    def release(self) -> Tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._version.release
+
+    @property
+    def pre(self) -> Optional[Tuple[str, int]]:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._version.pre
+
+    @property
+    def post(self) -> Optional[int]:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> Optional[int]:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> Optional[str]:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1.2.3+abc.dev1").public
+        '1.2.3'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3+abc.dev1").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
+) -> Optional[Tuple[str, int]]:
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: Tuple[int, ...],
+    pre: Optional[Tuple[str, int]],
+    post: Optional[Tuple[str, int]],
+    dev: Optional[Tuple[str, int]],
+    local: Optional[LocalType],
+) -> CmpKey:
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/vendor.txt b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/vendor.txt
new file mode 100644
index 0000000000000000000000000000000000000000..14666103a82ea7fced3d8cffb8a9b2a9e03fb492
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/vendor.txt
@@ -0,0 +1 @@
+packaging==24.0
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/wheelfile.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/wheelfile.py
new file mode 100644
index 0000000000000000000000000000000000000000..6440e90adeb13376e10177428b00edd3b0eb04b5
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/wheelfile.py
@@ -0,0 +1,196 @@
+from __future__ import annotations
+
+import csv
+import hashlib
+import os.path
+import re
+import stat
+import time
+from io import StringIO, TextIOWrapper
+from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
+
+from wheel.cli import WheelError
+from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
+
+# Non-greedy matching of an optional build number may be too clever (more
+# invalid wheel filenames will match). Separate regex for .dist-info?
+WHEEL_INFO_RE = re.compile(
+    r"""^(?P(?P[^\s-]+?)-(?P[^\s-]+?))(-(?P\d[^\s-]*))?
+     -(?P[^\s-]+?)-(?P[^\s-]+?)-(?P\S+)\.whl$""",
+    re.VERBOSE,
+)
+MINIMUM_TIMESTAMP = 315532800  # 1980-01-01 00:00:00 UTC
+
+
+def get_zipinfo_datetime(timestamp=None):
+    # Some applications need reproducible .whl files, but they can't do this without
+    # forcing the timestamp of the individual ZipInfo objects. See issue #143.
+    timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
+    timestamp = max(timestamp, MINIMUM_TIMESTAMP)
+    return time.gmtime(timestamp)[0:6]
+
+
+class WheelFile(ZipFile):
+    """A ZipFile derivative class that also reads SHA-256 hashes from
+    .dist-info/RECORD and checks any read files against those.
+    """
+
+    _default_algorithm = hashlib.sha256
+
+    def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
+        basename = os.path.basename(file)
+        self.parsed_filename = WHEEL_INFO_RE.match(basename)
+        if not basename.endswith(".whl") or self.parsed_filename is None:
+            raise WheelError(f"Bad wheel filename {basename!r}")
+
+        ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
+
+        self.dist_info_path = "{}.dist-info".format(
+            self.parsed_filename.group("namever")
+        )
+        self.record_path = self.dist_info_path + "/RECORD"
+        self._file_hashes = {}
+        self._file_sizes = {}
+        if mode == "r":
+            # Ignore RECORD and any embedded wheel signatures
+            self._file_hashes[self.record_path] = None, None
+            self._file_hashes[self.record_path + ".jws"] = None, None
+            self._file_hashes[self.record_path + ".p7s"] = None, None
+
+            # Fill in the expected hashes by reading them from RECORD
+            try:
+                record = self.open(self.record_path)
+            except KeyError:
+                raise WheelError(f"Missing {self.record_path} file") from None
+
+            with record:
+                for line in csv.reader(
+                    TextIOWrapper(record, newline="", encoding="utf-8")
+                ):
+                    path, hash_sum, size = line
+                    if not hash_sum:
+                        continue
+
+                    algorithm, hash_sum = hash_sum.split("=")
+                    try:
+                        hashlib.new(algorithm)
+                    except ValueError:
+                        raise WheelError(
+                            f"Unsupported hash algorithm: {algorithm}"
+                        ) from None
+
+                    if algorithm.lower() in {"md5", "sha1"}:
+                        raise WheelError(
+                            f"Weak hash algorithm ({algorithm}) is not permitted by "
+                            f"PEP 427"
+                        )
+
+                    self._file_hashes[path] = (
+                        algorithm,
+                        urlsafe_b64decode(hash_sum.encode("ascii")),
+                    )
+
+    def open(self, name_or_info, mode="r", pwd=None):
+        def _update_crc(newdata):
+            eof = ef._eof
+            update_crc_orig(newdata)
+            running_hash.update(newdata)
+            if eof and running_hash.digest() != expected_hash:
+                raise WheelError(f"Hash mismatch for file '{ef_name}'")
+
+        ef_name = (
+            name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
+        )
+        if (
+            mode == "r"
+            and not ef_name.endswith("/")
+            and ef_name not in self._file_hashes
+        ):
+            raise WheelError(f"No hash found for file '{ef_name}'")
+
+        ef = ZipFile.open(self, name_or_info, mode, pwd)
+        if mode == "r" and not ef_name.endswith("/"):
+            algorithm, expected_hash = self._file_hashes[ef_name]
+            if expected_hash is not None:
+                # Monkey patch the _update_crc method to also check for the hash from
+                # RECORD
+                running_hash = hashlib.new(algorithm)
+                update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
+
+        return ef
+
+    def write_files(self, base_dir):
+        log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
+        deferred = []
+        for root, dirnames, filenames in os.walk(base_dir):
+            # Sort the directory names so that `os.walk` will walk them in a
+            # defined order on the next iteration.
+            dirnames.sort()
+            for name in sorted(filenames):
+                path = os.path.normpath(os.path.join(root, name))
+                if os.path.isfile(path):
+                    arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
+                    if arcname == self.record_path:
+                        pass
+                    elif root.endswith(".dist-info"):
+                        deferred.append((path, arcname))
+                    else:
+                        self.write(path, arcname)
+
+        deferred.sort()
+        for path, arcname in deferred:
+            self.write(path, arcname)
+
+    def write(self, filename, arcname=None, compress_type=None):
+        with open(filename, "rb") as f:
+            st = os.fstat(f.fileno())
+            data = f.read()
+
+        zinfo = ZipInfo(
+            arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
+        )
+        zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
+        zinfo.compress_type = compress_type or self.compression
+        self.writestr(zinfo, data, compress_type)
+
+    def writestr(self, zinfo_or_arcname, data, compress_type=None):
+        if isinstance(zinfo_or_arcname, str):
+            zinfo_or_arcname = ZipInfo(
+                zinfo_or_arcname, date_time=get_zipinfo_datetime()
+            )
+            zinfo_or_arcname.compress_type = self.compression
+            zinfo_or_arcname.external_attr = (0o664 | stat.S_IFREG) << 16
+
+        if isinstance(data, str):
+            data = data.encode("utf-8")
+
+        ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
+        fname = (
+            zinfo_or_arcname.filename
+            if isinstance(zinfo_or_arcname, ZipInfo)
+            else zinfo_or_arcname
+        )
+        log.info(f"adding '{fname}'")
+        if fname != self.record_path:
+            hash_ = self._default_algorithm(data)
+            self._file_hashes[fname] = (
+                hash_.name,
+                urlsafe_b64encode(hash_.digest()).decode("ascii"),
+            )
+            self._file_sizes[fname] = len(data)
+
+    def close(self):
+        # Write RECORD
+        if self.fp is not None and self.mode == "w" and self._file_hashes:
+            data = StringIO()
+            writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
+            writer.writerows(
+                (
+                    (fname, algorithm + "=" + hash_, self._file_sizes[fname])
+                    for fname, (algorithm, hash_) in self._file_hashes.items()
+                )
+            )
+            writer.writerow((format(self.record_path), "", ""))
+            self.writestr(self.record_path, data.getvalue())
+
+        ZipFile.close(self)
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/fixtures.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/fixtures.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5472984b5690572285a84575f6c2d598f06dd11
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/fixtures.py
@@ -0,0 +1,157 @@
+import contextlib
+import os
+import subprocess
+import sys
+from pathlib import Path
+
+import path
+import pytest
+
+from . import contexts, environment
+
+
+@pytest.fixture
+def user_override(monkeypatch):
+    """
+    Override site.USER_BASE and site.USER_SITE with temporary directories in
+    a context.
+    """
+    with contexts.tempdir() as user_base:
+        monkeypatch.setattr('site.USER_BASE', user_base)
+        with contexts.tempdir() as user_site:
+            monkeypatch.setattr('site.USER_SITE', user_site)
+            with contexts.save_user_site_setting():
+                yield
+
+
+@pytest.fixture
+def tmpdir_cwd(tmpdir):
+    with tmpdir.as_cwd() as orig:
+        yield orig
+
+
+@pytest.fixture(autouse=True, scope="session")
+def workaround_xdist_376(request):
+    """
+    Workaround pytest-dev/pytest-xdist#376
+
+    ``pytest-xdist`` tends to inject '' into ``sys.path``,
+    which may break certain isolation expectations.
+    Remove the entry so the import
+    machinery behaves the same irrespective of xdist.
+    """
+    if not request.config.pluginmanager.has_plugin('xdist'):
+        return
+
+    with contextlib.suppress(ValueError):
+        sys.path.remove('')
+
+
+@pytest.fixture
+def sample_project(tmp_path):
+    """
+    Clone the 'sampleproject' and return a path to it.
+    """
+    cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject']
+    try:
+        subprocess.check_call(cmd, cwd=str(tmp_path))
+    except Exception:
+        pytest.skip("Unable to clone sampleproject")
+    return tmp_path / 'sampleproject'
+
+
+# sdist and wheel artifacts should be stable across a round of tests
+# so we can build them once per session and use the files as "readonly"
+
+# In the case of setuptools, building the wheel without sdist may cause
+# it to contain the `build` directory, and therefore create situations with
+# `setuptools/build/lib/build/lib/...`. To avoid that, build both artifacts at once.
+
+
+def _build_distributions(tmp_path_factory, request):
+    with contexts.session_locked_tmp_dir(
+        request, tmp_path_factory, "dist_build"
+    ) as tmp:  # pragma: no cover
+        sdist = next(tmp.glob("*.tar.gz"), None)
+        wheel = next(tmp.glob("*.whl"), None)
+        if sdist and wheel:
+            return (sdist, wheel)
+
+        # Sanity check: should not create recursive setuptools/build/lib/build/lib/...
+        assert not Path(request.config.rootdir, "build/lib/build").exists()
+
+        subprocess.check_output([
+            sys.executable,
+            "-m",
+            "build",
+            "--outdir",
+            str(tmp),
+            str(request.config.rootdir),
+        ])
+
+        # Sanity check: should not create recursive setuptools/build/lib/build/lib/...
+        assert not Path(request.config.rootdir, "build/lib/build").exists()
+
+        return next(tmp.glob("*.tar.gz")), next(tmp.glob("*.whl"))
+
+
+@pytest.fixture(scope="session")
+def setuptools_sdist(tmp_path_factory, request):
+    prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
+    if prebuilt and os.path.exists(prebuilt):  # pragma: no cover
+        return Path(prebuilt).resolve()
+
+    sdist, _ = _build_distributions(tmp_path_factory, request)
+    return sdist
+
+
+@pytest.fixture(scope="session")
+def setuptools_wheel(tmp_path_factory, request):
+    prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
+    if prebuilt and os.path.exists(prebuilt):  # pragma: no cover
+        return Path(prebuilt).resolve()
+
+    _, wheel = _build_distributions(tmp_path_factory, request)
+    return wheel
+
+
+@pytest.fixture
+def venv(tmp_path, setuptools_wheel):
+    """Virtual env with the version of setuptools under test installed"""
+    env = environment.VirtualEnv()
+    env.root = path.Path(tmp_path / 'venv')
+    env.create_opts = ['--no-setuptools', '--wheel=bundle']
+    # TODO: Use `--no-wheel` when setuptools implements its own bdist_wheel
+    env.req = str(setuptools_wheel)
+    # In some environments (eg. downstream distro packaging),
+    # where tox isn't used to run tests and PYTHONPATH is set to point to
+    # a specific setuptools codebase, PYTHONPATH will leak into the spawned
+    # processes.
+    # env.create() should install the just created setuptools
+    # wheel, but it doesn't if it finds another existing matching setuptools
+    # installation present on PYTHONPATH:
+    # `setuptools is already installed with the same version as the provided
+    # wheel. Use --force-reinstall to force an installation of the wheel.`
+    # This prevents leaking PYTHONPATH to the created environment.
+    with contexts.environment(PYTHONPATH=None):
+        return env.create()
+
+
+@pytest.fixture
+def venv_without_setuptools(tmp_path):
+    """Virtual env without any version of setuptools installed"""
+    env = environment.VirtualEnv()
+    env.root = path.Path(tmp_path / 'venv_without_setuptools')
+    env.create_opts = ['--no-setuptools', '--no-wheel']
+    env.ensure_env()
+    return env
+
+
+@pytest.fixture
+def bare_venv(tmp_path):
+    """Virtual env without any common packages installed"""
+    env = environment.VirtualEnv()
+    env.root = path.Path(tmp_path / 'bare_venv')
+    env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
+    env.ensure_env()
+    return env
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py
new file mode 100644
index 0000000000000000000000000000000000000000..c074d263c45bcaebe32fdba328d975c73d1ad5ca
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py
@@ -0,0 +1 @@
+result = 'passed'
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py
new file mode 100644
index 0000000000000000000000000000000000000000..036167dd951e70ad543775529d5ce3f6d6544c71
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py
@@ -0,0 +1,73 @@
+"""develop tests"""
+
+import os
+import re
+import zipfile
+
+import pytest
+
+from setuptools.dist import Distribution
+
+from . import contexts
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(py_modules=['hi'])
+"""
+
+
+@pytest.fixture
+def setup_context(tmpdir):
+    with (tmpdir / 'setup.py').open('w') as f:
+        f.write(SETUP_PY)
+    with (tmpdir / 'hi.py').open('w') as f:
+        f.write('1\n')
+    with tmpdir.as_cwd():
+        yield tmpdir
+
+
+class Test:
+    @pytest.mark.usefixtures("user_override")
+    @pytest.mark.usefixtures("setup_context")
+    def test_bdist_egg(self):
+        dist = Distribution(
+            dict(
+                script_name='setup.py',
+                script_args=['bdist_egg'],
+                name='foo',
+                py_modules=['hi'],
+            )
+        )
+        os.makedirs(os.path.join('build', 'src'))
+        with contexts.quiet():
+            dist.parse_command_line()
+            dist.run_commands()
+
+        # let's see if we got our egg link at the right place
+        [content] = os.listdir('dist')
+        assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content)
+
+    @pytest.mark.xfail(
+        os.environ.get('PYTHONDONTWRITEBYTECODE', False),
+        reason="Byte code disabled",
+    )
+    @pytest.mark.usefixtures("user_override")
+    @pytest.mark.usefixtures("setup_context")
+    def test_exclude_source_files(self):
+        dist = Distribution(
+            dict(
+                script_name='setup.py',
+                script_args=['bdist_egg', '--exclude-source-files'],
+                py_modules=['hi'],
+            )
+        )
+        with contexts.quiet():
+            dist.parse_command_line()
+            dist.run_commands()
+        [dist_name] = os.listdir('dist')
+        dist_filename = os.path.join('dist', dist_name)
+        zip = zipfile.ZipFile(dist_filename)
+        names = list(zi.filename for zi in zip.filelist)
+        assert 'hi.pyc' in names
+        assert 'hi.py' not in names
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py
new file mode 100644
index 0000000000000000000000000000000000000000..121f409057b2ef84f388db3afa93ef613bef9a37
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py
@@ -0,0 +1,970 @@
+import contextlib
+import importlib
+import os
+import re
+import shutil
+import signal
+import sys
+import tarfile
+from concurrent import futures
+from pathlib import Path
+from typing import Any, Callable
+from zipfile import ZipFile
+
+import pytest
+from jaraco import path
+from packaging.requirements import Requirement
+
+from .textwrap import DALS
+
+SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
+
+
+TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180"))  # in seconds
+IS_PYPY = '__pypy__' in sys.builtin_module_names
+
+
+pytestmark = pytest.mark.skipif(
+    sys.platform == "win32" and IS_PYPY,
+    reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
+    "is flaky and problematic",
+)
+
+
+class BuildBackendBase:
+    def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'):
+        self.cwd = cwd
+        self.env = env or {}
+        self.backend_name = backend_name
+
+
+class BuildBackend(BuildBackendBase):
+    """PEP 517 Build Backend"""
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.pool = futures.ProcessPoolExecutor(max_workers=1)
+
+    def __getattr__(self, name: str) -> Callable[..., Any]:
+        """Handles arbitrary function invocations on the build backend."""
+
+        def method(*args, **kw):
+            root = os.path.abspath(self.cwd)
+            caller = BuildBackendCaller(root, self.env, self.backend_name)
+            pid = None
+            try:
+                pid = self.pool.submit(os.getpid).result(TIMEOUT)
+                return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
+            except futures.TimeoutError:
+                self.pool.shutdown(wait=False)  # doesn't stop already running processes
+                self._kill(pid)
+                pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
+            except (futures.process.BrokenProcessPool, MemoryError, OSError):
+                if IS_PYPY:
+                    pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
+                raise
+
+        return method
+
+    def _kill(self, pid):
+        if pid is None:
+            return
+        with contextlib.suppress(ProcessLookupError, OSError):
+            os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
+
+
+class BuildBackendCaller(BuildBackendBase):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+
+        (self.backend_name, _, self.backend_obj) = self.backend_name.partition(':')
+
+    def __call__(self, name, *args, **kw):
+        """Handles arbitrary function invocations on the build backend."""
+        os.chdir(self.cwd)
+        os.environ.update(self.env)
+        mod = importlib.import_module(self.backend_name)
+
+        if self.backend_obj:
+            backend = getattr(mod, self.backend_obj)
+        else:
+            backend = mod
+
+        return getattr(backend, name)(*args, **kw)
+
+
+defns = [
+    {  # simple setup.py script
+        'setup.py': DALS(
+            """
+            __import__('setuptools').setup(
+                name='foo',
+                version='0.0.0',
+                py_modules=['hello'],
+                setup_requires=['six'],
+            )
+            """
+        ),
+        'hello.py': DALS(
+            """
+            def run():
+                print('hello')
+            """
+        ),
+    },
+    {  # setup.py that relies on __name__
+        'setup.py': DALS(
+            """
+            assert __name__ == '__main__'
+            __import__('setuptools').setup(
+                name='foo',
+                version='0.0.0',
+                py_modules=['hello'],
+                setup_requires=['six'],
+            )
+            """
+        ),
+        'hello.py': DALS(
+            """
+            def run():
+                print('hello')
+            """
+        ),
+    },
+    {  # setup.py script that runs arbitrary code
+        'setup.py': DALS(
+            """
+            variable = True
+            def function():
+                return variable
+            assert variable
+            __import__('setuptools').setup(
+                name='foo',
+                version='0.0.0',
+                py_modules=['hello'],
+                setup_requires=['six'],
+            )
+            """
+        ),
+        'hello.py': DALS(
+            """
+            def run():
+                print('hello')
+            """
+        ),
+    },
+    {  # setup.py script that constructs temp files to be included in the distribution
+        'setup.py': DALS(
+            """
+            # Some packages construct files on the fly, include them in the package,
+            # and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
+            # Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
+            # to obtain a distribution object first, and then run the distutils
+            # commands later, because these files will be removed in the meantime.
+
+            with open('world.py', 'w', encoding="utf-8") as f:
+                f.write('x = 42')
+
+            try:
+                __import__('setuptools').setup(
+                    name='foo',
+                    version='0.0.0',
+                    py_modules=['world'],
+                    setup_requires=['six'],
+                )
+            finally:
+                # Some packages will clean temporary files
+                __import__('os').unlink('world.py')
+            """
+        ),
+    },
+    {  # setup.cfg only
+        'setup.cfg': DALS(
+            """
+        [metadata]
+        name = foo
+        version = 0.0.0
+
+        [options]
+        py_modules=hello
+        setup_requires=six
+        """
+        ),
+        'hello.py': DALS(
+            """
+        def run():
+            print('hello')
+        """
+        ),
+    },
+    {  # setup.cfg and setup.py
+        'setup.cfg': DALS(
+            """
+        [metadata]
+        name = foo
+        version = 0.0.0
+
+        [options]
+        py_modules=hello
+        setup_requires=six
+        """
+        ),
+        'setup.py': "__import__('setuptools').setup()",
+        'hello.py': DALS(
+            """
+        def run():
+            print('hello')
+        """
+        ),
+    },
+]
+
+
+class TestBuildMetaBackend:
+    backend_name = 'setuptools.build_meta'
+
+    def get_build_backend(self):
+        return BuildBackend(backend_name=self.backend_name)
+
+    @pytest.fixture(params=defns)
+    def build_backend(self, tmpdir, request):
+        path.build(request.param, prefix=str(tmpdir))
+        with tmpdir.as_cwd():
+            yield self.get_build_backend()
+
+    def test_get_requires_for_build_wheel(self, build_backend):
+        actual = build_backend.get_requires_for_build_wheel()
+        expected = ['six']
+        assert sorted(actual) == sorted(expected)
+
+    def test_get_requires_for_build_sdist(self, build_backend):
+        actual = build_backend.get_requires_for_build_sdist()
+        expected = ['six']
+        assert sorted(actual) == sorted(expected)
+
+    def test_build_wheel(self, build_backend):
+        dist_dir = os.path.abspath('pip-wheel')
+        os.makedirs(dist_dir)
+        wheel_name = build_backend.build_wheel(dist_dir)
+
+        wheel_file = os.path.join(dist_dir, wheel_name)
+        assert os.path.isfile(wheel_file)
+
+        # Temporary files should be removed
+        assert not os.path.isfile('world.py')
+
+        with ZipFile(wheel_file) as zipfile:
+            wheel_contents = set(zipfile.namelist())
+
+        # Each one of the examples have a single module
+        # that should be included in the distribution
+        python_scripts = (f for f in wheel_contents if f.endswith('.py'))
+        modules = [f for f in python_scripts if not f.endswith('setup.py')]
+        assert len(modules) == 1
+
+    @pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
+    def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
+        # Building a sdist/wheel should still succeed if there's
+        # already a sdist/wheel in the destination directory.
+        files = {
+            'setup.py': "from setuptools import setup\nsetup()",
+            'VERSION': "0.0.1",
+            'setup.cfg': DALS(
+                """
+                [metadata]
+                name = foo
+                version = file: VERSION
+                """
+            ),
+            'pyproject.toml': DALS(
+                """
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+                """
+            ),
+        }
+
+        path.build(files)
+
+        dist_dir = os.path.abspath('preexisting-' + build_type)
+
+        build_backend = self.get_build_backend()
+        build_method = getattr(build_backend, 'build_' + build_type)
+
+        # Build a first sdist/wheel.
+        # Note: this also check the destination directory is
+        # successfully created if it does not exist already.
+        first_result = build_method(dist_dir)
+
+        # Change version.
+        with open("VERSION", "wt", encoding="utf-8") as version_file:
+            version_file.write("0.0.2")
+
+        # Build a *second* sdist/wheel.
+        second_result = build_method(dist_dir)
+
+        assert os.path.isfile(os.path.join(dist_dir, first_result))
+        assert first_result != second_result
+
+        # And if rebuilding the exact same sdist/wheel?
+        open(os.path.join(dist_dir, second_result), 'wb').close()
+        third_result = build_method(dist_dir)
+        assert third_result == second_result
+        assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
+
+    @pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
+    def test_build_with_pyproject_config(self, tmpdir, setup_script):
+        files = {
+            'pyproject.toml': DALS(
+                """
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+
+                [project]
+                name = "foo"
+                license = {text = "MIT"}
+                description = "This is a Python package"
+                dynamic = ["version", "readme"]
+                classifiers = [
+                    "Development Status :: 5 - Production/Stable",
+                    "Intended Audience :: Developers"
+                ]
+                urls = {Homepage = "http://github.com"}
+                dependencies = [
+                    "appdirs",
+                ]
+
+                [project.optional-dependencies]
+                all = [
+                    "tomli>=1",
+                    "pyscaffold>=4,<5",
+                    'importlib; python_version == "2.6"',
+                ]
+
+                [project.scripts]
+                foo = "foo.cli:main"
+
+                [tool.setuptools]
+                zip-safe = false
+                package-dir = {"" = "src"}
+                packages = {find = {where = ["src"]}}
+                license-files = ["LICENSE*"]
+
+                [tool.setuptools.dynamic]
+                version = {attr = "foo.__version__"}
+                readme = {file = "README.rst"}
+
+                [tool.distutils.sdist]
+                formats = "gztar"
+                """
+            ),
+            "MANIFEST.in": DALS(
+                """
+                global-include *.py *.txt
+                global-exclude *.py[cod]
+                """
+            ),
+            "README.rst": "This is a ``README``",
+            "LICENSE.txt": "---- placeholder MIT license ----",
+            "src": {
+                "foo": {
+                    "__init__.py": "__version__ = '0.1'",
+                    "__init__.pyi": "__version__: str",
+                    "cli.py": "def main(): print('hello world')",
+                    "data.txt": "def main(): print('hello world')",
+                    "py.typed": "",
+                }
+            },
+        }
+        if setup_script:
+            files["setup.py"] = setup_script
+
+        build_backend = self.get_build_backend()
+        with tmpdir.as_cwd():
+            path.build(files)
+            sdist_path = build_backend.build_sdist("temp")
+            wheel_file = build_backend.build_wheel("temp")
+
+        with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
+            sdist_contents = set(tar.getnames())
+
+        with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
+            wheel_contents = set(zipfile.namelist())
+            metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
+            license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
+            epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
+
+        assert sdist_contents - {"foo-0.1/setup.py"} == {
+            'foo-0.1',
+            'foo-0.1/LICENSE.txt',
+            'foo-0.1/MANIFEST.in',
+            'foo-0.1/PKG-INFO',
+            'foo-0.1/README.rst',
+            'foo-0.1/pyproject.toml',
+            'foo-0.1/setup.cfg',
+            'foo-0.1/src',
+            'foo-0.1/src/foo',
+            'foo-0.1/src/foo/__init__.py',
+            'foo-0.1/src/foo/__init__.pyi',
+            'foo-0.1/src/foo/cli.py',
+            'foo-0.1/src/foo/data.txt',
+            'foo-0.1/src/foo/py.typed',
+            'foo-0.1/src/foo.egg-info',
+            'foo-0.1/src/foo.egg-info/PKG-INFO',
+            'foo-0.1/src/foo.egg-info/SOURCES.txt',
+            'foo-0.1/src/foo.egg-info/dependency_links.txt',
+            'foo-0.1/src/foo.egg-info/entry_points.txt',
+            'foo-0.1/src/foo.egg-info/requires.txt',
+            'foo-0.1/src/foo.egg-info/top_level.txt',
+            'foo-0.1/src/foo.egg-info/not-zip-safe',
+        }
+        assert wheel_contents == {
+            "foo/__init__.py",
+            "foo/__init__.pyi",  # include type information by default
+            "foo/cli.py",
+            "foo/data.txt",  # include_package_data defaults to True
+            "foo/py.typed",  # include type information by default
+            "foo-0.1.dist-info/LICENSE.txt",
+            "foo-0.1.dist-info/METADATA",
+            "foo-0.1.dist-info/WHEEL",
+            "foo-0.1.dist-info/entry_points.txt",
+            "foo-0.1.dist-info/top_level.txt",
+            "foo-0.1.dist-info/RECORD",
+        }
+        assert license == "---- placeholder MIT license ----"
+
+        for line in (
+            "Summary: This is a Python package",
+            "License: MIT",
+            "Classifier: Intended Audience :: Developers",
+            "Requires-Dist: appdirs",
+            "Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
+            "Requires-Dist: "
+            + str(Requirement('importlib; python_version=="2.6" and extra =="all"')),
+        ):
+            assert line in metadata, (line, metadata)
+
+        assert metadata.strip().endswith("This is a ``README``")
+        assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
+
+    def test_static_metadata_in_pyproject_config(self, tmpdir):
+        # Make sure static metadata in pyproject.toml is not overwritten by setup.py
+        # as required by PEP 621
+        files = {
+            'pyproject.toml': DALS(
+                """
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+
+                [project]
+                name = "foo"
+                description = "This is a Python package"
+                version = "42"
+                dependencies = ["six"]
+                """
+            ),
+            'hello.py': DALS(
+                """
+                def run():
+                    print('hello')
+                """
+            ),
+            'setup.py': DALS(
+                """
+                __import__('setuptools').setup(
+                    name='bar',
+                    version='13',
+                )
+                """
+            ),
+        }
+        build_backend = self.get_build_backend()
+        with tmpdir.as_cwd():
+            path.build(files)
+            sdist_path = build_backend.build_sdist("temp")
+            wheel_file = build_backend.build_wheel("temp")
+
+        assert (tmpdir / "temp/foo-42.tar.gz").exists()
+        assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
+        assert not (tmpdir / "temp/bar-13.tar.gz").exists()
+        assert not (tmpdir / "temp/bar-42.tar.gz").exists()
+        assert not (tmpdir / "temp/foo-13.tar.gz").exists()
+        assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
+        assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
+        assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
+
+        with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
+            pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
+            members = tar.getnames()
+            assert "bar-13/PKG-INFO" not in members
+
+        with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
+            metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
+            members = zipfile.namelist()
+            assert "bar-13.dist-info/METADATA" not in members
+
+        for file in pkg_info, metadata:
+            for line in ("Name: foo", "Version: 42"):
+                assert line in file
+            for line in ("Name: bar", "Version: 13"):
+                assert line not in file
+
+    def test_build_sdist(self, build_backend):
+        dist_dir = os.path.abspath('pip-sdist')
+        os.makedirs(dist_dir)
+        sdist_name = build_backend.build_sdist(dist_dir)
+
+        assert os.path.isfile(os.path.join(dist_dir, sdist_name))
+
+    def test_prepare_metadata_for_build_wheel(self, build_backend):
+        dist_dir = os.path.abspath('pip-dist-info')
+        os.makedirs(dist_dir)
+
+        dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
+
+        assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
+
+    def test_prepare_metadata_inplace(self, build_backend):
+        """
+        Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
+        See issue #3523.
+        """
+        for pre_existing in [
+            ".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
+            ".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
+            ".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
+            ".venv/python3.10/site-packages/click-8.1.3.dist-info",
+            "venv/python3.10/site-packages/distlib-0.3.5.dist-info",
+            "env/python3.10/site-packages/docutils-0.19.dist-info",
+        ]:
+            os.makedirs(pre_existing, exist_ok=True)
+        dist_info = build_backend.prepare_metadata_for_build_wheel(".")
+        assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
+
+    def test_build_sdist_explicit_dist(self, build_backend):
+        # explicitly specifying the dist folder should work
+        # the folder sdist_directory and the ``--dist-dir`` can be the same
+        dist_dir = os.path.abspath('dist')
+        sdist_name = build_backend.build_sdist(dist_dir)
+        assert os.path.isfile(os.path.join(dist_dir, sdist_name))
+
+    def test_build_sdist_version_change(self, build_backend):
+        sdist_into_directory = os.path.abspath("out_sdist")
+        os.makedirs(sdist_into_directory)
+
+        sdist_name = build_backend.build_sdist(sdist_into_directory)
+        assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
+
+        # if the setup.py changes subsequent call of the build meta
+        # should still succeed, given the
+        # sdist_directory the frontend specifies is empty
+        setup_loc = os.path.abspath("setup.py")
+        if not os.path.exists(setup_loc):
+            setup_loc = os.path.abspath("setup.cfg")
+
+        with open(setup_loc, 'rt', encoding="utf-8") as file_handler:
+            content = file_handler.read()
+        with open(setup_loc, 'wt', encoding="utf-8") as file_handler:
+            file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'"))
+
+        shutil.rmtree(sdist_into_directory)
+        os.makedirs(sdist_into_directory)
+
+        sdist_name = build_backend.build_sdist("out_sdist")
+        assert os.path.isfile(os.path.join(os.path.abspath("out_sdist"), sdist_name))
+
+    def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
+        files = {
+            'setup.py': DALS(
+                """
+                __import__('setuptools').setup(
+                    name='foo',
+                    version='0.0.0',
+                    py_modules=['hello']
+                )"""
+            ),
+            'hello.py': '',
+            'pyproject.toml': DALS(
+                """
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+                """
+            ),
+        }
+        path.build(files)
+        build_backend = self.get_build_backend()
+        targz_path = build_backend.build_sdist("temp")
+        with tarfile.open(os.path.join("temp", targz_path)) as tar:
+            assert any('pyproject.toml' in name for name in tar.getnames())
+
+    def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
+        # If build_sdist is called from a script other than setup.py,
+        # ensure setup.py is included
+        path.build(defns[0])
+
+        build_backend = self.get_build_backend()
+        targz_path = build_backend.build_sdist("temp")
+        with tarfile.open(os.path.join("temp", targz_path)) as tar:
+            assert any('setup.py' in name for name in tar.getnames())
+
+    def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd):
+        # Ensure that MANIFEST.in can exclude setup.py
+        files = {
+            'setup.py': DALS(
+                """
+        __import__('setuptools').setup(
+            name='foo',
+            version='0.0.0',
+            py_modules=['hello']
+        )"""
+            ),
+            'hello.py': '',
+            'MANIFEST.in': DALS(
+                """
+        exclude setup.py
+        """
+            ),
+        }
+
+        path.build(files)
+
+        build_backend = self.get_build_backend()
+        targz_path = build_backend.build_sdist("temp")
+        with tarfile.open(os.path.join("temp", targz_path)) as tar:
+            assert not any('setup.py' in name for name in tar.getnames())
+
+    def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd):
+        files = {
+            'setup.py': DALS(
+                """
+                __import__('setuptools').setup(
+                    name='foo',
+                    version='0.0.0',
+                    py_modules=['hello']
+                )"""
+            ),
+            'hello.py': '',
+            'setup.cfg': DALS(
+                """
+                [sdist]
+                formats=zip
+                """
+            ),
+        }
+
+        path.build(files)
+
+        build_backend = self.get_build_backend()
+        build_backend.build_sdist("temp")
+
+    _relative_path_import_files = {
+        'setup.py': DALS(
+            """
+            __import__('setuptools').setup(
+                name='foo',
+                version=__import__('hello').__version__,
+                py_modules=['hello']
+            )"""
+        ),
+        'hello.py': '__version__ = "0.0.0"',
+        'setup.cfg': DALS(
+            """
+            [sdist]
+            formats=zip
+            """
+        ),
+    }
+
+    def test_build_sdist_relative_path_import(self, tmpdir_cwd):
+        path.build(self._relative_path_import_files)
+        build_backend = self.get_build_backend()
+        with pytest.raises(ImportError, match="^No module named 'hello'$"):
+            build_backend.build_sdist("temp")
+
+    _simple_pyproject_example = {
+        "pyproject.toml": DALS(
+            """
+            [project]
+            name = "proj"
+            version = "42"
+            """
+        ),
+        "src": {"proj": {"__init__.py": ""}},
+    }
+
+    def _assert_link_tree(self, parent_dir):
+        """All files in the directory should be either links or hard links"""
+        files = list(Path(parent_dir).glob("**/*"))
+        assert files  # Should not be empty
+        for file in files:
+            assert file.is_symlink() or os.stat(file).st_nlink > 0
+
+    def test_editable_without_config_settings(self, tmpdir_cwd):
+        """
+        Sanity check to ensure tests with --mode=strict are different from the ones
+        without --mode.
+
+        --mode=strict should create a local directory with a package tree.
+        The directory should not get created otherwise.
+        """
+        path.build(self._simple_pyproject_example)
+        build_backend = self.get_build_backend()
+        assert not Path("build").exists()
+        build_backend.build_editable("temp")
+        assert not Path("build").exists()
+
+    def test_build_wheel_inplace(self, tmpdir_cwd):
+        config_settings = {"--build-option": ["build_ext", "--inplace"]}
+        path.build(self._simple_pyproject_example)
+        build_backend = self.get_build_backend()
+        assert not Path("build").exists()
+        Path("build").mkdir()
+        build_backend.prepare_metadata_for_build_wheel("build", config_settings)
+        build_backend.build_wheel("build", config_settings)
+        assert Path("build/proj-42-py3-none-any.whl").exists()
+
+    @pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}])
+    def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
+        path.build({**self._simple_pyproject_example, '_meta': {}})
+        assert not Path("build").exists()
+        build_backend = self.get_build_backend()
+        build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
+        build_backend.build_editable("temp", config_settings, "_meta")
+        self._assert_link_tree(next(Path("build").glob("__editable__.*")))
+
+    @pytest.mark.parametrize(
+        ("setup_literal", "requirements"),
+        [
+            ("'foo'", ['foo']),
+            ("['foo']", ['foo']),
+            (r"'foo\n'", ['foo']),
+            (r"'foo\n\n'", ['foo']),
+            ("['foo', 'bar']", ['foo', 'bar']),
+            (r"'# Has a comment line\nfoo'", ['foo']),
+            (r"'foo # Has an inline comment'", ['foo']),
+            (r"'foo \\\n >=3.0'", ['foo>=3.0']),
+            (r"'foo\nbar'", ['foo', 'bar']),
+            (r"'foo\nbar\n'", ['foo', 'bar']),
+            (r"['foo\n', 'bar\n']", ['foo', 'bar']),
+        ],
+    )
+    @pytest.mark.parametrize('use_wheel', [True, False])
+    def test_setup_requires(self, setup_literal, requirements, use_wheel, tmpdir_cwd):
+        files = {
+            'setup.py': DALS(
+                """
+                from setuptools import setup
+
+                setup(
+                    name="qux",
+                    version="0.0.0",
+                    py_modules=["hello"],
+                    setup_requires={setup_literal},
+                )
+            """
+            ).format(setup_literal=setup_literal),
+            'hello.py': DALS(
+                """
+            def run():
+                print('hello')
+            """
+            ),
+        }
+
+        path.build(files)
+
+        build_backend = self.get_build_backend()
+
+        if use_wheel:
+            get_requires = build_backend.get_requires_for_build_wheel
+        else:
+            get_requires = build_backend.get_requires_for_build_sdist
+
+        # Ensure that the build requirements are properly parsed
+        expected = sorted(requirements)
+        actual = get_requires()
+
+        assert expected == sorted(actual)
+
+    def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
+        # Make sure patches introduced to retrieve setup_requires don't accidentally
+        # activate auto-discovery and cause problems due to the incomplete set of
+        # attributes passed to MinimalDistribution
+        files = {
+            'pyproject.toml': DALS(
+                """
+                [project]
+                name = "proj"
+                version = "42"
+            """
+            ),
+            "setup.py": DALS(
+                """
+                __import__('setuptools').setup(
+                    setup_requires=["foo"],
+                    py_modules = ["hello", "world"]
+                )
+            """
+            ),
+            'hello.py': "'hello'",
+            'world.py': "'world'",
+        }
+        path.build(files)
+        build_backend = self.get_build_backend()
+        setup_requires = build_backend.get_requires_for_build_wheel()
+        assert setup_requires == ["foo"]
+
+    def test_dont_install_setup_requires(self, tmpdir_cwd):
+        files = {
+            'setup.py': DALS(
+                """
+                        from setuptools import setup
+
+                        setup(
+                            name="qux",
+                            version="0.0.0",
+                            py_modules=["hello"],
+                            setup_requires=["does-not-exist >99"],
+                        )
+                    """
+            ),
+            'hello.py': DALS(
+                """
+                    def run():
+                        print('hello')
+                    """
+            ),
+        }
+
+        path.build(files)
+
+        build_backend = self.get_build_backend()
+
+        dist_dir = os.path.abspath('pip-dist-info')
+        os.makedirs(dist_dir)
+
+        # does-not-exist can't be satisfied, so if it attempts to install
+        # setup_requires, it will fail.
+        build_backend.prepare_metadata_for_build_wheel(dist_dir)
+
+    _sys_argv_0_passthrough = {
+        'setup.py': DALS(
+            """
+            import os
+            import sys
+
+            __import__('setuptools').setup(
+                name='foo',
+                version='0.0.0',
+            )
+
+            sys_argv = os.path.abspath(sys.argv[0])
+            file_path = os.path.abspath('setup.py')
+            assert sys_argv == file_path
+            """
+        )
+    }
+
+    def test_sys_argv_passthrough(self, tmpdir_cwd):
+        path.build(self._sys_argv_0_passthrough)
+        build_backend = self.get_build_backend()
+        with pytest.raises(AssertionError):
+            build_backend.build_sdist("temp")
+
+    _setup_py_file_abspath = {
+        'setup.py': DALS(
+            """
+            import os
+            assert os.path.isabs(__file__)
+            __import__('setuptools').setup(
+                name='foo',
+                version='0.0.0',
+                py_modules=['hello'],
+                setup_requires=['six'],
+            )
+            """
+        )
+    }
+
+    def test_setup_py_file_abspath(self, tmpdir_cwd):
+        path.build(self._setup_py_file_abspath)
+        build_backend = self.get_build_backend()
+        build_backend.build_sdist("temp")
+
+    @pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
+    def test_build_with_empty_setuppy(self, build_backend, build_hook):
+        files = {'setup.py': ''}
+        path.build(files)
+
+        msg = re.escape('No distribution was found.')
+        with pytest.raises(ValueError, match=msg):
+            getattr(build_backend, build_hook)("temp")
+
+
+class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
+    backend_name = 'setuptools.build_meta:__legacy__'
+
+    # build_meta_legacy-specific tests
+    def test_build_sdist_relative_path_import(self, tmpdir_cwd):
+        # This must fail in build_meta, but must pass in build_meta_legacy
+        path.build(self._relative_path_import_files)
+
+        build_backend = self.get_build_backend()
+        build_backend.build_sdist("temp")
+
+    def test_sys_argv_passthrough(self, tmpdir_cwd):
+        path.build(self._sys_argv_0_passthrough)
+
+        build_backend = self.get_build_backend()
+        build_backend.build_sdist("temp")
+
+
+def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
+    pyproject = """
+    [build-system]
+    requires = ["setuptools"]
+    build-backend = "setuptools.build_meta"
+    [project]
+    name = "myproj"
+    version = "42"
+    """
+    path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
+
+    # First: sanity check
+    cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
+    output = venv.run(cmd, cwd=tmpdir).lower()
+    assert "running setup.py develop for myproj" not in output
+    assert "created wheel for myproj" in output
+
+    # Then: real test
+    env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
+    cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
+    output = venv.run(cmd, cwd=tmpdir, env=env).lower()
+    assert "running setup.py develop for myproj" in output
+
+
+@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
+def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
+    """Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973)."""
+    monkeypatch.chdir(tmp_path)
+    setuppy = """
+        import sys, setuptools
+        setuptools.setup(name='foo', version='0.0.0')
+        sys.exit(0)
+        """
+    (tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8")
+    backend = BuildBackend(backend_name="setuptools.build_meta")
+    assert backend.get_requires_for_build_wheel() == []
+
+
+def test_system_exit_in_setuppy(monkeypatch, tmp_path):
+    monkeypatch.chdir(tmp_path)
+    setuppy = "import sys; sys.exit('some error')"
+    (tmp_path / "setup.py").write_text(setuppy, encoding="utf-8")
+    with pytest.raises(SystemExit, match="some error"):
+        backend = BuildBackend(backend_name="setuptools.build_meta")
+        backend.get_requires_for_build_wheel()
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_py.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_py.py
new file mode 100644
index 0000000000000000000000000000000000000000..e64cfa2e4bee38371c0e9194c4dc41457d492f7b
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_py.py
@@ -0,0 +1,480 @@
+import os
+import shutil
+import stat
+import warnings
+from pathlib import Path
+from unittest.mock import Mock
+
+import jaraco.path
+import pytest
+
+from setuptools import SetuptoolsDeprecationWarning
+from setuptools.dist import Distribution
+
+from .textwrap import DALS
+
+
+def test_directories_in_package_data_glob(tmpdir_cwd):
+    """
+    Directories matching the glob in package_data should
+    not be included in the package data.
+
+    Regression test for #261.
+    """
+    dist = Distribution(
+        dict(
+            script_name='setup.py',
+            script_args=['build_py'],
+            packages=[''],
+            package_data={'': ['path/*']},
+        )
+    )
+    os.makedirs('path/subpath')
+    dist.parse_command_line()
+    dist.run_commands()
+
+
+def test_recursive_in_package_data_glob(tmpdir_cwd):
+    """
+    Files matching recursive globs (**) in package_data should
+    be included in the package data.
+
+    #1806
+    """
+    dist = Distribution(
+        dict(
+            script_name='setup.py',
+            script_args=['build_py'],
+            packages=[''],
+            package_data={'': ['path/**/data']},
+        )
+    )
+    os.makedirs('path/subpath/subsubpath')
+    open('path/subpath/subsubpath/data', 'wb').close()
+
+    dist.parse_command_line()
+    dist.run_commands()
+
+    assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), (
+        "File is not included"
+    )
+
+
+def test_read_only(tmpdir_cwd):
+    """
+    Ensure read-only flag is not preserved in copy
+    for package modules and package data, as that
+    causes problems with deleting read-only files on
+    Windows.
+
+    #1451
+    """
+    dist = Distribution(
+        dict(
+            script_name='setup.py',
+            script_args=['build_py'],
+            packages=['pkg'],
+            package_data={'pkg': ['data.dat']},
+        )
+    )
+    os.makedirs('pkg')
+    open('pkg/__init__.py', 'wb').close()
+    open('pkg/data.dat', 'wb').close()
+    os.chmod('pkg/__init__.py', stat.S_IREAD)
+    os.chmod('pkg/data.dat', stat.S_IREAD)
+    dist.parse_command_line()
+    dist.run_commands()
+    shutil.rmtree('build')
+
+
+@pytest.mark.xfail(
+    'platform.system() == "Windows"',
+    reason="On Windows, files do not have executable bits",
+    raises=AssertionError,
+    strict=True,
+)
+def test_executable_data(tmpdir_cwd):
+    """
+    Ensure executable bit is preserved in copy for
+    package data, as users rely on it for scripts.
+
+    #2041
+    """
+    dist = Distribution(
+        dict(
+            script_name='setup.py',
+            script_args=['build_py'],
+            packages=['pkg'],
+            package_data={'pkg': ['run-me']},
+        )
+    )
+    os.makedirs('pkg')
+    open('pkg/__init__.py', 'wb').close()
+    open('pkg/run-me', 'wb').close()
+    os.chmod('pkg/run-me', 0o700)
+
+    dist.parse_command_line()
+    dist.run_commands()
+
+    assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, (
+        "Script is not executable"
+    )
+
+
+EXAMPLE_WITH_MANIFEST = {
+    "setup.cfg": DALS(
+        """
+        [metadata]
+        name = mypkg
+        version = 42
+
+        [options]
+        include_package_data = True
+        packages = find:
+
+        [options.packages.find]
+        exclude = *.tests*
+        """
+    ),
+    "mypkg": {
+        "__init__.py": "",
+        "resource_file.txt": "",
+        "tests": {
+            "__init__.py": "",
+            "test_mypkg.py": "",
+            "test_file.txt": "",
+        },
+    },
+    "MANIFEST.in": DALS(
+        """
+        global-include *.py *.txt
+        global-exclude *.py[cod]
+        prune dist
+        prune build
+        prune *.egg-info
+        """
+    ),
+}
+
+
+def test_excluded_subpackages(tmpdir_cwd):
+    jaraco.path.build(EXAMPLE_WITH_MANIFEST)
+    dist = Distribution({"script_name": "%PEP 517%"})
+    dist.parse_config_files()
+
+    build_py = dist.get_command_obj("build_py")
+
+    msg = r"Python recognizes 'mypkg\.tests' as an importable package"
+    with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+        # TODO: To fix #3260 we need some transition period to deprecate the
+        # existing behavior of `include_package_data`. After the transition, we
+        # should remove the warning and fix the behaviour.
+
+        if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
+            # pytest.warns reset the warning filter temporarily
+            # https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810
+            warnings.filterwarnings(
+                "ignore",
+                "'encoding' argument not specified",
+                module="distutils.text_file",
+                # This warning is already fixed in pypa/distutils but not in stdlib
+            )
+
+        build_py.finalize_options()
+        build_py.run()
+
+    build_dir = Path(dist.get_command_obj("build_py").build_lib)
+    assert (build_dir / "mypkg/__init__.py").exists()
+    assert (build_dir / "mypkg/resource_file.txt").exists()
+
+    # Setuptools is configured to ignore `mypkg.tests`, therefore the following
+    # files/dirs should not be included in the distribution.
+    for f in [
+        "mypkg/tests/__init__.py",
+        "mypkg/tests/test_mypkg.py",
+        "mypkg/tests/test_file.txt",
+        "mypkg/tests",
+    ]:
+        with pytest.raises(AssertionError):
+            # TODO: Enforce the following assertion once #3260 is fixed
+            # (remove context manager and the following xfail).
+            assert not (build_dir / f).exists()
+
+    pytest.xfail("#3260")
+
+
+@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
+def test_existing_egg_info(tmpdir_cwd, monkeypatch):
+    """When provided with the ``existing_egg_info_dir`` attribute, build_py should not
+    attempt to run egg_info again.
+    """
+    # == Pre-condition ==
+    # Generate an egg-info dir
+    jaraco.path.build(EXAMPLE_WITH_MANIFEST)
+    dist = Distribution({"script_name": "%PEP 517%"})
+    dist.parse_config_files()
+    assert dist.include_package_data
+
+    egg_info = dist.get_command_obj("egg_info")
+    dist.run_command("egg_info")
+    egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
+    assert egg_info_dir.is_dir()
+
+    # == Setup ==
+    build_py = dist.get_command_obj("build_py")
+    build_py.finalize_options()
+    egg_info = dist.get_command_obj("egg_info")
+    egg_info_run = Mock(side_effect=egg_info.run)
+    monkeypatch.setattr(egg_info, "run", egg_info_run)
+
+    # == Remove caches ==
+    # egg_info is called when build_py looks for data_files, which gets cached.
+    # We need to ensure it is not cached yet, otherwise it may impact on the tests
+    build_py.__dict__.pop('data_files', None)
+    dist.reinitialize_command(egg_info)
+
+    # == Sanity check ==
+    # Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
+    build_py.existing_egg_info_dir = None
+    build_py.run()
+    egg_info_run.assert_called()
+
+    # == Remove caches ==
+    egg_info_run.reset_mock()
+    build_py.__dict__.pop('data_files', None)
+    dist.reinitialize_command(egg_info)
+
+    # == Actual test ==
+    # Ensure that if existing_egg_info_dir is given, egg_info doesn't run
+    build_py.existing_egg_info_dir = egg_info_dir
+    build_py.run()
+    egg_info_run.assert_not_called()
+    assert build_py.data_files
+
+    # Make sure the list of outputs is actually OK
+    outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
+    assert outputs
+    example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
+    assert example in outputs
+
+
+EXAMPLE_ARBITRARY_MAPPING = {
+    "pyproject.toml": DALS(
+        """
+        [project]
+        name = "mypkg"
+        version = "42"
+
+        [tool.setuptools]
+        packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
+
+        [tool.setuptools.package-dir]
+        "" = "src"
+        "mypkg.sub2" = "src/mypkg/_sub2"
+        "mypkg.sub2.nested" = "other"
+        """
+    ),
+    "src": {
+        "mypkg": {
+            "__init__.py": "",
+            "resource_file.txt": "",
+            "sub1": {
+                "__init__.py": "",
+                "mod1.py": "",
+            },
+            "_sub2": {
+                "mod2.py": "",
+            },
+        },
+    },
+    "other": {
+        "__init__.py": "",
+        "mod3.py": "",
+    },
+    "MANIFEST.in": DALS(
+        """
+        global-include *.py *.txt
+        global-exclude *.py[cod]
+        """
+    ),
+}
+
+
+def test_get_outputs(tmpdir_cwd):
+    jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
+    dist = Distribution({"script_name": "%test%"})
+    dist.parse_config_files()
+
+    build_py = dist.get_command_obj("build_py")
+    build_py.editable_mode = True
+    build_py.ensure_finalized()
+    build_lib = build_py.build_lib.replace(os.sep, "/")
+    outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
+    assert outputs == {
+        f"{build_lib}/mypkg/__init__.py",
+        f"{build_lib}/mypkg/resource_file.txt",
+        f"{build_lib}/mypkg/sub1/__init__.py",
+        f"{build_lib}/mypkg/sub1/mod1.py",
+        f"{build_lib}/mypkg/sub2/mod2.py",
+        f"{build_lib}/mypkg/sub2/nested/__init__.py",
+        f"{build_lib}/mypkg/sub2/nested/mod3.py",
+    }
+    mapping = {
+        k.replace(os.sep, "/"): v.replace(os.sep, "/")
+        for k, v in build_py.get_output_mapping().items()
+    }
+    assert mapping == {
+        f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
+        f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
+        f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
+        f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
+        f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
+        f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
+        f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
+    }
+
+
+class TestTypeInfoFiles:
+    PYPROJECTS = {
+        "default_pyproject": DALS(
+            """
+            [project]
+            name = "foo"
+            version = "1"
+            """
+        ),
+        "dont_include_package_data": DALS(
+            """
+            [project]
+            name = "foo"
+            version = "1"
+
+            [tool.setuptools]
+            include-package-data = false
+            """
+        ),
+        "exclude_type_info": DALS(
+            """
+            [project]
+            name = "foo"
+            version = "1"
+
+            [tool.setuptools]
+            include-package-data = false
+
+            [tool.setuptools.exclude-package-data]
+            "*" = ["py.typed", "*.pyi"]
+            """
+        ),
+    }
+
+    EXAMPLES = {
+        "simple_namespace": {
+            "directory_structure": {
+                "foo": {
+                    "bar.pyi": "",
+                    "py.typed": "",
+                    "__init__.py": "",
+                }
+            },
+            "expected_type_files": {"foo/bar.pyi", "foo/py.typed"},
+        },
+        "nested_inside_namespace": {
+            "directory_structure": {
+                "foo": {
+                    "bar": {
+                        "py.typed": "",
+                        "mod.pyi": "",
+                    }
+                }
+            },
+            "expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"},
+        },
+        "namespace_nested_inside_regular": {
+            "directory_structure": {
+                "foo": {
+                    "namespace": {
+                        "foo.pyi": "",
+                    },
+                    "__init__.pyi": "",
+                    "py.typed": "",
+                }
+            },
+            "expected_type_files": {
+                "foo/namespace/foo.pyi",
+                "foo/__init__.pyi",
+                "foo/py.typed",
+            },
+        },
+    }
+
+    @pytest.mark.parametrize(
+        "pyproject",
+        [
+            "default_pyproject",
+            pytest.param(
+                "dont_include_package_data",
+                marks=pytest.mark.xfail(reason="pypa/setuptools#4350"),
+            ),
+        ],
+    )
+    @pytest.mark.parametrize("example", EXAMPLES.keys())
+    def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
+        structure = {
+            **self.EXAMPLES[example]["directory_structure"],
+            "pyproject.toml": self.PYPROJECTS[pyproject],
+        }
+        expected_type_files = self.EXAMPLES[example]["expected_type_files"]
+        jaraco.path.build(structure)
+
+        build_py = get_finalized_build_py()
+        outputs = get_outputs(build_py)
+        assert expected_type_files <= outputs
+
+    @pytest.mark.parametrize("pyproject", ["exclude_type_info"])
+    @pytest.mark.parametrize("example", EXAMPLES.keys())
+    def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example):
+        structure = {
+            **self.EXAMPLES[example]["directory_structure"],
+            "pyproject.toml": self.PYPROJECTS[pyproject],
+        }
+        expected_type_files = self.EXAMPLES[example]["expected_type_files"]
+        jaraco.path.build(structure)
+
+        build_py = get_finalized_build_py()
+        outputs = get_outputs(build_py)
+        assert expected_type_files.isdisjoint(outputs)
+
+    def test_stub_only_package(self, tmpdir_cwd):
+        structure = {
+            "pyproject.toml": DALS(
+                """
+                [project]
+                name = "foo-stubs"
+                version = "1"
+                """
+            ),
+            "foo-stubs": {"__init__.pyi": "", "bar.pyi": ""},
+        }
+        expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"}
+        jaraco.path.build(structure)
+
+        build_py = get_finalized_build_py()
+        outputs = get_outputs(build_py)
+        assert expected_type_files <= outputs
+
+
+def get_finalized_build_py(script_name="%build_py-test%"):
+    dist = Distribution({"script_name": script_name})
+    dist.parse_config_files()
+    build_py = dist.get_command_obj("build_py")
+    build_py.finalize_options()
+    return build_py
+
+
+def get_outputs(build_py):
+    build_dir = Path(build_py.build_lib)
+    return {
+        os.path.relpath(x, build_dir).replace(os.sep, "/")
+        for x in build_py.get_outputs()
+    }
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_develop.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_develop.py
new file mode 100644
index 0000000000000000000000000000000000000000..929fa9c285eb4d11e646dab2864be6d5fa023e2b
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_develop.py
@@ -0,0 +1,175 @@
+"""develop tests"""
+
+import os
+import pathlib
+import platform
+import subprocess
+import sys
+
+import pytest
+
+from setuptools._path import paths_on_pythonpath
+from setuptools.command.develop import develop
+from setuptools.dist import Distribution
+
+from . import contexts, namespaces
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo',
+    packages=['foo'],
+)
+"""
+
+INIT_PY = """print "foo"
+"""
+
+
+@pytest.fixture
+def temp_user(monkeypatch):
+    with contexts.tempdir() as user_base:
+        with contexts.tempdir() as user_site:
+            monkeypatch.setattr('site.USER_BASE', user_base)
+            monkeypatch.setattr('site.USER_SITE', user_site)
+            yield
+
+
+@pytest.fixture
+def test_env(tmpdir, temp_user):
+    target = tmpdir
+    foo = target.mkdir('foo')
+    setup = target / 'setup.py'
+    if setup.isfile():
+        raise ValueError(dir(target))
+    with setup.open('w') as f:
+        f.write(SETUP_PY)
+    init = foo / '__init__.py'
+    with init.open('w') as f:
+        f.write(INIT_PY)
+    with target.as_cwd():
+        yield target
+
+
+class TestDevelop:
+    in_virtualenv = hasattr(sys, 'real_prefix')
+    in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
+
+    def test_console_scripts(self, tmpdir):
+        """
+        Test that console scripts are installed and that they reference
+        only the project by name and not the current version.
+        """
+        pytest.skip(
+            "TODO: needs a fixture to cause 'develop' "
+            "to be invoked without mutating environment."
+        )
+        settings = dict(
+            name='foo',
+            packages=['foo'],
+            version='0.0',
+            entry_points={
+                'console_scripts': [
+                    'foocmd = foo:foo',
+                ],
+            },
+        )
+        dist = Distribution(settings)
+        dist.script_name = 'setup.py'
+        cmd = develop(dist)
+        cmd.ensure_finalized()
+        cmd.install_dir = tmpdir
+        cmd.run()
+        # assert '0.0' not in foocmd_text
+
+    @pytest.mark.xfail(reason="legacy behavior retained for compatibility #4167")
+    def test_egg_link_filename(self):
+        settings = dict(
+            name='Foo $$$ Bar_baz-bing',
+        )
+        dist = Distribution(settings)
+        cmd = develop(dist)
+        cmd.ensure_finalized()
+        link = pathlib.Path(cmd.egg_link)
+        assert link.suffix == '.egg-link'
+        assert link.stem == 'Foo_Bar_baz_bing'
+
+
+class TestResolver:
+    """
+    TODO: These tests were written with a minimal understanding
+    of what _resolve_setup_path is intending to do. Come up with
+    more meaningful cases that look like real-world scenarios.
+    """
+
+    def test_resolve_setup_path_cwd(self):
+        assert develop._resolve_setup_path('.', '.', '.') == '.'
+
+    def test_resolve_setup_path_one_dir(self):
+        assert develop._resolve_setup_path('pkgs', '.', 'pkgs') == '../'
+
+    def test_resolve_setup_path_one_dir_trailing_slash(self):
+        assert develop._resolve_setup_path('pkgs/', '.', 'pkgs') == '../'
+
+
+class TestNamespaces:
+    @staticmethod
+    def install_develop(src_dir, target):
+        develop_cmd = [
+            sys.executable,
+            'setup.py',
+            'develop',
+            '--install-dir',
+            str(target),
+        ]
+        with src_dir.as_cwd():
+            with paths_on_pythonpath([str(target)]):
+                subprocess.check_call(develop_cmd)
+
+    @pytest.mark.skipif(
+        bool(os.environ.get("APPVEYOR")),
+        reason="https://github.com/pypa/setuptools/issues/851",
+    )
+    @pytest.mark.skipif(
+        platform.python_implementation() == 'PyPy',
+        reason="https://github.com/pypa/setuptools/issues/1202",
+    )
+    def test_namespace_package_importable(self, tmpdir):
+        """
+        Installing two packages sharing the same namespace, one installed
+        naturally using pip or `--single-version-externally-managed`
+        and the other installed using `develop` should leave the namespace
+        in tact and both packages reachable by import.
+        """
+        pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+        pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
+        target = tmpdir / 'packages'
+        # use pip to install to the target directory
+        install_cmd = [
+            sys.executable,
+            '-m',
+            'pip',
+            'install',
+            str(pkg_A),
+            '-t',
+            str(target),
+        ]
+        subprocess.check_call(install_cmd)
+        self.install_develop(pkg_B, target)
+        namespaces.make_site_dir(target)
+        try_import = [
+            sys.executable,
+            '-c',
+            'import myns.pkgA; import myns.pkgB',
+        ]
+        with paths_on_pythonpath([str(target)]):
+            subprocess.check_call(try_import)
+
+        # additionally ensure that pkg_resources import works
+        pkg_resources_imp = [
+            sys.executable,
+            '-c',
+            'import pkg_resources',
+        ]
+        with paths_on_pythonpath([str(target)]):
+            subprocess.check_call(pkg_resources_imp)
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..038dcadf934af20dbe11dcb13ba324ceb30bf90d
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py
@@ -0,0 +1,1289 @@
+from __future__ import annotations
+
+import os
+import platform
+import stat
+import subprocess
+import sys
+from copy import deepcopy
+from importlib import import_module
+from importlib.machinery import EXTENSION_SUFFIXES
+from pathlib import Path
+from textwrap import dedent
+from typing import Any
+from unittest.mock import Mock
+from uuid import uuid4
+
+import jaraco.envs
+import jaraco.path
+import pytest
+from path import Path as _Path
+
+from setuptools._importlib import resources as importlib_resources
+from setuptools.command.editable_wheel import (
+    _DebuggingTips,
+    _encode_pth,
+    _find_namespaces,
+    _find_package_roots,
+    _find_virtual_namespaces,
+    _finder_template,
+    _LinkTree,
+    _TopLevelFinder,
+    editable_wheel,
+)
+from setuptools.dist import Distribution
+from setuptools.extension import Extension
+from setuptools.warnings import SetuptoolsDeprecationWarning
+
+from . import contexts, namespaces
+
+from distutils.core import run_setup
+
+
+@pytest.fixture(params=["strict", "lenient"])
+def editable_opts(request):
+    if request.param == "strict":
+        return ["--config-settings", "editable-mode=strict"]
+    return []
+
+
+EXAMPLE = {
+    'pyproject.toml': dedent(
+        """\
+        [build-system]
+        requires = ["setuptools"]
+        build-backend = "setuptools.build_meta"
+
+        [project]
+        name = "mypkg"
+        version = "3.14159"
+        license = {text = "MIT"}
+        description = "This is a Python package"
+        dynamic = ["readme"]
+        classifiers = [
+            "Development Status :: 5 - Production/Stable",
+            "Intended Audience :: Developers"
+        ]
+        urls = {Homepage = "https://github.com"}
+
+        [tool.setuptools]
+        package-dir = {"" = "src"}
+        packages = {find = {where = ["src"]}}
+        license-files = ["LICENSE*"]
+
+        [tool.setuptools.dynamic]
+        readme = {file = "README.rst"}
+
+        [tool.distutils.egg_info]
+        tag-build = ".post0"
+        """
+    ),
+    "MANIFEST.in": dedent(
+        """\
+        global-include *.py *.txt
+        global-exclude *.py[cod]
+        prune dist
+        prune build
+        """
+    ).strip(),
+    "README.rst": "This is a ``README``",
+    "LICENSE.txt": "---- placeholder MIT license ----",
+    "src": {
+        "mypkg": {
+            "__init__.py": dedent(
+                """\
+                import sys
+                from importlib.metadata import PackageNotFoundError, version
+
+                try:
+                    __version__ = version(__name__)
+                except PackageNotFoundError:
+                    __version__ = "unknown"
+                """
+            ),
+            "__main__.py": dedent(
+                """\
+                from importlib.resources import read_text
+                from . import __version__, __name__ as parent
+                from .mod import x
+
+                data = read_text(parent, "data.txt")
+                print(__version__, data, x)
+                """
+            ),
+            "mod.py": "x = ''",
+            "data.txt": "Hello World",
+        }
+    },
+}
+
+
+SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
+
+
+@pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
+@pytest.mark.parametrize(
+    "files",
+    [
+        {**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB},
+        EXAMPLE,  # No setup.py script
+    ],
+)
+def test_editable_with_pyproject(tmp_path, venv, files, editable_opts):
+    project = tmp_path / "mypkg"
+    project.mkdir()
+    jaraco.path.build(files, prefix=project)
+
+    cmd = [
+        "python",
+        "-m",
+        "pip",
+        "install",
+        "--no-build-isolation",  # required to force current version of setuptools
+        "-e",
+        str(project),
+        *editable_opts,
+    ]
+    print(venv.run(cmd))
+
+    cmd = ["python", "-m", "mypkg"]
+    assert venv.run(cmd).strip() == "3.14159.post0 Hello World"
+
+    (project / "src/mypkg/data.txt").write_text("foobar", encoding="utf-8")
+    (project / "src/mypkg/mod.py").write_text("x = 42", encoding="utf-8")
+    assert venv.run(cmd).strip() == "3.14159.post0 foobar 42"
+
+
+def test_editable_with_flat_layout(tmp_path, venv, editable_opts):
+    files = {
+        "mypkg": {
+            "pyproject.toml": dedent(
+                """\
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+
+                [project]
+                name = "mypkg"
+                version = "3.14159"
+
+                [tool.setuptools]
+                packages = ["pkg"]
+                py-modules = ["mod"]
+                """
+            ),
+            "pkg": {"__init__.py": "a = 4"},
+            "mod.py": "b = 2",
+        },
+    }
+    jaraco.path.build(files, prefix=tmp_path)
+    project = tmp_path / "mypkg"
+
+    cmd = [
+        "python",
+        "-m",
+        "pip",
+        "install",
+        "--no-build-isolation",  # required to force current version of setuptools
+        "-e",
+        str(project),
+        *editable_opts,
+    ]
+    print(venv.run(cmd))
+    cmd = ["python", "-c", "import pkg, mod; print(pkg.a, mod.b)"]
+    assert venv.run(cmd).strip() == "4 2"
+
+
+def test_editable_with_single_module(tmp_path, venv, editable_opts):
+    files = {
+        "mypkg": {
+            "pyproject.toml": dedent(
+                """\
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+
+                [project]
+                name = "mod"
+                version = "3.14159"
+
+                [tool.setuptools]
+                py-modules = ["mod"]
+                """
+            ),
+            "mod.py": "b = 2",
+        },
+    }
+    jaraco.path.build(files, prefix=tmp_path)
+    project = tmp_path / "mypkg"
+
+    cmd = [
+        "python",
+        "-m",
+        "pip",
+        "install",
+        "--no-build-isolation",  # required to force current version of setuptools
+        "-e",
+        str(project),
+        *editable_opts,
+    ]
+    print(venv.run(cmd))
+    cmd = ["python", "-c", "import mod; print(mod.b)"]
+    assert venv.run(cmd).strip() == "2"
+
+
+class TestLegacyNamespaces:
+    # legacy => pkg_resources.declare_namespace(...) + setup(namespace_packages=...)
+
+    def test_nspkg_file_is_unique(self, tmp_path, monkeypatch):
+        deprecation = pytest.warns(
+            SetuptoolsDeprecationWarning, match=".*namespace_packages parameter.*"
+        )
+        installation_dir = tmp_path / ".installation_dir"
+        installation_dir.mkdir()
+        examples = (
+            "myns.pkgA",
+            "myns.pkgB",
+            "myns.n.pkgA",
+            "myns.n.pkgB",
+        )
+
+        for name in examples:
+            pkg = namespaces.build_namespace_package(tmp_path, name, version="42")
+            with deprecation, monkeypatch.context() as ctx:
+                ctx.chdir(pkg)
+                dist = run_setup("setup.py", stop_after="config")
+                cmd = editable_wheel(dist)
+                cmd.finalize_options()
+                editable_name = cmd.get_finalized_command("dist_info").name
+                cmd._install_namespaces(installation_dir, editable_name)
+
+        files = list(installation_dir.glob("*-nspkg.pth"))
+        assert len(files) == len(examples)
+
+    @pytest.mark.parametrize(
+        "impl",
+        (
+            "pkg_resources",
+            #  "pkgutil",  => does not work
+        ),
+    )
+    @pytest.mark.parametrize("ns", ("myns.n",))
+    def test_namespace_package_importable(
+        self, venv, tmp_path, ns, impl, editable_opts
+    ):
+        """
+        Installing two packages sharing the same namespace, one installed
+        naturally using pip or `--single-version-externally-managed`
+        and the other installed in editable mode should leave the namespace
+        intact and both packages reachable by import.
+        (Ported from test_develop).
+        """
+        build_system = """\
+        [build-system]
+        requires = ["setuptools"]
+        build-backend = "setuptools.build_meta"
+        """
+        pkg_A = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgA", impl=impl)
+        pkg_B = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgB", impl=impl)
+        (pkg_A / "pyproject.toml").write_text(build_system, encoding="utf-8")
+        (pkg_B / "pyproject.toml").write_text(build_system, encoding="utf-8")
+        # use pip to install to the target directory
+        opts = editable_opts[:]
+        opts.append("--no-build-isolation")  # force current version of setuptools
+        venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
+        venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
+        venv.run(["python", "-c", f"import {ns}.pkgA; import {ns}.pkgB"])
+        # additionally ensure that pkg_resources import works
+        venv.run(["python", "-c", "import pkg_resources"])
+
+
+class TestPep420Namespaces:
+    def test_namespace_package_importable(self, venv, tmp_path, editable_opts):
+        """
+        Installing two packages sharing the same namespace, one installed
+        normally using pip and the other installed in editable mode
+        should allow importing both packages.
+        """
+        pkg_A = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgA')
+        pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
+        # use pip to install to the target directory
+        opts = editable_opts[:]
+        opts.append("--no-build-isolation")  # force current version of setuptools
+        venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
+        venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
+        venv.run(["python", "-c", "import myns.n.pkgA; import myns.n.pkgB"])
+
+    def test_namespace_created_via_package_dir(self, venv, tmp_path, editable_opts):
+        """Currently users can create a namespace by tweaking `package_dir`"""
+        files = {
+            "pkgA": {
+                "pyproject.toml": dedent(
+                    """\
+                    [build-system]
+                    requires = ["setuptools", "wheel"]
+                    build-backend = "setuptools.build_meta"
+
+                    [project]
+                    name = "pkgA"
+                    version = "3.14159"
+
+                    [tool.setuptools]
+                    package-dir = {"myns.n.pkgA" = "src"}
+                    """
+                ),
+                "src": {"__init__.py": "a = 1"},
+            },
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+        pkg_A = tmp_path / "pkgA"
+        pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
+        pkg_C = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgC')
+
+        # use pip to install to the target directory
+        opts = editable_opts[:]
+        opts.append("--no-build-isolation")  # force current version of setuptools
+        venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
+        venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
+        venv.run(["python", "-m", "pip", "install", "-e", str(pkg_C), *opts])
+        venv.run(["python", "-c", "from myns.n import pkgA, pkgB, pkgC"])
+
+    def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path):
+        """Sometimes users might specify an ``include`` pattern that ignores parent
+        packages. In a normal installation this would ignore all modules inside the
+        parent packages, and make them namespaces (reported in issue #3504),
+        so the editable mode should preserve this behaviour.
+        """
+        files = {
+            "pkgA": {
+                "pyproject.toml": dedent(
+                    """\
+                    [build-system]
+                    requires = ["setuptools", "wheel"]
+                    build-backend = "setuptools.build_meta"
+
+                    [project]
+                    name = "pkgA"
+                    version = "3.14159"
+
+                    [tool.setuptools]
+                    packages.find.include = ["mypkg.*"]
+                    """
+                ),
+                "mypkg": {
+                    "__init__.py": "",
+                    "other.py": "b = 1",
+                    "n": {
+                        "__init__.py": "",
+                        "pkgA.py": "a = 1",
+                    },
+                },
+                "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+            },
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+        pkg_A = tmp_path / "pkgA"
+
+        # use pip to install to the target directory
+        opts = ["--no-build-isolation"]  # force current version of setuptools
+        venv.run(["python", "-m", "pip", "-v", "install", "-e", str(pkg_A), *opts])
+        out = venv.run(["python", "-c", "from mypkg.n import pkgA; print(pkgA.a)"])
+        assert out.strip() == "1"
+        cmd = """\
+        try:
+            import mypkg.other
+        except ImportError:
+            print("mypkg.other not defined")
+        """
+        out = venv.run(["python", "-c", dedent(cmd)])
+        assert "mypkg.other not defined" in out
+
+
+def test_editable_with_prefix(tmp_path, sample_project, editable_opts):
+    """
+    Editable install to a prefix should be discoverable.
+    """
+    prefix = tmp_path / 'prefix'
+
+    # figure out where pip will likely install the package
+    site_packages_all = [
+        prefix / Path(path).relative_to(sys.prefix)
+        for path in sys.path
+        if 'site-packages' in path and path.startswith(sys.prefix)
+    ]
+
+    for sp in site_packages_all:
+        sp.mkdir(parents=True)
+
+    # install workaround
+    _addsitedirs(site_packages_all)
+
+    env = dict(os.environ, PYTHONPATH=os.pathsep.join(map(str, site_packages_all)))
+    cmd = [
+        sys.executable,
+        '-m',
+        'pip',
+        'install',
+        '--editable',
+        str(sample_project),
+        '--prefix',
+        str(prefix),
+        '--no-build-isolation',
+        *editable_opts,
+    ]
+    subprocess.check_call(cmd, env=env)
+
+    # now run 'sample' with the prefix on the PYTHONPATH
+    bin = 'Scripts' if platform.system() == 'Windows' else 'bin'
+    exe = prefix / bin / 'sample'
+    subprocess.check_call([exe], env=env)
+
+
+class TestFinderTemplate:
+    """This test focus in getting a particular implementation detail right.
+    If at some point in time the implementation is changed for something different,
+    this test can be modified or even excluded.
+    """
+
+    def install_finder(self, finder):
+        loc = {}
+        exec(finder, loc, loc)
+        loc["install"]()
+
+    def test_packages(self, tmp_path):
+        files = {
+            "src1": {
+                "pkg1": {
+                    "__init__.py": "",
+                    "subpkg": {"mod1.py": "a = 42"},
+                },
+            },
+            "src2": {"mod2.py": "a = 43"},
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+
+        mapping = {
+            "pkg1": str(tmp_path / "src1/pkg1"),
+            "mod2": str(tmp_path / "src2/mod2"),
+        }
+        template = _finder_template(str(uuid4()), mapping, {})
+
+        with contexts.save_paths(), contexts.save_sys_modules():
+            for mod in ("pkg1", "pkg1.subpkg", "pkg1.subpkg.mod1", "mod2"):
+                sys.modules.pop(mod, None)
+
+            self.install_finder(template)
+            mod1 = import_module("pkg1.subpkg.mod1")
+            mod2 = import_module("mod2")
+            subpkg = import_module("pkg1.subpkg")
+
+            assert mod1.a == 42
+            assert mod2.a == 43
+            expected = str((tmp_path / "src1/pkg1/subpkg").resolve())
+            assert_path(subpkg, expected)
+
+    def test_namespace(self, tmp_path):
+        files = {"pkg": {"__init__.py": "a = 13", "text.txt": "abc"}}
+        jaraco.path.build(files, prefix=tmp_path)
+
+        mapping = {"ns.othername": str(tmp_path / "pkg")}
+        namespaces = {"ns": []}
+
+        template = _finder_template(str(uuid4()), mapping, namespaces)
+        with contexts.save_paths(), contexts.save_sys_modules():
+            for mod in ("ns", "ns.othername"):
+                sys.modules.pop(mod, None)
+
+            self.install_finder(template)
+            pkg = import_module("ns.othername")
+            text = importlib_resources.files(pkg) / "text.txt"
+
+            expected = str((tmp_path / "pkg").resolve())
+            assert_path(pkg, expected)
+            assert pkg.a == 13
+
+            # Make sure resources can also be found
+            assert text.read_text(encoding="utf-8") == "abc"
+
+    def test_combine_namespaces(self, tmp_path):
+        files = {
+            "src1": {"ns": {"pkg1": {"__init__.py": "a = 13"}}},
+            "src2": {"ns": {"mod2.py": "b = 37"}},
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+
+        mapping = {
+            "ns.pkgA": str(tmp_path / "src1/ns/pkg1"),
+            "ns": str(tmp_path / "src2/ns"),
+        }
+        namespaces_ = {"ns": [str(tmp_path / "src1"), str(tmp_path / "src2")]}
+        template = _finder_template(str(uuid4()), mapping, namespaces_)
+
+        with contexts.save_paths(), contexts.save_sys_modules():
+            for mod in ("ns", "ns.pkgA", "ns.mod2"):
+                sys.modules.pop(mod, None)
+
+            self.install_finder(template)
+            pkgA = import_module("ns.pkgA")
+            mod2 = import_module("ns.mod2")
+
+            expected = str((tmp_path / "src1/ns/pkg1").resolve())
+            assert_path(pkgA, expected)
+            assert pkgA.a == 13
+            assert mod2.b == 37
+
+    def test_combine_namespaces_nested(self, tmp_path):
+        """
+        Users may attempt to combine namespace packages in a nested way via
+        ``package_dir`` as shown in pypa/setuptools#4248.
+        """
+
+        files = {
+            "src": {"my_package": {"my_module.py": "a = 13"}},
+            "src2": {"my_package2": {"my_module2.py": "b = 37"}},
+        }
+
+        stack = jaraco.path.DirectoryStack()
+        with stack.context(tmp_path):
+            jaraco.path.build(files)
+            attrs = {
+                "script_name": "%PEP 517%",
+                "package_dir": {
+                    "different_name": "src/my_package",
+                    "different_name.subpkg": "src2/my_package2",
+                },
+                "packages": ["different_name", "different_name.subpkg"],
+            }
+            dist = Distribution(attrs)
+            finder = _TopLevelFinder(dist, str(uuid4()))
+            code = next(v for k, v in finder.get_implementation() if k.endswith(".py"))
+
+        with contexts.save_paths(), contexts.save_sys_modules():
+            for mod in attrs["packages"]:
+                sys.modules.pop(mod, None)
+
+            self.install_finder(code)
+            mod1 = import_module("different_name.my_module")
+            mod2 = import_module("different_name.subpkg.my_module2")
+
+            expected = str((tmp_path / "src/my_package/my_module.py").resolve())
+            assert str(Path(mod1.__file__).resolve()) == expected
+
+            expected = str((tmp_path / "src2/my_package2/my_module2.py").resolve())
+            assert str(Path(mod2.__file__).resolve()) == expected
+
+            assert mod1.a == 13
+            assert mod2.b == 37
+
+    def test_dynamic_path_computation(self, tmp_path):
+        # Follows the example in PEP 420
+        files = {
+            "project1": {"parent": {"child": {"one.py": "x = 1"}}},
+            "project2": {"parent": {"child": {"two.py": "x = 2"}}},
+            "project3": {"parent": {"child": {"three.py": "x = 3"}}},
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+        mapping = {}
+        namespaces_ = {"parent": [str(tmp_path / "project1/parent")]}
+        template = _finder_template(str(uuid4()), mapping, namespaces_)
+
+        mods = (f"parent.child.{name}" for name in ("one", "two", "three"))
+        with contexts.save_paths(), contexts.save_sys_modules():
+            for mod in ("parent", "parent.child", "parent.child", *mods):
+                sys.modules.pop(mod, None)
+
+            self.install_finder(template)
+
+            one = import_module("parent.child.one")
+            assert one.x == 1
+
+            with pytest.raises(ImportError):
+                import_module("parent.child.two")
+
+            sys.path.append(str(tmp_path / "project2"))
+            two = import_module("parent.child.two")
+            assert two.x == 2
+
+            with pytest.raises(ImportError):
+                import_module("parent.child.three")
+
+            sys.path.append(str(tmp_path / "project3"))
+            three = import_module("parent.child.three")
+            assert three.x == 3
+
+    def test_no_recursion(self, tmp_path):
+        # See issue #3550
+        files = {
+            "pkg": {
+                "__init__.py": "from . import pkg",
+            },
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+
+        mapping = {
+            "pkg": str(tmp_path / "pkg"),
+        }
+        template = _finder_template(str(uuid4()), mapping, {})
+
+        with contexts.save_paths(), contexts.save_sys_modules():
+            sys.modules.pop("pkg", None)
+
+            self.install_finder(template)
+            with pytest.raises(ImportError, match="pkg"):
+                import_module("pkg")
+
+    def test_similar_name(self, tmp_path):
+        files = {
+            "foo": {
+                "__init__.py": "",
+                "bar": {
+                    "__init__.py": "",
+                },
+            },
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+
+        mapping = {
+            "foo": str(tmp_path / "foo"),
+        }
+        template = _finder_template(str(uuid4()), mapping, {})
+
+        with contexts.save_paths(), contexts.save_sys_modules():
+            sys.modules.pop("foo", None)
+            sys.modules.pop("foo.bar", None)
+
+            self.install_finder(template)
+            with pytest.raises(ImportError, match="foobar"):
+                import_module("foobar")
+
+    def test_case_sensitivity(self, tmp_path):
+        files = {
+            "foo": {
+                "__init__.py": "",
+                "lowercase.py": "x = 1",
+                "bar": {
+                    "__init__.py": "",
+                    "lowercase.py": "x = 2",
+                },
+            },
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+        mapping = {
+            "foo": str(tmp_path / "foo"),
+        }
+        template = _finder_template(str(uuid4()), mapping, {})
+        with contexts.save_paths(), contexts.save_sys_modules():
+            sys.modules.pop("foo", None)
+
+            self.install_finder(template)
+            with pytest.raises(ImportError, match="'FOO'"):
+                import_module("FOO")
+
+            with pytest.raises(ImportError, match="'foo\\.LOWERCASE'"):
+                import_module("foo.LOWERCASE")
+
+            with pytest.raises(ImportError, match="'foo\\.bar\\.Lowercase'"):
+                import_module("foo.bar.Lowercase")
+
+            with pytest.raises(ImportError, match="'foo\\.BAR'"):
+                import_module("foo.BAR.lowercase")
+
+            with pytest.raises(ImportError, match="'FOO'"):
+                import_module("FOO.bar.lowercase")
+
+            mod = import_module("foo.lowercase")
+            assert mod.x == 1
+
+            mod = import_module("foo.bar.lowercase")
+            assert mod.x == 2
+
+    def test_namespace_case_sensitivity(self, tmp_path):
+        files = {
+            "pkg": {
+                "__init__.py": "a = 13",
+                "foo": {
+                    "__init__.py": "b = 37",
+                    "bar.py": "c = 42",
+                },
+            },
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+
+        mapping = {"ns.othername": str(tmp_path / "pkg")}
+        namespaces = {"ns": []}
+
+        template = _finder_template(str(uuid4()), mapping, namespaces)
+        with contexts.save_paths(), contexts.save_sys_modules():
+            for mod in ("ns", "ns.othername"):
+                sys.modules.pop(mod, None)
+
+            self.install_finder(template)
+            pkg = import_module("ns.othername")
+            expected = str((tmp_path / "pkg").resolve())
+            assert_path(pkg, expected)
+            assert pkg.a == 13
+
+            foo = import_module("ns.othername.foo")
+            assert foo.b == 37
+
+            bar = import_module("ns.othername.foo.bar")
+            assert bar.c == 42
+
+            with pytest.raises(ImportError, match="'NS'"):
+                import_module("NS.othername.foo")
+
+            with pytest.raises(ImportError, match="'ns\\.othername\\.FOO\\'"):
+                import_module("ns.othername.FOO")
+
+            with pytest.raises(ImportError, match="'ns\\.othername\\.foo\\.BAR\\'"):
+                import_module("ns.othername.foo.BAR")
+
+    def test_intermediate_packages(self, tmp_path):
+        """
+        The finder should not import ``fullname`` if the intermediate segments
+        don't exist (see pypa/setuptools#4019).
+        """
+        files = {
+            "src": {
+                "mypkg": {
+                    "__init__.py": "",
+                    "config.py": "a = 13",
+                    "helloworld.py": "b = 13",
+                    "components": {
+                        "config.py": "a = 37",
+                    },
+                },
+            }
+        }
+        jaraco.path.build(files, prefix=tmp_path)
+
+        mapping = {"mypkg": str(tmp_path / "src/mypkg")}
+        template = _finder_template(str(uuid4()), mapping, {})
+
+        with contexts.save_paths(), contexts.save_sys_modules():
+            for mod in (
+                "mypkg",
+                "mypkg.config",
+                "mypkg.helloworld",
+                "mypkg.components",
+                "mypkg.components.config",
+                "mypkg.components.helloworld",
+            ):
+                sys.modules.pop(mod, None)
+
+            self.install_finder(template)
+
+            config = import_module("mypkg.components.config")
+            assert config.a == 37
+
+            helloworld = import_module("mypkg.helloworld")
+            assert helloworld.b == 13
+
+            with pytest.raises(ImportError):
+                import_module("mypkg.components.helloworld")
+
+
+def test_pkg_roots(tmp_path):
+    """This test focus in getting a particular implementation detail right.
+    If at some point in time the implementation is changed for something different,
+    this test can be modified or even excluded.
+    """
+    files = {
+        "a": {"b": {"__init__.py": "ab = 1"}, "__init__.py": "a = 1"},
+        "d": {"__init__.py": "d = 1", "e": {"__init__.py": "de = 1"}},
+        "f": {"g": {"h": {"__init__.py": "fgh = 1"}}},
+        "other": {"__init__.py": "abc = 1"},
+        "another": {"__init__.py": "abcxyz = 1"},
+        "yet_another": {"__init__.py": "mnopq = 1"},
+    }
+    jaraco.path.build(files, prefix=tmp_path)
+    package_dir = {
+        "a.b.c": "other",
+        "a.b.c.x.y.z": "another",
+        "m.n.o.p.q": "yet_another",
+    }
+    packages = [
+        "a",
+        "a.b",
+        "a.b.c",
+        "a.b.c.x.y",
+        "a.b.c.x.y.z",
+        "d",
+        "d.e",
+        "f",
+        "f.g",
+        "f.g.h",
+        "m.n.o.p.q",
+    ]
+    roots = _find_package_roots(packages, package_dir, tmp_path)
+    assert roots == {
+        "a": str(tmp_path / "a"),
+        "a.b.c": str(tmp_path / "other"),
+        "a.b.c.x.y.z": str(tmp_path / "another"),
+        "d": str(tmp_path / "d"),
+        "f": str(tmp_path / "f"),
+        "m.n.o.p.q": str(tmp_path / "yet_another"),
+    }
+
+    ns = set(dict(_find_namespaces(packages, roots)))
+    assert ns == {"f", "f.g"}
+
+    ns = set(_find_virtual_namespaces(roots))
+    assert ns == {"a.b", "a.b.c.x", "a.b.c.x.y", "m", "m.n", "m.n.o", "m.n.o.p"}
+
+
+class TestOverallBehaviour:
+    PYPROJECT = """\
+        [build-system]
+        requires = ["setuptools"]
+        build-backend = "setuptools.build_meta"
+
+        [project]
+        name = "mypkg"
+        version = "3.14159"
+        """
+
+    # Any: Would need a TypedDict. Keep it simple for tests
+    FLAT_LAYOUT: dict[str, Any] = {
+        "pyproject.toml": dedent(PYPROJECT),
+        "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+        "otherfile.py": "",
+        "mypkg": {
+            "__init__.py": "",
+            "mod1.py": "var = 42",
+            "subpackage": {
+                "__init__.py": "",
+                "mod2.py": "var = 13",
+                "resource_file.txt": "resource 39",
+            },
+        },
+    }
+
+    EXAMPLES = {
+        "flat-layout": FLAT_LAYOUT,
+        "src-layout": {
+            "pyproject.toml": dedent(PYPROJECT),
+            "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+            "otherfile.py": "",
+            "src": {"mypkg": FLAT_LAYOUT["mypkg"]},
+        },
+        "custom-layout": {
+            "pyproject.toml": dedent(PYPROJECT)
+            + dedent(
+                """\
+                [tool.setuptools]
+                packages = ["mypkg", "mypkg.subpackage"]
+
+                [tool.setuptools.package-dir]
+                "mypkg.subpackage" = "other"
+                """
+            ),
+            "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+            "otherfile.py": "",
+            "mypkg": {
+                "__init__.py": "",
+                "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
+            },
+            "other": FLAT_LAYOUT["mypkg"]["subpackage"],
+        },
+        "namespace": {
+            "pyproject.toml": dedent(PYPROJECT),
+            "MANIFEST.in": EXAMPLE["MANIFEST.in"],
+            "otherfile.py": "",
+            "src": {
+                "mypkg": {
+                    "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
+                    "subpackage": FLAT_LAYOUT["mypkg"]["subpackage"],
+                },
+            },
+        },
+    }
+
+    @pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
+    @pytest.mark.parametrize("layout", EXAMPLES.keys())
+    def test_editable_install(self, tmp_path, venv, layout, editable_opts):
+        project, _ = install_project(
+            "mypkg", venv, tmp_path, self.EXAMPLES[layout], *editable_opts
+        )
+
+        # Ensure stray files are not importable
+        cmd_import_error = """\
+        try:
+            import otherfile
+        except ImportError as ex:
+            print(ex)
+        """
+        out = venv.run(["python", "-c", dedent(cmd_import_error)])
+        assert "No module named 'otherfile'" in out
+
+        # Ensure the modules are importable
+        cmd_get_vars = """\
+        import mypkg, mypkg.mod1, mypkg.subpackage.mod2
+        print(mypkg.mod1.var, mypkg.subpackage.mod2.var)
+        """
+        out = venv.run(["python", "-c", dedent(cmd_get_vars)])
+        assert "42 13" in out
+
+        # Ensure resources are reachable
+        cmd_get_resource = """\
+        import mypkg.subpackage
+        from setuptools._importlib import resources as importlib_resources
+        text = importlib_resources.files(mypkg.subpackage) / "resource_file.txt"
+        print(text.read_text(encoding="utf-8"))
+        """
+        out = venv.run(["python", "-c", dedent(cmd_get_resource)])
+        assert "resource 39" in out
+
+        # Ensure files are editable
+        mod1 = next(project.glob("**/mod1.py"))
+        mod2 = next(project.glob("**/mod2.py"))
+        resource_file = next(project.glob("**/resource_file.txt"))
+
+        mod1.write_text("var = 17", encoding="utf-8")
+        mod2.write_text("var = 781", encoding="utf-8")
+        resource_file.write_text("resource 374", encoding="utf-8")
+
+        out = venv.run(["python", "-c", dedent(cmd_get_vars)])
+        assert "42 13" not in out
+        assert "17 781" in out
+
+        out = venv.run(["python", "-c", dedent(cmd_get_resource)])
+        assert "resource 39" not in out
+        assert "resource 374" in out
+
+
+class TestLinkTree:
+    FILES = deepcopy(TestOverallBehaviour.EXAMPLES["src-layout"])
+    FILES["pyproject.toml"] += dedent(
+        """\
+        [tool.setuptools]
+        # Temporary workaround: both `include-package-data` and `package-data` configs
+        # can be removed after #3260 is fixed.
+        include-package-data = false
+        package-data = {"*" = ["*.txt"]}
+
+        [tool.setuptools.packages.find]
+        where = ["src"]
+        exclude = ["*.subpackage*"]
+        """
+    )
+    FILES["src"]["mypkg"]["resource.not_in_manifest"] = "abc"
+
+    def test_generated_tree(self, tmp_path):
+        jaraco.path.build(self.FILES, prefix=tmp_path)
+
+        with _Path(tmp_path):
+            name = "mypkg-3.14159"
+            dist = Distribution({"script_name": "%PEP 517%"})
+            dist.parse_config_files()
+
+            wheel = Mock()
+            aux = tmp_path / ".aux"
+            build = tmp_path / ".build"
+            aux.mkdir()
+            build.mkdir()
+
+            build_py = dist.get_command_obj("build_py")
+            build_py.editable_mode = True
+            build_py.build_lib = str(build)
+            build_py.ensure_finalized()
+            outputs = build_py.get_outputs()
+            output_mapping = build_py.get_output_mapping()
+
+            make_tree = _LinkTree(dist, name, aux, build)
+            make_tree(wheel, outputs, output_mapping)
+
+            mod1 = next(aux.glob("**/mod1.py"))
+            expected = tmp_path / "src/mypkg/mod1.py"
+            assert_link_to(mod1, expected)
+
+            assert next(aux.glob("**/subpackage"), None) is None
+            assert next(aux.glob("**/mod2.py"), None) is None
+            assert next(aux.glob("**/resource_file.txt"), None) is None
+
+            assert next(aux.glob("**/resource.not_in_manifest"), None) is None
+
+    def test_strict_install(self, tmp_path, venv):
+        opts = ["--config-settings", "editable-mode=strict"]
+        install_project("mypkg", venv, tmp_path, self.FILES, *opts)
+
+        out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
+        assert "42" in out
+
+        # Ensure packages excluded from distribution are not importable
+        cmd_import_error = """\
+        try:
+            from mypkg import subpackage
+        except ImportError as ex:
+            print(ex)
+        """
+        out = venv.run(["python", "-c", dedent(cmd_import_error)])
+        assert "cannot import name 'subpackage'" in out
+
+        # Ensure resource files excluded from distribution are not reachable
+        cmd_get_resource = """\
+        import mypkg
+        from setuptools._importlib import resources as importlib_resources
+        try:
+            text = importlib_resources.files(mypkg) / "resource.not_in_manifest"
+            print(text.read_text(encoding="utf-8"))
+        except FileNotFoundError as ex:
+            print(ex)
+        """
+        out = venv.run(["python", "-c", dedent(cmd_get_resource)])
+        assert "No such file or directory" in out
+        assert "resource.not_in_manifest" in out
+
+
+@pytest.mark.filterwarnings("ignore:.*compat.*:setuptools.SetuptoolsDeprecationWarning")
+def test_compat_install(tmp_path, venv):
+    # TODO: Remove `compat` after Dec/2022.
+    opts = ["--config-settings", "editable-mode=compat"]
+    files = TestOverallBehaviour.EXAMPLES["custom-layout"]
+    install_project("mypkg", venv, tmp_path, files, *opts)
+
+    out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
+    assert "42" in out
+
+    expected_path = comparable_path(str(tmp_path))
+
+    # Compatible behaviour will make spurious modules and excluded
+    # files importable directly from the original path
+    for cmd in (
+        "import otherfile; print(otherfile)",
+        "import other; print(other)",
+        "import mypkg; print(mypkg)",
+    ):
+        out = comparable_path(venv.run(["python", "-c", cmd]))
+        assert expected_path in out
+
+    # Compatible behaviour will not consider custom mappings
+    cmd = """\
+    try:
+        from mypkg import subpackage;
+    except ImportError as ex:
+        print(ex)
+    """
+    out = venv.run(["python", "-c", dedent(cmd)])
+    assert "cannot import name 'subpackage'" in out
+
+
+def test_pbr_integration(tmp_path, venv, editable_opts):
+    """Ensure editable installs work with pbr, issue #3500"""
+    files = {
+        "pyproject.toml": dedent(
+            """\
+            [build-system]
+            requires = ["setuptools"]
+            build-backend = "setuptools.build_meta"
+            """
+        ),
+        "setup.py": dedent(
+            """\
+            __import__('setuptools').setup(
+                pbr=True,
+                setup_requires=["pbr"],
+            )
+            """
+        ),
+        "setup.cfg": dedent(
+            """\
+            [metadata]
+            name = mypkg
+
+            [files]
+            packages =
+                mypkg
+            """
+        ),
+        "mypkg": {
+            "__init__.py": "",
+            "hello.py": "print('Hello world!')",
+        },
+        "other": {"test.txt": "Another file in here."},
+    }
+    venv.run(["python", "-m", "pip", "install", "pbr"])
+
+    with contexts.environment(PBR_VERSION="0.42"):
+        install_project("mypkg", venv, tmp_path, files, *editable_opts)
+
+    out = venv.run(["python", "-c", "import mypkg.hello"])
+    assert "Hello world!" in out
+
+
+class TestCustomBuildPy:
+    """
+    Issue #3501 indicates that some plugins/customizations might rely on:
+
+    1. ``build_py`` not running
+    2. ``build_py`` always copying files to ``build_lib``
+
+    During the transition period setuptools should prevent potential errors from
+    happening due to those assumptions.
+    """
+
+    # TODO: Remove tests after _run_build_steps is removed.
+
+    FILES = {
+        **TestOverallBehaviour.EXAMPLES["flat-layout"],
+        "setup.py": dedent(
+            """\
+            import pathlib
+            from setuptools import setup
+            from setuptools.command.build_py import build_py as orig
+
+            class my_build_py(orig):
+                def run(self):
+                    super().run()
+                    raise ValueError("TEST_RAISE")
+
+            setup(cmdclass={"build_py": my_build_py})
+            """
+        ),
+    }
+
+    def test_safeguarded_from_errors(self, tmp_path, venv):
+        """Ensure that errors in custom build_py are reported as warnings"""
+        # Warnings should show up
+        _, out = install_project("mypkg", venv, tmp_path, self.FILES)
+        assert "SetuptoolsDeprecationWarning" in out
+        assert "ValueError: TEST_RAISE" in out
+        # but installation should be successful
+        out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
+        assert "42" in out
+
+
+class TestCustomBuildWheel:
+    def install_custom_build_wheel(self, dist):
+        bdist_wheel_cls = dist.get_command_class("bdist_wheel")
+
+        class MyBdistWheel(bdist_wheel_cls):
+            def get_tag(self):
+                # In issue #3513, we can see that some extensions may try to access
+                # the `plat_name` property in bdist_wheel
+                if self.plat_name.startswith("macosx-"):
+                    _ = "macOS platform"
+                return super().get_tag()
+
+        dist.cmdclass["bdist_wheel"] = MyBdistWheel
+
+    def test_access_plat_name(self, tmpdir_cwd):
+        # Even when a custom bdist_wheel tries to access plat_name the build should
+        # be successful
+        jaraco.path.build({"module.py": "x = 42"})
+        dist = Distribution()
+        dist.script_name = "setup.py"
+        dist.set_defaults()
+        self.install_custom_build_wheel(dist)
+        cmd = editable_wheel(dist)
+        cmd.ensure_finalized()
+        cmd.run()
+        wheel_file = str(next(Path().glob('dist/*.whl')))
+        assert "editable" in wheel_file
+
+
+class TestCustomBuildExt:
+    def install_custom_build_ext_distutils(self, dist):
+        from distutils.command.build_ext import build_ext as build_ext_cls
+
+        class MyBuildExt(build_ext_cls):
+            pass
+
+        dist.cmdclass["build_ext"] = MyBuildExt
+
+    @pytest.mark.skipif(
+        sys.platform != "linux", reason="compilers may fail without correct setup"
+    )
+    def test_distutils_leave_inplace_files(self, tmpdir_cwd):
+        jaraco.path.build({"module.c": ""})
+        attrs = {
+            "ext_modules": [Extension("module", ["module.c"])],
+        }
+        dist = Distribution(attrs)
+        dist.script_name = "setup.py"
+        dist.set_defaults()
+        self.install_custom_build_ext_distutils(dist)
+        cmd = editable_wheel(dist)
+        cmd.ensure_finalized()
+        cmd.run()
+        wheel_file = str(next(Path().glob('dist/*.whl')))
+        assert "editable" in wheel_file
+        files = [p for p in Path().glob("module.*") if p.suffix != ".c"]
+        assert len(files) == 1
+        name = files[0].name
+        assert any(name.endswith(ext) for ext in EXTENSION_SUFFIXES)
+
+
+def test_debugging_tips(tmpdir_cwd, monkeypatch):
+    """Make sure to display useful debugging tips to the user."""
+    jaraco.path.build({"module.py": "x = 42"})
+    dist = Distribution()
+    dist.script_name = "setup.py"
+    dist.set_defaults()
+    cmd = editable_wheel(dist)
+    cmd.ensure_finalized()
+
+    SimulatedErr = type("SimulatedErr", (Exception,), {})
+    simulated_failure = Mock(side_effect=SimulatedErr())
+    monkeypatch.setattr(cmd, "get_finalized_command", simulated_failure)
+
+    expected_msg = "following steps are recommended to help debug"
+    with pytest.raises(SimulatedErr), pytest.warns(_DebuggingTips, match=expected_msg):
+        cmd.run()
+
+
+@pytest.mark.filterwarnings("error")
+def test_encode_pth():
+    """Ensure _encode_pth function does not produce encoding warnings"""
+    content = _encode_pth("tkmilan_รง_utf8")  # no warnings (would be turned into errors)
+    assert isinstance(content, bytes)
+
+
+def install_project(name, venv, tmp_path, files, *opts):
+    project = tmp_path / name
+    project.mkdir()
+    jaraco.path.build(files, prefix=project)
+    opts = [*opts, "--no-build-isolation"]  # force current version of setuptools
+    out = venv.run(
+        ["python", "-m", "pip", "-v", "install", "-e", str(project), *opts],
+        stderr=subprocess.STDOUT,
+    )
+    return project, out
+
+
+def _addsitedirs(new_dirs):
+    """To use this function, it is necessary to insert new_dir in front of sys.path.
+    The Python process will try to import a ``sitecustomize`` module on startup.
+    If we manipulate sys.path/PYTHONPATH, we can force it to run our code,
+    which invokes ``addsitedir`` and ensure ``.pth`` files are loaded.
+    """
+    content = '\n'.join(
+        ("import site",)
+        + tuple(f"site.addsitedir({os.fspath(new_dir)!r})" for new_dir in new_dirs)
+    )
+    (new_dirs[0] / "sitecustomize.py").write_text(content, encoding="utf-8")
+
+
+# ---- Assertion Helpers ----
+
+
+def assert_path(pkg, expected):
+    # __path__ is not guaranteed to exist, so we have to account for that
+    if pkg.__path__:
+        path = next(iter(pkg.__path__), None)
+        if path:
+            assert str(Path(path).resolve()) == expected
+
+
+def assert_link_to(file: Path, other: Path) -> None:
+    if file.is_symlink():
+        assert str(file.resolve()) == str(other.resolve())
+    else:
+        file_stat = file.stat()
+        other_stat = other.stat()
+        assert file_stat[stat.ST_INO] == other_stat[stat.ST_INO]
+        assert file_stat[stat.ST_DEV] == other_stat[stat.ST_DEV]
+
+
+def comparable_path(str_with_path: str) -> str:
+    return str_with_path.lower().replace(os.sep, "/").replace("//", "/")
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_glob.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_glob.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d225a44610163c7d56d65b07c06f0f598ccfe84
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_glob.py
@@ -0,0 +1,45 @@
+import pytest
+from jaraco import path
+
+from setuptools.glob import glob
+
+
+@pytest.mark.parametrize(
+    ('tree', 'pattern', 'matches'),
+    (
+        ('', b'', []),
+        ('', '', []),
+        (
+            """
+     appveyor.yml
+     CHANGES.rst
+     LICENSE
+     MANIFEST.in
+     pyproject.toml
+     README.rst
+     setup.cfg
+     setup.py
+     """,
+            '*.rst',
+            ('CHANGES.rst', 'README.rst'),
+        ),
+        (
+            """
+     appveyor.yml
+     CHANGES.rst
+     LICENSE
+     MANIFEST.in
+     pyproject.toml
+     README.rst
+     setup.cfg
+     setup.py
+     """,
+            b'*.rst',
+            (b'CHANGES.rst', b'README.rst'),
+        ),
+    ),
+)
+def test_glob(monkeypatch, tmpdir, tree, pattern, matches):
+    monkeypatch.chdir(tmpdir)
+    path.build({name: '' for name in tree.split()})
+    assert list(sorted(glob(pattern))) == list(sorted(matches))
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py
new file mode 100644
index 0000000000000000000000000000000000000000..a0f4120bf7900b2118cc066034e036ab7af1798b
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py
@@ -0,0 +1,138 @@
+import subprocess
+import sys
+
+from setuptools._path import paths_on_pythonpath
+
+from . import namespaces
+
+
+class TestNamespaces:
+    def test_mixed_site_and_non_site(self, tmpdir):
+        """
+        Installing two packages sharing the same namespace, one installed
+        to a site dir and the other installed just to a path on PYTHONPATH
+        should leave the namespace in tact and both packages reachable by
+        import.
+        """
+        pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+        pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
+        site_packages = tmpdir / 'site-packages'
+        path_packages = tmpdir / 'path-packages'
+        targets = site_packages, path_packages
+        # use pip to install to the target directory
+        install_cmd = [
+            sys.executable,
+            '-m',
+            'pip.__main__',
+            'install',
+            str(pkg_A),
+            '-t',
+            str(site_packages),
+        ]
+        subprocess.check_call(install_cmd)
+        namespaces.make_site_dir(site_packages)
+        install_cmd = [
+            sys.executable,
+            '-m',
+            'pip.__main__',
+            'install',
+            str(pkg_B),
+            '-t',
+            str(path_packages),
+        ]
+        subprocess.check_call(install_cmd)
+        try_import = [
+            sys.executable,
+            '-c',
+            'import myns.pkgA; import myns.pkgB',
+        ]
+        with paths_on_pythonpath(map(str, targets)):
+            subprocess.check_call(try_import)
+
+    def test_pkg_resources_import(self, tmpdir):
+        """
+        Ensure that a namespace package doesn't break on import
+        of pkg_resources.
+        """
+        pkg = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+        target = tmpdir / 'packages'
+        target.mkdir()
+        install_cmd = [
+            sys.executable,
+            '-m',
+            'pip',
+            'install',
+            '-t',
+            str(target),
+            str(pkg),
+        ]
+        with paths_on_pythonpath([str(target)]):
+            subprocess.check_call(install_cmd)
+        namespaces.make_site_dir(target)
+        try_import = [
+            sys.executable,
+            '-c',
+            'import pkg_resources',
+        ]
+        with paths_on_pythonpath([str(target)]):
+            subprocess.check_call(try_import)
+
+    def test_namespace_package_installed_and_cwd(self, tmpdir):
+        """
+        Installing a namespace packages but also having it in the current
+        working directory, only one version should take precedence.
+        """
+        pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+        target = tmpdir / 'packages'
+        # use pip to install to the target directory
+        install_cmd = [
+            sys.executable,
+            '-m',
+            'pip.__main__',
+            'install',
+            str(pkg_A),
+            '-t',
+            str(target),
+        ]
+        subprocess.check_call(install_cmd)
+        namespaces.make_site_dir(target)
+
+        # ensure that package imports and pkg_resources imports
+        pkg_resources_imp = [
+            sys.executable,
+            '-c',
+            'import pkg_resources; import myns.pkgA',
+        ]
+        with paths_on_pythonpath([str(target)]):
+            subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A))
+
+    def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir):
+        """
+        Installing one namespace package and also have another in the same
+        namespace in the current working directory, both of them must be
+        importable.
+        """
+        pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
+        pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
+        target = tmpdir / 'packages'
+        # use pip to install to the target directory
+        install_cmd = [
+            sys.executable,
+            '-m',
+            'pip.__main__',
+            'install',
+            str(pkg_A),
+            '-t',
+            str(target),
+        ]
+        subprocess.check_call(install_cmd)
+        namespaces.make_site_dir(target)
+
+        # ensure that all packages import and pkg_resources imports
+        pkg_resources_imp = [
+            sys.executable,
+            '-c',
+            'import pkg_resources; import myns.pkgA; import myns.pkgB',
+        ]
+        with paths_on_pythonpath([str(target)]):
+            subprocess.check_call(pkg_resources_imp, cwd=str(pkg_B))
diff --git a/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/textwrap.py b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/textwrap.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e39618dca4ad6c3f0d4c8cb20af59ab85fb0eba
--- /dev/null
+++ b/mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/textwrap.py
@@ -0,0 +1,6 @@
+import textwrap
+
+
+def DALS(s):
+    "dedent and left-strip"
+    return textwrap.dedent(s).lstrip()
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_assert_async_native.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_assert_async_native.h
new file mode 100644
index 0000000000000000000000000000000000000000..b657735ee5f585de0c00a2291775fda4568a8ef6
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_assert_async_native.h
@@ -0,0 +1,24 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+namespace at {
+namespace native {
+TORCH_API void _assert_async_cpu(const at::Tensor & self);
+TORCH_API void _assert_async_cuda(const at::Tensor & self);
+TORCH_API void _assert_async_msg_cpu(const at::Tensor & self, c10::string_view assert_msg);
+TORCH_API void _assert_async_msg_cuda(const at::Tensor & self, c10::string_view assert_msg);
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_clamp_min_native.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_clamp_min_native.h
new file mode 100644
index 0000000000000000000000000000000000000000..02e3e40f993af792de8684d256f391160ec134f4
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_clamp_min_native.h
@@ -0,0 +1,35 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+namespace at {
+namespace native {
+TORCH_API void _foreach_clamp_min_Scalar_out(at::TensorList self, const at::Scalar & scalar, at::TensorList out);
+TORCH_API ::std::vector foreach_tensor_clamp_min_scalar_kernel_slow(at::TensorList self, const at::Scalar & scalar);
+TORCH_API void foreach_tensor_clamp_min_scalar_kernel_slow_(at::TensorList self, const at::Scalar & scalar);
+TORCH_API ::std::vector foreach_tensor_clamp_min_scalar_kernel_cuda(at::TensorList self, const at::Scalar & scalar);
+TORCH_API void foreach_tensor_clamp_min_scalar_kernel_cuda_(at::TensorList self, const at::Scalar & scalar);
+TORCH_API void _foreach_clamp_min_List_out(at::TensorList self, at::TensorList other, at::TensorList out);
+TORCH_API ::std::vector foreach_tensor_clamp_min_list_kernel_slow(at::TensorList self, at::TensorList other);
+TORCH_API void foreach_tensor_clamp_min_list_kernel_slow_(at::TensorList self, at::TensorList other);
+TORCH_API ::std::vector foreach_tensor_clamp_min_list_kernel_cuda(at::TensorList self, at::TensorList other);
+TORCH_API void foreach_tensor_clamp_min_list_kernel_cuda_(at::TensorList self, at::TensorList other);
+TORCH_API void _foreach_clamp_min_ScalarList_out(at::TensorList self, at::ArrayRef scalars, at::TensorList out);
+TORCH_API ::std::vector foreach_tensor_clamp_min_scalarlist_kernel_slow(at::TensorList self, at::ArrayRef scalars);
+TORCH_API void foreach_tensor_clamp_min_scalarlist_kernel_slow_(at::TensorList self, at::ArrayRef scalars);
+TORCH_API ::std::vector foreach_tensor_clamp_min_scalarlist_kernel_cuda(at::TensorList self, at::ArrayRef scalars);
+TORCH_API void foreach_tensor_clamp_min_scalarlist_kernel_cuda_(at::TensorList self, at::ArrayRef scalars);
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_zero_cuda_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_zero_cuda_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..242aa56705a2fd2da0051f9a5d2c06e2f2c0f425
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_zero_cuda_dispatch.h
@@ -0,0 +1,23 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace cuda {
+
+TORCH_API void _foreach_zero_(at::TensorList self);
+
+} // namespace cuda
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_compositeexplicitautograd_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_compositeexplicitautograd_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..c0aa6efd611de34e123d9c35c584939772a1c39a
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_grid_sampler_2d_cpu_fallback_compositeexplicitautograd_dispatch.h
@@ -0,0 +1,25 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace compositeexplicitautograd {
+
+TORCH_API at::Tensor _grid_sampler_2d_cpu_fallback(const at::Tensor & input, const at::Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners);
+TORCH_API at::Tensor & _grid_sampler_2d_cpu_fallback_out(at::Tensor & out, const at::Tensor & input, const at::Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners);
+TORCH_API at::Tensor & _grid_sampler_2d_cpu_fallback_outf(const at::Tensor & input, const at::Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners, at::Tensor & out);
+
+} // namespace compositeexplicitautograd
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_neg_view_copy.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_neg_view_copy.h
new file mode 100644
index 0000000000000000000000000000000000000000..d09e8a602ac74a0eeb74fc962efedb95b89b7304
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_neg_view_copy.h
@@ -0,0 +1,39 @@
+#pragma once
+
+// @generated by torchgen/gen.py from Function.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+
+#include 
+
+namespace at {
+
+
+// aten::_neg_view_copy(Tensor self) -> Tensor
+inline at::Tensor _neg_view_copy(const at::Tensor & self) {
+    return at::_ops::_neg_view_copy::call(self);
+}
+
+// aten::_neg_view_copy.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
+inline at::Tensor & _neg_view_copy_out(at::Tensor & out, const at::Tensor & self) {
+    return at::_ops::_neg_view_copy_out::call(self, out);
+}
+// aten::_neg_view_copy.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
+inline at::Tensor & _neg_view_copy_outf(const at::Tensor & self, at::Tensor & out) {
+    return at::_ops::_neg_view_copy_out::call(self, out);
+}
+
+}
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_cudnn_attention.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_cudnn_attention.h
new file mode 100644
index 0000000000000000000000000000000000000000..1369b432972f75e8339116380aa540143e0b05a5
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_cudnn_attention.h
@@ -0,0 +1,30 @@
+#pragma once
+
+// @generated by torchgen/gen.py from Function.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+
+#include 
+
+namespace at {
+
+
+// aten::_scaled_dot_product_cudnn_attention(Tensor query, Tensor key, Tensor value, float dropout_p=0.0, bool is_causal=False, bool return_debug_mask=False, *, float? scale=None) -> (Tensor output, Tensor logsumexp, Tensor philox_seed, Tensor philox_offset)
+inline ::std::tuple _scaled_dot_product_cudnn_attention(const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, double dropout_p=0.0, bool is_causal=false, bool return_debug_mask=false, c10::optional scale=c10::nullopt) {
+    return at::_ops::_scaled_dot_product_cudnn_attention::call(query, key, value, dropout_p, is_causal, return_debug_mask, scale);
+}
+
+}
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_validate_sparse_bsc_tensor_args.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_validate_sparse_bsc_tensor_args.h
new file mode 100644
index 0000000000000000000000000000000000000000..78d3453ca6e6d1700fdd1b77fd2fca9102794674
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_validate_sparse_bsc_tensor_args.h
@@ -0,0 +1,30 @@
+#pragma once
+
+// @generated by torchgen/gen.py from Function.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+
+#include 
+
+namespace at {
+
+
+// aten::_validate_sparse_bsc_tensor_args(Tensor ccol_indices, Tensor row_indices, Tensor values, int[] size) -> ()
+inline void _validate_sparse_bsc_tensor_args(const at::Tensor & ccol_indices, const at::Tensor & row_indices, const at::Tensor & values, at::IntArrayRef size) {
+    return at::_ops::_validate_sparse_bsc_tensor_args::call(ccol_indices, row_indices, values, size);
+}
+
+}
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_validate_sparse_compressed_tensor_args_compositeimplicitautograd_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_validate_sparse_compressed_tensor_args_compositeimplicitautograd_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..75c35a9462fcba4d794410aeaa33419b5d3b344b
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/_validate_sparse_compressed_tensor_args_compositeimplicitautograd_dispatch.h
@@ -0,0 +1,23 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace compositeimplicitautograd {
+
+TORCH_API void _validate_sparse_compressed_tensor_args(const at::Tensor & compressed_indices, const at::Tensor & plain_indices, const at::Tensor & values, at::IntArrayRef size, at::Layout layout);
+
+} // namespace compositeimplicitautograd
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/angle.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/angle.h
new file mode 100644
index 0000000000000000000000000000000000000000..cdabb6835b60e569db680903e41df05286861cbf
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/angle.h
@@ -0,0 +1,39 @@
+#pragma once
+
+// @generated by torchgen/gen.py from Function.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+
+#include 
+
+namespace at {
+
+
+// aten::angle(Tensor self) -> Tensor
+inline at::Tensor angle(const at::Tensor & self) {
+    return at::_ops::angle::call(self);
+}
+
+// aten::angle.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
+inline at::Tensor & angle_out(at::Tensor & out, const at::Tensor & self) {
+    return at::_ops::angle_out::call(self, out);
+}
+// aten::angle.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
+inline at::Tensor & angle_outf(const at::Tensor & self, at::Tensor & out) {
+    return at::_ops::angle_out::call(self, out);
+}
+
+}
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/any_native.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/any_native.h
new file mode 100644
index 0000000000000000000000000000000000000000..007aa045513c189e941d8b9acc03a8b18121b0dc
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/any_native.h
@@ -0,0 +1,34 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+namespace at {
+namespace native {
+struct TORCH_API structured_any_out : public at::meta::structured_any_dim {
+void impl(const at::Tensor & self, int64_t dim, bool keepdim, const at::Tensor & out);
+};
+TORCH_API at::Tensor any_dims_default(const at::Tensor & self, at::OptionalIntArrayRef dim=c10::nullopt, bool keepdim=false);
+TORCH_API at::Tensor & any_dims_out_default(const at::Tensor & self, at::OptionalIntArrayRef dim, bool keepdim, at::Tensor & out);
+struct TORCH_API structured_any_dims_out : public at::meta::structured_any_dims {
+void impl(const at::Tensor & self, at::OptionalIntArrayRef dim, bool keepdim, const at::Tensor & out);
+};
+TORCH_API at::Tensor any(const at::Tensor & self, at::Dimname dim, bool keepdim=false);
+TORCH_API at::Tensor & any_out(const at::Tensor & self, at::Dimname dim, bool keepdim, at::Tensor & out);
+struct TORCH_API structured_any_all_out : public at::meta::structured_any {
+void impl(const at::Tensor & self, const at::Tensor & out);
+};
+TORCH_API at::Tensor any_sparse(const at::Tensor & self);
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool2d_backward_meta.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool2d_backward_meta.h
new file mode 100644
index 0000000000000000000000000000000000000000..5909575414c6f83fcab926abda20dfb153ec318a
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/avg_pool2d_backward_meta.h
@@ -0,0 +1,27 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeMetaFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+namespace at {
+namespace meta {
+
+struct TORCH_API structured_avg_pool2d_backward : public at::impl::MetaBase {
+    
+    
+    void meta(const at::Tensor & grad_output, const at::Tensor & self, at::IntArrayRef kernel_size, at::IntArrayRef stride, at::IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override);
+};
+
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/bernoulli_meta_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/bernoulli_meta_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..96424b9faaacf5e6d12e4fc914e06625fb27de90
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/bernoulli_meta_dispatch.h
@@ -0,0 +1,24 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace meta {
+
+TORCH_API at::Tensor & bernoulli_(at::Tensor & self, const at::Tensor & p, c10::optional generator=c10::nullopt);
+TORCH_API at::Tensor & bernoulli_(at::Tensor & self, double p=0.5, c10::optional generator=c10::nullopt);
+
+} // namespace meta
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/cumulative_trapezoid.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/cumulative_trapezoid.h
new file mode 100644
index 0000000000000000000000000000000000000000..01a21f83723e6c74df839a4fa68d02f2e7a6970f
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/cumulative_trapezoid.h
@@ -0,0 +1,35 @@
+#pragma once
+
+// @generated by torchgen/gen.py from Function.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+
+#include 
+
+namespace at {
+
+
+// aten::cumulative_trapezoid.x(Tensor y, Tensor x, *, int dim=-1) -> Tensor
+inline at::Tensor cumulative_trapezoid(const at::Tensor & y, const at::Tensor & x, int64_t dim=-1) {
+    return at::_ops::cumulative_trapezoid_x::call(y, x, dim);
+}
+
+// aten::cumulative_trapezoid.dx(Tensor y, *, Scalar dx=1, int dim=-1) -> Tensor
+inline at::Tensor cumulative_trapezoid(const at::Tensor & y, const at::Scalar & dx=1, int64_t dim=-1) {
+    return at::_ops::cumulative_trapezoid_dx::call(y, dx, dim);
+}
+
+}
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/diag_embed_native.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/diag_embed_native.h
new file mode 100644
index 0000000000000000000000000000000000000000..3e2f3f4e044bbd4222b2254f5357e8fe2dbcf922
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/diag_embed_native.h
@@ -0,0 +1,22 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+namespace at {
+namespace native {
+TORCH_API at::Tensor & diag_embed_out(const at::Tensor & self, int64_t offset, int64_t dim1, int64_t dim2, at::Tensor & out);
+TORCH_API at::Tensor diag_embed(const at::Tensor & self, int64_t offset=0, int64_t dim1=-2, int64_t dim2=-1);
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_backward_native.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_backward_native.h
new file mode 100644
index 0000000000000000000000000000000000000000..f6b9aee974c9101d289672f4e7c1e75e052945e1
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/fake_quantize_per_channel_affine_cachemask_backward_native.h
@@ -0,0 +1,21 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+namespace at {
+namespace native {
+TORCH_API at::Tensor fake_quantize_per_channel_affine_cachemask_backward(const at::Tensor & grad, const at::Tensor & mask);
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/gcd.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/gcd.h
new file mode 100644
index 0000000000000000000000000000000000000000..ea16ad5d580e51176c508f6058b4263834b6c044
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/gcd.h
@@ -0,0 +1,44 @@
+#pragma once
+
+// @generated by torchgen/gen.py from Function.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+
+#include 
+
+namespace at {
+
+
+// aten::gcd.out(Tensor self, Tensor other, *, Tensor(a!) out) -> Tensor(a!)
+inline at::Tensor & gcd_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & other) {
+    return at::_ops::gcd_out::call(self, other, out);
+}
+// aten::gcd.out(Tensor self, Tensor other, *, Tensor(a!) out) -> Tensor(a!)
+inline at::Tensor & gcd_outf(const at::Tensor & self, const at::Tensor & other, at::Tensor & out) {
+    return at::_ops::gcd_out::call(self, other, out);
+}
+
+// aten::gcd(Tensor self, Tensor other) -> Tensor
+inline at::Tensor gcd(const at::Tensor & self, const at::Tensor & other) {
+    return at::_ops::gcd::call(self, other);
+}
+
+// aten::gcd_(Tensor(a!) self, Tensor other) -> Tensor(a!)
+inline at::Tensor & gcd_(at::Tensor & self, const at::Tensor & other) {
+    return at::_ops::gcd_::call(self, other);
+}
+
+}
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/gelu_backward_compositeexplicitautogradnonfunctional_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/gelu_backward_compositeexplicitautogradnonfunctional_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..fa915dc91bc9194e93fcf99f124570314d57201b
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/gelu_backward_compositeexplicitautogradnonfunctional_dispatch.h
@@ -0,0 +1,23 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace compositeexplicitautogradnonfunctional {
+
+TORCH_API at::Tensor gelu_backward(const at::Tensor & grad_output, const at::Tensor & self, c10::string_view approximate="none");
+
+} // namespace compositeexplicitautogradnonfunctional
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/hardtanh_backward_cuda_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/hardtanh_backward_cuda_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..c17b850d85b8b28e488a54b8875aeaeac8e15023
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/hardtanh_backward_cuda_dispatch.h
@@ -0,0 +1,25 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace cuda {
+
+TORCH_API at::Tensor hardtanh_backward(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & min_val, const at::Scalar & max_val);
+TORCH_API at::Tensor & hardtanh_backward_out(at::Tensor & grad_input, const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & min_val, const at::Scalar & max_val);
+TORCH_API at::Tensor & hardtanh_backward_outf(const at::Tensor & grad_output, const at::Tensor & self, const at::Scalar & min_val, const at::Scalar & max_val, at::Tensor & grad_input);
+
+} // namespace cuda
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/maximum_ops.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/maximum_ops.h
new file mode 100644
index 0000000000000000000000000000000000000000..092c13d6725fba99cc11050758197e988f318a2c
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/maximum_ops.h
@@ -0,0 +1,39 @@
+#pragma once
+
+// @generated by torchgen/gen.py from Operator.h
+
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+namespace _ops {
+
+
+struct TORCH_API maximum {
+  using schema = at::Tensor (const at::Tensor &, const at::Tensor &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::maximum")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "maximum(Tensor self, Tensor other) -> Tensor")
+  static at::Tensor call(const at::Tensor & self, const at::Tensor & other);
+  static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other);
+};
+
+struct TORCH_API maximum_out {
+  using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, at::Tensor &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::maximum")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "maximum.out(Tensor self, Tensor other, *, Tensor(a!) out) -> Tensor(a!)")
+  static at::Tensor & call(const at::Tensor & self, const at::Tensor & other, at::Tensor & out);
+  static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other, at::Tensor & out);
+};
+
+}} // namespace at::_ops
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/nextafter_cpu_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/nextafter_cpu_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..4c320c412a3566746310eaa57d4f8ef4ec42fa43
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/nextafter_cpu_dispatch.h
@@ -0,0 +1,26 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace cpu {
+
+TORCH_API at::Tensor nextafter(const at::Tensor & self, const at::Tensor & other);
+TORCH_API at::Tensor & nextafter_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & other);
+TORCH_API at::Tensor & nextafter_outf(const at::Tensor & self, const at::Tensor & other, at::Tensor & out);
+TORCH_API at::Tensor & nextafter_(at::Tensor & self, const at::Tensor & other);
+
+} // namespace cpu
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pairwise_distance_native.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pairwise_distance_native.h
new file mode 100644
index 0000000000000000000000000000000000000000..750f65e5d62312c5e399327c02371ef466389e79
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pairwise_distance_native.h
@@ -0,0 +1,21 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+namespace at {
+namespace native {
+TORCH_API at::Tensor pairwise_distance(const at::Tensor & x1, const at::Tensor & x2, double p=2, double eps=1e-06, bool keepdim=false);
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pow_cpu_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pow_cpu_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..41d76bb8ada02710eb71c57ff21d47f878cd3cc9
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pow_cpu_dispatch.h
@@ -0,0 +1,33 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace cpu {
+
+TORCH_API at::Tensor pow(const at::Tensor & self, const at::Tensor & exponent);
+TORCH_API at::Tensor & pow_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & exponent);
+TORCH_API at::Tensor & pow_outf(const at::Tensor & self, const at::Tensor & exponent, at::Tensor & out);
+TORCH_API at::Tensor & pow_(at::Tensor & self, const at::Tensor & exponent);
+TORCH_API at::Tensor pow(const at::Scalar & self, const at::Tensor & exponent);
+TORCH_API at::Tensor & pow_out(at::Tensor & out, const at::Scalar & self, const at::Tensor & exponent);
+TORCH_API at::Tensor & pow_outf(const at::Scalar & self, const at::Tensor & exponent, at::Tensor & out);
+TORCH_API at::Tensor pow(const at::Tensor & self, const at::Scalar & exponent);
+TORCH_API at::Tensor & pow_out(at::Tensor & out, const at::Tensor & self, const at::Scalar & exponent);
+TORCH_API at::Tensor & pow_outf(const at::Tensor & self, const at::Scalar & exponent, at::Tensor & out);
+TORCH_API at::Tensor & pow_(at::Tensor & self, const at::Scalar & exponent);
+
+} // namespace cpu
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pow_ops.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pow_ops.h
new file mode 100644
index 0000000000000000000000000000000000000000..0c2805b9ee8711849ecf285cec10332944ec15c7
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/pow_ops.h
@@ -0,0 +1,105 @@
+#pragma once
+
+// @generated by torchgen/gen.py from Operator.h
+
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+namespace _ops {
+
+
+struct TORCH_API pow_Tensor_Tensor_out {
+  using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, at::Tensor &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::pow")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor_Tensor_out")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "pow.Tensor_Tensor_out(Tensor self, Tensor exponent, *, Tensor(a!) out) -> Tensor(a!)")
+  static at::Tensor & call(const at::Tensor & self, const at::Tensor & exponent, at::Tensor & out);
+  static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & exponent, at::Tensor & out);
+};
+
+struct TORCH_API pow_Tensor_Tensor {
+  using schema = at::Tensor (const at::Tensor &, const at::Tensor &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::pow")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor_Tensor")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "pow.Tensor_Tensor(Tensor self, Tensor exponent) -> Tensor")
+  static at::Tensor call(const at::Tensor & self, const at::Tensor & exponent);
+  static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & exponent);
+};
+
+struct TORCH_API pow_Scalar_out {
+  using schema = at::Tensor & (const at::Scalar &, const at::Tensor &, at::Tensor &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::pow")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar_out")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "pow.Scalar_out(Scalar self, Tensor exponent, *, Tensor(a!) out) -> Tensor(a!)")
+  static at::Tensor & call(const at::Scalar & self, const at::Tensor & exponent, at::Tensor & out);
+  static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Scalar & self, const at::Tensor & exponent, at::Tensor & out);
+};
+
+struct TORCH_API pow_Scalar {
+  using schema = at::Tensor (const at::Scalar &, const at::Tensor &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::pow")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "pow.Scalar(Scalar self, Tensor exponent) -> Tensor")
+  static at::Tensor call(const at::Scalar & self, const at::Tensor & exponent);
+  static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Scalar & self, const at::Tensor & exponent);
+};
+
+struct TORCH_API pow_Tensor_Scalar_out {
+  using schema = at::Tensor & (const at::Tensor &, const at::Scalar &, at::Tensor &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::pow")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor_Scalar_out")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "pow.Tensor_Scalar_out(Tensor self, Scalar exponent, *, Tensor(a!) out) -> Tensor(a!)")
+  static at::Tensor & call(const at::Tensor & self, const at::Scalar & exponent, at::Tensor & out);
+  static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Scalar & exponent, at::Tensor & out);
+};
+
+struct TORCH_API pow_Tensor_Scalar {
+  using schema = at::Tensor (const at::Tensor &, const at::Scalar &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::pow")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor_Scalar")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "pow.Tensor_Scalar(Tensor self, Scalar exponent) -> Tensor")
+  static at::Tensor call(const at::Tensor & self, const at::Scalar & exponent);
+  static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Scalar & exponent);
+};
+
+struct TORCH_API pow__Scalar {
+  using schema = at::Tensor & (at::Tensor &, const at::Scalar &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::pow_")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "pow_.Scalar(Tensor(a!) self, Scalar exponent) -> Tensor(a!)")
+  static at::Tensor & call(at::Tensor & self, const at::Scalar & exponent);
+  static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self, const at::Scalar & exponent);
+};
+
+struct TORCH_API pow__Tensor {
+  using schema = at::Tensor & (at::Tensor &, const at::Tensor &);
+  using ptr_schema = schema*;
+  // See Note [static constexpr char* members for windows NVCC]
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::pow_")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor")
+  STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "pow_.Tensor(Tensor(a!) self, Tensor exponent) -> Tensor(a!)")
+  static at::Tensor & call(at::Tensor & self, const at::Tensor & exponent);
+  static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self, const at::Tensor & exponent);
+};
+
+}} // namespace at::_ops
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_meta_dispatch.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_meta_dispatch.h
new file mode 100644
index 0000000000000000000000000000000000000000..d184797c64a87c74da40e1c90050ef5bfb1bc816
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/special_shifted_chebyshev_polynomial_u_meta_dispatch.h
@@ -0,0 +1,25 @@
+#pragma once
+// @generated by torchgen/gen.py from DispatchKeyFunction.h
+
+// NB: The implementing C++ file is RegisterDispatchKey.cpp
+
+// The only #includes we need are for custom classes that have defaults in the C++ API
+#include 
+#include 
+#include 
+
+// Forward declarations of any types needed in the operator signatures.
+// We can't directly include these classes because it will cause circular include dependencies.
+// This file is included by TensorBody.h, which defines the Tensor class.
+#include 
+
+namespace at {
+
+namespace meta {
+
+TORCH_API at::Tensor special_shifted_chebyshev_polynomial_u(const at::Tensor & x, const at::Tensor & n);
+TORCH_API at::Tensor & special_shifted_chebyshev_polynomial_u_out(at::Tensor & out, const at::Tensor & x, const at::Tensor & n);
+TORCH_API at::Tensor & special_shifted_chebyshev_polynomial_u_outf(const at::Tensor & x, const at::Tensor & n, at::Tensor & out);
+
+} // namespace meta
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/tensor.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/tensor.h
new file mode 100644
index 0000000000000000000000000000000000000000..2f72b7ef02637983d35f5049bdb6d1bd613cd2bd
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/tensor.h
@@ -0,0 +1,30 @@
+#pragma once
+#include 
+#include 
+
+namespace at {
+
+// These functions are defined in ATen/Utils.cpp.
+#define TENSOR(T, S)                                                          \
+  TORCH_API Tensor tensor(ArrayRef values, const TensorOptions& options);  \
+  inline Tensor tensor(                                                       \
+      std::initializer_list values, const TensorOptions& options) {        \
+    return at::tensor(ArrayRef(values), options);                          \
+  }                                                                           \
+  inline Tensor tensor(T value, const TensorOptions& options) {               \
+    return at::tensor(ArrayRef(value), options);                           \
+  }                                                                           \
+  inline Tensor tensor(ArrayRef values) {                                  \
+    return at::tensor(std::move(values), at::dtype(k##S));                    \
+  }                                                                           \
+  inline Tensor tensor(std::initializer_list values) {                     \
+    return at::tensor(ArrayRef(values));                                   \
+  }                                                                           \
+  inline Tensor tensor(T value) {                                             \
+    return at::tensor(ArrayRef(value));                                    \
+  }
+AT_FORALL_SCALAR_TYPES_AND3(Bool, Half, BFloat16, TENSOR)
+AT_FORALL_COMPLEX_TYPES(TENSOR)
+#undef TENSOR
+
+}  // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/threshold_native.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/threshold_native.h
new file mode 100644
index 0000000000000000000000000000000000000000..6c8ae67512a023c587ac03bdcdee03a5b0f1c057
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/threshold_native.h
@@ -0,0 +1,24 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+namespace at {
+namespace native {
+struct TORCH_API structured_threshold_out : public at::meta::structured_threshold {
+void impl(const at::Tensor & self, const at::Scalar & threshold, const at::Scalar & value, const at::Tensor & out);
+};
+TORCH_API at::Tensor threshold_quantized_cpu(const at::Tensor & self, const at::Scalar & threshold, const at::Scalar & value);
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/transpose_copy_native.h b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/transpose_copy_native.h
new file mode 100644
index 0000000000000000000000000000000000000000..6d833095b859aa46440fd4f652e21974bcd36ad4
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/include/ATen/ops/transpose_copy_native.h
@@ -0,0 +1,22 @@
+#pragma once
+
+// @generated by torchgen/gen.py from NativeFunction.h
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+
+namespace at {
+namespace native {
+TORCH_API at::Tensor & transpose_copy_int_out(const at::Tensor & self, int64_t dim0, int64_t dim1, at::Tensor & out);
+TORCH_API at::Tensor transpose_copy_int(const at::Tensor & self, int64_t dim0, int64_t dim1);
+} // namespace native
+} // namespace at
diff --git a/moondream/lib/python3.10/site-packages/torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-310.pyc b/moondream/lib/python3.10/site-packages/torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7ada5e0d6d5ce98ca490ea9b0dbbdde4cef289d5
--- /dev/null
+++ b/moondream/lib/python3.10/site-packages/torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dee7e29f003c207300d0fc7aa125748abcd903820960852c770ed48ef694ae4b
+size 450843