Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- venv/lib/python3.10/site-packages/Cython/Build/BuildExecutable.py +169 -0
- venv/lib/python3.10/site-packages/Cython/Build/Cache.py +199 -0
- venv/lib/python3.10/site-packages/Cython/Build/Cythonize.py +350 -0
- venv/lib/python3.10/site-packages/Cython/Build/Dependencies.py +1311 -0
- venv/lib/python3.10/site-packages/Cython/Build/Distutils.py +1 -0
- venv/lib/python3.10/site-packages/Cython/Build/Inline.py +463 -0
- venv/lib/python3.10/site-packages/Cython/Build/IpythonMagic.py +560 -0
- venv/lib/python3.10/site-packages/Cython/Build/SharedModule.py +76 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/TestCyCache.py +194 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/TestCythonizeArgsParser.py +481 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/TestDependencies.py +133 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/TestInline.py +177 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/TestIpythonMagic.py +287 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/TestRecythonize.py +212 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/TestStripLiterals.py +155 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__init__.py +1 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestCyCache.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestCythonizeArgsParser.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestDependencies.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestInline.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestIpythonMagic.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestRecythonize.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestStripLiterals.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__init__.py +11 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/BuildExecutable.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Cache.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Cythonize.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Dependencies.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Distutils.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Inline.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/IpythonMagic.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/SharedModule.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/Build/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/Cython/CodeWriter.py +811 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/AnalysedTreeTransforms.py +97 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/Annotate.py +325 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/AutoDocTransforms.py +320 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/Buffer.py +680 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/Builtin.py +948 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/CmdLine.py +259 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/Code.pxd +148 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/Code.py +0 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/CodeGeneration.py +33 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/CythonScope.py +187 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/Dataclass.py +868 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/DebugFlags.py +24 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/Errors.py +295 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/ExprNodes.py +0 -0
- venv/lib/python3.10/site-packages/Cython/Compiler/FlowControl.pxd +97 -0
venv/lib/python3.10/site-packages/Cython/Build/BuildExecutable.py
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Compile a Python script into an executable that embeds CPython.
|
| 3 |
+
Requires CPython to be built as a shared library ('libpythonX.Y').
|
| 4 |
+
|
| 5 |
+
Basic usage:
|
| 6 |
+
|
| 7 |
+
python -m Cython.Build.BuildExecutable [ARGS] somefile.py
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
DEBUG = True
|
| 12 |
+
|
| 13 |
+
import sys
|
| 14 |
+
import os
|
| 15 |
+
if sys.version_info < (3, 9):
|
| 16 |
+
from distutils import sysconfig as _sysconfig
|
| 17 |
+
|
| 18 |
+
class sysconfig:
|
| 19 |
+
|
| 20 |
+
@staticmethod
|
| 21 |
+
def get_path(name):
|
| 22 |
+
assert name == 'include'
|
| 23 |
+
return _sysconfig.get_python_inc()
|
| 24 |
+
|
| 25 |
+
get_config_var = staticmethod(_sysconfig.get_config_var)
|
| 26 |
+
else:
|
| 27 |
+
# sysconfig can be trusted from cpython >= 3.8.7
|
| 28 |
+
import sysconfig
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def get_config_var(name, default=''):
|
| 32 |
+
return sysconfig.get_config_var(name) or default
|
| 33 |
+
|
| 34 |
+
INCDIR = sysconfig.get_path('include')
|
| 35 |
+
LIBDIR1 = get_config_var('LIBDIR')
|
| 36 |
+
LIBDIR2 = get_config_var('LIBPL')
|
| 37 |
+
PYLIB = get_config_var('LIBRARY')
|
| 38 |
+
PYLIB_DYN = get_config_var('LDLIBRARY')
|
| 39 |
+
if PYLIB_DYN == PYLIB:
|
| 40 |
+
# no shared library
|
| 41 |
+
PYLIB_DYN = ''
|
| 42 |
+
else:
|
| 43 |
+
PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
|
| 44 |
+
|
| 45 |
+
CC = get_config_var('CC', os.environ.get('CC', ''))
|
| 46 |
+
CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '')
|
| 47 |
+
LINKCC = get_config_var('LINKCC', os.environ.get('LINKCC', CC))
|
| 48 |
+
LINKFORSHARED = get_config_var('LINKFORSHARED')
|
| 49 |
+
LIBS = get_config_var('LIBS')
|
| 50 |
+
SYSLIBS = get_config_var('SYSLIBS')
|
| 51 |
+
EXE_EXT = sysconfig.get_config_var('EXE')
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _debug(msg, *args):
|
| 55 |
+
if DEBUG:
|
| 56 |
+
if args:
|
| 57 |
+
msg = msg % args
|
| 58 |
+
sys.stderr.write(msg + '\n')
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def dump_config():
|
| 62 |
+
_debug('INCDIR: %s', INCDIR)
|
| 63 |
+
_debug('LIBDIR1: %s', LIBDIR1)
|
| 64 |
+
_debug('LIBDIR2: %s', LIBDIR2)
|
| 65 |
+
_debug('PYLIB: %s', PYLIB)
|
| 66 |
+
_debug('PYLIB_DYN: %s', PYLIB_DYN)
|
| 67 |
+
_debug('CC: %s', CC)
|
| 68 |
+
_debug('CFLAGS: %s', CFLAGS)
|
| 69 |
+
_debug('LINKCC: %s', LINKCC)
|
| 70 |
+
_debug('LINKFORSHARED: %s', LINKFORSHARED)
|
| 71 |
+
_debug('LIBS: %s', LIBS)
|
| 72 |
+
_debug('SYSLIBS: %s', SYSLIBS)
|
| 73 |
+
_debug('EXE_EXT: %s', EXE_EXT)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def _parse_args(args):
|
| 77 |
+
cy_args = []
|
| 78 |
+
last_arg = None
|
| 79 |
+
for i, arg in enumerate(args):
|
| 80 |
+
if arg.startswith('-'):
|
| 81 |
+
cy_args.append(arg)
|
| 82 |
+
elif last_arg in ('-X', '--directive'):
|
| 83 |
+
cy_args.append(arg)
|
| 84 |
+
else:
|
| 85 |
+
input_file = arg
|
| 86 |
+
args = args[i+1:]
|
| 87 |
+
break
|
| 88 |
+
last_arg = arg
|
| 89 |
+
else:
|
| 90 |
+
raise ValueError('no input file provided')
|
| 91 |
+
|
| 92 |
+
return input_file, cy_args, args
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
def runcmd(cmd, shell=True):
|
| 96 |
+
if shell:
|
| 97 |
+
cmd = ' '.join(cmd)
|
| 98 |
+
_debug(cmd)
|
| 99 |
+
else:
|
| 100 |
+
_debug(' '.join(cmd))
|
| 101 |
+
|
| 102 |
+
import subprocess
|
| 103 |
+
returncode = subprocess.call(cmd, shell=shell)
|
| 104 |
+
|
| 105 |
+
if returncode:
|
| 106 |
+
sys.exit(returncode)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def clink(basename):
|
| 110 |
+
runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2]
|
| 111 |
+
+ [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)]
|
| 112 |
+
+ LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split())
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def ccompile(basename):
|
| 116 |
+
runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split())
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def cycompile(input_file, options=()):
|
| 120 |
+
from ..Compiler import Version, CmdLine, Main
|
| 121 |
+
options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file])
|
| 122 |
+
_debug('Using Cython %s to compile %s', Version.version, input_file)
|
| 123 |
+
result = Main.compile(sources, options)
|
| 124 |
+
if result.num_errors > 0:
|
| 125 |
+
sys.exit(1)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def exec_file(program_name, args=()):
|
| 129 |
+
runcmd([os.path.abspath(program_name)] + list(args), shell=False)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def build(input_file, compiler_args=(), force=False):
|
| 133 |
+
"""
|
| 134 |
+
Build an executable program from a Cython module.
|
| 135 |
+
|
| 136 |
+
Returns the name of the executable file.
|
| 137 |
+
"""
|
| 138 |
+
basename = os.path.splitext(input_file)[0]
|
| 139 |
+
exe_file = basename + EXE_EXT
|
| 140 |
+
if not force and os.path.abspath(exe_file) == os.path.abspath(input_file):
|
| 141 |
+
raise ValueError("Input and output file names are the same, refusing to overwrite")
|
| 142 |
+
if (not force and os.path.exists(exe_file) and os.path.exists(input_file)
|
| 143 |
+
and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
|
| 144 |
+
_debug("File is up to date, not regenerating %s", exe_file)
|
| 145 |
+
return exe_file
|
| 146 |
+
cycompile(input_file, compiler_args)
|
| 147 |
+
ccompile(basename)
|
| 148 |
+
clink(basename)
|
| 149 |
+
return exe_file
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def build_and_run(args):
|
| 153 |
+
"""
|
| 154 |
+
Build an executable program from a Cython module and run it.
|
| 155 |
+
|
| 156 |
+
Arguments after the module name will be passed verbatimly to the program.
|
| 157 |
+
"""
|
| 158 |
+
program_name, args = _build(args)
|
| 159 |
+
exec_file(program_name, args)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def _build(args):
|
| 163 |
+
input_file, cy_args, args = _parse_args(args)
|
| 164 |
+
program_name = build(input_file, cy_args)
|
| 165 |
+
return program_name, args
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
if __name__ == '__main__':
|
| 169 |
+
_build(sys.argv[1:])
|
venv/lib/python3.10/site-packages/Cython/Build/Cache.py
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dataclasses import dataclass
|
| 2 |
+
import sys
|
| 3 |
+
import os
|
| 4 |
+
import hashlib
|
| 5 |
+
import shutil
|
| 6 |
+
import subprocess
|
| 7 |
+
from ..Utils import safe_makedirs, cached_function
|
| 8 |
+
import zipfile
|
| 9 |
+
from .. import __version__
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import zlib
|
| 13 |
+
|
| 14 |
+
zipfile_compression_mode = zipfile.ZIP_DEFLATED
|
| 15 |
+
except ImportError:
|
| 16 |
+
zipfile_compression_mode = zipfile.ZIP_STORED
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
import gzip
|
| 20 |
+
|
| 21 |
+
gzip_open = gzip.open
|
| 22 |
+
gzip_ext = ".gz"
|
| 23 |
+
except ImportError:
|
| 24 |
+
gzip_open = open
|
| 25 |
+
gzip_ext = ""
|
| 26 |
+
|
| 27 |
+
zip_ext = ".zip"
|
| 28 |
+
|
| 29 |
+
MAX_CACHE_SIZE = 1024 * 1024 * 100
|
| 30 |
+
|
| 31 |
+
join_path = cached_function(os.path.join)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@cached_function
|
| 35 |
+
def file_hash(filename):
|
| 36 |
+
path = os.path.normpath(filename)
|
| 37 |
+
prefix = ("%d:%s" % (len(path), path)).encode("UTF-8")
|
| 38 |
+
m = hashlib.sha256(prefix)
|
| 39 |
+
with open(path, "rb") as f:
|
| 40 |
+
data = f.read(65000)
|
| 41 |
+
while data:
|
| 42 |
+
m.update(data)
|
| 43 |
+
data = f.read(65000)
|
| 44 |
+
return m.hexdigest()
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@cached_function
|
| 48 |
+
def get_cython_cache_dir():
|
| 49 |
+
r"""
|
| 50 |
+
Return the base directory containing Cython's caches.
|
| 51 |
+
|
| 52 |
+
Priority:
|
| 53 |
+
|
| 54 |
+
1. CYTHON_CACHE_DIR
|
| 55 |
+
2. (OS X): ~/Library/Caches/Cython
|
| 56 |
+
(posix not OS X): XDG_CACHE_HOME/cython if XDG_CACHE_HOME defined
|
| 57 |
+
3. ~/.cython
|
| 58 |
+
|
| 59 |
+
"""
|
| 60 |
+
if "CYTHON_CACHE_DIR" in os.environ:
|
| 61 |
+
return os.environ["CYTHON_CACHE_DIR"]
|
| 62 |
+
|
| 63 |
+
parent = None
|
| 64 |
+
if os.name == "posix":
|
| 65 |
+
if sys.platform == "darwin":
|
| 66 |
+
parent = os.path.expanduser("~/Library/Caches")
|
| 67 |
+
else:
|
| 68 |
+
# this could fallback on ~/.cache
|
| 69 |
+
parent = os.environ.get("XDG_CACHE_HOME")
|
| 70 |
+
|
| 71 |
+
if parent and os.path.isdir(parent):
|
| 72 |
+
return join_path(parent, "cython")
|
| 73 |
+
|
| 74 |
+
# last fallback: ~/.cython
|
| 75 |
+
return os.path.expanduser(join_path("~", ".cython"))
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
@dataclass
|
| 79 |
+
class FingerprintFlags:
|
| 80 |
+
language: str = "c"
|
| 81 |
+
py_limited_api: bool = False
|
| 82 |
+
np_pythran: bool = False
|
| 83 |
+
|
| 84 |
+
def get_fingerprint(self):
|
| 85 |
+
return str((self.language, self.py_limited_api, self.np_pythran))
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class Cache:
|
| 89 |
+
def __init__(self, path, cache_size=None):
|
| 90 |
+
if path is None:
|
| 91 |
+
self.path = join_path(get_cython_cache_dir(), "compiler")
|
| 92 |
+
else:
|
| 93 |
+
self.path = path
|
| 94 |
+
self.cache_size = cache_size if cache_size is not None else MAX_CACHE_SIZE
|
| 95 |
+
if not os.path.exists(self.path):
|
| 96 |
+
os.makedirs(self.path)
|
| 97 |
+
|
| 98 |
+
def transitive_fingerprint(
|
| 99 |
+
self, filename, dependencies, compilation_options, flags=FingerprintFlags()
|
| 100 |
+
):
|
| 101 |
+
r"""
|
| 102 |
+
Return a fingerprint of a cython file that is about to be cythonized.
|
| 103 |
+
|
| 104 |
+
Fingerprints are looked up in future compilations. If the fingerprint
|
| 105 |
+
is found, the cythonization can be skipped. The fingerprint must
|
| 106 |
+
incorporate everything that has an influence on the generated code.
|
| 107 |
+
"""
|
| 108 |
+
try:
|
| 109 |
+
m = hashlib.sha256(__version__.encode("UTF-8"))
|
| 110 |
+
m.update(file_hash(filename).encode("UTF-8"))
|
| 111 |
+
for x in sorted(dependencies):
|
| 112 |
+
if os.path.splitext(x)[1] not in (".c", ".cpp", ".h"):
|
| 113 |
+
m.update(file_hash(x).encode("UTF-8"))
|
| 114 |
+
# Include the module attributes that change the compilation result
|
| 115 |
+
# in the fingerprint. We do not iterate over module.__dict__ and
|
| 116 |
+
# include almost everything here as users might extend Extension
|
| 117 |
+
# with arbitrary (random) attributes that would lead to cache
|
| 118 |
+
# misses.
|
| 119 |
+
m.update(flags.get_fingerprint().encode("UTF-8"))
|
| 120 |
+
m.update(compilation_options.get_fingerprint().encode("UTF-8"))
|
| 121 |
+
return m.hexdigest()
|
| 122 |
+
except OSError:
|
| 123 |
+
return None
|
| 124 |
+
|
| 125 |
+
def fingerprint_file(self, cfile, fingerprint, ext):
|
| 126 |
+
return (
|
| 127 |
+
join_path(self.path, "%s-%s" % (os.path.basename(cfile), fingerprint)) + ext
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
def lookup_cache(self, c_file, fingerprint):
|
| 131 |
+
# Cython-generated c files are highly compressible.
|
| 132 |
+
# (E.g. a compression ratio of about 10 for Sage).
|
| 133 |
+
if not os.path.exists(self.path):
|
| 134 |
+
safe_makedirs(self.path)
|
| 135 |
+
gz_fingerprint_file = self.fingerprint_file(c_file, fingerprint, gzip_ext)
|
| 136 |
+
if os.path.exists(gz_fingerprint_file):
|
| 137 |
+
return gz_fingerprint_file
|
| 138 |
+
zip_fingerprint_file = self.fingerprint_file(c_file, fingerprint, zip_ext)
|
| 139 |
+
if os.path.exists(zip_fingerprint_file):
|
| 140 |
+
return zip_fingerprint_file
|
| 141 |
+
return None
|
| 142 |
+
|
| 143 |
+
def load_from_cache(self, c_file, cached):
|
| 144 |
+
ext = os.path.splitext(cached)[1]
|
| 145 |
+
if ext == gzip_ext:
|
| 146 |
+
os.utime(cached, None)
|
| 147 |
+
with gzip_open(cached, "rb") as g:
|
| 148 |
+
with open(c_file, "wb") as f:
|
| 149 |
+
shutil.copyfileobj(g, f)
|
| 150 |
+
elif ext == zip_ext:
|
| 151 |
+
os.utime(cached, None)
|
| 152 |
+
dirname = os.path.dirname(c_file)
|
| 153 |
+
with zipfile.ZipFile(cached) as z:
|
| 154 |
+
for artifact in z.namelist():
|
| 155 |
+
z.extract(artifact, join_path(dirname, artifact))
|
| 156 |
+
else:
|
| 157 |
+
raise ValueError(f"Unsupported cache file extension: {ext}")
|
| 158 |
+
|
| 159 |
+
def store_to_cache(self, c_file, fingerprint, compilation_result):
|
| 160 |
+
artifacts = compilation_result.get_generated_source_files()
|
| 161 |
+
if len(artifacts) == 1:
|
| 162 |
+
fingerprint_file = self.fingerprint_file(c_file, fingerprint, gzip_ext)
|
| 163 |
+
with open(c_file, "rb") as f:
|
| 164 |
+
with gzip_open(fingerprint_file + ".tmp", "wb") as g:
|
| 165 |
+
shutil.copyfileobj(f, g)
|
| 166 |
+
else:
|
| 167 |
+
fingerprint_file = self.fingerprint_file(c_file, fingerprint, zip_ext)
|
| 168 |
+
with zipfile.ZipFile(
|
| 169 |
+
fingerprint_file + ".tmp", "w", zipfile_compression_mode
|
| 170 |
+
) as zip:
|
| 171 |
+
for artifact in artifacts:
|
| 172 |
+
zip.write(artifact, os.path.basename(artifact))
|
| 173 |
+
os.rename(fingerprint_file + ".tmp", fingerprint_file)
|
| 174 |
+
|
| 175 |
+
def cleanup_cache(self, ratio=0.85):
|
| 176 |
+
try:
|
| 177 |
+
completed_process = subprocess.run(
|
| 178 |
+
["du", "-s", "-k", os.path.abspath(self.path)], stdout=subprocess.PIPE
|
| 179 |
+
)
|
| 180 |
+
stdout = completed_process.stdout
|
| 181 |
+
if completed_process.returncode == 0:
|
| 182 |
+
total_size = 1024 * int(stdout.strip().split()[0])
|
| 183 |
+
if total_size < self.cache_size:
|
| 184 |
+
return
|
| 185 |
+
except (OSError, ValueError):
|
| 186 |
+
pass
|
| 187 |
+
total_size = 0
|
| 188 |
+
all = []
|
| 189 |
+
for file in os.listdir(self.path):
|
| 190 |
+
path = join_path(self.path, file)
|
| 191 |
+
s = os.stat(path)
|
| 192 |
+
total_size += s.st_size
|
| 193 |
+
all.append((s.st_atime, s.st_size, path))
|
| 194 |
+
if total_size > self.cache_size:
|
| 195 |
+
for time, size, file in reversed(sorted(all)):
|
| 196 |
+
os.unlink(file)
|
| 197 |
+
total_size -= size
|
| 198 |
+
if total_size < self.cache_size * ratio:
|
| 199 |
+
break
|
venv/lib/python3.10/site-packages/Cython/Build/Cythonize.py
ADDED
|
@@ -0,0 +1,350 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import concurrent.futures
|
| 2 |
+
import os
|
| 3 |
+
import shutil
|
| 4 |
+
import sys
|
| 5 |
+
import tempfile
|
| 6 |
+
from collections import defaultdict
|
| 7 |
+
from contextlib import contextmanager
|
| 8 |
+
|
| 9 |
+
from .Dependencies import cythonize, extended_iglob
|
| 10 |
+
from ..Utils import is_package_dir
|
| 11 |
+
from ..Compiler import Options
|
| 12 |
+
|
| 13 |
+
try:
|
| 14 |
+
import multiprocessing
|
| 15 |
+
parallel_compiles = int(multiprocessing.cpu_count() * 1.5)
|
| 16 |
+
except ImportError:
|
| 17 |
+
multiprocessing = None
|
| 18 |
+
parallel_compiles = 0
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def find_package_base(path):
|
| 22 |
+
base_dir, package_path = os.path.split(path)
|
| 23 |
+
while is_package_dir(base_dir):
|
| 24 |
+
base_dir, parent = os.path.split(base_dir)
|
| 25 |
+
package_path = '%s/%s' % (parent, package_path)
|
| 26 |
+
return base_dir, package_path
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def cython_compile(path_pattern, options) -> dict:
|
| 30 |
+
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
|
| 31 |
+
ext_modules_by_basedir = _cython_compile_files(all_paths, options)
|
| 32 |
+
_build(list(ext_modules_by_basedir.items()), options.parallel)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def _cython_compile_files(all_paths, options) -> dict:
|
| 36 |
+
ext_modules_to_build = defaultdict(list)
|
| 37 |
+
|
| 38 |
+
for path in all_paths:
|
| 39 |
+
if options.build_inplace:
|
| 40 |
+
base_dir = path
|
| 41 |
+
while not os.path.isdir(base_dir) or is_package_dir(base_dir):
|
| 42 |
+
base_dir = os.path.dirname(base_dir)
|
| 43 |
+
else:
|
| 44 |
+
base_dir = None
|
| 45 |
+
|
| 46 |
+
if os.path.isdir(path):
|
| 47 |
+
# recursively compiling a package
|
| 48 |
+
paths = [os.path.join(path, '**', '*.{py,pyx}')]
|
| 49 |
+
else:
|
| 50 |
+
# assume it's a file(-like thing)
|
| 51 |
+
paths = [path]
|
| 52 |
+
|
| 53 |
+
ext_modules = cythonize(
|
| 54 |
+
paths,
|
| 55 |
+
nthreads=options.parallel,
|
| 56 |
+
exclude_failures=options.keep_going,
|
| 57 |
+
exclude=options.excludes,
|
| 58 |
+
compiler_directives=options.directives,
|
| 59 |
+
compile_time_env=options.compile_time_env,
|
| 60 |
+
force=options.force,
|
| 61 |
+
quiet=options.quiet,
|
| 62 |
+
depfile=options.depfile,
|
| 63 |
+
language=options.language,
|
| 64 |
+
**options.options)
|
| 65 |
+
|
| 66 |
+
if ext_modules and options.build:
|
| 67 |
+
ext_modules_to_build[base_dir].extend(ext_modules)
|
| 68 |
+
|
| 69 |
+
return dict(ext_modules_to_build)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@contextmanager
|
| 73 |
+
def _interruptible_pool(pool_cm):
|
| 74 |
+
with pool_cm as proc_pool:
|
| 75 |
+
try:
|
| 76 |
+
yield proc_pool
|
| 77 |
+
except KeyboardInterrupt:
|
| 78 |
+
proc_pool.terminate_workers()
|
| 79 |
+
proc_pool.shutdown(cancel_futures=True)
|
| 80 |
+
raise
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _build(ext_modules, parallel):
|
| 84 |
+
modcount = sum(len(modules) for _, modules in ext_modules)
|
| 85 |
+
if not modcount:
|
| 86 |
+
return
|
| 87 |
+
|
| 88 |
+
serial_execution_mode = modcount == 1 or parallel < 2
|
| 89 |
+
|
| 90 |
+
try:
|
| 91 |
+
pool_cm = (
|
| 92 |
+
None if serial_execution_mode
|
| 93 |
+
else concurrent.futures.ProcessPoolExecutor(max_workers=parallel)
|
| 94 |
+
)
|
| 95 |
+
except (OSError, ImportError):
|
| 96 |
+
# `OSError` is a historic exception in `multiprocessing`
|
| 97 |
+
# `ImportError` happens e.g. under pyodide (`ModuleNotFoundError`)
|
| 98 |
+
serial_execution_mode = True
|
| 99 |
+
|
| 100 |
+
if serial_execution_mode:
|
| 101 |
+
for ext in ext_modules:
|
| 102 |
+
run_distutils(ext)
|
| 103 |
+
return
|
| 104 |
+
|
| 105 |
+
with _interruptible_pool(pool_cm) as proc_pool:
|
| 106 |
+
compiler_tasks = [
|
| 107 |
+
proc_pool.submit(run_distutils, (base_dir, [ext]))
|
| 108 |
+
for base_dir, modules in ext_modules
|
| 109 |
+
for ext in modules
|
| 110 |
+
]
|
| 111 |
+
|
| 112 |
+
concurrent.futures.wait(compiler_tasks, return_when=concurrent.futures.FIRST_EXCEPTION)
|
| 113 |
+
|
| 114 |
+
worker_exceptions = []
|
| 115 |
+
for task in compiler_tasks: # discover any crashes
|
| 116 |
+
try:
|
| 117 |
+
task.result()
|
| 118 |
+
except BaseException as proc_err: # could be SystemExit
|
| 119 |
+
worker_exceptions.append(proc_err)
|
| 120 |
+
|
| 121 |
+
if worker_exceptions:
|
| 122 |
+
exc_msg = 'Compiling Cython modules failed with these errors:\n\n'
|
| 123 |
+
exc_msg += '\n\t* '.join(('', *map(str, worker_exceptions)))
|
| 124 |
+
exc_msg += '\n\n'
|
| 125 |
+
|
| 126 |
+
non_base_exceptions = [
|
| 127 |
+
exc for exc in worker_exceptions
|
| 128 |
+
if isinstance(exc, Exception)
|
| 129 |
+
]
|
| 130 |
+
if sys.version_info[:2] >= (3, 11) and non_base_exceptions:
|
| 131 |
+
raise ExceptionGroup(exc_msg, non_base_exceptions)
|
| 132 |
+
else:
|
| 133 |
+
raise RuntimeError(exc_msg) from worker_exceptions[0]
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def run_distutils(args):
|
| 137 |
+
try:
|
| 138 |
+
from distutils.core import setup
|
| 139 |
+
except ImportError:
|
| 140 |
+
try:
|
| 141 |
+
from setuptools import setup
|
| 142 |
+
except ImportError:
|
| 143 |
+
raise ImportError("'distutils' is not available. Please install 'setuptools' for binary builds.")
|
| 144 |
+
|
| 145 |
+
base_dir, ext_modules = args
|
| 146 |
+
script_args = ['build_ext', '-i']
|
| 147 |
+
cwd = os.getcwd()
|
| 148 |
+
temp_dir = None
|
| 149 |
+
try:
|
| 150 |
+
if base_dir:
|
| 151 |
+
os.chdir(base_dir)
|
| 152 |
+
temp_dir = tempfile.mkdtemp(dir=base_dir)
|
| 153 |
+
script_args.extend(['--build-temp', temp_dir])
|
| 154 |
+
setup(
|
| 155 |
+
script_name='setup.py',
|
| 156 |
+
script_args=script_args,
|
| 157 |
+
ext_modules=ext_modules,
|
| 158 |
+
)
|
| 159 |
+
finally:
|
| 160 |
+
if base_dir:
|
| 161 |
+
os.chdir(cwd)
|
| 162 |
+
if temp_dir and os.path.isdir(temp_dir):
|
| 163 |
+
shutil.rmtree(temp_dir)
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def benchmark(code, setup_code=None, import_module=None, directives=None):
|
| 167 |
+
from Cython.Build.Inline import cymeit
|
| 168 |
+
|
| 169 |
+
timings, number = cymeit(code, setup_code, import_module, directives, repeat=9)
|
| 170 |
+
|
| 171 |
+
# Based on 'timeit.main()' in CPython 3.13.
|
| 172 |
+
units = {"nsec": 1e-9, "usec": 1e-6, "msec": 1e-3, "sec": 1.0}
|
| 173 |
+
scales = [(scale, unit) for unit, scale in reversed(units.items())] # biggest first
|
| 174 |
+
|
| 175 |
+
def format_time(t):
|
| 176 |
+
for scale, unit in scales:
|
| 177 |
+
if t >= scale:
|
| 178 |
+
break
|
| 179 |
+
else:
|
| 180 |
+
raise RuntimeError("Timing is below nanoseconds: {t:f}")
|
| 181 |
+
return f"{t / scale :.3f} {unit}"
|
| 182 |
+
|
| 183 |
+
timings.sort()
|
| 184 |
+
assert len(timings) & 1 == 1 # odd number of timings, for median position
|
| 185 |
+
fastest, median, slowest = timings[0], timings[len(timings) // 2], timings[-1]
|
| 186 |
+
|
| 187 |
+
print(f"{number} loops, best of {len(timings)}: {format_time(fastest)} per loop (median: {format_time(median)})")
|
| 188 |
+
|
| 189 |
+
if slowest > fastest * 4:
|
| 190 |
+
print(
|
| 191 |
+
"The timings are likely unreliable. "
|
| 192 |
+
f"The worst time ({format_time(slowest)}) was more than four times "
|
| 193 |
+
f"slower than the best time ({format_time(fastest)}).")
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def create_args_parser():
|
| 197 |
+
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
| 198 |
+
from ..Compiler.CmdLine import ParseDirectivesAction, ParseOptionsAction, ParseCompileTimeEnvAction
|
| 199 |
+
|
| 200 |
+
parser = ArgumentParser(
|
| 201 |
+
formatter_class=RawDescriptionHelpFormatter,
|
| 202 |
+
epilog="""\
|
| 203 |
+
Environment variables:
|
| 204 |
+
CYTHON_FORCE_REGEN: if set to 1, forces cythonize to regenerate the output files regardless
|
| 205 |
+
of modification times and changes.
|
| 206 |
+
CYTHON_CACHE_DIR: the base directory containing Cython's caches.
|
| 207 |
+
Environment variables accepted by setuptools are supported to configure the C compiler and build:
|
| 208 |
+
https://setuptools.pypa.io/en/latest/userguide/ext_modules.html#compiler-and-linker-options"""
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
|
| 212 |
+
dest='directives', default={}, type=str,
|
| 213 |
+
action=ParseDirectivesAction,
|
| 214 |
+
help='set a compiler directive')
|
| 215 |
+
parser.add_argument('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
| 216 |
+
dest='compile_time_env', default={}, type=str,
|
| 217 |
+
action=ParseCompileTimeEnvAction,
|
| 218 |
+
help='set a compile time environment variable')
|
| 219 |
+
parser.add_argument('-s', '--option', metavar='NAME=VALUE',
|
| 220 |
+
dest='options', default={}, type=str,
|
| 221 |
+
action=ParseOptionsAction,
|
| 222 |
+
help='set a cythonize option')
|
| 223 |
+
parser.add_argument('-2', dest='language_level', action='store_const', const=2, default=None,
|
| 224 |
+
help='use Python 2 syntax mode by default')
|
| 225 |
+
parser.add_argument('-3', dest='language_level', action='store_const', const=3,
|
| 226 |
+
help='use Python 3 syntax mode by default')
|
| 227 |
+
parser.add_argument('--3str', dest='language_level', action='store_const', const=3,
|
| 228 |
+
help='use Python 3 syntax mode by default (deprecated alias for -3)')
|
| 229 |
+
parser.add_argument('-+', '--cplus', dest='language', action='store_const', const='c++', default=None,
|
| 230 |
+
help='Compile as C++ rather than C')
|
| 231 |
+
parser.add_argument('-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 232 |
+
help='Produce a colorized HTML version of the source.')
|
| 233 |
+
parser.add_argument('--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 234 |
+
help='Produce a colorized HTML version of the source '
|
| 235 |
+
'which includes entire generated C/C++-code.')
|
| 236 |
+
parser.add_argument('-x', '--exclude', metavar='PATTERN', dest='excludes',
|
| 237 |
+
action='append', default=[],
|
| 238 |
+
help='exclude certain file patterns from the compilation')
|
| 239 |
+
|
| 240 |
+
parser.add_argument('-b', '--build', dest='build', action='store_true', default=None,
|
| 241 |
+
help='build extension modules using distutils/setuptools')
|
| 242 |
+
parser.add_argument('-i', '--inplace', dest='build_inplace', action='store_true', default=None,
|
| 243 |
+
help='build extension modules in place using distutils/setuptools (implies -b)')
|
| 244 |
+
|
| 245 |
+
parser.add_argument('--timeit', dest='benchmark', metavar="CODESTRING", type=str, default=None,
|
| 246 |
+
help="build in place, then compile+run CODESTRING as benchmark in first module's namespace (implies -i)")
|
| 247 |
+
parser.add_argument('--setup', dest='benchmark_setup', metavar="CODESTRING", type=str, default=None,
|
| 248 |
+
help="use CODESTRING as pre-benchmark setup code for --bench")
|
| 249 |
+
|
| 250 |
+
parser.add_argument('-j', '--parallel', dest='parallel', metavar='N',
|
| 251 |
+
type=int, default=parallel_compiles,
|
| 252 |
+
help=f'run builds in N parallel jobs (default: {parallel_compiles or 1})')
|
| 253 |
+
parser.add_argument('-f', '--force', dest='force', action='store_true', default=None,
|
| 254 |
+
help='force recompilation')
|
| 255 |
+
parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', default=None,
|
| 256 |
+
help='be less verbose during compilation')
|
| 257 |
+
|
| 258 |
+
parser.add_argument('--lenient', dest='lenient', action='store_true', default=None,
|
| 259 |
+
help='increase Python compatibility by ignoring some compile time errors')
|
| 260 |
+
parser.add_argument('-k', '--keep-going', dest='keep_going', action='store_true', default=None,
|
| 261 |
+
help='compile as much as possible, ignore compilation failures')
|
| 262 |
+
parser.add_argument('--no-docstrings', dest='no_docstrings', action='store_true', default=None,
|
| 263 |
+
help='strip docstrings')
|
| 264 |
+
parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
| 265 |
+
parser.add_argument('sources', nargs='*')
|
| 266 |
+
return parser
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
def parse_args_raw(parser, args):
|
| 270 |
+
options, unknown = parser.parse_known_args(args)
|
| 271 |
+
sources = options.sources
|
| 272 |
+
# if positional arguments were interspersed
|
| 273 |
+
# some of them are in unknown
|
| 274 |
+
for option in unknown:
|
| 275 |
+
if option.startswith('-'):
|
| 276 |
+
parser.error("unknown option "+option)
|
| 277 |
+
else:
|
| 278 |
+
sources.append(option)
|
| 279 |
+
del options.sources
|
| 280 |
+
return (options, sources)
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
def parse_args(args):
|
| 284 |
+
parser = create_args_parser()
|
| 285 |
+
options, args = parse_args_raw(parser, args)
|
| 286 |
+
|
| 287 |
+
if options.benchmark is not None:
|
| 288 |
+
options.build_inplace = True
|
| 289 |
+
elif not args:
|
| 290 |
+
parser.error("no source files provided")
|
| 291 |
+
|
| 292 |
+
if options.build_inplace:
|
| 293 |
+
options.build = True
|
| 294 |
+
if multiprocessing is None:
|
| 295 |
+
options.parallel = 0
|
| 296 |
+
if options.language_level:
|
| 297 |
+
assert options.language_level in (2, 3, '3str')
|
| 298 |
+
options.options['language_level'] = options.language_level
|
| 299 |
+
|
| 300 |
+
if options.lenient:
|
| 301 |
+
# increase Python compatibility by ignoring compile time errors
|
| 302 |
+
Options.error_on_unknown_names = False
|
| 303 |
+
Options.error_on_uninitialized = False
|
| 304 |
+
|
| 305 |
+
if options.annotate:
|
| 306 |
+
Options.annotate = options.annotate
|
| 307 |
+
|
| 308 |
+
if options.no_docstrings:
|
| 309 |
+
Options.docstrings = False
|
| 310 |
+
|
| 311 |
+
return options, args
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def main(args=None):
|
| 315 |
+
options, paths = parse_args(args)
|
| 316 |
+
|
| 317 |
+
all_paths = []
|
| 318 |
+
for path in paths:
|
| 319 |
+
expanded_path = [os.path.abspath(p) for p in extended_iglob(path)]
|
| 320 |
+
if not expanded_path:
|
| 321 |
+
print("{}: No such file or directory: '{}'".format(sys.argv[0], path), file=sys.stderr)
|
| 322 |
+
sys.exit(1)
|
| 323 |
+
all_paths.extend(expanded_path)
|
| 324 |
+
|
| 325 |
+
ext_modules_by_basedir = _cython_compile_files(all_paths, options)
|
| 326 |
+
|
| 327 |
+
if ext_modules_by_basedir and options.build:
|
| 328 |
+
_build(list(ext_modules_by_basedir.items()), options.parallel)
|
| 329 |
+
|
| 330 |
+
if options.benchmark is not None:
|
| 331 |
+
base_dir = import_module = None
|
| 332 |
+
if ext_modules_by_basedir:
|
| 333 |
+
base_dir, first_extensions = ext_modules_by_basedir.popitem()
|
| 334 |
+
if first_extensions:
|
| 335 |
+
import_module = first_extensions[0].name
|
| 336 |
+
|
| 337 |
+
if base_dir is not None:
|
| 338 |
+
sys.path.insert(0, base_dir)
|
| 339 |
+
|
| 340 |
+
benchmark(
|
| 341 |
+
options.benchmark, options.benchmark_setup,
|
| 342 |
+
import_module=import_module,
|
| 343 |
+
)
|
| 344 |
+
|
| 345 |
+
if base_dir is not None:
|
| 346 |
+
sys.path.remove(base_dir)
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
if __name__ == '__main__':
|
| 350 |
+
main()
|
venv/lib/python3.10/site-packages/Cython/Build/Dependencies.py
ADDED
|
@@ -0,0 +1,1311 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import cython
|
| 2 |
+
|
| 3 |
+
import collections
|
| 4 |
+
import os
|
| 5 |
+
import re, sys, time
|
| 6 |
+
from glob import iglob
|
| 7 |
+
from io import StringIO
|
| 8 |
+
from os.path import relpath as _relpath
|
| 9 |
+
from .Cache import Cache, FingerprintFlags
|
| 10 |
+
|
| 11 |
+
from collections.abc import Iterable
|
| 12 |
+
|
| 13 |
+
try:
|
| 14 |
+
import pythran
|
| 15 |
+
except:
|
| 16 |
+
pythran = None
|
| 17 |
+
|
| 18 |
+
from .. import Utils
|
| 19 |
+
from ..Utils import (cached_function, cached_method, path_exists,
|
| 20 |
+
safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, write_depfile)
|
| 21 |
+
from ..Compiler import Errors
|
| 22 |
+
from ..Compiler.Main import Context
|
| 23 |
+
from ..Compiler import Options
|
| 24 |
+
from ..Compiler.Options import (CompilationOptions, default_options,
|
| 25 |
+
get_directive_defaults)
|
| 26 |
+
|
| 27 |
+
join_path = cached_function(os.path.join)
|
| 28 |
+
copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
|
| 29 |
+
safe_makedirs_once = cached_function(safe_makedirs)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def _make_relative(file_paths, base=None):
|
| 33 |
+
if not base:
|
| 34 |
+
base = os.getcwd()
|
| 35 |
+
if base[-1] != os.path.sep:
|
| 36 |
+
base += os.path.sep
|
| 37 |
+
return [_relpath(path, base) if path.startswith(base) else path
|
| 38 |
+
for path in file_paths]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def extended_iglob(pattern):
|
| 42 |
+
if '{' in pattern:
|
| 43 |
+
m = re.match('(.*){([^}]+)}(.*)', pattern)
|
| 44 |
+
if m:
|
| 45 |
+
before, switch, after = m.groups()
|
| 46 |
+
for case in switch.split(','):
|
| 47 |
+
for path in extended_iglob(before + case + after):
|
| 48 |
+
yield path
|
| 49 |
+
return
|
| 50 |
+
|
| 51 |
+
# We always accept '/' and also '\' on Windows,
|
| 52 |
+
# because '/' is generally common for relative paths.
|
| 53 |
+
if '**/' in pattern or os.sep == '\\' and '**\\' in pattern:
|
| 54 |
+
seen = set()
|
| 55 |
+
first, rest = re.split(r'\*\*[%s]' % ('/\\\\' if os.sep == '\\' else '/'), pattern, 1)
|
| 56 |
+
if first:
|
| 57 |
+
first = iglob(first + os.sep)
|
| 58 |
+
else:
|
| 59 |
+
first = ['']
|
| 60 |
+
for root in first:
|
| 61 |
+
for path in extended_iglob(join_path(root, rest)):
|
| 62 |
+
if path not in seen:
|
| 63 |
+
seen.add(path)
|
| 64 |
+
yield path
|
| 65 |
+
for path in extended_iglob(join_path(root, '*', '**', rest)):
|
| 66 |
+
if path not in seen:
|
| 67 |
+
seen.add(path)
|
| 68 |
+
yield path
|
| 69 |
+
else:
|
| 70 |
+
for path in iglob(pattern):
|
| 71 |
+
yield path
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def nonempty(it, error_msg="expected non-empty iterator"):
|
| 75 |
+
empty = True
|
| 76 |
+
for value in it:
|
| 77 |
+
empty = False
|
| 78 |
+
yield value
|
| 79 |
+
if empty:
|
| 80 |
+
raise ValueError(error_msg)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def update_pythran_extension(ext):
|
| 84 |
+
if pythran is None:
|
| 85 |
+
raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
|
| 86 |
+
try:
|
| 87 |
+
pythran_ext = pythran.config.make_extension(python=True)
|
| 88 |
+
except TypeError: # older pythran version only
|
| 89 |
+
pythran_ext = pythran.config.make_extension()
|
| 90 |
+
|
| 91 |
+
ext.include_dirs.extend(pythran_ext['include_dirs'])
|
| 92 |
+
ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
|
| 93 |
+
ext.extra_link_args.extend(pythran_ext['extra_link_args'])
|
| 94 |
+
ext.define_macros.extend(pythran_ext['define_macros'])
|
| 95 |
+
ext.undef_macros.extend(pythran_ext['undef_macros'])
|
| 96 |
+
ext.library_dirs.extend(pythran_ext['library_dirs'])
|
| 97 |
+
ext.libraries.extend(pythran_ext['libraries'])
|
| 98 |
+
ext.language = 'c++'
|
| 99 |
+
|
| 100 |
+
# These options are not compatible with the way normal Cython extensions work
|
| 101 |
+
for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
|
| 102 |
+
try:
|
| 103 |
+
ext.extra_compile_args.remove(bad_option)
|
| 104 |
+
except ValueError:
|
| 105 |
+
pass
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def parse_list(s):
|
| 109 |
+
"""
|
| 110 |
+
>>> parse_list("")
|
| 111 |
+
[]
|
| 112 |
+
>>> parse_list("a")
|
| 113 |
+
['a']
|
| 114 |
+
>>> parse_list("a b c")
|
| 115 |
+
['a', 'b', 'c']
|
| 116 |
+
>>> parse_list("[a, b, c]")
|
| 117 |
+
['a', 'b', 'c']
|
| 118 |
+
>>> parse_list('a " " b')
|
| 119 |
+
['a', ' ', 'b']
|
| 120 |
+
>>> parse_list('[a, ",a", "a,", ",", ]')
|
| 121 |
+
['a', ',a', 'a,', ',']
|
| 122 |
+
"""
|
| 123 |
+
if len(s) >= 2 and s[0] == '[' and s[-1] == ']':
|
| 124 |
+
s = s[1:-1]
|
| 125 |
+
delimiter = ','
|
| 126 |
+
else:
|
| 127 |
+
delimiter = ' '
|
| 128 |
+
s, literals = strip_string_literals(s)
|
| 129 |
+
def unquote(literal):
|
| 130 |
+
literal = literal.strip()
|
| 131 |
+
if literal[0] in "'\"":
|
| 132 |
+
return literals[literal[1:-1]]
|
| 133 |
+
else:
|
| 134 |
+
return literal
|
| 135 |
+
return [unquote(item) for item in s.split(delimiter) if item.strip()]
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
transitive_str = object()
|
| 139 |
+
transitive_list = object()
|
| 140 |
+
bool_or = object()
|
| 141 |
+
|
| 142 |
+
distutils_settings = {
|
| 143 |
+
'name': str,
|
| 144 |
+
'sources': list,
|
| 145 |
+
'define_macros': list,
|
| 146 |
+
'undef_macros': list,
|
| 147 |
+
'libraries': transitive_list,
|
| 148 |
+
'library_dirs': transitive_list,
|
| 149 |
+
'runtime_library_dirs': transitive_list,
|
| 150 |
+
'include_dirs': transitive_list,
|
| 151 |
+
'extra_objects': list,
|
| 152 |
+
'extra_compile_args': transitive_list,
|
| 153 |
+
'extra_link_args': transitive_list,
|
| 154 |
+
'export_symbols': list,
|
| 155 |
+
'depends': transitive_list,
|
| 156 |
+
'language': transitive_str,
|
| 157 |
+
'np_pythran': bool_or
|
| 158 |
+
}
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
def _legacy_strtobool(val):
|
| 162 |
+
# Used to be "distutils.util.strtobool", adapted for deprecation warnings.
|
| 163 |
+
if val == "True":
|
| 164 |
+
return True
|
| 165 |
+
elif val == "False":
|
| 166 |
+
return False
|
| 167 |
+
|
| 168 |
+
import warnings
|
| 169 |
+
warnings.warn("The 'np_python' option requires 'True' or 'False'", category=DeprecationWarning)
|
| 170 |
+
val = val.lower()
|
| 171 |
+
if val in ('y', 'yes', 't', 'true', 'on', '1'):
|
| 172 |
+
return True
|
| 173 |
+
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
|
| 174 |
+
return False
|
| 175 |
+
else:
|
| 176 |
+
raise ValueError("invalid truth value %r" % (val,))
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
class DistutilsInfo:
|
| 180 |
+
|
| 181 |
+
def __init__(self, source=None, exn=None):
|
| 182 |
+
self.values = {}
|
| 183 |
+
if source is not None:
|
| 184 |
+
source_lines = StringIO(source) if isinstance(source, str) else source
|
| 185 |
+
for line in source_lines:
|
| 186 |
+
line = line.lstrip()
|
| 187 |
+
if not line:
|
| 188 |
+
continue
|
| 189 |
+
if line[0] != '#':
|
| 190 |
+
break
|
| 191 |
+
line = line[1:].lstrip()
|
| 192 |
+
kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None)
|
| 193 |
+
if kind is not None:
|
| 194 |
+
key, _, value = [s.strip() for s in line[len(kind):].partition('=')]
|
| 195 |
+
type = distutils_settings.get(key, None)
|
| 196 |
+
if line.startswith("cython:") and type is None: continue
|
| 197 |
+
if type in (list, transitive_list):
|
| 198 |
+
value = parse_list(value)
|
| 199 |
+
if key == 'define_macros':
|
| 200 |
+
value = [tuple(macro.split('=', 1))
|
| 201 |
+
if '=' in macro else (macro, None)
|
| 202 |
+
for macro in value]
|
| 203 |
+
if type is bool_or:
|
| 204 |
+
value = _legacy_strtobool(value)
|
| 205 |
+
self.values[key] = value
|
| 206 |
+
elif exn is not None:
|
| 207 |
+
for key in distutils_settings:
|
| 208 |
+
if key in ('name', 'sources','np_pythran'):
|
| 209 |
+
continue
|
| 210 |
+
value = getattr(exn, key, None)
|
| 211 |
+
if value:
|
| 212 |
+
self.values[key] = value
|
| 213 |
+
|
| 214 |
+
def merge(self, other):
|
| 215 |
+
if other is None:
|
| 216 |
+
return self
|
| 217 |
+
for key, value in other.values.items():
|
| 218 |
+
type = distutils_settings[key]
|
| 219 |
+
if type is transitive_str and key not in self.values:
|
| 220 |
+
self.values[key] = value
|
| 221 |
+
elif type is transitive_list:
|
| 222 |
+
if key in self.values:
|
| 223 |
+
# Change a *copy* of the list (Trac #845)
|
| 224 |
+
all = self.values[key][:]
|
| 225 |
+
for v in value:
|
| 226 |
+
if v not in all:
|
| 227 |
+
all.append(v)
|
| 228 |
+
value = all
|
| 229 |
+
self.values[key] = value
|
| 230 |
+
elif type is bool_or:
|
| 231 |
+
self.values[key] = self.values.get(key, False) | value
|
| 232 |
+
return self
|
| 233 |
+
|
| 234 |
+
def subs(self, aliases):
|
| 235 |
+
if aliases is None:
|
| 236 |
+
return self
|
| 237 |
+
resolved = DistutilsInfo()
|
| 238 |
+
for key, value in self.values.items():
|
| 239 |
+
type = distutils_settings[key]
|
| 240 |
+
if type in [list, transitive_list]:
|
| 241 |
+
new_value_list = []
|
| 242 |
+
for v in value:
|
| 243 |
+
if v in aliases:
|
| 244 |
+
v = aliases[v]
|
| 245 |
+
if isinstance(v, list):
|
| 246 |
+
new_value_list += v
|
| 247 |
+
else:
|
| 248 |
+
new_value_list.append(v)
|
| 249 |
+
value = new_value_list
|
| 250 |
+
else:
|
| 251 |
+
if value in aliases:
|
| 252 |
+
value = aliases[value]
|
| 253 |
+
resolved.values[key] = value
|
| 254 |
+
return resolved
|
| 255 |
+
|
| 256 |
+
def apply(self, extension):
|
| 257 |
+
for key, value in self.values.items():
|
| 258 |
+
type = distutils_settings[key]
|
| 259 |
+
if type in [list, transitive_list]:
|
| 260 |
+
value = getattr(extension, key) + list(value)
|
| 261 |
+
setattr(extension, key, value)
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
_FIND_TOKEN = cython.declare(object, re.compile(r"""
|
| 265 |
+
(?P<comment> [#] ) |
|
| 266 |
+
(?P<brace> [{}] ) |
|
| 267 |
+
(?P<fstring> f )? (?P<quote> '+ | "+ )
|
| 268 |
+
""", re.VERBOSE).search)
|
| 269 |
+
|
| 270 |
+
_FIND_STRING_TOKEN = cython.declare(object, re.compile(r"""
|
| 271 |
+
(?P<escape> [\\]+ ) (?P<escaped_quote> ['"] ) |
|
| 272 |
+
(?P<fstring> f )? (?P<quote> '+ | "+ )
|
| 273 |
+
""", re.VERBOSE).search)
|
| 274 |
+
|
| 275 |
+
_FIND_FSTRING_TOKEN = cython.declare(object, re.compile(r"""
|
| 276 |
+
(?P<braces> [{]+ | [}]+ ) |
|
| 277 |
+
(?P<escape> [\\]+ ) (?P<escaped_quote> ['"] ) |
|
| 278 |
+
(?P<fstring> f )? (?P<quote> '+ | "+ )
|
| 279 |
+
""", re.VERBOSE).search)
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
def strip_string_literals(code: str, prefix: str = '__Pyx_L'):
|
| 283 |
+
"""
|
| 284 |
+
Normalizes every string literal to be of the form '__Pyx_Lxxx',
|
| 285 |
+
returning the normalized code and a mapping of labels to
|
| 286 |
+
string literals.
|
| 287 |
+
"""
|
| 288 |
+
new_code: list = []
|
| 289 |
+
literals: dict = {}
|
| 290 |
+
counter: cython.Py_ssize_t = 0
|
| 291 |
+
find_token = _FIND_TOKEN
|
| 292 |
+
|
| 293 |
+
def append_new_label(literal):
|
| 294 |
+
nonlocal counter
|
| 295 |
+
counter += 1
|
| 296 |
+
label = f"{prefix}{counter}_"
|
| 297 |
+
literals[label] = literal
|
| 298 |
+
new_code.append(label)
|
| 299 |
+
|
| 300 |
+
def parse_string(quote_type: str, start: cython.Py_ssize_t, is_fstring: cython.bint) -> cython.Py_ssize_t:
|
| 301 |
+
charpos: cython.Py_ssize_t = start
|
| 302 |
+
|
| 303 |
+
find_token = _FIND_FSTRING_TOKEN if is_fstring else _FIND_STRING_TOKEN
|
| 304 |
+
|
| 305 |
+
while charpos != -1:
|
| 306 |
+
token = find_token(code, charpos)
|
| 307 |
+
if token is None:
|
| 308 |
+
# This probably indicates an unclosed string literal, i.e. a broken file.
|
| 309 |
+
append_new_label(code[start:])
|
| 310 |
+
charpos = -1
|
| 311 |
+
break
|
| 312 |
+
charpos = token.end()
|
| 313 |
+
|
| 314 |
+
if token['escape']:
|
| 315 |
+
if len(token['escape']) % 2 == 0 and token['escaped_quote'] == quote_type[0]:
|
| 316 |
+
# Quote is not actually escaped and might be part of a terminator, look at it next.
|
| 317 |
+
charpos -= 1
|
| 318 |
+
|
| 319 |
+
elif is_fstring and token['braces']:
|
| 320 |
+
# Formats or brace(s) in fstring.
|
| 321 |
+
if len(token['braces']) % 2 == 0:
|
| 322 |
+
# Normal brace characters in string.
|
| 323 |
+
continue
|
| 324 |
+
if token['braces'][-1] == '{':
|
| 325 |
+
if start < charpos-1:
|
| 326 |
+
append_new_label(code[start : charpos-1])
|
| 327 |
+
new_code.append('{')
|
| 328 |
+
start = charpos = parse_code(charpos, in_fstring=True)
|
| 329 |
+
|
| 330 |
+
elif token['quote'].startswith(quote_type):
|
| 331 |
+
# Closing quote found (potentially together with further, unrelated quotes).
|
| 332 |
+
charpos = token.start('quote')
|
| 333 |
+
if charpos > start:
|
| 334 |
+
append_new_label(code[start : charpos])
|
| 335 |
+
new_code.append(quote_type)
|
| 336 |
+
charpos += len(quote_type)
|
| 337 |
+
break
|
| 338 |
+
|
| 339 |
+
return charpos
|
| 340 |
+
|
| 341 |
+
def parse_code(start: cython.Py_ssize_t, in_fstring: cython.bint = False) -> cython.Py_ssize_t:
|
| 342 |
+
charpos: cython.Py_ssize_t = start
|
| 343 |
+
end: cython.Py_ssize_t
|
| 344 |
+
quote: str
|
| 345 |
+
|
| 346 |
+
while charpos != -1:
|
| 347 |
+
token = find_token(code, charpos)
|
| 348 |
+
if token is None:
|
| 349 |
+
new_code.append(code[start:])
|
| 350 |
+
charpos = -1
|
| 351 |
+
break
|
| 352 |
+
charpos = end = token.end()
|
| 353 |
+
|
| 354 |
+
if token['quote']:
|
| 355 |
+
quote = token['quote']
|
| 356 |
+
if len(quote) >= 6:
|
| 357 |
+
# Ignore empty tripple-quoted strings: '''''' or """"""
|
| 358 |
+
quote = quote[:len(quote) % 6]
|
| 359 |
+
if quote and len(quote) != 2:
|
| 360 |
+
if len(quote) > 3:
|
| 361 |
+
end -= len(quote) - 3
|
| 362 |
+
quote = quote[:3]
|
| 363 |
+
new_code.append(code[start:end])
|
| 364 |
+
start = charpos = parse_string(quote, end, is_fstring=token['fstring'])
|
| 365 |
+
|
| 366 |
+
elif token['comment']:
|
| 367 |
+
new_code.append(code[start:end])
|
| 368 |
+
charpos = code.find('\n', end)
|
| 369 |
+
append_new_label(code[end : charpos if charpos != -1 else None])
|
| 370 |
+
if charpos == -1:
|
| 371 |
+
break # EOF
|
| 372 |
+
start = charpos
|
| 373 |
+
|
| 374 |
+
elif in_fstring and token['brace']:
|
| 375 |
+
if token['brace'] == '}':
|
| 376 |
+
# Closing '}' of f-string.
|
| 377 |
+
charpos = end = token.start() + 1
|
| 378 |
+
new_code.append(code[start:end]) # with '}'
|
| 379 |
+
break
|
| 380 |
+
else:
|
| 381 |
+
# Starting a calculated format modifier inside of an f-string format.
|
| 382 |
+
end = token.start() + 1
|
| 383 |
+
new_code.append(code[start:end]) # with '{'
|
| 384 |
+
start = charpos = parse_code(end, in_fstring=True)
|
| 385 |
+
|
| 386 |
+
return charpos
|
| 387 |
+
|
| 388 |
+
parse_code(0)
|
| 389 |
+
return "".join(new_code), literals
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
# We need to allow spaces to allow for conditional compilation like
|
| 393 |
+
# IF ...:
|
| 394 |
+
# cimport ...
|
| 395 |
+
dependency_regex = re.compile(
|
| 396 |
+
r"(?:^ [ \t\f]* from [ \t\f]+ cython\.cimports\.([\w.]+) [ \t\f]+ c?import ) |"
|
| 397 |
+
r"(?:^ [ \t\f]* from [ \t\f]+ ([\w.]+) [ \t\f]+ cimport ) |"
|
| 398 |
+
r"(?:^ [ \t\f]* c?import [ \t\f]+ cython\.cimports\.([\w.]+) ) |"
|
| 399 |
+
r"(?:^ [ \t\f]* cimport [ \t\f]+ ([\w.]+ (?:[ \t\f]* , [ \t\f]* [\w.]+)*) ) |"
|
| 400 |
+
r"(?:^ [ \t\f]* cdef [ \t\f]+ extern [ \t\f]+ from [ \t\f]+ ['\"] ([^'\"]+) ['\"] ) |"
|
| 401 |
+
r"(?:^ [ \t\f]* include [ \t\f]+ ['\"] ([^'\"]+) ['\"] )",
|
| 402 |
+
re.MULTILINE | re.VERBOSE)
|
| 403 |
+
dependency_after_from_regex = re.compile(
|
| 404 |
+
r"(?:^ [ \t\f]+ \( ([\w., \t\f]*) \) [ \t\f]* [#\n]) |"
|
| 405 |
+
r"(?:^ [ \t\f]+ ([\w., \t\f]*) [ \t\f]* [#\n])",
|
| 406 |
+
re.MULTILINE | re.VERBOSE)
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
def normalize_existing(base_path, rel_paths):
|
| 410 |
+
return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths)))
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
@cached_function
|
| 414 |
+
def normalize_existing0(base_dir, rel_paths):
|
| 415 |
+
"""
|
| 416 |
+
Given some base directory ``base_dir`` and a list of path names
|
| 417 |
+
``rel_paths``, normalize each relative path name ``rel`` by
|
| 418 |
+
replacing it by ``os.path.join(base, rel)`` if that file exists.
|
| 419 |
+
|
| 420 |
+
Return a couple ``(normalized, needed_base)`` where ``normalized``
|
| 421 |
+
if the list of normalized file names and ``needed_base`` is
|
| 422 |
+
``base_dir`` if we actually needed ``base_dir``. If no paths were
|
| 423 |
+
changed (for example, if all paths were already absolute), then
|
| 424 |
+
``needed_base`` is ``None``.
|
| 425 |
+
"""
|
| 426 |
+
normalized = []
|
| 427 |
+
needed_base = None
|
| 428 |
+
for rel in rel_paths:
|
| 429 |
+
if os.path.isabs(rel):
|
| 430 |
+
normalized.append(rel)
|
| 431 |
+
continue
|
| 432 |
+
path = join_path(base_dir, rel)
|
| 433 |
+
if path_exists(path):
|
| 434 |
+
normalized.append(os.path.normpath(path))
|
| 435 |
+
needed_base = base_dir
|
| 436 |
+
else:
|
| 437 |
+
normalized.append(rel)
|
| 438 |
+
return (normalized, needed_base)
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
def resolve_depends(depends, include_dirs):
|
| 442 |
+
include_dirs = tuple(include_dirs)
|
| 443 |
+
resolved = []
|
| 444 |
+
for depend in depends:
|
| 445 |
+
path = resolve_depend(depend, include_dirs)
|
| 446 |
+
if path is not None:
|
| 447 |
+
resolved.append(path)
|
| 448 |
+
return resolved
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
@cached_function
|
| 452 |
+
def resolve_depend(depend, include_dirs):
|
| 453 |
+
if depend[0] == '<' and depend[-1] == '>':
|
| 454 |
+
return None
|
| 455 |
+
for dir in include_dirs:
|
| 456 |
+
path = join_path(dir, depend)
|
| 457 |
+
if path_exists(path):
|
| 458 |
+
return os.path.normpath(path)
|
| 459 |
+
return None
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
@cached_function
|
| 463 |
+
def package(filename):
|
| 464 |
+
dir = os.path.dirname(os.path.abspath(str(filename)))
|
| 465 |
+
if dir != filename and is_package_dir(dir):
|
| 466 |
+
return package(dir) + (os.path.basename(dir),)
|
| 467 |
+
else:
|
| 468 |
+
return ()
|
| 469 |
+
|
| 470 |
+
|
| 471 |
+
@cached_function
|
| 472 |
+
def fully_qualified_name(filename):
|
| 473 |
+
module = os.path.splitext(os.path.basename(filename))[0]
|
| 474 |
+
return '.'.join(package(filename) + (module,))
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
@cached_function
|
| 478 |
+
def parse_dependencies(source_filename):
|
| 479 |
+
# Actual parsing is way too slow, so we use regular expressions.
|
| 480 |
+
# The only catch is that we must strip comments and string
|
| 481 |
+
# literals ahead of time.
|
| 482 |
+
with Utils.open_source_file(source_filename, error_handling='ignore') as fh:
|
| 483 |
+
source = fh.read()
|
| 484 |
+
distutils_info = DistutilsInfo(source)
|
| 485 |
+
source, literals = strip_string_literals(source)
|
| 486 |
+
source = source.replace('\\\n', ' ').replace('\t', ' ')
|
| 487 |
+
|
| 488 |
+
# TODO: pure mode
|
| 489 |
+
cimports = []
|
| 490 |
+
includes = []
|
| 491 |
+
externs = []
|
| 492 |
+
for m in dependency_regex.finditer(source):
|
| 493 |
+
pycimports_from, cimport_from, pycimports_list, cimport_list, extern, include = m.groups()
|
| 494 |
+
if pycimports_from:
|
| 495 |
+
cimport_from = pycimports_from
|
| 496 |
+
if pycimports_list:
|
| 497 |
+
cimport_list = pycimports_list
|
| 498 |
+
|
| 499 |
+
if cimport_from:
|
| 500 |
+
cimports.append(cimport_from)
|
| 501 |
+
m_after_from = dependency_after_from_regex.search(source, pos=m.end())
|
| 502 |
+
if m_after_from:
|
| 503 |
+
multiline, one_line = m_after_from.groups()
|
| 504 |
+
subimports = multiline or one_line
|
| 505 |
+
cimports.extend("{}.{}".format(cimport_from, s.strip())
|
| 506 |
+
for s in subimports.split(','))
|
| 507 |
+
|
| 508 |
+
elif cimport_list:
|
| 509 |
+
cimports.extend(x.strip() for x in cimport_list.split(","))
|
| 510 |
+
elif extern:
|
| 511 |
+
externs.append(literals[extern])
|
| 512 |
+
else:
|
| 513 |
+
includes.append(literals[include])
|
| 514 |
+
return cimports, includes, externs, distutils_info
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
class DependencyTree:
|
| 518 |
+
|
| 519 |
+
def __init__(self, context, quiet=False):
|
| 520 |
+
self.context = context
|
| 521 |
+
self.quiet = quiet
|
| 522 |
+
self._transitive_cache = {}
|
| 523 |
+
|
| 524 |
+
def parse_dependencies(self, source_filename):
|
| 525 |
+
if path_exists(source_filename):
|
| 526 |
+
source_filename = os.path.normpath(source_filename)
|
| 527 |
+
return parse_dependencies(source_filename)
|
| 528 |
+
|
| 529 |
+
@cached_method
|
| 530 |
+
def included_files(self, filename):
|
| 531 |
+
# This is messy because included files are textually included, resolving
|
| 532 |
+
# cimports (but not includes) relative to the including file.
|
| 533 |
+
all = set()
|
| 534 |
+
for include in self.parse_dependencies(filename)[1]:
|
| 535 |
+
include_path = join_path(os.path.dirname(filename), include)
|
| 536 |
+
if not path_exists(include_path):
|
| 537 |
+
include_path = self.context.find_include_file(include, source_file_path=filename)
|
| 538 |
+
if include_path:
|
| 539 |
+
if '.' + os.path.sep in include_path:
|
| 540 |
+
include_path = os.path.normpath(include_path)
|
| 541 |
+
all.add(include_path)
|
| 542 |
+
all.update(self.included_files(include_path))
|
| 543 |
+
elif not self.quiet:
|
| 544 |
+
print("Unable to locate '%s' referenced from '%s'" % (filename, include))
|
| 545 |
+
return all
|
| 546 |
+
|
| 547 |
+
@cached_method
|
| 548 |
+
def cimports_externs_incdirs(self, filename):
|
| 549 |
+
# This is really ugly. Nested cimports are resolved with respect to the
|
| 550 |
+
# includer, but includes are resolved with respect to the includee.
|
| 551 |
+
cimports, includes, externs = self.parse_dependencies(filename)[:3]
|
| 552 |
+
cimports = set(cimports)
|
| 553 |
+
externs = set(externs)
|
| 554 |
+
incdirs = set()
|
| 555 |
+
for include in self.included_files(filename):
|
| 556 |
+
included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include)
|
| 557 |
+
cimports.update(included_cimports)
|
| 558 |
+
externs.update(included_externs)
|
| 559 |
+
incdirs.update(included_incdirs)
|
| 560 |
+
externs, incdir = normalize_existing(filename, externs)
|
| 561 |
+
if incdir:
|
| 562 |
+
incdirs.add(incdir)
|
| 563 |
+
return tuple(cimports), externs, incdirs
|
| 564 |
+
|
| 565 |
+
def cimports(self, filename):
|
| 566 |
+
return self.cimports_externs_incdirs(filename)[0]
|
| 567 |
+
|
| 568 |
+
def package(self, filename):
|
| 569 |
+
return package(filename)
|
| 570 |
+
|
| 571 |
+
def fully_qualified_name(self, filename):
|
| 572 |
+
return fully_qualified_name(filename)
|
| 573 |
+
|
| 574 |
+
@cached_method
|
| 575 |
+
def find_pxd(self, module, filename=None):
|
| 576 |
+
is_relative = module[0] == '.'
|
| 577 |
+
if is_relative and not filename:
|
| 578 |
+
raise NotImplementedError("New relative imports.")
|
| 579 |
+
if filename is not None:
|
| 580 |
+
module_path = module.split('.')
|
| 581 |
+
if is_relative:
|
| 582 |
+
module_path.pop(0) # just explicitly relative
|
| 583 |
+
package_path = list(self.package(filename))
|
| 584 |
+
while module_path and not module_path[0]:
|
| 585 |
+
try:
|
| 586 |
+
package_path.pop()
|
| 587 |
+
except IndexError:
|
| 588 |
+
return None # FIXME: error?
|
| 589 |
+
module_path.pop(0)
|
| 590 |
+
relative = '.'.join(package_path + module_path)
|
| 591 |
+
pxd = self.context.find_pxd_file(relative, source_file_path=filename)
|
| 592 |
+
if pxd:
|
| 593 |
+
return pxd
|
| 594 |
+
if is_relative:
|
| 595 |
+
return None # FIXME: error?
|
| 596 |
+
return self.context.find_pxd_file(module, source_file_path=filename)
|
| 597 |
+
|
| 598 |
+
@cached_method
|
| 599 |
+
def cimported_files(self, filename):
|
| 600 |
+
filename_root, filename_ext = os.path.splitext(filename)
|
| 601 |
+
if filename_ext in ('.pyx', '.py') and path_exists(filename_root + '.pxd'):
|
| 602 |
+
pxd_list = [filename_root + '.pxd']
|
| 603 |
+
else:
|
| 604 |
+
pxd_list = []
|
| 605 |
+
# Cimports generates all possible combinations package.module
|
| 606 |
+
# when imported as from package cimport module.
|
| 607 |
+
for module in self.cimports(filename):
|
| 608 |
+
if module[:7] == 'cython.' or module == 'cython':
|
| 609 |
+
continue
|
| 610 |
+
pxd_file = self.find_pxd(module, filename)
|
| 611 |
+
if pxd_file is not None:
|
| 612 |
+
pxd_list.append(pxd_file)
|
| 613 |
+
return tuple(pxd_list)
|
| 614 |
+
|
| 615 |
+
@cached_method
|
| 616 |
+
def immediate_dependencies(self, filename):
|
| 617 |
+
all_deps = {filename}
|
| 618 |
+
all_deps.update(self.cimported_files(filename))
|
| 619 |
+
all_deps.update(self.included_files(filename))
|
| 620 |
+
return all_deps
|
| 621 |
+
|
| 622 |
+
def all_dependencies(self, filename):
|
| 623 |
+
return self.transitive_merge(filename, self.immediate_dependencies, set.union)
|
| 624 |
+
|
| 625 |
+
@cached_method
|
| 626 |
+
def timestamp(self, filename):
|
| 627 |
+
return os.path.getmtime(filename)
|
| 628 |
+
|
| 629 |
+
def extract_timestamp(self, filename):
|
| 630 |
+
return self.timestamp(filename), filename
|
| 631 |
+
|
| 632 |
+
def newest_dependency(self, filename):
|
| 633 |
+
return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)])
|
| 634 |
+
|
| 635 |
+
def distutils_info0(self, filename):
|
| 636 |
+
info = self.parse_dependencies(filename)[3]
|
| 637 |
+
kwds = info.values
|
| 638 |
+
cimports, externs, incdirs = self.cimports_externs_incdirs(filename)
|
| 639 |
+
basedir = os.getcwd()
|
| 640 |
+
# Add dependencies on "cdef extern from ..." files
|
| 641 |
+
if externs:
|
| 642 |
+
externs = _make_relative(externs, basedir)
|
| 643 |
+
if 'depends' in kwds:
|
| 644 |
+
kwds['depends'] = list(set(kwds['depends']).union(externs))
|
| 645 |
+
else:
|
| 646 |
+
kwds['depends'] = list(externs)
|
| 647 |
+
# Add include_dirs to ensure that the C compiler will find the
|
| 648 |
+
# "cdef extern from ..." files
|
| 649 |
+
if incdirs:
|
| 650 |
+
include_dirs = list(kwds.get('include_dirs', []))
|
| 651 |
+
for inc in _make_relative(incdirs, basedir):
|
| 652 |
+
if inc not in include_dirs:
|
| 653 |
+
include_dirs.append(inc)
|
| 654 |
+
kwds['include_dirs'] = include_dirs
|
| 655 |
+
return info
|
| 656 |
+
|
| 657 |
+
def distutils_info(self, filename, aliases=None, base=None):
|
| 658 |
+
return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge)
|
| 659 |
+
.subs(aliases)
|
| 660 |
+
.merge(base))
|
| 661 |
+
|
| 662 |
+
def transitive_merge(self, node, extract, merge):
|
| 663 |
+
try:
|
| 664 |
+
seen = self._transitive_cache[extract, merge]
|
| 665 |
+
except KeyError:
|
| 666 |
+
seen = self._transitive_cache[extract, merge] = {}
|
| 667 |
+
return self.transitive_merge_helper(
|
| 668 |
+
node, extract, merge, seen, {}, self.cimported_files)[0]
|
| 669 |
+
|
| 670 |
+
def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing):
|
| 671 |
+
if node in seen:
|
| 672 |
+
return seen[node], None
|
| 673 |
+
deps = extract(node)
|
| 674 |
+
if node in stack:
|
| 675 |
+
return deps, node
|
| 676 |
+
try:
|
| 677 |
+
stack[node] = len(stack)
|
| 678 |
+
loop = None
|
| 679 |
+
for next in outgoing(node):
|
| 680 |
+
sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing)
|
| 681 |
+
if sub_loop is not None:
|
| 682 |
+
if loop is not None and stack[loop] < stack[sub_loop]:
|
| 683 |
+
pass
|
| 684 |
+
else:
|
| 685 |
+
loop = sub_loop
|
| 686 |
+
deps = merge(deps, sub_deps)
|
| 687 |
+
if loop == node:
|
| 688 |
+
loop = None
|
| 689 |
+
if loop is None:
|
| 690 |
+
seen[node] = deps
|
| 691 |
+
return deps, loop
|
| 692 |
+
finally:
|
| 693 |
+
del stack[node]
|
| 694 |
+
|
| 695 |
+
|
| 696 |
+
_dep_tree = None
|
| 697 |
+
|
| 698 |
+
def create_dependency_tree(ctx=None, quiet=False):
|
| 699 |
+
global _dep_tree
|
| 700 |
+
if _dep_tree is None:
|
| 701 |
+
if ctx is None:
|
| 702 |
+
ctx = Context(["."], get_directive_defaults(),
|
| 703 |
+
options=CompilationOptions(default_options))
|
| 704 |
+
_dep_tree = DependencyTree(ctx, quiet=quiet)
|
| 705 |
+
return _dep_tree
|
| 706 |
+
|
| 707 |
+
|
| 708 |
+
# If this changes, change also docs/src/reference/compilation.rst
|
| 709 |
+
# which mentions this function
|
| 710 |
+
def default_create_extension(template, kwds):
|
| 711 |
+
if 'depends' in kwds:
|
| 712 |
+
include_dirs = kwds.get('include_dirs', []) + ["."]
|
| 713 |
+
depends = resolve_depends(kwds['depends'], include_dirs)
|
| 714 |
+
kwds['depends'] = sorted(set(depends + template.depends))
|
| 715 |
+
|
| 716 |
+
t = template.__class__
|
| 717 |
+
ext = t(**kwds)
|
| 718 |
+
if hasattr(template, "py_limited_api"):
|
| 719 |
+
ext.py_limited_api = template.py_limited_api
|
| 720 |
+
metadata = dict(distutils=kwds, module_name=kwds['name'])
|
| 721 |
+
return (ext, metadata)
|
| 722 |
+
|
| 723 |
+
|
| 724 |
+
# This may be useful for advanced users?
|
| 725 |
+
def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
|
| 726 |
+
exclude_failures=False):
|
| 727 |
+
if language is not None:
|
| 728 |
+
print('Warning: passing language={0!r} to cythonize() is deprecated. '
|
| 729 |
+
'Instead, put "# distutils: language={0}" in your .pyx or .pxd file(s)'.format(language))
|
| 730 |
+
if exclude is None:
|
| 731 |
+
exclude = []
|
| 732 |
+
if patterns is None:
|
| 733 |
+
return [], {}
|
| 734 |
+
elif isinstance(patterns, str) or not isinstance(patterns, Iterable):
|
| 735 |
+
patterns = [patterns]
|
| 736 |
+
|
| 737 |
+
from distutils.extension import Extension
|
| 738 |
+
if 'setuptools' in sys.modules:
|
| 739 |
+
# Support setuptools Extension instances as well.
|
| 740 |
+
extension_classes = (
|
| 741 |
+
Extension, # should normally be the same as 'setuptools.extension._Extension'
|
| 742 |
+
sys.modules['setuptools.extension']._Extension,
|
| 743 |
+
sys.modules['setuptools'].Extension,
|
| 744 |
+
)
|
| 745 |
+
else:
|
| 746 |
+
extension_classes = (Extension,)
|
| 747 |
+
|
| 748 |
+
explicit_modules = {m.name for m in patterns if isinstance(m, extension_classes)}
|
| 749 |
+
deps = create_dependency_tree(ctx, quiet=quiet)
|
| 750 |
+
shared_utility_qualified_name = ctx.shared_utility_qualified_name
|
| 751 |
+
|
| 752 |
+
to_exclude = set()
|
| 753 |
+
if not isinstance(exclude, list):
|
| 754 |
+
exclude = [exclude]
|
| 755 |
+
for pattern in exclude:
|
| 756 |
+
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
|
| 757 |
+
|
| 758 |
+
module_list = []
|
| 759 |
+
module_metadata = {}
|
| 760 |
+
|
| 761 |
+
# if no create_extension() function is defined, use a simple
|
| 762 |
+
# default function.
|
| 763 |
+
create_extension = ctx.options.create_extension or default_create_extension
|
| 764 |
+
|
| 765 |
+
seen = set()
|
| 766 |
+
for pattern in patterns:
|
| 767 |
+
if isinstance(pattern, str):
|
| 768 |
+
filepattern = pattern
|
| 769 |
+
template = Extension(pattern, []) # Fake Extension without sources
|
| 770 |
+
name = '*'
|
| 771 |
+
base = None
|
| 772 |
+
ext_language = language
|
| 773 |
+
elif isinstance(pattern, extension_classes):
|
| 774 |
+
cython_sources = [s for s in pattern.sources
|
| 775 |
+
if os.path.splitext(s)[1] in ('.py', '.pyx')]
|
| 776 |
+
if cython_sources:
|
| 777 |
+
filepattern = cython_sources[0]
|
| 778 |
+
if len(cython_sources) > 1:
|
| 779 |
+
print("Warning: Multiple cython sources found for extension '%s': %s\n"
|
| 780 |
+
"See https://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
|
| 781 |
+
"for sharing declarations among Cython files." % (pattern.name, cython_sources))
|
| 782 |
+
elif shared_utility_qualified_name and pattern.name == shared_utility_qualified_name:
|
| 783 |
+
# This is the shared utility code file.
|
| 784 |
+
m, _ = create_extension(pattern, dict(
|
| 785 |
+
name=shared_utility_qualified_name,
|
| 786 |
+
sources=pattern.sources or [
|
| 787 |
+
shared_utility_qualified_name.replace('.', os.sep) + ('.cpp' if pattern.language == 'c++' else '.c')],
|
| 788 |
+
language=pattern.language,
|
| 789 |
+
))
|
| 790 |
+
m.np_pythran = False
|
| 791 |
+
m.shared_utility_qualified_name = None
|
| 792 |
+
module_list.append(m)
|
| 793 |
+
continue
|
| 794 |
+
else:
|
| 795 |
+
# ignore non-cython modules
|
| 796 |
+
module_list.append(pattern)
|
| 797 |
+
continue
|
| 798 |
+
template = pattern
|
| 799 |
+
name = template.name
|
| 800 |
+
base = DistutilsInfo(exn=template)
|
| 801 |
+
ext_language = None # do not override whatever the Extension says
|
| 802 |
+
else:
|
| 803 |
+
msg = str("pattern is not of type str nor subclass of Extension (%s)"
|
| 804 |
+
" but of type %s and class %s" % (repr(Extension),
|
| 805 |
+
type(pattern),
|
| 806 |
+
pattern.__class__))
|
| 807 |
+
raise TypeError(msg)
|
| 808 |
+
|
| 809 |
+
for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern):
|
| 810 |
+
if os.path.abspath(file) in to_exclude:
|
| 811 |
+
continue
|
| 812 |
+
module_name = deps.fully_qualified_name(file)
|
| 813 |
+
if '*' in name:
|
| 814 |
+
if module_name in explicit_modules:
|
| 815 |
+
continue
|
| 816 |
+
elif name:
|
| 817 |
+
module_name = name
|
| 818 |
+
|
| 819 |
+
Utils.raise_error_if_module_name_forbidden(module_name)
|
| 820 |
+
|
| 821 |
+
if module_name not in seen:
|
| 822 |
+
try:
|
| 823 |
+
kwds = deps.distutils_info(file, aliases, base).values
|
| 824 |
+
except Exception:
|
| 825 |
+
if exclude_failures:
|
| 826 |
+
continue
|
| 827 |
+
raise
|
| 828 |
+
if base is not None:
|
| 829 |
+
for key, value in base.values.items():
|
| 830 |
+
if key not in kwds:
|
| 831 |
+
kwds[key] = value
|
| 832 |
+
|
| 833 |
+
kwds['name'] = module_name
|
| 834 |
+
|
| 835 |
+
sources = [file] + [m for m in template.sources if m != filepattern]
|
| 836 |
+
if 'sources' in kwds:
|
| 837 |
+
# allow users to add .c files etc.
|
| 838 |
+
for source in kwds['sources']:
|
| 839 |
+
if source not in sources:
|
| 840 |
+
sources.append(source)
|
| 841 |
+
kwds['sources'] = sources
|
| 842 |
+
|
| 843 |
+
if ext_language and 'language' not in kwds:
|
| 844 |
+
kwds['language'] = ext_language
|
| 845 |
+
|
| 846 |
+
np_pythran = kwds.pop('np_pythran', False)
|
| 847 |
+
|
| 848 |
+
# Create the new extension
|
| 849 |
+
m, metadata = create_extension(template, kwds)
|
| 850 |
+
m.np_pythran = np_pythran or getattr(m, 'np_pythran', False)
|
| 851 |
+
m.shared_utility_qualified_name = shared_utility_qualified_name
|
| 852 |
+
if m.np_pythran:
|
| 853 |
+
update_pythran_extension(m)
|
| 854 |
+
module_list.append(m)
|
| 855 |
+
|
| 856 |
+
# Store metadata (this will be written as JSON in the
|
| 857 |
+
# generated C file but otherwise has no purpose)
|
| 858 |
+
module_metadata[module_name] = metadata
|
| 859 |
+
|
| 860 |
+
if file not in m.sources:
|
| 861 |
+
# Old setuptools unconditionally replaces .pyx with .c/.cpp
|
| 862 |
+
target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
|
| 863 |
+
try:
|
| 864 |
+
m.sources.remove(target_file)
|
| 865 |
+
except ValueError:
|
| 866 |
+
# never seen this in the wild, but probably better to warn about this unexpected case
|
| 867 |
+
print("Warning: Cython source file not found in sources list, adding %s" % file)
|
| 868 |
+
m.sources.insert(0, file)
|
| 869 |
+
seen.add(name)
|
| 870 |
+
return module_list, module_metadata
|
| 871 |
+
|
| 872 |
+
|
| 873 |
+
# This is the user-exposed entry point.
|
| 874 |
+
def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=None, language=None,
|
| 875 |
+
exclude_failures=False, show_all_warnings=False, **options):
|
| 876 |
+
"""
|
| 877 |
+
Compile a set of source modules into C/C++ files and return a list of distutils
|
| 878 |
+
Extension objects for them.
|
| 879 |
+
|
| 880 |
+
:param module_list: As module list, pass either a glob pattern, a list of glob
|
| 881 |
+
patterns or a list of Extension objects. The latter
|
| 882 |
+
allows you to configure the extensions separately
|
| 883 |
+
through the normal distutils options.
|
| 884 |
+
You can also pass Extension objects that have
|
| 885 |
+
glob patterns as their sources. Then, cythonize
|
| 886 |
+
will resolve the pattern and create a
|
| 887 |
+
copy of the Extension for every matching file.
|
| 888 |
+
|
| 889 |
+
:param exclude: When passing glob patterns as ``module_list``, you can exclude certain
|
| 890 |
+
module names explicitly by passing them into the ``exclude`` option.
|
| 891 |
+
|
| 892 |
+
:param nthreads: The number of concurrent builds for parallel compilation
|
| 893 |
+
(requires the ``multiprocessing`` module).
|
| 894 |
+
|
| 895 |
+
:param aliases: If you want to use compiler directives like ``# distutils: ...`` but
|
| 896 |
+
can only know at compile time (when running the ``setup.py``) which values
|
| 897 |
+
to use, you can use aliases and pass a dictionary mapping those aliases
|
| 898 |
+
to Python strings when calling :func:`cythonize`. As an example, say you
|
| 899 |
+
want to use the compiler
|
| 900 |
+
directive ``# distutils: include_dirs = ../static_libs/include/``
|
| 901 |
+
but this path isn't always fixed and you want to find it when running
|
| 902 |
+
the ``setup.py``. You can then do ``# distutils: include_dirs = MY_HEADERS``,
|
| 903 |
+
find the value of ``MY_HEADERS`` in the ``setup.py``, put it in a python
|
| 904 |
+
variable called ``foo`` as a string, and then call
|
| 905 |
+
``cythonize(..., aliases={'MY_HEADERS': foo})``.
|
| 906 |
+
|
| 907 |
+
:param quiet: If True, Cython won't print error, warning, or status messages during the
|
| 908 |
+
compilation.
|
| 909 |
+
|
| 910 |
+
:param force: Forces the recompilation of the Cython modules, even if the timestamps
|
| 911 |
+
don't indicate that a recompilation is necessary.
|
| 912 |
+
|
| 913 |
+
:param language: To globally enable C++ mode, you can pass ``language='c++'``. Otherwise, this
|
| 914 |
+
will be determined at a per-file level based on compiler directives. This
|
| 915 |
+
affects only modules found based on file names. Extension instances passed
|
| 916 |
+
into :func:`cythonize` will not be changed. It is recommended to rather
|
| 917 |
+
use the compiler directive ``# distutils: language = c++`` than this option.
|
| 918 |
+
|
| 919 |
+
:param exclude_failures: For a broad 'try to compile' mode that ignores compilation
|
| 920 |
+
failures and simply excludes the failed extensions,
|
| 921 |
+
pass ``exclude_failures=True``. Note that this only
|
| 922 |
+
really makes sense for compiling ``.py`` files which can also
|
| 923 |
+
be used without compilation.
|
| 924 |
+
|
| 925 |
+
:param show_all_warnings: By default, not all Cython warnings are printed.
|
| 926 |
+
Set to true to show all warnings.
|
| 927 |
+
|
| 928 |
+
:param annotate: If ``True``, will produce a HTML file for each of the ``.pyx`` or ``.py``
|
| 929 |
+
files compiled. The HTML file gives an indication
|
| 930 |
+
of how much Python interaction there is in
|
| 931 |
+
each of the source code lines, compared to plain C code.
|
| 932 |
+
It also allows you to see the C/C++ code
|
| 933 |
+
generated for each line of Cython code. This report is invaluable when
|
| 934 |
+
optimizing a function for speed,
|
| 935 |
+
and for determining when to :ref:`release the GIL <nogil>`:
|
| 936 |
+
in general, a ``nogil`` block may contain only "white" code.
|
| 937 |
+
See examples in :ref:`determining_where_to_add_types` or
|
| 938 |
+
:ref:`primes`.
|
| 939 |
+
|
| 940 |
+
|
| 941 |
+
:param annotate-fullc: If ``True`` will produce a colorized HTML version of
|
| 942 |
+
the source which includes entire generated C/C++-code.
|
| 943 |
+
|
| 944 |
+
|
| 945 |
+
:param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this:
|
| 946 |
+
``compiler_directives={'embedsignature': True}``.
|
| 947 |
+
See :ref:`compiler-directives`.
|
| 948 |
+
|
| 949 |
+
:param depfile: produce depfiles for the sources if True.
|
| 950 |
+
:param cache: If ``True`` the cache enabled with default path. If the value is a path to a directory,
|
| 951 |
+
then the directory is used to cache generated ``.c``/``.cpp`` files. By default cache is disabled.
|
| 952 |
+
See :ref:`cython-cache`.
|
| 953 |
+
"""
|
| 954 |
+
if exclude is None:
|
| 955 |
+
exclude = []
|
| 956 |
+
if 'include_path' not in options:
|
| 957 |
+
options['include_path'] = ['.']
|
| 958 |
+
if 'common_utility_include_dir' in options:
|
| 959 |
+
safe_makedirs(options['common_utility_include_dir'])
|
| 960 |
+
|
| 961 |
+
depfile = options.pop('depfile', None)
|
| 962 |
+
|
| 963 |
+
if pythran is None:
|
| 964 |
+
pythran_options = None
|
| 965 |
+
else:
|
| 966 |
+
pythran_options = CompilationOptions(**options)
|
| 967 |
+
pythran_options.cplus = True
|
| 968 |
+
pythran_options.np_pythran = True
|
| 969 |
+
|
| 970 |
+
if force is None:
|
| 971 |
+
force = os.environ.get("CYTHON_FORCE_REGEN") == "1" # allow global overrides for build systems
|
| 972 |
+
|
| 973 |
+
c_options = CompilationOptions(**options)
|
| 974 |
+
cpp_options = CompilationOptions(**options); cpp_options.cplus = True
|
| 975 |
+
ctx = Context.from_options(c_options)
|
| 976 |
+
options = c_options
|
| 977 |
+
shared_utility_qualified_name = ctx.shared_utility_qualified_name
|
| 978 |
+
module_list, module_metadata = create_extension_list(
|
| 979 |
+
module_list,
|
| 980 |
+
exclude=exclude,
|
| 981 |
+
ctx=ctx,
|
| 982 |
+
quiet=quiet,
|
| 983 |
+
exclude_failures=exclude_failures,
|
| 984 |
+
language=language,
|
| 985 |
+
aliases=aliases)
|
| 986 |
+
|
| 987 |
+
fix_windows_unicode_modules(module_list)
|
| 988 |
+
|
| 989 |
+
deps = create_dependency_tree(ctx, quiet=quiet)
|
| 990 |
+
build_dir = getattr(options, 'build_dir', None)
|
| 991 |
+
if options.cache and not (options.annotate or Options.annotate):
|
| 992 |
+
# cache is enabled when:
|
| 993 |
+
# * options.cache is True (the default path to the cache base dir is used)
|
| 994 |
+
# * options.cache is the explicit path to the cache base dir
|
| 995 |
+
# * annotations are not generated
|
| 996 |
+
cache_path = None if options.cache is True else options.cache
|
| 997 |
+
cache = Cache(cache_path, getattr(options, 'cache_size', None))
|
| 998 |
+
else:
|
| 999 |
+
cache = None
|
| 1000 |
+
|
| 1001 |
+
def copy_to_build_dir(filepath, root=os.getcwd()):
|
| 1002 |
+
filepath_abs = os.path.abspath(filepath)
|
| 1003 |
+
if os.path.isabs(filepath):
|
| 1004 |
+
filepath = filepath_abs
|
| 1005 |
+
if filepath_abs.startswith(root):
|
| 1006 |
+
# distutil extension depends are relative to cwd
|
| 1007 |
+
mod_dir = join_path(build_dir,
|
| 1008 |
+
os.path.dirname(_relpath(filepath, root)))
|
| 1009 |
+
copy_once_if_newer(filepath_abs, mod_dir)
|
| 1010 |
+
|
| 1011 |
+
def file_in_build_dir(c_file):
|
| 1012 |
+
if not build_dir:
|
| 1013 |
+
return c_file
|
| 1014 |
+
if os.path.isabs(c_file):
|
| 1015 |
+
c_file = os.path.splitdrive(c_file)[1]
|
| 1016 |
+
c_file = c_file.split(os.sep, 1)[1]
|
| 1017 |
+
c_file = os.path.join(build_dir, c_file)
|
| 1018 |
+
dir = os.path.dirname(c_file)
|
| 1019 |
+
safe_makedirs_once(dir)
|
| 1020 |
+
return c_file
|
| 1021 |
+
|
| 1022 |
+
modules_by_cfile = collections.defaultdict(list)
|
| 1023 |
+
to_compile = []
|
| 1024 |
+
for m in module_list:
|
| 1025 |
+
if build_dir:
|
| 1026 |
+
for dep in m.depends:
|
| 1027 |
+
copy_to_build_dir(dep)
|
| 1028 |
+
|
| 1029 |
+
cy_sources = [
|
| 1030 |
+
source for source in m.sources
|
| 1031 |
+
if os.path.splitext(source)[1] in ('.pyx', '.py')]
|
| 1032 |
+
if len(cy_sources) == 1:
|
| 1033 |
+
# normal "special" case: believe the Extension module name to allow user overrides
|
| 1034 |
+
full_module_name = m.name
|
| 1035 |
+
else:
|
| 1036 |
+
# infer FQMN from source files
|
| 1037 |
+
full_module_name = None
|
| 1038 |
+
|
| 1039 |
+
np_pythran = getattr(m, 'np_pythran', False)
|
| 1040 |
+
py_limited_api = getattr(m, 'py_limited_api', False)
|
| 1041 |
+
|
| 1042 |
+
if np_pythran:
|
| 1043 |
+
options = pythran_options
|
| 1044 |
+
elif m.language == 'c++':
|
| 1045 |
+
options = cpp_options
|
| 1046 |
+
else:
|
| 1047 |
+
options = c_options
|
| 1048 |
+
|
| 1049 |
+
new_sources = []
|
| 1050 |
+
for source in m.sources:
|
| 1051 |
+
base, ext = os.path.splitext(source)
|
| 1052 |
+
if ext in ('.pyx', '.py'):
|
| 1053 |
+
c_file = base + ('.cpp' if m.language == 'c++' or np_pythran else '.c')
|
| 1054 |
+
|
| 1055 |
+
# setup for out of place build directory if enabled
|
| 1056 |
+
c_file = file_in_build_dir(c_file)
|
| 1057 |
+
|
| 1058 |
+
# write out the depfile, if requested
|
| 1059 |
+
if depfile:
|
| 1060 |
+
dependencies = deps.all_dependencies(source)
|
| 1061 |
+
write_depfile(c_file, source, dependencies)
|
| 1062 |
+
|
| 1063 |
+
# Missing files and those generated by other Cython versions should always be recreated.
|
| 1064 |
+
if Utils.file_generated_by_this_cython(c_file):
|
| 1065 |
+
c_timestamp = os.path.getmtime(c_file)
|
| 1066 |
+
else:
|
| 1067 |
+
c_timestamp = -1
|
| 1068 |
+
|
| 1069 |
+
# Priority goes first to modified files, second to direct
|
| 1070 |
+
# dependents, and finally to indirect dependents.
|
| 1071 |
+
if c_timestamp < deps.timestamp(source):
|
| 1072 |
+
dep_timestamp, dep = deps.timestamp(source), source
|
| 1073 |
+
priority = 0
|
| 1074 |
+
else:
|
| 1075 |
+
dep_timestamp, dep = deps.newest_dependency(source)
|
| 1076 |
+
priority = 2 - (dep in deps.immediate_dependencies(source))
|
| 1077 |
+
if force or c_timestamp < dep_timestamp:
|
| 1078 |
+
if not quiet and not force:
|
| 1079 |
+
if source == dep:
|
| 1080 |
+
print("Compiling %s because it changed." % Utils.decode_filename(source))
|
| 1081 |
+
else:
|
| 1082 |
+
print("Compiling %s because it depends on %s." % (
|
| 1083 |
+
Utils.decode_filename(source),
|
| 1084 |
+
Utils.decode_filename(dep),
|
| 1085 |
+
))
|
| 1086 |
+
if not force and cache:
|
| 1087 |
+
fingerprint = cache.transitive_fingerprint(
|
| 1088 |
+
source, deps.all_dependencies(source), options,
|
| 1089 |
+
FingerprintFlags(m.language or 'c', py_limited_api, np_pythran)
|
| 1090 |
+
)
|
| 1091 |
+
else:
|
| 1092 |
+
fingerprint = None
|
| 1093 |
+
to_compile.append((
|
| 1094 |
+
priority, source, c_file, fingerprint, quiet,
|
| 1095 |
+
options, not exclude_failures, module_metadata.get(m.name),
|
| 1096 |
+
full_module_name, show_all_warnings))
|
| 1097 |
+
modules_by_cfile[c_file].append(m)
|
| 1098 |
+
elif shared_utility_qualified_name and m.name == shared_utility_qualified_name:
|
| 1099 |
+
# Generate shared utility code module now.
|
| 1100 |
+
c_file = file_in_build_dir(source)
|
| 1101 |
+
module_options = CompilationOptions(
|
| 1102 |
+
options, shared_c_file_path=c_file, shared_utility_qualified_name=None)
|
| 1103 |
+
if not Utils.is_cython_generated_file(c_file):
|
| 1104 |
+
print(f"Warning: Shared module source file is not a Cython file - not creating '{m.name}' as '{c_file}'")
|
| 1105 |
+
elif force or not Utils.file_generated_by_this_cython(c_file):
|
| 1106 |
+
from .SharedModule import generate_shared_module
|
| 1107 |
+
if not quiet:
|
| 1108 |
+
print(f"Generating shared module '{m.name}'")
|
| 1109 |
+
generate_shared_module(module_options)
|
| 1110 |
+
else:
|
| 1111 |
+
c_file = source
|
| 1112 |
+
if build_dir:
|
| 1113 |
+
copy_to_build_dir(source)
|
| 1114 |
+
|
| 1115 |
+
new_sources.append(c_file)
|
| 1116 |
+
|
| 1117 |
+
m.sources = new_sources
|
| 1118 |
+
|
| 1119 |
+
to_compile.sort()
|
| 1120 |
+
N = len(to_compile)
|
| 1121 |
+
|
| 1122 |
+
# Drop "priority" sorting component of "to_compile" entries
|
| 1123 |
+
# and add a simple progress indicator and the remaining arguments.
|
| 1124 |
+
build_progress_indicator = ("[{0:%d}/%d] " % (len(str(N)), N)).format
|
| 1125 |
+
to_compile = [
|
| 1126 |
+
task[1:] + (build_progress_indicator(i), cache)
|
| 1127 |
+
for i, task in enumerate(to_compile, 1)
|
| 1128 |
+
]
|
| 1129 |
+
|
| 1130 |
+
if N <= 1:
|
| 1131 |
+
nthreads = 0
|
| 1132 |
+
try:
|
| 1133 |
+
from concurrent.futures import ProcessPoolExecutor
|
| 1134 |
+
except ImportError:
|
| 1135 |
+
nthreads = 0
|
| 1136 |
+
|
| 1137 |
+
if nthreads:
|
| 1138 |
+
with ProcessPoolExecutor(
|
| 1139 |
+
max_workers=nthreads,
|
| 1140 |
+
initializer=_init_multiprocessing_helper,
|
| 1141 |
+
) as proc_pool:
|
| 1142 |
+
try:
|
| 1143 |
+
list(proc_pool.map(cythonize_one_helper, to_compile, chunksize=1))
|
| 1144 |
+
except KeyboardInterrupt:
|
| 1145 |
+
proc_pool.terminate_workers()
|
| 1146 |
+
proc_pool.shutdown(cancel_futures=True)
|
| 1147 |
+
raise
|
| 1148 |
+
else:
|
| 1149 |
+
for args in to_compile:
|
| 1150 |
+
cythonize_one(*args)
|
| 1151 |
+
|
| 1152 |
+
if exclude_failures:
|
| 1153 |
+
failed_modules = set()
|
| 1154 |
+
for c_file, modules in modules_by_cfile.items():
|
| 1155 |
+
if not os.path.exists(c_file):
|
| 1156 |
+
failed_modules.update(modules)
|
| 1157 |
+
elif os.path.getsize(c_file) < 200:
|
| 1158 |
+
f = open(c_file, 'r', encoding='iso8859-1')
|
| 1159 |
+
try:
|
| 1160 |
+
if f.read(len('#error ')) == '#error ':
|
| 1161 |
+
# dead compilation result
|
| 1162 |
+
failed_modules.update(modules)
|
| 1163 |
+
finally:
|
| 1164 |
+
f.close()
|
| 1165 |
+
if failed_modules:
|
| 1166 |
+
for module in failed_modules:
|
| 1167 |
+
module_list.remove(module)
|
| 1168 |
+
print("Failed compilations: %s" % ', '.join(sorted([
|
| 1169 |
+
module.name for module in failed_modules])))
|
| 1170 |
+
|
| 1171 |
+
if cache:
|
| 1172 |
+
cache.cleanup_cache()
|
| 1173 |
+
|
| 1174 |
+
# cythonize() is often followed by the (non-Python-buffered)
|
| 1175 |
+
# compiler output, flush now to avoid interleaving output.
|
| 1176 |
+
sys.stdout.flush()
|
| 1177 |
+
return module_list
|
| 1178 |
+
|
| 1179 |
+
|
| 1180 |
+
def fix_windows_unicode_modules(module_list):
|
| 1181 |
+
# Hack around a distutils 3.[5678] bug on Windows for unicode module names.
|
| 1182 |
+
# https://bugs.python.org/issue39432
|
| 1183 |
+
if sys.platform != "win32":
|
| 1184 |
+
return
|
| 1185 |
+
if sys.version_info >= (3, 8, 2):
|
| 1186 |
+
return
|
| 1187 |
+
|
| 1188 |
+
def make_filtered_list(ignored_symbol, old_entries):
|
| 1189 |
+
class FilteredExportSymbols(list):
|
| 1190 |
+
# export_symbols for unicode filename cause link errors on Windows
|
| 1191 |
+
# Cython doesn't need them (it already defines PyInit with the correct linkage)
|
| 1192 |
+
# so use this class as a temporary fix to stop them from being generated
|
| 1193 |
+
def __contains__(self, val):
|
| 1194 |
+
# so distutils doesn't "helpfully" add PyInit_<name>
|
| 1195 |
+
return val == ignored_symbol or list.__contains__(self, val)
|
| 1196 |
+
|
| 1197 |
+
filtered_list = FilteredExportSymbols(old_entries)
|
| 1198 |
+
if old_entries:
|
| 1199 |
+
filtered_list.extend(name for name in old_entries if name != ignored_symbol)
|
| 1200 |
+
return filtered_list
|
| 1201 |
+
|
| 1202 |
+
for m in module_list:
|
| 1203 |
+
if m.name.isascii():
|
| 1204 |
+
continue
|
| 1205 |
+
m.export_symbols = make_filtered_list(
|
| 1206 |
+
"PyInit_" + m.name.rsplit(".", 1)[-1],
|
| 1207 |
+
m.export_symbols,
|
| 1208 |
+
)
|
| 1209 |
+
|
| 1210 |
+
|
| 1211 |
+
if os.environ.get('XML_RESULTS'):
|
| 1212 |
+
compile_result_dir = os.environ['XML_RESULTS']
|
| 1213 |
+
def record_results(func):
|
| 1214 |
+
def with_record(*args):
|
| 1215 |
+
t = time.time()
|
| 1216 |
+
success = True
|
| 1217 |
+
try:
|
| 1218 |
+
try:
|
| 1219 |
+
func(*args)
|
| 1220 |
+
except:
|
| 1221 |
+
success = False
|
| 1222 |
+
finally:
|
| 1223 |
+
t = time.time() - t
|
| 1224 |
+
module = fully_qualified_name(args[0])
|
| 1225 |
+
name = "cythonize." + module
|
| 1226 |
+
failures = 1 - success
|
| 1227 |
+
if success:
|
| 1228 |
+
failure_item = ""
|
| 1229 |
+
else:
|
| 1230 |
+
failure_item = "failure"
|
| 1231 |
+
output = open(os.path.join(compile_result_dir, name + ".xml"), "w")
|
| 1232 |
+
output.write("""
|
| 1233 |
+
<?xml version="1.0" ?>
|
| 1234 |
+
<testsuite name="%(name)s" errors="0" failures="%(failures)s" tests="1" time="%(t)s">
|
| 1235 |
+
<testcase classname="%(name)s" name="cythonize">
|
| 1236 |
+
%(failure_item)s
|
| 1237 |
+
</testcase>
|
| 1238 |
+
</testsuite>
|
| 1239 |
+
""".strip() % locals())
|
| 1240 |
+
output.close()
|
| 1241 |
+
return with_record
|
| 1242 |
+
else:
|
| 1243 |
+
def record_results(func):
|
| 1244 |
+
return func
|
| 1245 |
+
|
| 1246 |
+
|
| 1247 |
+
# TODO: Share context? Issue: pyx processing leaks into pxd module
|
| 1248 |
+
@record_results
|
| 1249 |
+
def cythonize_one(pyx_file, c_file,
|
| 1250 |
+
fingerprint=None, quiet=False, options=None,
|
| 1251 |
+
raise_on_failure=True, embedded_metadata=None,
|
| 1252 |
+
full_module_name=None, show_all_warnings=False,
|
| 1253 |
+
progress="", cache=None):
|
| 1254 |
+
from ..Compiler.Main import compile_single, default_options
|
| 1255 |
+
from ..Compiler.Errors import CompileError, PyrexError
|
| 1256 |
+
|
| 1257 |
+
if not quiet:
|
| 1258 |
+
if cache and fingerprint and cache.lookup_cache(c_file, fingerprint):
|
| 1259 |
+
print(f"{progress}Found compiled {pyx_file} in cache")
|
| 1260 |
+
else:
|
| 1261 |
+
print(f"{progress}Cythonizing {Utils.decode_filename(pyx_file)}")
|
| 1262 |
+
if options is None:
|
| 1263 |
+
options = CompilationOptions(default_options)
|
| 1264 |
+
options.output_file = c_file
|
| 1265 |
+
options.embedded_metadata = embedded_metadata
|
| 1266 |
+
|
| 1267 |
+
old_warning_level = Errors.LEVEL
|
| 1268 |
+
if show_all_warnings:
|
| 1269 |
+
Errors.LEVEL = 0
|
| 1270 |
+
|
| 1271 |
+
any_failures = 0
|
| 1272 |
+
try:
|
| 1273 |
+
result = compile_single(pyx_file, options, full_module_name=full_module_name, cache=cache, fingerprint=fingerprint)
|
| 1274 |
+
if result.num_errors > 0:
|
| 1275 |
+
any_failures = 1
|
| 1276 |
+
except (OSError, PyrexError) as e:
|
| 1277 |
+
sys.stderr.write('%s\n' % e)
|
| 1278 |
+
any_failures = 1
|
| 1279 |
+
# XXX
|
| 1280 |
+
import traceback
|
| 1281 |
+
traceback.print_exc()
|
| 1282 |
+
except Exception:
|
| 1283 |
+
if raise_on_failure:
|
| 1284 |
+
raise
|
| 1285 |
+
import traceback
|
| 1286 |
+
traceback.print_exc()
|
| 1287 |
+
any_failures = 1
|
| 1288 |
+
finally:
|
| 1289 |
+
if show_all_warnings:
|
| 1290 |
+
Errors.LEVEL = old_warning_level
|
| 1291 |
+
|
| 1292 |
+
if any_failures:
|
| 1293 |
+
if raise_on_failure:
|
| 1294 |
+
raise CompileError(None, pyx_file)
|
| 1295 |
+
elif os.path.exists(c_file):
|
| 1296 |
+
os.remove(c_file)
|
| 1297 |
+
|
| 1298 |
+
|
| 1299 |
+
def cythonize_one_helper(m):
|
| 1300 |
+
import traceback
|
| 1301 |
+
try:
|
| 1302 |
+
return cythonize_one(*m)
|
| 1303 |
+
except Exception:
|
| 1304 |
+
traceback.print_exc()
|
| 1305 |
+
raise
|
| 1306 |
+
|
| 1307 |
+
|
| 1308 |
+
def _init_multiprocessing_helper():
|
| 1309 |
+
# KeyboardInterrupt kills workers, so don't let them get it
|
| 1310 |
+
import signal
|
| 1311 |
+
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
venv/lib/python3.10/site-packages/Cython/Build/Distutils.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from Cython.Distutils.build_ext import build_ext
|
venv/lib/python3.10/site-packages/Cython/Build/Inline.py
ADDED
|
@@ -0,0 +1,463 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gc
|
| 2 |
+
import hashlib
|
| 3 |
+
import inspect
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import sys
|
| 7 |
+
import time
|
| 8 |
+
|
| 9 |
+
from distutils.core import Distribution, Extension
|
| 10 |
+
from distutils.command.build_ext import build_ext
|
| 11 |
+
|
| 12 |
+
import Cython
|
| 13 |
+
from ..Compiler.Main import Context
|
| 14 |
+
from ..Compiler.Options import (default_options, CompilationOptions,
|
| 15 |
+
get_directive_defaults)
|
| 16 |
+
|
| 17 |
+
from ..Compiler.Visitor import CythonTransform, EnvTransform
|
| 18 |
+
from ..Compiler.ParseTreeTransforms import SkipDeclarations
|
| 19 |
+
from ..Compiler.TreeFragment import parse_from_strings
|
| 20 |
+
from .Dependencies import strip_string_literals, cythonize, cached_function
|
| 21 |
+
from .Cache import get_cython_cache_dir
|
| 22 |
+
from ..Compiler import Pipeline
|
| 23 |
+
import cython as cython_module
|
| 24 |
+
|
| 25 |
+
import importlib.util
|
| 26 |
+
from importlib.machinery import ExtensionFileLoader
|
| 27 |
+
|
| 28 |
+
def load_dynamic(name, path):
|
| 29 |
+
spec = importlib.util.spec_from_file_location(name, loader=ExtensionFileLoader(name, path))
|
| 30 |
+
module = importlib.util.module_from_spec(spec)
|
| 31 |
+
spec.loader.exec_module(module)
|
| 32 |
+
return module
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class UnboundSymbols(EnvTransform, SkipDeclarations):
|
| 36 |
+
def __init__(self):
|
| 37 |
+
super(EnvTransform, self).__init__(context=None)
|
| 38 |
+
self.unbound = set()
|
| 39 |
+
def visit_NameNode(self, node):
|
| 40 |
+
if not self.current_env().lookup(node.name):
|
| 41 |
+
self.unbound.add(node.name)
|
| 42 |
+
return node
|
| 43 |
+
def __call__(self, node):
|
| 44 |
+
super().__call__(node)
|
| 45 |
+
return self.unbound
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@cached_function
|
| 49 |
+
def unbound_symbols(code, context=None):
|
| 50 |
+
if context is None:
|
| 51 |
+
context = Context([], get_directive_defaults(),
|
| 52 |
+
options=CompilationOptions(default_options))
|
| 53 |
+
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
|
| 54 |
+
tree = parse_from_strings('(tree fragment)', code)
|
| 55 |
+
for phase in Pipeline.create_pipeline(context, 'pyx'):
|
| 56 |
+
if phase is None:
|
| 57 |
+
continue
|
| 58 |
+
tree = phase(tree)
|
| 59 |
+
if isinstance(phase, AnalyseDeclarationsTransform):
|
| 60 |
+
break
|
| 61 |
+
import builtins
|
| 62 |
+
return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def unsafe_type(arg, context=None):
|
| 66 |
+
py_type = type(arg)
|
| 67 |
+
if py_type is int:
|
| 68 |
+
return 'long'
|
| 69 |
+
else:
|
| 70 |
+
return safe_type(arg, context)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def safe_type(arg, context=None):
|
| 74 |
+
py_type = type(arg)
|
| 75 |
+
if py_type in (list, tuple, dict, str):
|
| 76 |
+
return py_type.__name__
|
| 77 |
+
elif py_type is complex:
|
| 78 |
+
return 'double complex'
|
| 79 |
+
elif py_type is float:
|
| 80 |
+
return 'double'
|
| 81 |
+
elif py_type is bool:
|
| 82 |
+
return 'bint'
|
| 83 |
+
elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray):
|
| 84 |
+
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
|
| 85 |
+
else:
|
| 86 |
+
for base_type in py_type.__mro__:
|
| 87 |
+
if base_type.__module__ in ('__builtin__', 'builtins'):
|
| 88 |
+
return 'object'
|
| 89 |
+
module = context.find_module(base_type.__module__, need_pxd=False)
|
| 90 |
+
if module:
|
| 91 |
+
entry = module.lookup(base_type.__name__)
|
| 92 |
+
if entry.is_type:
|
| 93 |
+
return '%s.%s' % (base_type.__module__, base_type.__name__)
|
| 94 |
+
return 'object'
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def _get_build_extension():
|
| 98 |
+
dist = Distribution()
|
| 99 |
+
# Ensure the build respects distutils configuration by parsing
|
| 100 |
+
# the configuration files
|
| 101 |
+
config_files = dist.find_config_files()
|
| 102 |
+
dist.parse_config_files(config_files)
|
| 103 |
+
build_extension = build_ext(dist)
|
| 104 |
+
build_extension.finalize_options()
|
| 105 |
+
return build_extension
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
@cached_function
|
| 109 |
+
def _create_context(cython_include_dirs):
|
| 110 |
+
return Context(
|
| 111 |
+
list(cython_include_dirs),
|
| 112 |
+
get_directive_defaults(),
|
| 113 |
+
options=CompilationOptions(default_options)
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
_cython_inline_cache = {}
|
| 118 |
+
_cython_inline_default_context = _create_context(('.',))
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
|
| 122 |
+
for symbol in unbound_symbols:
|
| 123 |
+
if symbol not in kwds:
|
| 124 |
+
if locals is None or globals is None:
|
| 125 |
+
calling_frame = inspect.currentframe().f_back.f_back.f_back
|
| 126 |
+
if locals is None:
|
| 127 |
+
locals = calling_frame.f_locals
|
| 128 |
+
if globals is None:
|
| 129 |
+
globals = calling_frame.f_globals
|
| 130 |
+
if not isinstance(locals, dict):
|
| 131 |
+
# FrameLocalsProxy is stricter than dict on how it looks up keys
|
| 132 |
+
# and this means our "EncodedStrings" don't match the keys in locals.
|
| 133 |
+
# Therefore copy to a dict.
|
| 134 |
+
locals = dict(locals)
|
| 135 |
+
if symbol in locals:
|
| 136 |
+
kwds[symbol] = locals[symbol]
|
| 137 |
+
elif symbol in globals:
|
| 138 |
+
kwds[symbol] = globals[symbol]
|
| 139 |
+
else:
|
| 140 |
+
print("Couldn't find %r" % symbol)
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def _inline_key(orig_code, arg_sigs, language_level):
|
| 144 |
+
key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__
|
| 145 |
+
return hashlib.sha256(str(key).encode('utf-8')).hexdigest()
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def cython_inline(code, get_type=unsafe_type,
|
| 149 |
+
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
|
| 150 |
+
cython_include_dirs=None, cython_compiler_directives=None,
|
| 151 |
+
force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
|
| 152 |
+
|
| 153 |
+
if get_type is None:
|
| 154 |
+
get_type = lambda x: 'object'
|
| 155 |
+
ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context
|
| 156 |
+
|
| 157 |
+
cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {}
|
| 158 |
+
if language_level is None and 'language_level' not in cython_compiler_directives:
|
| 159 |
+
language_level = '3'
|
| 160 |
+
if language_level is not None:
|
| 161 |
+
cython_compiler_directives['language_level'] = language_level
|
| 162 |
+
|
| 163 |
+
key_hash = None
|
| 164 |
+
|
| 165 |
+
# Fast path if this has been called in this session.
|
| 166 |
+
_unbound_symbols = _cython_inline_cache.get(code)
|
| 167 |
+
if _unbound_symbols is not None:
|
| 168 |
+
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
| 169 |
+
args = sorted(kwds.items())
|
| 170 |
+
arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args])
|
| 171 |
+
key_hash = _inline_key(code, arg_sigs, language_level)
|
| 172 |
+
invoke = _cython_inline_cache.get((code, arg_sigs, key_hash))
|
| 173 |
+
if invoke is not None:
|
| 174 |
+
arg_list = [arg[1] for arg in args]
|
| 175 |
+
return invoke(*arg_list)
|
| 176 |
+
|
| 177 |
+
orig_code = code
|
| 178 |
+
code, literals = strip_string_literals(code)
|
| 179 |
+
code = strip_common_indent(code)
|
| 180 |
+
if locals is None:
|
| 181 |
+
locals = inspect.currentframe().f_back.f_back.f_locals
|
| 182 |
+
if globals is None:
|
| 183 |
+
globals = inspect.currentframe().f_back.f_back.f_globals
|
| 184 |
+
try:
|
| 185 |
+
_cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code)
|
| 186 |
+
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
| 187 |
+
except AssertionError:
|
| 188 |
+
if not quiet:
|
| 189 |
+
# Parsing from strings not fully supported (e.g. cimports).
|
| 190 |
+
print("Could not parse code as a string (to extract unbound symbols).")
|
| 191 |
+
|
| 192 |
+
cimports = []
|
| 193 |
+
for name, arg in list(kwds.items()):
|
| 194 |
+
if arg is cython_module:
|
| 195 |
+
cimports.append('\ncimport cython as %s' % name)
|
| 196 |
+
del kwds[name]
|
| 197 |
+
arg_names = sorted(kwds)
|
| 198 |
+
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
|
| 199 |
+
if key_hash is None:
|
| 200 |
+
key_hash = _inline_key(orig_code, arg_sigs, language_level)
|
| 201 |
+
module_name = "_cython_inline_" + key_hash
|
| 202 |
+
|
| 203 |
+
if module_name in sys.modules:
|
| 204 |
+
module = sys.modules[module_name]
|
| 205 |
+
|
| 206 |
+
else:
|
| 207 |
+
build_extension = None
|
| 208 |
+
if cython_inline.so_ext is None:
|
| 209 |
+
# Figure out and cache current extension suffix
|
| 210 |
+
build_extension = _get_build_extension()
|
| 211 |
+
cython_inline.so_ext = build_extension.get_ext_filename('')
|
| 212 |
+
|
| 213 |
+
lib_dir = os.path.abspath(lib_dir)
|
| 214 |
+
module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext)
|
| 215 |
+
|
| 216 |
+
if not os.path.exists(lib_dir):
|
| 217 |
+
os.makedirs(lib_dir)
|
| 218 |
+
if force or not os.path.isfile(module_path):
|
| 219 |
+
cflags = []
|
| 220 |
+
define_macros = []
|
| 221 |
+
c_include_dirs = []
|
| 222 |
+
qualified = re.compile(r'([.\w]+)[.]')
|
| 223 |
+
for type, _ in arg_sigs:
|
| 224 |
+
m = qualified.match(type)
|
| 225 |
+
if m:
|
| 226 |
+
cimports.append('\ncimport %s' % m.groups()[0])
|
| 227 |
+
# one special case
|
| 228 |
+
if m.groups()[0] == 'numpy':
|
| 229 |
+
import numpy
|
| 230 |
+
c_include_dirs.append(numpy.get_include())
|
| 231 |
+
define_macros.append(("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION"))
|
| 232 |
+
# cflags.append('-Wno-unused')
|
| 233 |
+
module_body, func_body = extract_func_code(code)
|
| 234 |
+
params = ', '.join(['%s %s' % a for a in arg_sigs])
|
| 235 |
+
module_code = """
|
| 236 |
+
%(module_body)s
|
| 237 |
+
%(cimports)s
|
| 238 |
+
def __invoke(%(params)s):
|
| 239 |
+
%(func_body)s
|
| 240 |
+
return locals()
|
| 241 |
+
""" % {'cimports': '\n'.join(cimports),
|
| 242 |
+
'module_body': module_body,
|
| 243 |
+
'params': params,
|
| 244 |
+
'func_body': func_body }
|
| 245 |
+
for key, value in literals.items():
|
| 246 |
+
module_code = module_code.replace(key, value)
|
| 247 |
+
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
| 248 |
+
fh = open(pyx_file, 'w')
|
| 249 |
+
try:
|
| 250 |
+
fh.write(module_code)
|
| 251 |
+
finally:
|
| 252 |
+
fh.close()
|
| 253 |
+
extension = Extension(
|
| 254 |
+
name=module_name,
|
| 255 |
+
sources=[pyx_file],
|
| 256 |
+
include_dirs=c_include_dirs or None,
|
| 257 |
+
extra_compile_args=cflags or None,
|
| 258 |
+
define_macros=define_macros or None,
|
| 259 |
+
)
|
| 260 |
+
if build_extension is None:
|
| 261 |
+
build_extension = _get_build_extension()
|
| 262 |
+
build_extension.extensions = cythonize(
|
| 263 |
+
[extension],
|
| 264 |
+
include_path=cython_include_dirs or ['.'],
|
| 265 |
+
compiler_directives=cython_compiler_directives,
|
| 266 |
+
quiet=quiet)
|
| 267 |
+
build_extension.build_temp = os.path.dirname(pyx_file)
|
| 268 |
+
build_extension.build_lib = lib_dir
|
| 269 |
+
build_extension.run()
|
| 270 |
+
|
| 271 |
+
if sys.platform == 'win32' and sys.version_info >= (3, 8):
|
| 272 |
+
with os.add_dll_directory(os.path.abspath(lib_dir)):
|
| 273 |
+
module = load_dynamic(module_name, module_path)
|
| 274 |
+
else:
|
| 275 |
+
module = load_dynamic(module_name, module_path)
|
| 276 |
+
|
| 277 |
+
_cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke
|
| 278 |
+
arg_list = [kwds[arg] for arg in arg_names]
|
| 279 |
+
return module.__invoke(*arg_list)
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
# The code template used for cymeit benchmark runs.
|
| 283 |
+
# We keep the benchmark repetition separate from the benchmarked code
|
| 284 |
+
# to prevent the C compiler from doing unhelpful loop optimisations.
|
| 285 |
+
_CYMEIT_TEMPLATE = """
|
| 286 |
+
def __PYX_repeat_benchmark(benchmark, timer, size_t number):
|
| 287 |
+
cdef size_t i
|
| 288 |
+
|
| 289 |
+
t0 = timer()
|
| 290 |
+
for i in range(number):
|
| 291 |
+
benchmark()
|
| 292 |
+
t1 = timer()
|
| 293 |
+
return t1 - t0
|
| 294 |
+
|
| 295 |
+
def __PYX_make_benchmark():
|
| 296 |
+
{setup_code}
|
| 297 |
+
|
| 298 |
+
def __PYX_run_benchmark():
|
| 299 |
+
{benchmark_code}
|
| 300 |
+
|
| 301 |
+
return __PYX_run_benchmark
|
| 302 |
+
"""
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
def cymeit(code, setup_code=None, import_module=None, directives=None, timer=time.perf_counter, repeat=9):
|
| 306 |
+
"""Benchmark a Cython code string similar to 'timeit'.
|
| 307 |
+
|
| 308 |
+
'setup_code': string of setup code that will be run before taking the timings.
|
| 309 |
+
|
| 310 |
+
'import_module': a module namespace to run the benchmark in
|
| 311 |
+
(usually a compiled Cython module).
|
| 312 |
+
|
| 313 |
+
'directives': Cython directives to use when compiling the benchmark code.
|
| 314 |
+
|
| 315 |
+
'timer': The timer function. Defaults to 'time.perf_counter', returning float seconds.
|
| 316 |
+
Nanosecond timers are detected (and can only be used) if they return integers.
|
| 317 |
+
|
| 318 |
+
'repeat': The number of timings to take and return.
|
| 319 |
+
|
| 320 |
+
Returns a tuple: (list of single-loop timings, number of loops run for each)
|
| 321 |
+
"""
|
| 322 |
+
import textwrap
|
| 323 |
+
|
| 324 |
+
# Compile the benchmark code as an inline closure function.
|
| 325 |
+
|
| 326 |
+
setup_code = strip_common_indent(setup_code) if setup_code else ''
|
| 327 |
+
code = strip_common_indent(code) if code.strip() else 'pass'
|
| 328 |
+
|
| 329 |
+
module_namespace = __import__(import_module).__dict__ if import_module else None
|
| 330 |
+
|
| 331 |
+
cymeit_code = _CYMEIT_TEMPLATE.format(
|
| 332 |
+
setup_code=textwrap.indent(setup_code, ' '*4).strip(),
|
| 333 |
+
benchmark_code=textwrap.indent(code, ' '*8).strip(),
|
| 334 |
+
|
| 335 |
+
)
|
| 336 |
+
|
| 337 |
+
namespace = cython_inline(
|
| 338 |
+
cymeit_code,
|
| 339 |
+
cython_compiler_directives=directives,
|
| 340 |
+
locals=module_namespace,
|
| 341 |
+
)
|
| 342 |
+
|
| 343 |
+
make_benchmark = namespace['__PYX_make_benchmark']
|
| 344 |
+
repeat_benchmark = namespace['__PYX_repeat_benchmark']
|
| 345 |
+
|
| 346 |
+
# Based on 'timeit' in CPython 3.13.
|
| 347 |
+
|
| 348 |
+
def timeit(number):
|
| 349 |
+
benchmark = make_benchmark()
|
| 350 |
+
|
| 351 |
+
gcold = gc.isenabled()
|
| 352 |
+
gc.disable()
|
| 353 |
+
try:
|
| 354 |
+
timing = repeat_benchmark(benchmark, timer, number)
|
| 355 |
+
finally:
|
| 356 |
+
if gcold:
|
| 357 |
+
gc.enable()
|
| 358 |
+
return timing
|
| 359 |
+
|
| 360 |
+
# Find a sufficiently large number of loops, warm up the system.
|
| 361 |
+
|
| 362 |
+
timer_returns_nanoseconds = isinstance(timer(), int)
|
| 363 |
+
one_second = 1_000_000_000 if timer_returns_nanoseconds else 1.0
|
| 364 |
+
|
| 365 |
+
# Run for at least 0.2 seconds, either as integer nanoseconds or floating point seconds.
|
| 366 |
+
min_runtime = one_second // 5 if timer_returns_nanoseconds else one_second / 5
|
| 367 |
+
|
| 368 |
+
def autorange():
|
| 369 |
+
i = 1
|
| 370 |
+
while True:
|
| 371 |
+
for j in 1, 2, 5:
|
| 372 |
+
number = i * j
|
| 373 |
+
time_taken = timeit(number)
|
| 374 |
+
assert isinstance(time_taken, int if timer_returns_nanoseconds else float)
|
| 375 |
+
if time_taken >= min_runtime:
|
| 376 |
+
return number
|
| 377 |
+
elif timer_returns_nanoseconds and (time_taken < 10 and number >= 10):
|
| 378 |
+
# Arbitrary sanity check to prevent endless loops for non-ns timers.
|
| 379 |
+
raise RuntimeError(f"Timer seems to return non-ns timings: {timer}")
|
| 380 |
+
i *= 10
|
| 381 |
+
|
| 382 |
+
autorange() # warmup
|
| 383 |
+
number = autorange()
|
| 384 |
+
|
| 385 |
+
# Run and repeat the benchmark.
|
| 386 |
+
timings = [
|
| 387 |
+
timeit(number)
|
| 388 |
+
for _ in range(repeat)
|
| 389 |
+
]
|
| 390 |
+
|
| 391 |
+
half = number // 2 # for integer rounding
|
| 392 |
+
|
| 393 |
+
timings = [
|
| 394 |
+
(timing + half) // number if timer_returns_nanoseconds else timing / number
|
| 395 |
+
for timing in timings
|
| 396 |
+
]
|
| 397 |
+
|
| 398 |
+
return (timings, number)
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
# Cached suffix used by cython_inline above. None should get
|
| 402 |
+
# overridden with actual value upon the first cython_inline invocation
|
| 403 |
+
cython_inline.so_ext = None
|
| 404 |
+
|
| 405 |
+
_find_non_space = re.compile(r'\S').search
|
| 406 |
+
|
| 407 |
+
|
| 408 |
+
def strip_common_indent(code):
|
| 409 |
+
min_indent = None
|
| 410 |
+
lines = code.splitlines()
|
| 411 |
+
for line in lines:
|
| 412 |
+
match = _find_non_space(line)
|
| 413 |
+
if not match:
|
| 414 |
+
continue # blank
|
| 415 |
+
indent = match.start()
|
| 416 |
+
if line[indent] == '#':
|
| 417 |
+
continue # comment
|
| 418 |
+
if min_indent is None or min_indent > indent:
|
| 419 |
+
min_indent = indent
|
| 420 |
+
for ix, line in enumerate(lines):
|
| 421 |
+
match = _find_non_space(line)
|
| 422 |
+
if not match or not line or line[indent:indent+1] == '#':
|
| 423 |
+
continue
|
| 424 |
+
lines[ix] = line[min_indent:]
|
| 425 |
+
return '\n'.join(lines)
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
|
| 429 |
+
def extract_func_code(code):
|
| 430 |
+
module = []
|
| 431 |
+
function = []
|
| 432 |
+
current = function
|
| 433 |
+
code = code.replace('\t', ' ')
|
| 434 |
+
lines = code.split('\n')
|
| 435 |
+
for line in lines:
|
| 436 |
+
if not line.startswith(' '):
|
| 437 |
+
if module_statement.match(line):
|
| 438 |
+
current = module
|
| 439 |
+
else:
|
| 440 |
+
current = function
|
| 441 |
+
current.append(line)
|
| 442 |
+
return '\n'.join(module), ' ' + '\n '.join(function)
|
| 443 |
+
|
| 444 |
+
|
| 445 |
+
def get_body(source):
|
| 446 |
+
ix = source.index(':')
|
| 447 |
+
if source[:5] == 'lambda':
|
| 448 |
+
return "return %s" % source[ix+1:]
|
| 449 |
+
else:
|
| 450 |
+
return source[ix+1:]
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
# Lots to be done here... It would be especially cool if compiled functions
|
| 454 |
+
# could invoke each other quickly.
|
| 455 |
+
class RuntimeCompiledFunction:
|
| 456 |
+
|
| 457 |
+
def __init__(self, f):
|
| 458 |
+
self._f = f
|
| 459 |
+
self._body = get_body(inspect.getsource(f))
|
| 460 |
+
|
| 461 |
+
def __call__(self, *args, **kwds):
|
| 462 |
+
all = inspect.getcallargs(self._f, *args, **kwds)
|
| 463 |
+
return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
|
venv/lib/python3.10/site-packages/Cython/Build/IpythonMagic.py
ADDED
|
@@ -0,0 +1,560 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
=====================
|
| 3 |
+
Cython related magics
|
| 4 |
+
=====================
|
| 5 |
+
|
| 6 |
+
Magic command interface for interactive work with Cython
|
| 7 |
+
|
| 8 |
+
.. note::
|
| 9 |
+
|
| 10 |
+
The ``Cython`` package needs to be installed separately. It
|
| 11 |
+
can be obtained using ``easy_install`` or ``pip``.
|
| 12 |
+
|
| 13 |
+
Usage
|
| 14 |
+
=====
|
| 15 |
+
|
| 16 |
+
To enable the magics below, execute ``%load_ext cython``.
|
| 17 |
+
|
| 18 |
+
``%%cython``
|
| 19 |
+
|
| 20 |
+
{CYTHON_DOC}
|
| 21 |
+
|
| 22 |
+
``%%cython_inline``
|
| 23 |
+
|
| 24 |
+
{CYTHON_INLINE_DOC}
|
| 25 |
+
|
| 26 |
+
``%%cython_pyximport``
|
| 27 |
+
|
| 28 |
+
{CYTHON_PYXIMPORT_DOC}
|
| 29 |
+
|
| 30 |
+
Author:
|
| 31 |
+
* Brian Granger
|
| 32 |
+
|
| 33 |
+
Code moved from IPython and adapted by:
|
| 34 |
+
* Martín Gaitán
|
| 35 |
+
|
| 36 |
+
Parts of this code were taken from Cython.inline.
|
| 37 |
+
"""
|
| 38 |
+
#-----------------------------------------------------------------------------
|
| 39 |
+
# Copyright (C) 2010-2011, IPython Development Team.
|
| 40 |
+
#
|
| 41 |
+
# Distributed under the terms of the Modified BSD License.
|
| 42 |
+
#
|
| 43 |
+
# The full license is in the file ipython-COPYING.rst, distributed with this software.
|
| 44 |
+
#-----------------------------------------------------------------------------
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
import io
|
| 48 |
+
import os
|
| 49 |
+
import re
|
| 50 |
+
import sys
|
| 51 |
+
import time
|
| 52 |
+
import copy
|
| 53 |
+
import distutils.log
|
| 54 |
+
import textwrap
|
| 55 |
+
|
| 56 |
+
IO_ENCODING = sys.getfilesystemencoding()
|
| 57 |
+
|
| 58 |
+
import hashlib
|
| 59 |
+
from distutils.core import Distribution, Extension
|
| 60 |
+
from distutils.command.build_ext import build_ext
|
| 61 |
+
|
| 62 |
+
from IPython.core import display
|
| 63 |
+
from IPython.core import magic_arguments
|
| 64 |
+
from IPython.core.magic import Magics, magics_class, cell_magic
|
| 65 |
+
try:
|
| 66 |
+
from IPython.paths import get_ipython_cache_dir
|
| 67 |
+
except ImportError:
|
| 68 |
+
# older IPython version
|
| 69 |
+
from IPython.utils.path import get_ipython_cache_dir
|
| 70 |
+
from IPython.utils.text import dedent
|
| 71 |
+
|
| 72 |
+
from ..Shadow import __version__ as cython_version
|
| 73 |
+
from ..Compiler.Errors import CompileError
|
| 74 |
+
from .Inline import cython_inline, load_dynamic
|
| 75 |
+
from .Dependencies import cythonize
|
| 76 |
+
from ..Utils import captured_fd, print_captured
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
PGO_CONFIG = {
|
| 80 |
+
'gcc': {
|
| 81 |
+
'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'],
|
| 82 |
+
'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'],
|
| 83 |
+
},
|
| 84 |
+
# blind copy from 'configure' script in CPython 3.7
|
| 85 |
+
'icc': {
|
| 86 |
+
'gen': ['-prof-gen'],
|
| 87 |
+
'use': ['-prof-use'],
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc']
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
@magics_class
|
| 94 |
+
class CythonMagics(Magics):
|
| 95 |
+
|
| 96 |
+
def __init__(self, shell):
|
| 97 |
+
super().__init__(shell)
|
| 98 |
+
self._reloads = {}
|
| 99 |
+
self._code_cache = {}
|
| 100 |
+
self._pyximport_installed = False
|
| 101 |
+
|
| 102 |
+
def _import_all(self, module):
|
| 103 |
+
mdict = module.__dict__
|
| 104 |
+
if '__all__' in mdict:
|
| 105 |
+
keys = mdict['__all__']
|
| 106 |
+
else:
|
| 107 |
+
keys = [k for k in mdict if not k.startswith('_')]
|
| 108 |
+
|
| 109 |
+
for k in keys:
|
| 110 |
+
try:
|
| 111 |
+
self.shell.push({k: mdict[k]})
|
| 112 |
+
except KeyError:
|
| 113 |
+
msg = "'module' object has no attribute '%s'" % k
|
| 114 |
+
raise AttributeError(msg)
|
| 115 |
+
|
| 116 |
+
@cell_magic
|
| 117 |
+
def cython_inline(self, line, cell):
|
| 118 |
+
"""Compile and run a Cython code cell using Cython.inline.
|
| 119 |
+
|
| 120 |
+
This magic simply passes the body of the cell to Cython.inline
|
| 121 |
+
and returns the result. If the variables `a` and `b` are defined
|
| 122 |
+
in the user's namespace, here is a simple example that returns
|
| 123 |
+
their sum::
|
| 124 |
+
|
| 125 |
+
%%cython_inline
|
| 126 |
+
return a+b
|
| 127 |
+
|
| 128 |
+
For most purposes, we recommend the usage of the `%%cython` magic.
|
| 129 |
+
"""
|
| 130 |
+
locs = self.shell.user_global_ns
|
| 131 |
+
globs = self.shell.user_ns
|
| 132 |
+
return cython_inline(cell, locals=locs, globals=globs)
|
| 133 |
+
|
| 134 |
+
@cell_magic
|
| 135 |
+
def cython_pyximport(self, line, cell):
|
| 136 |
+
"""Compile and import a Cython code cell using pyximport.
|
| 137 |
+
|
| 138 |
+
The contents of the cell are written to a `.pyx` file in the current
|
| 139 |
+
working directory, which is then imported using `pyximport`. This
|
| 140 |
+
magic requires a module name to be passed::
|
| 141 |
+
|
| 142 |
+
%%cython_pyximport modulename
|
| 143 |
+
def f(x):
|
| 144 |
+
return 2.0*x
|
| 145 |
+
|
| 146 |
+
The compiled module is then imported and all of its symbols are
|
| 147 |
+
injected into the user's namespace. For most purposes, we recommend
|
| 148 |
+
the usage of the `%%cython` magic.
|
| 149 |
+
"""
|
| 150 |
+
module_name = line.strip()
|
| 151 |
+
if not module_name:
|
| 152 |
+
raise ValueError('module name must be given')
|
| 153 |
+
fname = module_name + '.pyx'
|
| 154 |
+
with open(fname, 'w', encoding='utf-8') as f:
|
| 155 |
+
f.write(cell)
|
| 156 |
+
if 'pyximport' not in sys.modules or not self._pyximport_installed:
|
| 157 |
+
import pyximport
|
| 158 |
+
pyximport.install()
|
| 159 |
+
self._pyximport_installed = True
|
| 160 |
+
if module_name in self._reloads:
|
| 161 |
+
module = self._reloads[module_name]
|
| 162 |
+
# Note: reloading extension modules is not actually supported
|
| 163 |
+
# (requires PEP-489 reinitialisation support).
|
| 164 |
+
# Don't know why this should ever have worked as it reads here.
|
| 165 |
+
# All we really need to do is to update the globals below.
|
| 166 |
+
#reload(module)
|
| 167 |
+
else:
|
| 168 |
+
__import__(module_name)
|
| 169 |
+
module = sys.modules[module_name]
|
| 170 |
+
self._reloads[module_name] = module
|
| 171 |
+
self._import_all(module)
|
| 172 |
+
|
| 173 |
+
@magic_arguments.magic_arguments()
|
| 174 |
+
@magic_arguments.argument(
|
| 175 |
+
'-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 176 |
+
help="Produce a colorized HTML version of the source."
|
| 177 |
+
)
|
| 178 |
+
@magic_arguments.argument(
|
| 179 |
+
'--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 180 |
+
help="Produce a colorized HTML version of the source "
|
| 181 |
+
"which includes entire generated C/C++-code."
|
| 182 |
+
)
|
| 183 |
+
@magic_arguments.argument(
|
| 184 |
+
'-+', '--cplus', action='store_true', default=False,
|
| 185 |
+
help="Output a C++ rather than C file."
|
| 186 |
+
)
|
| 187 |
+
@magic_arguments.argument(
|
| 188 |
+
'-3', dest='language_level', action='store_const', const=3, default=None,
|
| 189 |
+
help="Select Python 3 syntax."
|
| 190 |
+
)
|
| 191 |
+
@magic_arguments.argument(
|
| 192 |
+
'-2', dest='language_level', action='store_const', const=2, default=None,
|
| 193 |
+
help="Select Python 2 syntax."
|
| 194 |
+
)
|
| 195 |
+
@magic_arguments.argument(
|
| 196 |
+
'-f', '--force', action='store_true', default=False,
|
| 197 |
+
help="Force the compilation of a new module, even if the source has been "
|
| 198 |
+
"previously compiled."
|
| 199 |
+
)
|
| 200 |
+
@magic_arguments.argument(
|
| 201 |
+
'-c', '--compile-args', action='append', default=[],
|
| 202 |
+
help="Extra flags to pass to compiler via the `extra_compile_args` "
|
| 203 |
+
"Extension flag (can be specified multiple times)."
|
| 204 |
+
)
|
| 205 |
+
@magic_arguments.argument(
|
| 206 |
+
'--link-args', action='append', default=[],
|
| 207 |
+
help="Extra flags to pass to linker via the `extra_link_args` "
|
| 208 |
+
"Extension flag (can be specified multiple times)."
|
| 209 |
+
)
|
| 210 |
+
@magic_arguments.argument(
|
| 211 |
+
'-l', '--lib', action='append', default=[],
|
| 212 |
+
help="Add a library to link the extension against (can be specified "
|
| 213 |
+
"multiple times)."
|
| 214 |
+
)
|
| 215 |
+
@magic_arguments.argument(
|
| 216 |
+
'-n', '--name',
|
| 217 |
+
help="Specify a name for the Cython module."
|
| 218 |
+
)
|
| 219 |
+
@magic_arguments.argument(
|
| 220 |
+
'-L', dest='library_dirs', metavar='dir', action='append', default=[],
|
| 221 |
+
help="Add a path to the list of library directories (can be specified "
|
| 222 |
+
"multiple times)."
|
| 223 |
+
)
|
| 224 |
+
@magic_arguments.argument(
|
| 225 |
+
'-I', '--include', action='append', default=[],
|
| 226 |
+
help="Add a path to the list of include directories (can be specified "
|
| 227 |
+
"multiple times)."
|
| 228 |
+
)
|
| 229 |
+
@magic_arguments.argument(
|
| 230 |
+
'-S', '--src', action='append', default=[],
|
| 231 |
+
help="Add a path to the list of src files (can be specified "
|
| 232 |
+
"multiple times)."
|
| 233 |
+
)
|
| 234 |
+
@magic_arguments.argument(
|
| 235 |
+
'--pgo', dest='pgo', action='store_true', default=False,
|
| 236 |
+
help=("Enable profile guided optimisation in the C compiler. "
|
| 237 |
+
"Compiles the cell twice and executes it in between to generate a runtime profile.")
|
| 238 |
+
)
|
| 239 |
+
@magic_arguments.argument(
|
| 240 |
+
'--verbose', dest='quiet', action='store_false', default=True,
|
| 241 |
+
help=("Print debug information like generated .c/.cpp file location "
|
| 242 |
+
"and exact gcc/g++ command invoked.")
|
| 243 |
+
)
|
| 244 |
+
@cell_magic
|
| 245 |
+
def cython(self, line, cell):
|
| 246 |
+
"""Compile and import everything from a Cython code cell.
|
| 247 |
+
|
| 248 |
+
The contents of the cell are written to a `.pyx` file in the
|
| 249 |
+
directory returned by `get_ipython_cache_dir()/cython` using a filename
|
| 250 |
+
with the hash of the code. This file is then cythonized and compiled.
|
| 251 |
+
The resulting module is imported and all of its symbols are injected
|
| 252 |
+
into the user's namespace. The usage is similar to that of
|
| 253 |
+
`%%cython_pyximport` but you don't have to pass a module name::
|
| 254 |
+
|
| 255 |
+
%%cython
|
| 256 |
+
def f(x):
|
| 257 |
+
return 2.0*x
|
| 258 |
+
|
| 259 |
+
To compile OpenMP codes, pass the required `--compile-args`
|
| 260 |
+
and `--link-args`. For example with gcc::
|
| 261 |
+
|
| 262 |
+
%%cython --compile-args=-fopenmp --link-args=-fopenmp
|
| 263 |
+
...
|
| 264 |
+
|
| 265 |
+
To enable profile guided optimisation, pass the ``--pgo`` option.
|
| 266 |
+
Note that the cell itself needs to take care of establishing a suitable
|
| 267 |
+
profile when executed. This can be done by implementing the functions to
|
| 268 |
+
optimise, and then calling them directly in the same cell on some realistic
|
| 269 |
+
training data like this::
|
| 270 |
+
|
| 271 |
+
%%cython --pgo
|
| 272 |
+
def critical_function(data):
|
| 273 |
+
for item in data:
|
| 274 |
+
...
|
| 275 |
+
|
| 276 |
+
# execute function several times to build profile
|
| 277 |
+
from somewhere import some_typical_data
|
| 278 |
+
for _ in range(100):
|
| 279 |
+
critical_function(some_typical_data)
|
| 280 |
+
|
| 281 |
+
In Python 3.5 and later, you can distinguish between the profile and
|
| 282 |
+
non-profile runs as follows::
|
| 283 |
+
|
| 284 |
+
if "_pgo_" in __name__:
|
| 285 |
+
... # execute critical code here
|
| 286 |
+
"""
|
| 287 |
+
args = magic_arguments.parse_argstring(self.cython, line)
|
| 288 |
+
code = cell if cell.endswith('\n') else cell + '\n'
|
| 289 |
+
lib_dir = os.path.join(get_ipython_cache_dir(), 'cython')
|
| 290 |
+
key = (code, line, sys.version_info, sys.executable, cython_version)
|
| 291 |
+
|
| 292 |
+
if not os.path.exists(lib_dir):
|
| 293 |
+
os.makedirs(lib_dir)
|
| 294 |
+
|
| 295 |
+
if args.pgo:
|
| 296 |
+
key += ('pgo',)
|
| 297 |
+
if args.force:
|
| 298 |
+
# Force a new module name by adding the current time to the
|
| 299 |
+
# key which is hashed to determine the module name.
|
| 300 |
+
key += (time.time(),)
|
| 301 |
+
|
| 302 |
+
if args.name:
|
| 303 |
+
module_name = str(args.name) # no-op in Py3
|
| 304 |
+
else:
|
| 305 |
+
module_name = "_cython_magic_" + hashlib.sha256(str(key).encode('utf-8')).hexdigest()
|
| 306 |
+
html_file = os.path.join(lib_dir, module_name + '.html')
|
| 307 |
+
module_path = os.path.join(lib_dir, module_name + self.so_ext)
|
| 308 |
+
|
| 309 |
+
have_module = os.path.isfile(module_path)
|
| 310 |
+
need_cythonize = args.pgo or not have_module
|
| 311 |
+
|
| 312 |
+
if args.annotate:
|
| 313 |
+
if not os.path.isfile(html_file):
|
| 314 |
+
need_cythonize = True
|
| 315 |
+
|
| 316 |
+
extension = None
|
| 317 |
+
if need_cythonize:
|
| 318 |
+
extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
|
| 319 |
+
if extensions is None:
|
| 320 |
+
# Compilation failed and printed error message
|
| 321 |
+
return None
|
| 322 |
+
assert len(extensions) == 1
|
| 323 |
+
extension = extensions[0]
|
| 324 |
+
self._code_cache[key] = module_name
|
| 325 |
+
|
| 326 |
+
if args.pgo:
|
| 327 |
+
self._profile_pgo_wrapper(extension, lib_dir)
|
| 328 |
+
|
| 329 |
+
def print_compiler_output(stdout, stderr, where):
|
| 330 |
+
# On windows, errors are printed to stdout, we redirect both to sys.stderr.
|
| 331 |
+
print_captured(stdout, where, "Content of stdout:\n")
|
| 332 |
+
print_captured(stderr, where, "Content of stderr:\n")
|
| 333 |
+
|
| 334 |
+
get_stderr = get_stdout = None
|
| 335 |
+
try:
|
| 336 |
+
with captured_fd(1) as get_stdout:
|
| 337 |
+
with captured_fd(2) as get_stderr:
|
| 338 |
+
self._build_extension(
|
| 339 |
+
extension, lib_dir, pgo_step_name='use' if args.pgo else None, quiet=args.quiet)
|
| 340 |
+
except (distutils.errors.CompileError, distutils.errors.LinkError):
|
| 341 |
+
# Build failed, print error message from compiler/linker
|
| 342 |
+
print_compiler_output(get_stdout(), get_stderr(), sys.stderr)
|
| 343 |
+
return None
|
| 344 |
+
|
| 345 |
+
# Build seems ok, but we might still want to show any warnings that occurred
|
| 346 |
+
print_compiler_output(get_stdout(), get_stderr(), sys.stdout)
|
| 347 |
+
|
| 348 |
+
module = load_dynamic(module_name, module_path)
|
| 349 |
+
self._import_all(module)
|
| 350 |
+
|
| 351 |
+
if args.annotate:
|
| 352 |
+
try:
|
| 353 |
+
with open(html_file, encoding='utf-8') as f:
|
| 354 |
+
annotated_html = f.read()
|
| 355 |
+
except OSError as e:
|
| 356 |
+
# File could not be opened. Most likely the user has a version
|
| 357 |
+
# of Cython before 0.15.1 (when `cythonize` learned the
|
| 358 |
+
# `force` keyword argument) and has already compiled this
|
| 359 |
+
# exact source without annotation.
|
| 360 |
+
print('Cython completed successfully but the annotated '
|
| 361 |
+
'source could not be read.', file=sys.stderr)
|
| 362 |
+
print(e, file=sys.stderr)
|
| 363 |
+
else:
|
| 364 |
+
return display.HTML(self.clean_annotated_html(annotated_html))
|
| 365 |
+
|
| 366 |
+
def _profile_pgo_wrapper(self, extension, lib_dir):
|
| 367 |
+
"""
|
| 368 |
+
Generate a .c file for a separate extension module that calls the
|
| 369 |
+
module init function of the original module. This makes sure that the
|
| 370 |
+
PGO profiler sees the correct .o file of the final module, but it still
|
| 371 |
+
allows us to import the module under a different name for profiling,
|
| 372 |
+
before recompiling it into the PGO optimised module. Overwriting and
|
| 373 |
+
reimporting the same shared library is not portable.
|
| 374 |
+
"""
|
| 375 |
+
extension = copy.copy(extension) # shallow copy, do not modify sources in place!
|
| 376 |
+
module_name = extension.name
|
| 377 |
+
pgo_module_name = '_pgo_' + module_name
|
| 378 |
+
pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c')
|
| 379 |
+
with open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f:
|
| 380 |
+
f.write(textwrap.dedent("""
|
| 381 |
+
#include "Python.h"
|
| 382 |
+
extern PyMODINIT_FUNC PyInit_%(module_name)s(void);
|
| 383 |
+
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/
|
| 384 |
+
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) {
|
| 385 |
+
return PyInit_%(module_name)s();
|
| 386 |
+
}
|
| 387 |
+
""" % {'module_name': module_name, 'pgo_module_name': pgo_module_name}))
|
| 388 |
+
|
| 389 |
+
extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place!
|
| 390 |
+
extension.name = pgo_module_name
|
| 391 |
+
|
| 392 |
+
self._build_extension(extension, lib_dir, pgo_step_name='gen')
|
| 393 |
+
|
| 394 |
+
# import and execute module code to generate profile
|
| 395 |
+
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
|
| 396 |
+
load_dynamic(pgo_module_name, so_module_path)
|
| 397 |
+
|
| 398 |
+
def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
|
| 399 |
+
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
| 400 |
+
|
| 401 |
+
c_include_dirs = args.include
|
| 402 |
+
c_src_files = list(map(str, args.src))
|
| 403 |
+
if 'numpy' in code:
|
| 404 |
+
import numpy
|
| 405 |
+
c_include_dirs.append(numpy.get_include())
|
| 406 |
+
with open(pyx_file, 'w', encoding='utf-8') as f:
|
| 407 |
+
f.write(code)
|
| 408 |
+
extension = Extension(
|
| 409 |
+
name=module_name,
|
| 410 |
+
sources=[pyx_file] + c_src_files,
|
| 411 |
+
include_dirs=c_include_dirs,
|
| 412 |
+
library_dirs=args.library_dirs,
|
| 413 |
+
extra_compile_args=args.compile_args,
|
| 414 |
+
extra_link_args=args.link_args,
|
| 415 |
+
libraries=args.lib,
|
| 416 |
+
language='c++' if args.cplus else 'c',
|
| 417 |
+
)
|
| 418 |
+
try:
|
| 419 |
+
opts = dict(
|
| 420 |
+
quiet=quiet,
|
| 421 |
+
annotate=args.annotate,
|
| 422 |
+
force=True,
|
| 423 |
+
language_level=min(3, sys.version_info[0]),
|
| 424 |
+
)
|
| 425 |
+
if args.language_level is not None:
|
| 426 |
+
assert args.language_level in (2, 3)
|
| 427 |
+
opts['language_level'] = args.language_level
|
| 428 |
+
return cythonize([extension], **opts)
|
| 429 |
+
except CompileError:
|
| 430 |
+
return None
|
| 431 |
+
|
| 432 |
+
def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
|
| 433 |
+
build_extension = self._get_build_extension(
|
| 434 |
+
extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
|
| 435 |
+
old_threshold = None
|
| 436 |
+
try:
|
| 437 |
+
if not quiet:
|
| 438 |
+
old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
|
| 439 |
+
build_extension.run()
|
| 440 |
+
finally:
|
| 441 |
+
if not quiet and old_threshold is not None:
|
| 442 |
+
distutils.log.set_threshold(old_threshold)
|
| 443 |
+
|
| 444 |
+
def _add_pgo_flags(self, build_extension, step_name, temp_dir):
|
| 445 |
+
compiler_type = build_extension.compiler.compiler_type
|
| 446 |
+
if compiler_type == 'unix':
|
| 447 |
+
compiler_cmd = build_extension.compiler.compiler_so
|
| 448 |
+
# TODO: we could try to call "[cmd] --version" for better insights
|
| 449 |
+
if not compiler_cmd:
|
| 450 |
+
pass
|
| 451 |
+
elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]:
|
| 452 |
+
compiler_type = 'clang'
|
| 453 |
+
elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]:
|
| 454 |
+
compiler_type = 'icc'
|
| 455 |
+
elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]:
|
| 456 |
+
compiler_type = 'gcc'
|
| 457 |
+
elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]:
|
| 458 |
+
compiler_type = 'gcc'
|
| 459 |
+
config = PGO_CONFIG.get(compiler_type)
|
| 460 |
+
orig_flags = []
|
| 461 |
+
if config and step_name in config:
|
| 462 |
+
flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]]
|
| 463 |
+
for extension in build_extension.extensions:
|
| 464 |
+
orig_flags.append((extension.extra_compile_args, extension.extra_link_args))
|
| 465 |
+
extension.extra_compile_args = extension.extra_compile_args + flags
|
| 466 |
+
extension.extra_link_args = extension.extra_link_args + flags
|
| 467 |
+
else:
|
| 468 |
+
print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type),
|
| 469 |
+
file=sys.stderr)
|
| 470 |
+
return orig_flags
|
| 471 |
+
|
| 472 |
+
@property
|
| 473 |
+
def so_ext(self):
|
| 474 |
+
"""The extension suffix for compiled modules."""
|
| 475 |
+
try:
|
| 476 |
+
return self._so_ext
|
| 477 |
+
except AttributeError:
|
| 478 |
+
self._so_ext = self._get_build_extension().get_ext_filename('')
|
| 479 |
+
return self._so_ext
|
| 480 |
+
|
| 481 |
+
def _clear_distutils_mkpath_cache(self):
|
| 482 |
+
"""clear distutils mkpath cache
|
| 483 |
+
|
| 484 |
+
prevents distutils from skipping re-creation of dirs that have been removed
|
| 485 |
+
"""
|
| 486 |
+
try:
|
| 487 |
+
from distutils.dir_util import _path_created
|
| 488 |
+
except ImportError:
|
| 489 |
+
pass
|
| 490 |
+
else:
|
| 491 |
+
_path_created.clear()
|
| 492 |
+
|
| 493 |
+
def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
|
| 494 |
+
pgo_step_name=None, _build_ext=build_ext):
|
| 495 |
+
self._clear_distutils_mkpath_cache()
|
| 496 |
+
dist = Distribution()
|
| 497 |
+
config_files = dist.find_config_files()
|
| 498 |
+
try:
|
| 499 |
+
config_files.remove('setup.cfg')
|
| 500 |
+
except ValueError:
|
| 501 |
+
pass
|
| 502 |
+
dist.parse_config_files(config_files)
|
| 503 |
+
|
| 504 |
+
if not temp_dir:
|
| 505 |
+
temp_dir = lib_dir
|
| 506 |
+
add_pgo_flags = self._add_pgo_flags
|
| 507 |
+
|
| 508 |
+
if pgo_step_name:
|
| 509 |
+
base_build_ext = _build_ext
|
| 510 |
+
class _build_ext(_build_ext):
|
| 511 |
+
def build_extensions(self):
|
| 512 |
+
add_pgo_flags(self, pgo_step_name, temp_dir)
|
| 513 |
+
base_build_ext.build_extensions(self)
|
| 514 |
+
|
| 515 |
+
build_extension = _build_ext(dist)
|
| 516 |
+
build_extension.finalize_options()
|
| 517 |
+
if temp_dir:
|
| 518 |
+
build_extension.build_temp = temp_dir
|
| 519 |
+
if lib_dir:
|
| 520 |
+
build_extension.build_lib = lib_dir
|
| 521 |
+
if extension is not None:
|
| 522 |
+
build_extension.extensions = [extension]
|
| 523 |
+
return build_extension
|
| 524 |
+
|
| 525 |
+
@staticmethod
|
| 526 |
+
def clean_annotated_html(html, include_style=True):
|
| 527 |
+
"""Clean up the annotated HTML source.
|
| 528 |
+
|
| 529 |
+
Strips the link to the generated C or C++ file, which we do not
|
| 530 |
+
present to the user.
|
| 531 |
+
|
| 532 |
+
Returns an HTML snippet (no <html>, <head>, or <body>),
|
| 533 |
+
containing only the style tag(s) and _contents_ of the body,
|
| 534 |
+
appropriate for embedding multiple times in cell output.
|
| 535 |
+
"""
|
| 536 |
+
# extract CSS and body, rather than full HTML document
|
| 537 |
+
chunks = []
|
| 538 |
+
if include_style:
|
| 539 |
+
styles = re.findall("<style.*</style>", html, re.MULTILINE | re.DOTALL)
|
| 540 |
+
chunks.extend(styles)
|
| 541 |
+
# extract body
|
| 542 |
+
body = re.search(
|
| 543 |
+
r"<body[^>]*>(.+)</body>", html, re.MULTILINE | re.DOTALL
|
| 544 |
+
).group(1)
|
| 545 |
+
|
| 546 |
+
# exclude link to generated file
|
| 547 |
+
r = re.compile('<p>Raw output: <a href="(.*)">(.*)</a>')
|
| 548 |
+
for line in body.splitlines():
|
| 549 |
+
if not r.match(line):
|
| 550 |
+
chunks.append(line)
|
| 551 |
+
return "\n".join(chunks)
|
| 552 |
+
|
| 553 |
+
__doc__ = __doc__.format(
|
| 554 |
+
# rST doesn't see the -+ flag as part of an option list, so we
|
| 555 |
+
# hide it from the module-level docstring.
|
| 556 |
+
CYTHON_DOC=dedent(CythonMagics.cython.__doc__
|
| 557 |
+
.replace('-+, --cplus', '--cplus ')),
|
| 558 |
+
CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__),
|
| 559 |
+
CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__),
|
| 560 |
+
)
|
venv/lib/python3.10/site-packages/Cython/Build/SharedModule.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import tempfile
|
| 2 |
+
import os
|
| 3 |
+
import shutil
|
| 4 |
+
|
| 5 |
+
from Cython.Compiler import (
|
| 6 |
+
MemoryView, Code, Options, Pipeline, Errors, Main, Symtab
|
| 7 |
+
)
|
| 8 |
+
from Cython.Compiler.StringEncoding import EncodedString
|
| 9 |
+
from Cython.Compiler.Scanning import FileSourceDescriptor
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def create_shared_library_pipeline(context, scope, options, result):
|
| 13 |
+
|
| 14 |
+
parse = Pipeline.parse_stage_factory(context)
|
| 15 |
+
|
| 16 |
+
def generate_tree_factory(context):
|
| 17 |
+
def generate_tree(compsrc):
|
| 18 |
+
tree = parse(compsrc)
|
| 19 |
+
|
| 20 |
+
tree.scope.use_utility_code(
|
| 21 |
+
MemoryView.get_view_utility_code(options.shared_utility_qualified_name))
|
| 22 |
+
|
| 23 |
+
tree.scope.use_utility_code(MemoryView._get_memviewslice_declare_code())
|
| 24 |
+
tree.scope.use_utility_code(MemoryView._get_typeinfo_to_format_code())
|
| 25 |
+
context.include_directories.append(Code.get_utility_dir())
|
| 26 |
+
return tree
|
| 27 |
+
|
| 28 |
+
return generate_tree
|
| 29 |
+
|
| 30 |
+
orig_cimport_from_pyx = Options.cimport_from_pyx
|
| 31 |
+
|
| 32 |
+
def set_cimport_from_pyx(cimport_from_pyx):
|
| 33 |
+
def inner(node):
|
| 34 |
+
Options.cimport_from_pyx = cimport_from_pyx
|
| 35 |
+
return node
|
| 36 |
+
return inner
|
| 37 |
+
|
| 38 |
+
return [
|
| 39 |
+
# "cimport_from_pyx=True" to force generating __Pyx_ExportFunction
|
| 40 |
+
set_cimport_from_pyx(True),
|
| 41 |
+
generate_tree_factory(context),
|
| 42 |
+
*Pipeline.create_pipeline(context, 'pyx', exclude_classes=()),
|
| 43 |
+
Pipeline.inject_pxd_code_stage_factory(context),
|
| 44 |
+
Pipeline.inject_utility_code_stage_factory(context, internalise_c_class_entries=False),
|
| 45 |
+
Pipeline.inject_utility_pxd_code_stage_factory(context),
|
| 46 |
+
Pipeline.abort_on_errors,
|
| 47 |
+
Pipeline.generate_pyx_code_stage_factory(options, result),
|
| 48 |
+
set_cimport_from_pyx(orig_cimport_from_pyx),
|
| 49 |
+
]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def generate_shared_module(options):
|
| 53 |
+
Errors.init_thread()
|
| 54 |
+
Errors.open_listing_file(None)
|
| 55 |
+
|
| 56 |
+
dest_c_file = options.shared_c_file_path
|
| 57 |
+
module_name = os.path.splitext(os.path.basename(dest_c_file))[0]
|
| 58 |
+
|
| 59 |
+
context = Main.Context.from_options(options)
|
| 60 |
+
scope = Symtab.ModuleScope('MemoryView', parent_module = None, context = context, is_package=False)
|
| 61 |
+
|
| 62 |
+
with tempfile.TemporaryDirectory() as tmpdirname:
|
| 63 |
+
pyx_file = os.path.join(tmpdirname, f'{module_name}.pyx')
|
| 64 |
+
c_file = os.path.join(tmpdirname, f'{module_name}.c')
|
| 65 |
+
with open(pyx_file, 'w'):
|
| 66 |
+
pass
|
| 67 |
+
source_desc = FileSourceDescriptor(pyx_file)
|
| 68 |
+
comp_src = Main.CompilationSource(source_desc, EncodedString(module_name), os.getcwd())
|
| 69 |
+
result = Main.create_default_resultobj(comp_src, options)
|
| 70 |
+
|
| 71 |
+
pipeline = create_shared_library_pipeline(context, scope, options, result)
|
| 72 |
+
err, enddata = Pipeline.run_pipeline(pipeline, comp_src)
|
| 73 |
+
if err is None:
|
| 74 |
+
shutil.copy(c_file, dest_c_file)
|
| 75 |
+
|
| 76 |
+
return err, enddata
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/TestCyCache.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import difflib
|
| 2 |
+
import glob
|
| 3 |
+
import gzip
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
import tempfile
|
| 7 |
+
import unittest
|
| 8 |
+
|
| 9 |
+
import Cython.Build.Dependencies
|
| 10 |
+
import Cython.Compiler.Main
|
| 11 |
+
import Cython.Utils
|
| 12 |
+
from Cython.TestUtils import CythonTest
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TestCyCache(CythonTest):
|
| 16 |
+
|
| 17 |
+
def setUp(self):
|
| 18 |
+
CythonTest.setUp(self)
|
| 19 |
+
self.temp_dir = tempfile.mkdtemp(
|
| 20 |
+
prefix='cycache-test',
|
| 21 |
+
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None)
|
| 22 |
+
self.src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 23 |
+
self.cache_dir = tempfile.mkdtemp(prefix='cache', dir=self.temp_dir)
|
| 24 |
+
|
| 25 |
+
def cache_files(self, file_glob):
|
| 26 |
+
return glob.glob(os.path.join(self.cache_dir, file_glob))
|
| 27 |
+
|
| 28 |
+
def fresh_cythonize(self, *args, **kwargs):
|
| 29 |
+
Cython.Utils.clear_function_caches()
|
| 30 |
+
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
| 31 |
+
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
| 32 |
+
|
| 33 |
+
def fresh_compile(self, *args, **kwargs):
|
| 34 |
+
Cython.Utils.clear_function_caches()
|
| 35 |
+
Cython.Compiler.Main.compile(*args, **kwargs)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def _test_cycache_switch(self, compilation_method):
|
| 39 |
+
content1 = 'value = 1\n'
|
| 40 |
+
content2 = 'value = 2\n'
|
| 41 |
+
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
| 42 |
+
a_c = a_pyx[:-4] + '.c'
|
| 43 |
+
|
| 44 |
+
with open(a_pyx, 'w') as f:
|
| 45 |
+
f.write(content1)
|
| 46 |
+
|
| 47 |
+
compilation_method(a_pyx, cache=self.cache_dir)
|
| 48 |
+
compilation_method(a_pyx, cache=self.cache_dir)
|
| 49 |
+
|
| 50 |
+
self.assertEqual(1, len(self.cache_files('a.c*')))
|
| 51 |
+
with open(a_c) as f:
|
| 52 |
+
a_contents1 = f.read()
|
| 53 |
+
os.unlink(a_c)
|
| 54 |
+
|
| 55 |
+
with open(a_pyx, 'w') as f:
|
| 56 |
+
f.write(content2)
|
| 57 |
+
|
| 58 |
+
compilation_method(a_pyx, cache=self.cache_dir)
|
| 59 |
+
|
| 60 |
+
with open(a_c) as f:
|
| 61 |
+
a_contents2 = f.read()
|
| 62 |
+
os.unlink(a_c)
|
| 63 |
+
|
| 64 |
+
self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
|
| 65 |
+
self.assertEqual(2, len(self.cache_files('a.c*')))
|
| 66 |
+
|
| 67 |
+
with open(a_pyx, 'w') as f:
|
| 68 |
+
f.write(content1)
|
| 69 |
+
|
| 70 |
+
compilation_method(a_pyx, cache=self.cache_dir)
|
| 71 |
+
|
| 72 |
+
self.assertEqual(2, len(self.cache_files('a.c*')))
|
| 73 |
+
with open(a_c) as f:
|
| 74 |
+
a_contents = f.read()
|
| 75 |
+
self.assertEqual(
|
| 76 |
+
a_contents, a_contents1,
|
| 77 |
+
msg='\n'.join(list(difflib.unified_diff(
|
| 78 |
+
a_contents.split('\n'), a_contents1.split('\n')))[:10]))
|
| 79 |
+
|
| 80 |
+
def test_cycache_switch_cythonize(self):
|
| 81 |
+
self._test_cycache_switch(self.fresh_cythonize)
|
| 82 |
+
|
| 83 |
+
def test_cycache_switch_compile(self):
|
| 84 |
+
self._test_cycache_switch(self.fresh_compile)
|
| 85 |
+
|
| 86 |
+
def _test_cycache_uses_cache(self, compilation_method):
|
| 87 |
+
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
| 88 |
+
a_c = a_pyx[:-4] + '.c'
|
| 89 |
+
with open(a_pyx, 'w') as f:
|
| 90 |
+
f.write('pass')
|
| 91 |
+
|
| 92 |
+
compilation_method(a_pyx, cache=self.cache_dir)
|
| 93 |
+
|
| 94 |
+
a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
|
| 95 |
+
with gzip.GzipFile(a_cache, 'wb') as gzipfile:
|
| 96 |
+
gzipfile.write(b'fake stuff')
|
| 97 |
+
os.unlink(a_c)
|
| 98 |
+
|
| 99 |
+
compilation_method(a_pyx, cache=self.cache_dir)
|
| 100 |
+
|
| 101 |
+
with open(a_c) as f:
|
| 102 |
+
a_contents = f.read()
|
| 103 |
+
self.assertEqual(a_contents, 'fake stuff',
|
| 104 |
+
'Unexpected contents: %s...' % a_contents[:100])
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def test_cycache_uses_cache_cythonize(self):
|
| 108 |
+
self._test_cycache_uses_cache(self.fresh_cythonize)
|
| 109 |
+
|
| 110 |
+
def test_cycache_uses_cache_compile(self):
|
| 111 |
+
self._test_cycache_uses_cache(self.fresh_compile)
|
| 112 |
+
|
| 113 |
+
def _test_cycache_annotation(self, compilation_method):
|
| 114 |
+
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
| 115 |
+
a_c = a_pyx[:-4] + '.c'
|
| 116 |
+
a_html = a_pyx[:-4] + '.html'
|
| 117 |
+
with open(a_pyx, 'w') as f:
|
| 118 |
+
f.write('pass')
|
| 119 |
+
|
| 120 |
+
compilation_method(a_pyx, cache=self.cache_dir, annotate='default')
|
| 121 |
+
self.assertTrue(os.path.exists(a_html), a_html)
|
| 122 |
+
os.unlink(a_html)
|
| 123 |
+
os.unlink(a_c)
|
| 124 |
+
compilation_method(a_pyx, cache=self.cache_dir, annotate='default')
|
| 125 |
+
self.assertTrue(os.path.exists(a_html), a_html)
|
| 126 |
+
|
| 127 |
+
def test_cycache_annotation_cythonize(self):
|
| 128 |
+
self._test_cycache_annotation(self.fresh_cythonize)
|
| 129 |
+
|
| 130 |
+
def test_cycache_annotation_compile(self):
|
| 131 |
+
self._test_cycache_annotation(self.fresh_compile)
|
| 132 |
+
|
| 133 |
+
def _test_multi_file_output(self, compilation_method):
|
| 134 |
+
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
| 135 |
+
a_c = a_pyx[:-4] + '.c'
|
| 136 |
+
a_h = a_pyx[:-4] + '.h'
|
| 137 |
+
a_api_h = a_pyx[:-4] + '_api.h'
|
| 138 |
+
with open(a_pyx, 'w') as f:
|
| 139 |
+
f.write('cdef public api int foo(int x): return x\n')
|
| 140 |
+
|
| 141 |
+
compilation_method(a_pyx, cache=self.cache_dir)
|
| 142 |
+
|
| 143 |
+
expected = [a_c, a_h, a_api_h]
|
| 144 |
+
for output in expected:
|
| 145 |
+
self.assertTrue(os.path.exists(output), output)
|
| 146 |
+
os.unlink(output)
|
| 147 |
+
|
| 148 |
+
compilation_method(a_pyx, cache=self.cache_dir)
|
| 149 |
+
|
| 150 |
+
for output in expected:
|
| 151 |
+
self.assertTrue(os.path.exists(output), output)
|
| 152 |
+
|
| 153 |
+
def test_multi_file_output_cythonize(self):
|
| 154 |
+
self._test_multi_file_output(self.fresh_cythonize)
|
| 155 |
+
|
| 156 |
+
def test_multi_file_output_compile(self):
|
| 157 |
+
self._test_multi_file_output(self.fresh_compile)
|
| 158 |
+
|
| 159 |
+
def _test_options_invalidation(self, compilation_method):
|
| 160 |
+
hash_pyx = os.path.join(self.src_dir, 'options.pyx')
|
| 161 |
+
hash_c = hash_pyx[:-len('.pyx')] + '.c'
|
| 162 |
+
hash_cpp = hash_pyx[:-len('.pyx')] + '.cpp'
|
| 163 |
+
|
| 164 |
+
with open(hash_pyx, 'w') as f:
|
| 165 |
+
f.write('pass')
|
| 166 |
+
compilation_method(hash_pyx, cache=self.cache_dir, cplus=False)
|
| 167 |
+
self.assertEqual(1, len(self.cache_files('options.c*')))
|
| 168 |
+
|
| 169 |
+
os.unlink(hash_c)
|
| 170 |
+
|
| 171 |
+
compilation_method(hash_pyx, cache=self.cache_dir, cplus=True)
|
| 172 |
+
|
| 173 |
+
self.assertEqual(2, len(self.cache_files('options.c*')))
|
| 174 |
+
|
| 175 |
+
try:
|
| 176 |
+
os.unlink(hash_c)
|
| 177 |
+
except FileNotFoundError:
|
| 178 |
+
# fresh_cythonize() produces .c file, fresh_compile produces .cpp file
|
| 179 |
+
os.unlink(hash_cpp)
|
| 180 |
+
|
| 181 |
+
compilation_method(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False)
|
| 182 |
+
|
| 183 |
+
self.assertEqual(2, len(self.cache_files('options.c*')))
|
| 184 |
+
|
| 185 |
+
os.unlink(hash_c)
|
| 186 |
+
|
| 187 |
+
compilation_method(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True)
|
| 188 |
+
|
| 189 |
+
self.assertEqual(2, len(self.cache_files('options.c*')))
|
| 190 |
+
def test_options_invalidation_cythonize(self):
|
| 191 |
+
self._test_options_invalidation(self.fresh_cythonize)
|
| 192 |
+
|
| 193 |
+
def test_options_invalidation_compile(self):
|
| 194 |
+
self._test_options_invalidation(self.fresh_compile)
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/TestCythonizeArgsParser.py
ADDED
|
@@ -0,0 +1,481 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from Cython.Build.Cythonize import (
|
| 2 |
+
create_args_parser, parse_args_raw, parse_args,
|
| 3 |
+
parallel_compiles
|
| 4 |
+
)
|
| 5 |
+
|
| 6 |
+
from Cython.Compiler import Options
|
| 7 |
+
from Cython.Compiler.Tests.Utils import backup_Options, restore_Options, check_global_options
|
| 8 |
+
|
| 9 |
+
from unittest import TestCase
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
from io import StringIO
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TestCythonizeArgsParser(TestCase):
|
| 16 |
+
|
| 17 |
+
def setUp(self):
|
| 18 |
+
TestCase.setUp(self)
|
| 19 |
+
self.parse_args = lambda x, parser=create_args_parser() : parse_args_raw(parser, x)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def are_default(self, options, skip):
|
| 23 |
+
# empty containers
|
| 24 |
+
empty_containers = ['directives', 'compile_time_env', 'options', 'excludes']
|
| 25 |
+
are_none = ['language_level', 'annotate', 'build', 'build_inplace', 'force', 'quiet', 'lenient', 'keep_going', 'no_docstrings']
|
| 26 |
+
for opt_name in empty_containers:
|
| 27 |
+
if len(getattr(options, opt_name))!=0 and (opt_name not in skip):
|
| 28 |
+
self.assertEqual(opt_name,"", msg="For option "+opt_name)
|
| 29 |
+
return False
|
| 30 |
+
for opt_name in are_none:
|
| 31 |
+
if (getattr(options, opt_name) is not None) and (opt_name not in skip):
|
| 32 |
+
self.assertEqual(opt_name,"", msg="For option "+opt_name)
|
| 33 |
+
return False
|
| 34 |
+
if options.parallel!=parallel_compiles and ('parallel' not in skip):
|
| 35 |
+
return False
|
| 36 |
+
return True
|
| 37 |
+
|
| 38 |
+
# testing directives:
|
| 39 |
+
def test_directive_short(self):
|
| 40 |
+
options, args = self.parse_args(['-X', 'cdivision=True'])
|
| 41 |
+
self.assertFalse(args)
|
| 42 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 43 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 44 |
+
|
| 45 |
+
def test_directive_long(self):
|
| 46 |
+
options, args = self.parse_args(['--directive', 'cdivision=True'])
|
| 47 |
+
self.assertFalse(args)
|
| 48 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 49 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 50 |
+
|
| 51 |
+
def test_directive_multiple(self):
|
| 52 |
+
options, args = self.parse_args(['-X', 'cdivision=True', '-X', 'c_string_type=bytes'])
|
| 53 |
+
self.assertFalse(args)
|
| 54 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 55 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 56 |
+
self.assertEqual(options.directives['c_string_type'], 'bytes')
|
| 57 |
+
|
| 58 |
+
def test_directive_multiple_v2(self):
|
| 59 |
+
options, args = self.parse_args(['-X', 'cdivision=True,c_string_type=bytes'])
|
| 60 |
+
self.assertFalse(args)
|
| 61 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 62 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 63 |
+
self.assertEqual(options.directives['c_string_type'], 'bytes')
|
| 64 |
+
|
| 65 |
+
def test_directive_value_yes(self):
|
| 66 |
+
options, args = self.parse_args(['-X', 'cdivision=YeS'])
|
| 67 |
+
self.assertFalse(args)
|
| 68 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 69 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 70 |
+
|
| 71 |
+
def test_directive_value_no(self):
|
| 72 |
+
options, args = self.parse_args(['-X', 'cdivision=no'])
|
| 73 |
+
self.assertFalse(args)
|
| 74 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 75 |
+
self.assertEqual(options.directives['cdivision'], False)
|
| 76 |
+
|
| 77 |
+
def test_directive_value_invalid(self):
|
| 78 |
+
with self.assertRaises(ValueError) as context:
|
| 79 |
+
options, args = self.parse_args(['-X', 'cdivision=sadfasd'])
|
| 80 |
+
|
| 81 |
+
def test_directive_key_invalid(self):
|
| 82 |
+
with self.assertRaises(ValueError) as context:
|
| 83 |
+
options, args = self.parse_args(['-X', 'abracadabra'])
|
| 84 |
+
|
| 85 |
+
def test_directive_no_value(self):
|
| 86 |
+
with self.assertRaises(ValueError) as context:
|
| 87 |
+
options, args = self.parse_args(['-X', 'cdivision'])
|
| 88 |
+
|
| 89 |
+
def test_directives_types(self):
|
| 90 |
+
directives = [
|
| 91 |
+
('auto_pickle', True),
|
| 92 |
+
('c_string_type', 'bytearray'),
|
| 93 |
+
('c_string_type', 'bytes'),
|
| 94 |
+
('c_string_type', 'str'),
|
| 95 |
+
('c_string_type', 'bytearray'),
|
| 96 |
+
('c_string_type', 'unicode'),
|
| 97 |
+
('c_string_encoding', 'ascii'),
|
| 98 |
+
('language_level', '2'),
|
| 99 |
+
('language_level', '3'),
|
| 100 |
+
#('language_level', '3str'),
|
| 101 |
+
('set_initial_path', 'my_initial_path'),
|
| 102 |
+
]
|
| 103 |
+
for key, value in directives:
|
| 104 |
+
cmd = '{key}={value}'.format(key=key, value=str(value))
|
| 105 |
+
options, args = self.parse_args(['-X', cmd])
|
| 106 |
+
self.assertFalse(args)
|
| 107 |
+
self.assertTrue(self.are_default(options, ['directives']), msg = "Error for option: "+cmd)
|
| 108 |
+
if value == 'unicode':
|
| 109 |
+
value = 'str'
|
| 110 |
+
self.assertEqual(options.directives[key], value, msg = "Error for option: "+cmd)
|
| 111 |
+
|
| 112 |
+
def test_directives_wrong(self):
|
| 113 |
+
directives = [
|
| 114 |
+
('auto_pickle', 42), # for bool type
|
| 115 |
+
('auto_pickle', 'NONONO'), # for bool type
|
| 116 |
+
('c_string_type', 'bites'),
|
| 117 |
+
#('c_string_encoding', 'a'),
|
| 118 |
+
#('language_level', 4),
|
| 119 |
+
]
|
| 120 |
+
for key, value in directives:
|
| 121 |
+
cmd = '{key}={value}'.format(key=key, value=str(value))
|
| 122 |
+
with self.assertRaises(ValueError, msg = "Error for option: "+cmd) as context:
|
| 123 |
+
options, args = self.parse_args(['-X', cmd])
|
| 124 |
+
|
| 125 |
+
def test_compile_time_env_short(self):
|
| 126 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10'])
|
| 127 |
+
self.assertFalse(args)
|
| 128 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 129 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 130 |
+
|
| 131 |
+
def test_compile_time_env_long(self):
|
| 132 |
+
options, args = self.parse_args(['--compile-time-env', 'MYSIZE=10'])
|
| 133 |
+
self.assertFalse(args)
|
| 134 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 135 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 136 |
+
|
| 137 |
+
def test_compile_time_env_multiple(self):
|
| 138 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10', '-E', 'ARRSIZE=11'])
|
| 139 |
+
self.assertFalse(args)
|
| 140 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 141 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 142 |
+
self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
|
| 143 |
+
|
| 144 |
+
def test_compile_time_env_multiple_v2(self):
|
| 145 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10,ARRSIZE=11'])
|
| 146 |
+
self.assertFalse(args)
|
| 147 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 148 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 149 |
+
self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
|
| 150 |
+
|
| 151 |
+
#testing options
|
| 152 |
+
def test_option_short(self):
|
| 153 |
+
options, args = self.parse_args(['-s', 'docstrings=True'])
|
| 154 |
+
self.assertFalse(args)
|
| 155 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 156 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 157 |
+
|
| 158 |
+
def test_option_long(self):
|
| 159 |
+
options, args = self.parse_args(['--option', 'docstrings=True'])
|
| 160 |
+
self.assertFalse(args)
|
| 161 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 162 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 163 |
+
|
| 164 |
+
def test_option_multiple(self):
|
| 165 |
+
options, args = self.parse_args(['-s', 'docstrings=True', '-s', 'buffer_max_dims=8'])
|
| 166 |
+
self.assertFalse(args)
|
| 167 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 168 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 169 |
+
self.assertEqual(options.options['buffer_max_dims'], True) # really?
|
| 170 |
+
|
| 171 |
+
def test_option_multiple_v2(self):
|
| 172 |
+
options, args = self.parse_args(['-s', 'docstrings=True,buffer_max_dims=8'])
|
| 173 |
+
self.assertFalse(args)
|
| 174 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 175 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 176 |
+
self.assertEqual(options.options['buffer_max_dims'], True) # really?
|
| 177 |
+
|
| 178 |
+
def test_option_value_yes(self):
|
| 179 |
+
options, args = self.parse_args(['-s', 'docstrings=YeS'])
|
| 180 |
+
self.assertFalse(args)
|
| 181 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 182 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 183 |
+
|
| 184 |
+
def test_option_value_4242(self):
|
| 185 |
+
options, args = self.parse_args(['-s', 'docstrings=4242'])
|
| 186 |
+
self.assertFalse(args)
|
| 187 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 188 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 189 |
+
|
| 190 |
+
def test_option_value_0(self):
|
| 191 |
+
options, args = self.parse_args(['-s', 'docstrings=0'])
|
| 192 |
+
self.assertFalse(args)
|
| 193 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 194 |
+
self.assertEqual(options.options['docstrings'], False)
|
| 195 |
+
|
| 196 |
+
def test_option_value_emptystr(self):
|
| 197 |
+
options, args = self.parse_args(['-s', 'docstrings='])
|
| 198 |
+
self.assertFalse(args)
|
| 199 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 200 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 201 |
+
|
| 202 |
+
def test_option_value_a_str(self):
|
| 203 |
+
options, args = self.parse_args(['-s', 'docstrings=BB'])
|
| 204 |
+
self.assertFalse(args)
|
| 205 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 206 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 207 |
+
|
| 208 |
+
def test_option_value_no(self):
|
| 209 |
+
options, args = self.parse_args(['-s', 'docstrings=nO'])
|
| 210 |
+
self.assertFalse(args)
|
| 211 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 212 |
+
self.assertEqual(options.options['docstrings'], False)
|
| 213 |
+
|
| 214 |
+
def test_option_no_value(self):
|
| 215 |
+
options, args = self.parse_args(['-s', 'docstrings'])
|
| 216 |
+
self.assertFalse(args)
|
| 217 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 218 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 219 |
+
|
| 220 |
+
def test_option_any_key(self):
|
| 221 |
+
options, args = self.parse_args(['-s', 'abracadabra'])
|
| 222 |
+
self.assertFalse(args)
|
| 223 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 224 |
+
self.assertEqual(options.options['abracadabra'], True)
|
| 225 |
+
|
| 226 |
+
def test_language_level_2(self):
|
| 227 |
+
options, args = self.parse_args(['-2'])
|
| 228 |
+
self.assertFalse(args)
|
| 229 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 230 |
+
self.assertEqual(options.language_level, 2)
|
| 231 |
+
|
| 232 |
+
def test_language_level_3(self):
|
| 233 |
+
options, args = self.parse_args(['-3'])
|
| 234 |
+
self.assertFalse(args)
|
| 235 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 236 |
+
self.assertEqual(options.language_level, 3)
|
| 237 |
+
|
| 238 |
+
def test_language_level_3str(self):
|
| 239 |
+
options, args = self.parse_args(['--3str'])
|
| 240 |
+
self.assertFalse(args)
|
| 241 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 242 |
+
self.assertEqual(options.language_level, 3)
|
| 243 |
+
|
| 244 |
+
def test_annotate_short(self):
|
| 245 |
+
options, args = self.parse_args(['-a'])
|
| 246 |
+
self.assertFalse(args)
|
| 247 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 248 |
+
self.assertEqual(options.annotate, 'default')
|
| 249 |
+
|
| 250 |
+
def test_annotate_long(self):
|
| 251 |
+
options, args = self.parse_args(['--annotate'])
|
| 252 |
+
self.assertFalse(args)
|
| 253 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 254 |
+
self.assertEqual(options.annotate, 'default')
|
| 255 |
+
|
| 256 |
+
def test_annotate_fullc(self):
|
| 257 |
+
options, args = self.parse_args(['--annotate-fullc'])
|
| 258 |
+
self.assertFalse(args)
|
| 259 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 260 |
+
self.assertEqual(options.annotate, 'fullc')
|
| 261 |
+
|
| 262 |
+
def test_annotate_and_positional(self):
|
| 263 |
+
options, args = self.parse_args(['-a', 'foo.pyx'])
|
| 264 |
+
self.assertEqual(args, ['foo.pyx'])
|
| 265 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 266 |
+
self.assertEqual(options.annotate, 'default')
|
| 267 |
+
|
| 268 |
+
def test_annotate_and_optional(self):
|
| 269 |
+
options, args = self.parse_args(['-a', '--3str'])
|
| 270 |
+
self.assertFalse(args)
|
| 271 |
+
self.assertTrue(self.are_default(options, ['annotate', 'language_level']))
|
| 272 |
+
self.assertEqual(options.annotate, 'default')
|
| 273 |
+
self.assertEqual(options.language_level, 3)
|
| 274 |
+
|
| 275 |
+
def test_exclude_short(self):
|
| 276 |
+
options, args = self.parse_args(['-x', '*.pyx'])
|
| 277 |
+
self.assertFalse(args)
|
| 278 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 279 |
+
self.assertTrue('*.pyx' in options.excludes)
|
| 280 |
+
|
| 281 |
+
def test_exclude_long(self):
|
| 282 |
+
options, args = self.parse_args(['--exclude', '*.pyx'])
|
| 283 |
+
self.assertFalse(args)
|
| 284 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 285 |
+
self.assertTrue('*.pyx' in options.excludes)
|
| 286 |
+
|
| 287 |
+
def test_exclude_multiple(self):
|
| 288 |
+
options, args = self.parse_args(['--exclude', '*.pyx', '--exclude', '*.py', ])
|
| 289 |
+
self.assertFalse(args)
|
| 290 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 291 |
+
self.assertEqual(options.excludes, ['*.pyx', '*.py'])
|
| 292 |
+
|
| 293 |
+
def test_build_short(self):
|
| 294 |
+
options, args = self.parse_args(['-b'])
|
| 295 |
+
self.assertFalse(args)
|
| 296 |
+
self.assertTrue(self.are_default(options, ['build']))
|
| 297 |
+
self.assertEqual(options.build, True)
|
| 298 |
+
|
| 299 |
+
def test_build_long(self):
|
| 300 |
+
options, args = self.parse_args(['--build'])
|
| 301 |
+
self.assertFalse(args)
|
| 302 |
+
self.assertTrue(self.are_default(options, ['build']))
|
| 303 |
+
self.assertEqual(options.build, True)
|
| 304 |
+
|
| 305 |
+
def test_inplace_short(self):
|
| 306 |
+
options, args = self.parse_args(['-i'])
|
| 307 |
+
self.assertFalse(args)
|
| 308 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 309 |
+
self.assertEqual(options.build_inplace, True)
|
| 310 |
+
|
| 311 |
+
def test_inplace_long(self):
|
| 312 |
+
options, args = self.parse_args(['--inplace'])
|
| 313 |
+
self.assertFalse(args)
|
| 314 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 315 |
+
self.assertEqual(options.build_inplace, True)
|
| 316 |
+
|
| 317 |
+
def test_parallel_short(self):
|
| 318 |
+
options, args = self.parse_args(['-j', '42'])
|
| 319 |
+
self.assertFalse(args)
|
| 320 |
+
self.assertTrue(self.are_default(options, ['parallel']))
|
| 321 |
+
self.assertEqual(options.parallel, 42)
|
| 322 |
+
|
| 323 |
+
def test_parallel_long(self):
|
| 324 |
+
options, args = self.parse_args(['--parallel', '42'])
|
| 325 |
+
self.assertFalse(args)
|
| 326 |
+
self.assertTrue(self.are_default(options, ['parallel']))
|
| 327 |
+
self.assertEqual(options.parallel, 42)
|
| 328 |
+
|
| 329 |
+
def test_force_short(self):
|
| 330 |
+
options, args = self.parse_args(['-f'])
|
| 331 |
+
self.assertFalse(args)
|
| 332 |
+
self.assertTrue(self.are_default(options, ['force']))
|
| 333 |
+
self.assertEqual(options.force, True)
|
| 334 |
+
|
| 335 |
+
def test_force_long(self):
|
| 336 |
+
options, args = self.parse_args(['--force'])
|
| 337 |
+
self.assertFalse(args)
|
| 338 |
+
self.assertTrue(self.are_default(options, ['force']))
|
| 339 |
+
self.assertEqual(options.force, True)
|
| 340 |
+
|
| 341 |
+
def test_quite_short(self):
|
| 342 |
+
options, args = self.parse_args(['-q'])
|
| 343 |
+
self.assertFalse(args)
|
| 344 |
+
self.assertTrue(self.are_default(options, ['quiet']))
|
| 345 |
+
self.assertEqual(options.quiet, True)
|
| 346 |
+
|
| 347 |
+
def test_quite_long(self):
|
| 348 |
+
options, args = self.parse_args(['--quiet'])
|
| 349 |
+
self.assertFalse(args)
|
| 350 |
+
self.assertTrue(self.are_default(options, ['quiet']))
|
| 351 |
+
self.assertEqual(options.quiet, True)
|
| 352 |
+
|
| 353 |
+
def test_lenient_long(self):
|
| 354 |
+
options, args = self.parse_args(['--lenient'])
|
| 355 |
+
self.assertTrue(self.are_default(options, ['lenient']))
|
| 356 |
+
self.assertFalse(args)
|
| 357 |
+
self.assertEqual(options.lenient, True)
|
| 358 |
+
|
| 359 |
+
def test_keep_going_short(self):
|
| 360 |
+
options, args = self.parse_args(['-k'])
|
| 361 |
+
self.assertFalse(args)
|
| 362 |
+
self.assertTrue(self.are_default(options, ['keep_going']))
|
| 363 |
+
self.assertEqual(options.keep_going, True)
|
| 364 |
+
|
| 365 |
+
def test_keep_going_long(self):
|
| 366 |
+
options, args = self.parse_args(['--keep-going'])
|
| 367 |
+
self.assertFalse(args)
|
| 368 |
+
self.assertTrue(self.are_default(options, ['keep_going']))
|
| 369 |
+
self.assertEqual(options.keep_going, True)
|
| 370 |
+
|
| 371 |
+
def test_no_docstrings_long(self):
|
| 372 |
+
options, args = self.parse_args(['--no-docstrings'])
|
| 373 |
+
self.assertFalse(args)
|
| 374 |
+
self.assertTrue(self.are_default(options, ['no_docstrings']))
|
| 375 |
+
self.assertEqual(options.no_docstrings, True)
|
| 376 |
+
|
| 377 |
+
def test_file_name(self):
|
| 378 |
+
options, args = self.parse_args(['file1.pyx', 'file2.pyx'])
|
| 379 |
+
self.assertEqual(len(args), 2)
|
| 380 |
+
self.assertEqual(args[0], 'file1.pyx')
|
| 381 |
+
self.assertEqual(args[1], 'file2.pyx')
|
| 382 |
+
self.assertTrue(self.are_default(options, []))
|
| 383 |
+
|
| 384 |
+
def test_option_first(self):
|
| 385 |
+
options, args = self.parse_args(['-i', 'file.pyx'])
|
| 386 |
+
self.assertEqual(args, ['file.pyx'])
|
| 387 |
+
self.assertEqual(options.build_inplace, True)
|
| 388 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 389 |
+
|
| 390 |
+
def test_file_inbetween(self):
|
| 391 |
+
options, args = self.parse_args(['-i', 'file.pyx', '-a'])
|
| 392 |
+
self.assertEqual(args, ['file.pyx'])
|
| 393 |
+
self.assertEqual(options.build_inplace, True)
|
| 394 |
+
self.assertEqual(options.annotate, 'default')
|
| 395 |
+
self.assertTrue(self.are_default(options, ['build_inplace', 'annotate']))
|
| 396 |
+
|
| 397 |
+
def test_option_trailing(self):
|
| 398 |
+
options, args = self.parse_args(['file.pyx', '-i'])
|
| 399 |
+
self.assertEqual(args, ['file.pyx'])
|
| 400 |
+
self.assertEqual(options.build_inplace, True)
|
| 401 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 402 |
+
|
| 403 |
+
def test_interspersed_positional(self):
|
| 404 |
+
options, sources = self.parse_args([
|
| 405 |
+
'file1.pyx', '-a',
|
| 406 |
+
'file2.pyx'
|
| 407 |
+
])
|
| 408 |
+
self.assertEqual(sources, ['file1.pyx', 'file2.pyx'])
|
| 409 |
+
self.assertEqual(options.annotate, 'default')
|
| 410 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 411 |
+
|
| 412 |
+
def test_interspersed_positional2(self):
|
| 413 |
+
options, sources = self.parse_args([
|
| 414 |
+
'file1.pyx', '-a',
|
| 415 |
+
'file2.pyx', '-a', 'file3.pyx'
|
| 416 |
+
])
|
| 417 |
+
self.assertEqual(sources, ['file1.pyx', 'file2.pyx', 'file3.pyx'])
|
| 418 |
+
self.assertEqual(options.annotate, 'default')
|
| 419 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 420 |
+
|
| 421 |
+
def test_interspersed_positional3(self):
|
| 422 |
+
options, sources = self.parse_args([
|
| 423 |
+
'-f', 'f1', 'f2', '-a',
|
| 424 |
+
'f3', 'f4', '-a', 'f5'
|
| 425 |
+
])
|
| 426 |
+
self.assertEqual(sources, ['f1', 'f2', 'f3', 'f4', 'f5'])
|
| 427 |
+
self.assertEqual(options.annotate, 'default')
|
| 428 |
+
self.assertEqual(options.force, True)
|
| 429 |
+
self.assertTrue(self.are_default(options, ['annotate', 'force']))
|
| 430 |
+
|
| 431 |
+
def test_wrong_option(self):
|
| 432 |
+
old_stderr = sys.stderr
|
| 433 |
+
stderr = sys.stderr = StringIO()
|
| 434 |
+
try:
|
| 435 |
+
self.assertRaises(SystemExit, self.parse_args,
|
| 436 |
+
['--unknown-option']
|
| 437 |
+
)
|
| 438 |
+
finally:
|
| 439 |
+
sys.stderr = old_stderr
|
| 440 |
+
self.assertTrue(stderr.getvalue())
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
class TestParseArgs(TestCase):
|
| 444 |
+
def setUp(self):
|
| 445 |
+
self._options_backup = backup_Options()
|
| 446 |
+
|
| 447 |
+
def tearDown(self):
|
| 448 |
+
restore_Options(self._options_backup)
|
| 449 |
+
|
| 450 |
+
def check_default_global_options(self, white_list=[]):
|
| 451 |
+
self.assertEqual(check_global_options(self._options_backup, white_list), "")
|
| 452 |
+
|
| 453 |
+
def test_build_set_for_inplace(self):
|
| 454 |
+
options, args = parse_args(['foo.pyx', '-i'])
|
| 455 |
+
self.assertEqual(options.build, True)
|
| 456 |
+
self.check_default_global_options()
|
| 457 |
+
|
| 458 |
+
def test_lenient(self):
|
| 459 |
+
options, sources = parse_args(['foo.pyx', '--lenient'])
|
| 460 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 461 |
+
self.assertEqual(Options.error_on_unknown_names, False)
|
| 462 |
+
self.assertEqual(Options.error_on_uninitialized, False)
|
| 463 |
+
self.check_default_global_options(['error_on_unknown_names', 'error_on_uninitialized'])
|
| 464 |
+
|
| 465 |
+
def test_annotate(self):
|
| 466 |
+
options, sources = parse_args(['foo.pyx', '--annotate'])
|
| 467 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 468 |
+
self.assertEqual(Options.annotate, 'default')
|
| 469 |
+
self.check_default_global_options(['annotate'])
|
| 470 |
+
|
| 471 |
+
def test_annotate_fullc(self):
|
| 472 |
+
options, sources = parse_args(['foo.pyx', '--annotate-fullc'])
|
| 473 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 474 |
+
self.assertEqual(Options.annotate, 'fullc')
|
| 475 |
+
self.check_default_global_options(['annotate'])
|
| 476 |
+
|
| 477 |
+
def test_no_docstrings(self):
|
| 478 |
+
options, sources = parse_args(['foo.pyx', '--no-docstrings'])
|
| 479 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 480 |
+
self.assertEqual(Options.docstrings, False)
|
| 481 |
+
self.check_default_global_options(['docstrings'])
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/TestDependencies.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import os.path
|
| 3 |
+
import tempfile
|
| 4 |
+
import unittest
|
| 5 |
+
from os.path import join as pjoin
|
| 6 |
+
|
| 7 |
+
from ..Dependencies import extended_iglob
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@contextlib.contextmanager
|
| 11 |
+
def writable_file(dir_path, filename):
|
| 12 |
+
with open(pjoin(dir_path, filename), "w", encoding="utf8") as f:
|
| 13 |
+
yield f
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TestGlobbing(unittest.TestCase):
|
| 17 |
+
@classmethod
|
| 18 |
+
def setUpClass(cls):
|
| 19 |
+
cls._orig_dir = os.getcwd()
|
| 20 |
+
cls._tmpdir = tempfile.TemporaryDirectory()
|
| 21 |
+
temp_path = cls._tmpdir.name
|
| 22 |
+
os.chdir(temp_path)
|
| 23 |
+
|
| 24 |
+
for dir1 in "abcd":
|
| 25 |
+
for dir1x in [dir1, dir1 + 'x']:
|
| 26 |
+
for dir2 in "xyz":
|
| 27 |
+
dir_path = pjoin(dir1x, dir2)
|
| 28 |
+
os.makedirs(dir_path)
|
| 29 |
+
with writable_file(dir_path, "file2_pyx.pyx") as f:
|
| 30 |
+
f.write('""" PYX """')
|
| 31 |
+
with writable_file(dir_path, "file2_py.py") as f:
|
| 32 |
+
f.write('""" PY """')
|
| 33 |
+
|
| 34 |
+
with writable_file(dir1x, "file1_pyx.pyx") as f:
|
| 35 |
+
f.write('""" PYX """')
|
| 36 |
+
with writable_file(dir1x, "file1_py.py") as f:
|
| 37 |
+
f.write('""" PY """')
|
| 38 |
+
|
| 39 |
+
@classmethod
|
| 40 |
+
def tearDownClass(cls):
|
| 41 |
+
os.chdir(cls._orig_dir)
|
| 42 |
+
cls._tmpdir.cleanup()
|
| 43 |
+
|
| 44 |
+
def files_equal(self, pattern, expected_files):
|
| 45 |
+
expected_files = sorted(expected_files)
|
| 46 |
+
# It's the users's choice whether '/' will appear on Windows.
|
| 47 |
+
matched_files = sorted(path.replace('/', os.sep) for path in extended_iglob(pattern))
|
| 48 |
+
self.assertListEqual(matched_files, expected_files) # /
|
| 49 |
+
|
| 50 |
+
# Special case for Windows: also support '\' in patterns.
|
| 51 |
+
if os.sep == '\\' and '/' in pattern:
|
| 52 |
+
matched_files = sorted(extended_iglob(pattern.replace('/', '\\')))
|
| 53 |
+
self.assertListEqual(matched_files, expected_files) # \
|
| 54 |
+
|
| 55 |
+
def test_extended_iglob_simple(self):
|
| 56 |
+
ax_files = [pjoin("a", "x", "file2_pyx.pyx"), pjoin("a", "x", "file2_py.py")]
|
| 57 |
+
self.files_equal("a/x/*", ax_files)
|
| 58 |
+
self.files_equal("a/x/*.c12", [])
|
| 59 |
+
self.files_equal("a/x/*.{py,pyx,c12}", ax_files)
|
| 60 |
+
self.files_equal("a/x/*.{py,pyx}", ax_files)
|
| 61 |
+
self.files_equal("a/x/*.{pyx}", ax_files[:1])
|
| 62 |
+
self.files_equal("a/x/*.pyx", ax_files[:1])
|
| 63 |
+
self.files_equal("a/x/*.{py}", ax_files[1:])
|
| 64 |
+
self.files_equal("a/x/*.py", ax_files[1:])
|
| 65 |
+
|
| 66 |
+
def test_extended_iglob_simple_star(self):
|
| 67 |
+
for basedir in "ad":
|
| 68 |
+
files = [
|
| 69 |
+
pjoin(basedir, dirname, filename)
|
| 70 |
+
for dirname in "xyz"
|
| 71 |
+
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
| 72 |
+
]
|
| 73 |
+
self.files_equal(basedir + "/*/*", files)
|
| 74 |
+
self.files_equal(basedir + "/*/*.c12", [])
|
| 75 |
+
self.files_equal(basedir + "/*/*.{py,pyx,c12}", files)
|
| 76 |
+
self.files_equal(basedir + "/*/*.{py,pyx}", files)
|
| 77 |
+
self.files_equal(basedir + "/*/*.{pyx}", files[::2])
|
| 78 |
+
self.files_equal(basedir + "/*/*.pyx", files[::2])
|
| 79 |
+
self.files_equal(basedir + "/*/*.{py}", files[1::2])
|
| 80 |
+
self.files_equal(basedir + "/*/*.py", files[1::2])
|
| 81 |
+
|
| 82 |
+
for subdir in "xy*":
|
| 83 |
+
files = [
|
| 84 |
+
pjoin(basedir, dirname, filename)
|
| 85 |
+
for dirname in "xyz"
|
| 86 |
+
if subdir in ('*', dirname)
|
| 87 |
+
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
| 88 |
+
]
|
| 89 |
+
path = basedir + '/' + subdir + '/'
|
| 90 |
+
self.files_equal(path + "*", files)
|
| 91 |
+
self.files_equal(path + "*.{py,pyx}", files)
|
| 92 |
+
self.files_equal(path + "*.{pyx}", files[::2])
|
| 93 |
+
self.files_equal(path + "*.pyx", files[::2])
|
| 94 |
+
self.files_equal(path + "*.{py}", files[1::2])
|
| 95 |
+
self.files_equal(path + "*.py", files[1::2])
|
| 96 |
+
|
| 97 |
+
def test_extended_iglob_double_star(self):
|
| 98 |
+
basedirs = os.listdir(".")
|
| 99 |
+
files = [
|
| 100 |
+
pjoin(basedir, dirname, filename)
|
| 101 |
+
for basedir in basedirs
|
| 102 |
+
for dirname in "xyz"
|
| 103 |
+
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
| 104 |
+
]
|
| 105 |
+
all_files = [
|
| 106 |
+
pjoin(basedir, filename)
|
| 107 |
+
for basedir in basedirs
|
| 108 |
+
for filename in ["file1_pyx.pyx", "file1_py.py"]
|
| 109 |
+
] + files
|
| 110 |
+
self.files_equal("*/*/*", files)
|
| 111 |
+
self.files_equal("*/*/**/*", files)
|
| 112 |
+
self.files_equal("*/**/*.*", all_files)
|
| 113 |
+
self.files_equal("**/*.*", all_files)
|
| 114 |
+
self.files_equal("*/**/*.c12", [])
|
| 115 |
+
self.files_equal("**/*.c12", [])
|
| 116 |
+
self.files_equal("*/*/*.{py,pyx,c12}", files)
|
| 117 |
+
self.files_equal("*/*/**/*.{py,pyx,c12}", files)
|
| 118 |
+
self.files_equal("*/**/*/*.{py,pyx,c12}", files)
|
| 119 |
+
self.files_equal("**/*/*/*.{py,pyx,c12}", files)
|
| 120 |
+
self.files_equal("**/*.{py,pyx,c12}", all_files)
|
| 121 |
+
self.files_equal("*/*/*.{py,pyx}", files)
|
| 122 |
+
self.files_equal("**/*/*/*.{py,pyx}", files)
|
| 123 |
+
self.files_equal("*/**/*/*.{py,pyx}", files)
|
| 124 |
+
self.files_equal("**/*.{py,pyx}", all_files)
|
| 125 |
+
self.files_equal("*/*/*.{pyx}", files[::2])
|
| 126 |
+
self.files_equal("**/*.{pyx}", all_files[::2])
|
| 127 |
+
self.files_equal("*/**/*/*.pyx", files[::2])
|
| 128 |
+
self.files_equal("*/*/*.pyx", files[::2])
|
| 129 |
+
self.files_equal("**/*.pyx", all_files[::2])
|
| 130 |
+
self.files_equal("*/*/*.{py}", files[1::2])
|
| 131 |
+
self.files_equal("**/*.{py}", all_files[1::2])
|
| 132 |
+
self.files_equal("*/*/*.py", files[1::2])
|
| 133 |
+
self.files_equal("**/*.py", all_files[1::2])
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/TestInline.py
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import tempfile
|
| 3 |
+
import unittest
|
| 4 |
+
from Cython.Shadow import inline
|
| 5 |
+
from Cython.Build.Inline import safe_type, cymeit
|
| 6 |
+
from Cython.TestUtils import CythonTest
|
| 7 |
+
|
| 8 |
+
try:
|
| 9 |
+
import numpy
|
| 10 |
+
has_numpy = True
|
| 11 |
+
except:
|
| 12 |
+
has_numpy = False
|
| 13 |
+
|
| 14 |
+
test_kwds = dict(force=True, quiet=True)
|
| 15 |
+
|
| 16 |
+
global_value = 100
|
| 17 |
+
|
| 18 |
+
class TestInline(CythonTest):
|
| 19 |
+
def setUp(self):
|
| 20 |
+
CythonTest.setUp(self)
|
| 21 |
+
self._call_kwds = dict(test_kwds)
|
| 22 |
+
if os.path.isdir('TEST_TMP'):
|
| 23 |
+
lib_dir = os.path.join('TEST_TMP','inline')
|
| 24 |
+
else:
|
| 25 |
+
lib_dir = tempfile.mkdtemp(prefix='cython_inline_')
|
| 26 |
+
self._call_kwds['lib_dir'] = lib_dir
|
| 27 |
+
|
| 28 |
+
def test_simple(self):
|
| 29 |
+
self.assertEqual(inline("return 1+2", **self._call_kwds), 3)
|
| 30 |
+
|
| 31 |
+
def test_types(self):
|
| 32 |
+
self.assertEqual(inline("""
|
| 33 |
+
cimport cython
|
| 34 |
+
return cython.typeof(a), cython.typeof(b)
|
| 35 |
+
""", a=1.0, b=[], **self._call_kwds), ('double', 'list object'))
|
| 36 |
+
|
| 37 |
+
def test_locals(self):
|
| 38 |
+
a = 1
|
| 39 |
+
b = 2
|
| 40 |
+
self.assertEqual(inline("return a+b", **self._call_kwds), 3)
|
| 41 |
+
|
| 42 |
+
def test_globals(self):
|
| 43 |
+
self.assertEqual(inline("return global_value + 1", **self._call_kwds), global_value + 1)
|
| 44 |
+
|
| 45 |
+
def test_no_return(self):
|
| 46 |
+
self.assertEqual(inline("""
|
| 47 |
+
a = 1
|
| 48 |
+
cdef double b = 2
|
| 49 |
+
cdef c = []
|
| 50 |
+
""", **self._call_kwds), dict(a=1, b=2.0, c=[]))
|
| 51 |
+
|
| 52 |
+
def test_def_node(self):
|
| 53 |
+
foo = inline("def foo(x): return x * x", **self._call_kwds)['foo']
|
| 54 |
+
self.assertEqual(foo(7), 49)
|
| 55 |
+
|
| 56 |
+
def test_class_ref(self):
|
| 57 |
+
class Type:
|
| 58 |
+
pass
|
| 59 |
+
tp = inline("Type")['Type']
|
| 60 |
+
self.assertEqual(tp, Type)
|
| 61 |
+
|
| 62 |
+
def test_pure(self):
|
| 63 |
+
import cython as cy
|
| 64 |
+
b = inline("""
|
| 65 |
+
b = cy.declare(float, a)
|
| 66 |
+
c = cy.declare(cy.pointer(cy.float), &b)
|
| 67 |
+
return b
|
| 68 |
+
""", a=3, **self._call_kwds)
|
| 69 |
+
self.assertEqual(type(b), float)
|
| 70 |
+
|
| 71 |
+
def test_compiler_directives(self):
|
| 72 |
+
self.assertEqual(
|
| 73 |
+
inline('return sum(x)',
|
| 74 |
+
x=[1, 2, 3],
|
| 75 |
+
cython_compiler_directives={'boundscheck': False}),
|
| 76 |
+
6
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
def test_lang_version(self):
|
| 80 |
+
# GH-3419. Caching for inline code didn't always respect compiler directives.
|
| 81 |
+
inline_divcode = "def f(int a, int b): return a/b"
|
| 82 |
+
self.assertEqual(
|
| 83 |
+
inline(inline_divcode, language_level=2)['f'](5,2),
|
| 84 |
+
2
|
| 85 |
+
)
|
| 86 |
+
self.assertEqual(
|
| 87 |
+
inline(inline_divcode, language_level=3)['f'](5,2),
|
| 88 |
+
2.5
|
| 89 |
+
)
|
| 90 |
+
self.assertEqual(
|
| 91 |
+
inline(inline_divcode, language_level=2)['f'](5,2),
|
| 92 |
+
2
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
def test_repeated_use(self):
|
| 96 |
+
inline_mulcode = "def f(int a, int b): return a * b"
|
| 97 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
|
| 98 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 3), 15)
|
| 99 |
+
self.assertEqual(inline(inline_mulcode)['f'](6, 2), 12)
|
| 100 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
|
| 101 |
+
|
| 102 |
+
f = inline(inline_mulcode)['f']
|
| 103 |
+
self.assertEqual(f(5, 2), 10)
|
| 104 |
+
self.assertEqual(f(5, 3), 15)
|
| 105 |
+
|
| 106 |
+
@unittest.skipIf(not has_numpy, "NumPy is not available")
|
| 107 |
+
def test_numpy(self):
|
| 108 |
+
import numpy
|
| 109 |
+
a = numpy.ndarray((10, 20))
|
| 110 |
+
a[0,0] = 10
|
| 111 |
+
self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
|
| 112 |
+
self.assertEqual(inline("return a[0,0]", a=a, **self._call_kwds), 10.0)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class TestCymeit(unittest.TestCase):
|
| 116 |
+
def _run(self, code, setup_code=None, **kwargs):
|
| 117 |
+
timings, number = cymeit(code, setup_code=setup_code, **kwargs)
|
| 118 |
+
|
| 119 |
+
self.assertGreater(min(timings), 0)
|
| 120 |
+
|
| 121 |
+
# Guard that autoscaling leads to reasonable timings.
|
| 122 |
+
# Note: we cannot compare against the expected 0.2 due to large timing variations on CI.
|
| 123 |
+
max_time = max(timing * number for timing in timings)
|
| 124 |
+
if isinstance(max_time, int):
|
| 125 |
+
self.assertGreaterEqual(max_time, 100_000)
|
| 126 |
+
else:
|
| 127 |
+
self.assertGreaterEqual(max_time, 0.0001)
|
| 128 |
+
self.assertGreater(number, 10) # arbitrary lower bound for our very quick benchmarks
|
| 129 |
+
|
| 130 |
+
return timings
|
| 131 |
+
|
| 132 |
+
def test_benchmark_simple(self):
|
| 133 |
+
setup_code = "numbers = list(range(0, 1000, 3))"
|
| 134 |
+
self._run("sum([num for num in numbers])", setup_code, repeat=3)
|
| 135 |
+
|
| 136 |
+
def test_benchmark_timer(self):
|
| 137 |
+
import time
|
| 138 |
+
setup_code = "numbers = list(range(0, 1000, 3))"
|
| 139 |
+
timings = self._run("sum([num for num in numbers])", setup_code, timer=time.perf_counter, repeat=3)
|
| 140 |
+
|
| 141 |
+
for timing in timings:
|
| 142 |
+
self.assertIsInstance(timing, float)
|
| 143 |
+
|
| 144 |
+
def test_benchmark_timer_ns(self):
|
| 145 |
+
import time
|
| 146 |
+
setup_code = "numbers = list(range(0, 1000, 3))"
|
| 147 |
+
timings = self._run("sum([num for num in numbers])", setup_code, timer=time.perf_counter_ns, repeat=3)
|
| 148 |
+
|
| 149 |
+
for timing in timings:
|
| 150 |
+
self.assertIsInstance(timing, int)
|
| 151 |
+
|
| 152 |
+
def test_benchmark_multiline_setup(self):
|
| 153 |
+
setup_code = """
|
| 154 |
+
numbers = list(range(0, 100, 3))
|
| 155 |
+
|
| 156 |
+
def csum(numbers):
|
| 157 |
+
result = 0
|
| 158 |
+
for number in numbers:
|
| 159 |
+
result += number
|
| 160 |
+
return result
|
| 161 |
+
"""
|
| 162 |
+
self._run("csum(numbers)", setup_code)
|
| 163 |
+
|
| 164 |
+
def test_benchmark_multiline_code(self):
|
| 165 |
+
setup_code = "numbers = list(range(0, 100, 3))"
|
| 166 |
+
self._run("""
|
| 167 |
+
sum([
|
| 168 |
+
num
|
| 169 |
+
for num in numbers
|
| 170 |
+
])
|
| 171 |
+
""",
|
| 172 |
+
setup_code,
|
| 173 |
+
repeat=3
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
def test_benchmark_in_module(self):
|
| 177 |
+
self._run("fsum(range(100))", import_module='math', repeat=2)
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/TestIpythonMagic.py
ADDED
|
@@ -0,0 +1,287 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# tag: ipython
|
| 2 |
+
|
| 3 |
+
"""Tests for the Cython magics extension."""
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import io
|
| 8 |
+
import sys
|
| 9 |
+
from contextlib import contextmanager
|
| 10 |
+
from unittest import skipIf
|
| 11 |
+
|
| 12 |
+
from Cython.Build import IpythonMagic
|
| 13 |
+
from Cython.TestUtils import CythonTest
|
| 14 |
+
from Cython.Compiler.Annotate import AnnotationCCodeWriter
|
| 15 |
+
|
| 16 |
+
try:
|
| 17 |
+
import IPython.testing.globalipapp
|
| 18 |
+
except ImportError:
|
| 19 |
+
# Disable tests and fake helpers for initialisation below.
|
| 20 |
+
def skip_if_not_installed(_):
|
| 21 |
+
return None
|
| 22 |
+
else:
|
| 23 |
+
def skip_if_not_installed(c):
|
| 24 |
+
return c
|
| 25 |
+
|
| 26 |
+
# not using IPython's decorators here because they depend on "nose"
|
| 27 |
+
skip_win32 = skipIf(sys.platform == 'win32', "Skip on Windows")
|
| 28 |
+
|
| 29 |
+
try:
|
| 30 |
+
# disable IPython history thread before it gets started to avoid having to clean it up
|
| 31 |
+
from IPython.core.history import HistoryManager
|
| 32 |
+
HistoryManager.enabled = False
|
| 33 |
+
except ImportError:
|
| 34 |
+
pass
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
@contextmanager
|
| 38 |
+
def capture_output():
|
| 39 |
+
backup = sys.stdout, sys.stderr
|
| 40 |
+
try:
|
| 41 |
+
replacement = [
|
| 42 |
+
io.TextIOWrapper(io.BytesIO(), encoding=sys.stdout.encoding),
|
| 43 |
+
io.TextIOWrapper(io.BytesIO(), encoding=sys.stderr.encoding),
|
| 44 |
+
]
|
| 45 |
+
sys.stdout, sys.stderr = replacement
|
| 46 |
+
output = []
|
| 47 |
+
yield output
|
| 48 |
+
finally:
|
| 49 |
+
sys.stdout, sys.stderr = backup
|
| 50 |
+
for wrapper in replacement:
|
| 51 |
+
wrapper.seek(0) # rewind
|
| 52 |
+
output.append(wrapper.read())
|
| 53 |
+
wrapper.close()
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
code = """\
|
| 57 |
+
def f(x):
|
| 58 |
+
return 2*x
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
cython3_code = """\
|
| 62 |
+
def f(int x):
|
| 63 |
+
return 2 / x
|
| 64 |
+
|
| 65 |
+
def call(x):
|
| 66 |
+
return f(*(x,))
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
pgo_cython3_code = cython3_code + """\
|
| 70 |
+
def main():
|
| 71 |
+
for _ in range(100): call(5)
|
| 72 |
+
main()
|
| 73 |
+
"""
|
| 74 |
+
|
| 75 |
+
compile_error_code = '''\
|
| 76 |
+
cdef extern from *:
|
| 77 |
+
"""
|
| 78 |
+
xxx a=1;
|
| 79 |
+
"""
|
| 80 |
+
int a;
|
| 81 |
+
def doit():
|
| 82 |
+
return a
|
| 83 |
+
'''
|
| 84 |
+
|
| 85 |
+
compile_warning_code = '''\
|
| 86 |
+
cdef extern from *:
|
| 87 |
+
"""
|
| 88 |
+
#pragma message ( "CWarning" )
|
| 89 |
+
int a = 42;
|
| 90 |
+
"""
|
| 91 |
+
int a;
|
| 92 |
+
def doit():
|
| 93 |
+
return a
|
| 94 |
+
'''
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
@skip_if_not_installed
|
| 98 |
+
class TestIPythonMagic(CythonTest):
|
| 99 |
+
|
| 100 |
+
@classmethod
|
| 101 |
+
def setUpClass(cls):
|
| 102 |
+
CythonTest.setUpClass()
|
| 103 |
+
cls._ip = IPython.testing.globalipapp.get_ipython()
|
| 104 |
+
|
| 105 |
+
def setUp(self):
|
| 106 |
+
CythonTest.setUp(self)
|
| 107 |
+
self._ip.extension_manager.load_extension('cython')
|
| 108 |
+
|
| 109 |
+
def test_cython_inline(self):
|
| 110 |
+
ip = self._ip
|
| 111 |
+
ip.ex('a=10; b=20')
|
| 112 |
+
result = ip.run_cell_magic('cython_inline', '', 'return a+b')
|
| 113 |
+
self.assertEqual(result, 30)
|
| 114 |
+
|
| 115 |
+
@skip_win32
|
| 116 |
+
def test_cython_pyximport(self):
|
| 117 |
+
ip = self._ip
|
| 118 |
+
module_name = '_test_cython_pyximport'
|
| 119 |
+
ip.run_cell_magic('cython_pyximport', module_name, code)
|
| 120 |
+
ip.ex('g = f(10)')
|
| 121 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 122 |
+
ip.run_cell_magic('cython_pyximport', module_name, code)
|
| 123 |
+
ip.ex('h = f(-10)')
|
| 124 |
+
self.assertEqual(ip.user_ns['h'], -20.0)
|
| 125 |
+
try:
|
| 126 |
+
os.remove(module_name + '.pyx')
|
| 127 |
+
except OSError:
|
| 128 |
+
pass
|
| 129 |
+
|
| 130 |
+
def test_cython(self):
|
| 131 |
+
ip = self._ip
|
| 132 |
+
ip.run_cell_magic('cython', '', code)
|
| 133 |
+
ip.ex('g = f(10)')
|
| 134 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 135 |
+
|
| 136 |
+
def test_cython_name(self):
|
| 137 |
+
# The Cython module named 'mymodule' defines the function f.
|
| 138 |
+
ip = self._ip
|
| 139 |
+
ip.run_cell_magic('cython', '--name=mymodule', code)
|
| 140 |
+
# This module can now be imported in the interactive namespace.
|
| 141 |
+
ip.ex('import mymodule; g = mymodule.f(10)')
|
| 142 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 143 |
+
|
| 144 |
+
def test_cython_language_level(self):
|
| 145 |
+
# The Cython cell defines the functions f() and call().
|
| 146 |
+
ip = self._ip
|
| 147 |
+
ip.run_cell_magic('cython', '', cython3_code)
|
| 148 |
+
ip.ex('g = f(10); h = call(10)')
|
| 149 |
+
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
| 150 |
+
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
| 151 |
+
|
| 152 |
+
def test_cython3(self):
|
| 153 |
+
# The Cython cell defines the functions f() and call().
|
| 154 |
+
ip = self._ip
|
| 155 |
+
ip.run_cell_magic('cython', '-3', cython3_code)
|
| 156 |
+
ip.ex('g = f(10); h = call(10)')
|
| 157 |
+
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
| 158 |
+
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
| 159 |
+
|
| 160 |
+
def test_cython2(self):
|
| 161 |
+
# The Cython cell defines the functions f() and call().
|
| 162 |
+
ip = self._ip
|
| 163 |
+
ip.run_cell_magic('cython', '-2', cython3_code)
|
| 164 |
+
ip.ex('g = f(10); h = call(10)')
|
| 165 |
+
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
| 166 |
+
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
| 167 |
+
|
| 168 |
+
def test_cython_compile_error_shown(self):
|
| 169 |
+
ip = self._ip
|
| 170 |
+
with capture_output() as out:
|
| 171 |
+
ip.run_cell_magic('cython', '-3', compile_error_code)
|
| 172 |
+
captured_out, captured_err = out
|
| 173 |
+
|
| 174 |
+
# it could be that c-level output is captured by distutil-extension
|
| 175 |
+
# (and not by us) and is printed to stdout:
|
| 176 |
+
captured_all = captured_out + "\n" + captured_err
|
| 177 |
+
self.assertTrue("error" in captured_all, msg="error in " + captured_all)
|
| 178 |
+
|
| 179 |
+
def test_cython_link_error_shown(self):
|
| 180 |
+
ip = self._ip
|
| 181 |
+
with capture_output() as out:
|
| 182 |
+
ip.run_cell_magic('cython', '-3 -l=xxxxxxxx', code)
|
| 183 |
+
captured_out, captured_err = out
|
| 184 |
+
|
| 185 |
+
# it could be that c-level output is captured by distutil-extension
|
| 186 |
+
# (and not by us) and is printed to stdout:
|
| 187 |
+
captured_all = captured_out + "\n!" + captured_err
|
| 188 |
+
self.assertTrue("error" in captured_all, msg="error in " + captured_all)
|
| 189 |
+
|
| 190 |
+
def test_cython_warning_shown(self):
|
| 191 |
+
ip = self._ip
|
| 192 |
+
with capture_output() as out:
|
| 193 |
+
# force rebuild, otherwise no warning as after the first success
|
| 194 |
+
# no build step is performed
|
| 195 |
+
ip.run_cell_magic('cython', '-3 -f', compile_warning_code)
|
| 196 |
+
captured_out, captured_err = out
|
| 197 |
+
|
| 198 |
+
# check that warning was printed to stdout even if build hasn't failed
|
| 199 |
+
self.assertTrue("CWarning" in captured_out)
|
| 200 |
+
|
| 201 |
+
@skip_win32
|
| 202 |
+
def test_cython3_pgo(self):
|
| 203 |
+
# The Cython cell defines the functions f() and call().
|
| 204 |
+
ip = self._ip
|
| 205 |
+
ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code)
|
| 206 |
+
ip.ex('g = f(10); h = call(10); main()')
|
| 207 |
+
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
| 208 |
+
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
| 209 |
+
|
| 210 |
+
@skip_win32
|
| 211 |
+
def test_extlibs(self):
|
| 212 |
+
ip = self._ip
|
| 213 |
+
code = """
|
| 214 |
+
from libc.math cimport sin
|
| 215 |
+
x = sin(0.0)
|
| 216 |
+
"""
|
| 217 |
+
ip.user_ns['x'] = 1
|
| 218 |
+
ip.run_cell_magic('cython', '-l m', code)
|
| 219 |
+
self.assertEqual(ip.user_ns['x'], 0)
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def test_cython_verbose(self):
|
| 223 |
+
ip = self._ip
|
| 224 |
+
ip.run_cell_magic('cython', '--verbose', code)
|
| 225 |
+
ip.ex('g = f(10)')
|
| 226 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 227 |
+
|
| 228 |
+
def test_cython_verbose_thresholds(self):
|
| 229 |
+
@contextmanager
|
| 230 |
+
def mock_distutils():
|
| 231 |
+
class MockLog:
|
| 232 |
+
DEBUG = 1
|
| 233 |
+
INFO = 2
|
| 234 |
+
thresholds = [INFO]
|
| 235 |
+
|
| 236 |
+
def set_threshold(self, val):
|
| 237 |
+
self.thresholds.append(val)
|
| 238 |
+
return self.thresholds[-2]
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
new_log = MockLog()
|
| 242 |
+
old_log = IpythonMagic.distutils.log
|
| 243 |
+
try:
|
| 244 |
+
IpythonMagic.distutils.log = new_log
|
| 245 |
+
yield new_log
|
| 246 |
+
finally:
|
| 247 |
+
IpythonMagic.distutils.log = old_log
|
| 248 |
+
|
| 249 |
+
ip = self._ip
|
| 250 |
+
with mock_distutils() as verbose_log:
|
| 251 |
+
ip.run_cell_magic('cython', '--verbose', code)
|
| 252 |
+
ip.ex('g = f(10)')
|
| 253 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 254 |
+
self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
|
| 255 |
+
verbose_log.thresholds)
|
| 256 |
+
|
| 257 |
+
with mock_distutils() as normal_log:
|
| 258 |
+
ip.run_cell_magic('cython', '', code)
|
| 259 |
+
ip.ex('g = f(10)')
|
| 260 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 261 |
+
self.assertEqual([normal_log.INFO], normal_log.thresholds)
|
| 262 |
+
|
| 263 |
+
def test_cython_no_annotate(self):
|
| 264 |
+
ip = self._ip
|
| 265 |
+
html = ip.run_cell_magic('cython', '', code)
|
| 266 |
+
self.assertTrue(html is None)
|
| 267 |
+
|
| 268 |
+
def test_cython_annotate(self):
|
| 269 |
+
ip = self._ip
|
| 270 |
+
html = ip.run_cell_magic('cython', '--annotate', code)
|
| 271 |
+
# somewhat brittle way to differentiate between annotated htmls
|
| 272 |
+
# with/without complete source code:
|
| 273 |
+
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE not in html.data)
|
| 274 |
+
|
| 275 |
+
def test_cython_annotate_default(self):
|
| 276 |
+
ip = self._ip
|
| 277 |
+
html = ip.run_cell_magic('cython', '-a', code)
|
| 278 |
+
# somewhat brittle way to differentiate between annotated htmls
|
| 279 |
+
# with/without complete source code:
|
| 280 |
+
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE not in html.data)
|
| 281 |
+
|
| 282 |
+
def test_cython_annotate_complete_c_code(self):
|
| 283 |
+
ip = self._ip
|
| 284 |
+
html = ip.run_cell_magic('cython', '--annotate-fullc', code)
|
| 285 |
+
# somewhat brittle way to differentiate between annotated htmls
|
| 286 |
+
# with/without complete source code:
|
| 287 |
+
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE in html.data)
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/TestRecythonize.py
ADDED
|
@@ -0,0 +1,212 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import shutil
|
| 2 |
+
import os
|
| 3 |
+
import tempfile
|
| 4 |
+
import time
|
| 5 |
+
|
| 6 |
+
import Cython.Build.Dependencies
|
| 7 |
+
import Cython.Utils
|
| 8 |
+
from Cython.TestUtils import CythonTest
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def fresh_cythonize(*args, **kwargs):
|
| 12 |
+
Cython.Utils.clear_function_caches()
|
| 13 |
+
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
| 14 |
+
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
| 15 |
+
|
| 16 |
+
class TestRecythonize(CythonTest):
|
| 17 |
+
|
| 18 |
+
def setUp(self):
|
| 19 |
+
CythonTest.setUp(self)
|
| 20 |
+
self.temp_dir = (
|
| 21 |
+
tempfile.mkdtemp(
|
| 22 |
+
prefix='recythonize-test',
|
| 23 |
+
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None
|
| 24 |
+
)
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
def tearDown(self):
|
| 28 |
+
CythonTest.tearDown(self)
|
| 29 |
+
shutil.rmtree(self.temp_dir)
|
| 30 |
+
|
| 31 |
+
def test_recythonize_pyx_on_pxd_change(self):
|
| 32 |
+
|
| 33 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 34 |
+
|
| 35 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 36 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 37 |
+
a_c = os.path.join(src_dir, 'a.c')
|
| 38 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 39 |
+
|
| 40 |
+
with open(a_pxd, 'w') as f:
|
| 41 |
+
f.write('cdef int value\n')
|
| 42 |
+
|
| 43 |
+
with open(a_pyx, 'w') as f:
|
| 44 |
+
f.write('value = 1\n')
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# The dependencies for "a.pyx" are "a.pxd" and "a.pyx".
|
| 48 |
+
self.assertEqual({a_pxd, a_pyx}, dep_tree.all_dependencies(a_pyx))
|
| 49 |
+
|
| 50 |
+
# Cythonize to create a.c
|
| 51 |
+
fresh_cythonize(a_pyx)
|
| 52 |
+
|
| 53 |
+
# Sleep to address coarse time-stamp precision.
|
| 54 |
+
time.sleep(1)
|
| 55 |
+
|
| 56 |
+
with open(a_c) as f:
|
| 57 |
+
a_c_contents1 = f.read()
|
| 58 |
+
|
| 59 |
+
with open(a_pxd, 'w') as f:
|
| 60 |
+
f.write('cdef double value\n')
|
| 61 |
+
|
| 62 |
+
fresh_cythonize(a_pyx)
|
| 63 |
+
|
| 64 |
+
with open(a_c) as f:
|
| 65 |
+
a_c_contents2 = f.read()
|
| 66 |
+
|
| 67 |
+
self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
|
| 68 |
+
self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
|
| 69 |
+
self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
|
| 70 |
+
self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def test_recythonize_py_on_pxd_change(self):
|
| 74 |
+
|
| 75 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 76 |
+
|
| 77 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 78 |
+
a_py = os.path.join(src_dir, 'a.py')
|
| 79 |
+
a_c = os.path.join(src_dir, 'a.c')
|
| 80 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 81 |
+
|
| 82 |
+
with open(a_pxd, 'w') as f:
|
| 83 |
+
f.write('cdef int value\n')
|
| 84 |
+
|
| 85 |
+
with open(a_py, 'w') as f:
|
| 86 |
+
f.write('value = 1\n')
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# The dependencies for "a.py" are "a.pxd" and "a.py".
|
| 90 |
+
self.assertEqual({a_pxd, a_py}, dep_tree.all_dependencies(a_py))
|
| 91 |
+
|
| 92 |
+
# Cythonize to create a.c
|
| 93 |
+
fresh_cythonize(a_py)
|
| 94 |
+
|
| 95 |
+
# Sleep to address coarse time-stamp precision.
|
| 96 |
+
time.sleep(1)
|
| 97 |
+
|
| 98 |
+
with open(a_c) as f:
|
| 99 |
+
a_c_contents1 = f.read()
|
| 100 |
+
|
| 101 |
+
with open(a_pxd, 'w') as f:
|
| 102 |
+
f.write('cdef double value\n')
|
| 103 |
+
|
| 104 |
+
fresh_cythonize(a_py)
|
| 105 |
+
|
| 106 |
+
with open(a_c) as f:
|
| 107 |
+
a_c_contents2 = f.read()
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
|
| 111 |
+
self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
|
| 112 |
+
self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
|
| 113 |
+
self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
|
| 114 |
+
|
| 115 |
+
def test_recythonize_pyx_on_dep_pxd_change(self):
|
| 116 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 117 |
+
|
| 118 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 119 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 120 |
+
b_pyx = os.path.join(src_dir, 'b.pyx')
|
| 121 |
+
b_c = os.path.join(src_dir, 'b.c')
|
| 122 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 123 |
+
|
| 124 |
+
with open(a_pxd, 'w') as f:
|
| 125 |
+
f.write('cdef int value\n')
|
| 126 |
+
|
| 127 |
+
with open(a_pyx, 'w') as f:
|
| 128 |
+
f.write('value = 1\n')
|
| 129 |
+
|
| 130 |
+
with open(b_pyx, 'w') as f:
|
| 131 |
+
f.write('cimport a\n' + 'a.value = 2\n')
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
# The dependencies for "b.pyx" are "a.pxd" and "b.pyx".
|
| 135 |
+
self.assertEqual({a_pxd, b_pyx}, dep_tree.all_dependencies(b_pyx))
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
# Cythonize to create b.c
|
| 139 |
+
fresh_cythonize([a_pyx, b_pyx])
|
| 140 |
+
|
| 141 |
+
# Sleep to address coarse time-stamp precision.
|
| 142 |
+
time.sleep(1)
|
| 143 |
+
|
| 144 |
+
with open(b_c) as f:
|
| 145 |
+
b_c_contents1 = f.read()
|
| 146 |
+
|
| 147 |
+
with open(a_pxd, 'w') as f:
|
| 148 |
+
f.write('cdef double value\n')
|
| 149 |
+
|
| 150 |
+
fresh_cythonize([a_pyx, b_pyx])
|
| 151 |
+
|
| 152 |
+
with open(b_c) as f:
|
| 153 |
+
b_c_contents2 = f.read()
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
|
| 158 |
+
self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
|
| 159 |
+
self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
|
| 160 |
+
self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def test_recythonize_py_on_dep_pxd_change(self):
|
| 165 |
+
|
| 166 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 167 |
+
|
| 168 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 169 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 170 |
+
b_pxd = os.path.join(src_dir, 'b.pxd')
|
| 171 |
+
b_py = os.path.join(src_dir, 'b.py')
|
| 172 |
+
b_c = os.path.join(src_dir, 'b.c')
|
| 173 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 174 |
+
|
| 175 |
+
with open(a_pxd, 'w') as f:
|
| 176 |
+
f.write('cdef int value\n')
|
| 177 |
+
|
| 178 |
+
with open(a_pyx, 'w') as f:
|
| 179 |
+
f.write('value = 1\n')
|
| 180 |
+
|
| 181 |
+
with open(b_pxd, 'w') as f:
|
| 182 |
+
f.write('cimport a\n')
|
| 183 |
+
|
| 184 |
+
with open(b_py, 'w') as f:
|
| 185 |
+
f.write('a.value = 2\n')
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
# The dependencies for b.py are "a.pxd", "b.pxd" and "b.py".
|
| 189 |
+
self.assertEqual({a_pxd, b_pxd, b_py}, dep_tree.all_dependencies(b_py))
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
# Cythonize to create b.c
|
| 193 |
+
fresh_cythonize([a_pyx, b_py])
|
| 194 |
+
|
| 195 |
+
# Sleep to address coarse time-stamp precision.
|
| 196 |
+
time.sleep(1)
|
| 197 |
+
|
| 198 |
+
with open(b_c) as f:
|
| 199 |
+
b_c_contents1 = f.read()
|
| 200 |
+
|
| 201 |
+
with open(a_pxd, 'w') as f:
|
| 202 |
+
f.write('cdef double value\n')
|
| 203 |
+
|
| 204 |
+
fresh_cythonize([a_pyx, b_py])
|
| 205 |
+
|
| 206 |
+
with open(b_c) as f:
|
| 207 |
+
b_c_contents2 = f.read()
|
| 208 |
+
|
| 209 |
+
self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
|
| 210 |
+
self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
|
| 211 |
+
self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
|
| 212 |
+
self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/TestStripLiterals.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pathlib
|
| 2 |
+
import re
|
| 3 |
+
import unittest
|
| 4 |
+
|
| 5 |
+
from ...Utils import open_source_file
|
| 6 |
+
from ..Dependencies import strip_string_literals
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TestStripLiterals(unittest.TestCase):
|
| 10 |
+
maxDiff = None
|
| 11 |
+
|
| 12 |
+
@staticmethod
|
| 13 |
+
def _rebuild_string(stripped, literals):
|
| 14 |
+
def lookup(match):
|
| 15 |
+
return literals[match.group()]
|
| 16 |
+
|
| 17 |
+
return re.sub("__Pyx_L[0-9]+_", lookup, stripped)
|
| 18 |
+
|
| 19 |
+
def test_strip_string_literals(self):
|
| 20 |
+
def strip_equals(s, expected):
|
| 21 |
+
stripped, literals = strip_string_literals(s)
|
| 22 |
+
self.assertEqual(expected, stripped)
|
| 23 |
+
|
| 24 |
+
recovered = self._rebuild_string(stripped, literals)
|
| 25 |
+
self.assertEqual(s, recovered)
|
| 26 |
+
|
| 27 |
+
unchanged = [
|
| 28 |
+
"",
|
| 29 |
+
"""abc""",
|
| 30 |
+
"""123""",
|
| 31 |
+
"""func(123)""",
|
| 32 |
+
""" '' """,
|
| 33 |
+
""" '''''''''''' """,
|
| 34 |
+
""" '''''''''''''' """,
|
| 35 |
+
]
|
| 36 |
+
|
| 37 |
+
tests = [(code, code) for code in unchanged] + [
|
| 38 |
+
# strings and quotes
|
| 39 |
+
('"x"',
|
| 40 |
+
'"__Pyx_L1_"'),
|
| 41 |
+
("'x'",
|
| 42 |
+
"'__Pyx_L1_'"),
|
| 43 |
+
(""" '"' "'" """,
|
| 44 |
+
""" '__Pyx_L1_' "__Pyx_L2_" """),
|
| 45 |
+
(""" '''' ''' """,
|
| 46 |
+
""" '''__Pyx_L1_''' """),
|
| 47 |
+
(''' """" """ ''',
|
| 48 |
+
''' """__Pyx_L1_""" '''),
|
| 49 |
+
(" '''a\n''' ",
|
| 50 |
+
" '''__Pyx_L1_''' "),
|
| 51 |
+
|
| 52 |
+
# escapes
|
| 53 |
+
(r"'a\'b'",
|
| 54 |
+
"'__Pyx_L1_'"),
|
| 55 |
+
(r"'a\\'",
|
| 56 |
+
"'__Pyx_L1_'"),
|
| 57 |
+
(r"'a\\\'b'",
|
| 58 |
+
"'__Pyx_L1_'"),
|
| 59 |
+
|
| 60 |
+
# string prefixes
|
| 61 |
+
("u'abc'",
|
| 62 |
+
"u'__Pyx_L1_'"),
|
| 63 |
+
(r"r'abc\\'",
|
| 64 |
+
"r'__Pyx_L1_'"),
|
| 65 |
+
(r"ru'abc\\'",
|
| 66 |
+
"ru'__Pyx_L1_'"),
|
| 67 |
+
|
| 68 |
+
# comments
|
| 69 |
+
("abc # foo",
|
| 70 |
+
"abc #__Pyx_L1_"),
|
| 71 |
+
("abc # 'x'",
|
| 72 |
+
"abc #__Pyx_L1_"),
|
| 73 |
+
("'abc#'",
|
| 74 |
+
"'__Pyx_L1_'"),
|
| 75 |
+
|
| 76 |
+
# special commands
|
| 77 |
+
("include 'a.pxi' # something here",
|
| 78 |
+
"include '__Pyx_L1_' #__Pyx_L2_"),
|
| 79 |
+
("cdef extern from 'a.h': # comment",
|
| 80 |
+
"cdef extern from '__Pyx_L1_': #__Pyx_L2_"),
|
| 81 |
+
|
| 82 |
+
# mixed strings
|
| 83 |
+
(""" func('xyz') + " " + "" '' # '' | "" "123" 'xyz' "' """,
|
| 84 |
+
""" func('__Pyx_L1_') + "__Pyx_L2_" + "" '' #__Pyx_L3_"""),
|
| 85 |
+
|
| 86 |
+
(""" f'f' """,
|
| 87 |
+
""" f'__Pyx_L1_' """),
|
| 88 |
+
|
| 89 |
+
(""" f'a{123}b' """,
|
| 90 |
+
""" f'__Pyx_L1_{123}__Pyx_L2_' """),
|
| 91 |
+
|
| 92 |
+
(""" f'{1}{f'xyz'}' """,
|
| 93 |
+
""" f'{1}{f'__Pyx_L1_'}' """),
|
| 94 |
+
|
| 95 |
+
(""" f'{f'''xyz{f\"""abc\"""}'''}' """,
|
| 96 |
+
""" f'{f'''__Pyx_L1_{f\"""__Pyx_L2_\"""}'''}' """),
|
| 97 |
+
|
| 98 |
+
(""" f'{{{{{"abc"}}}}}{{}}{{' == '{{abc}}{}{' """,
|
| 99 |
+
""" f'__Pyx_L1_{"__Pyx_L2_"}__Pyx_L3_' == '__Pyx_L4_' """),
|
| 100 |
+
|
| 101 |
+
("f'" + ('{x} ' * 250) + "{x:{width}} '",
|
| 102 |
+
"f'" + ''.join([f'{{x}}__Pyx_L{n}_' for n in range(1, 251)]) + "{x:{width}}__Pyx_L251_'")
|
| 103 |
+
]
|
| 104 |
+
|
| 105 |
+
for code, expected in tests:
|
| 106 |
+
with self.subTest(code=code):
|
| 107 |
+
strip_equals(code, expected) # plain
|
| 108 |
+
code = code.strip()
|
| 109 |
+
expected = expected.strip()
|
| 110 |
+
with self.subTest(code=code):
|
| 111 |
+
strip_equals(code, expected) # stripped
|
| 112 |
+
code += "\n"
|
| 113 |
+
expected += "\n"
|
| 114 |
+
with self.subTest(code=code):
|
| 115 |
+
strip_equals(code, expected) # +EOL
|
| 116 |
+
|
| 117 |
+
# GH-5977: unclosed string literal
|
| 118 |
+
strip_equals(
|
| 119 |
+
""" print("Say something: %s' % something) """,
|
| 120 |
+
""" print("__Pyx_L1_"""
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
def _test_all_files(self, base_dir, file_paths):
|
| 124 |
+
_find_leftover_string = re.compile(r"""[^_'"}](['"]+)[^_'"{]""").search
|
| 125 |
+
for file_path in sorted(file_paths):
|
| 126 |
+
with self.subTest(file=str(file_path.relative_to(base_dir))):
|
| 127 |
+
with open_source_file(str(file_path)) as f:
|
| 128 |
+
code = f.read()
|
| 129 |
+
stripped, literals = strip_string_literals(code)
|
| 130 |
+
|
| 131 |
+
match = _find_leftover_string(stripped)
|
| 132 |
+
if match and len(match.group(1)) != 2:
|
| 133 |
+
match_pos = match.start() + 1
|
| 134 |
+
self.fail(f"Leftover string found: {stripped[match_pos - 12 : match_pos + 12]!r}")
|
| 135 |
+
|
| 136 |
+
recovered = self._rebuild_string(stripped, literals)
|
| 137 |
+
self.assertEqual(code, recovered)
|
| 138 |
+
|
| 139 |
+
def test_strip_string_literals_py_files(self):
|
| 140 |
+
# process all .py files in the Cython package
|
| 141 |
+
package_dir = pathlib.Path(__file__).absolute().parents[2]
|
| 142 |
+
assert package_dir.name == 'Cython'
|
| 143 |
+
base_dir = package_dir.parent
|
| 144 |
+
self._test_all_files(base_dir, package_dir.rglob("*.py"))
|
| 145 |
+
|
| 146 |
+
def test_strip_string_literals_test_files(self):
|
| 147 |
+
# process all .py[x] files in the tests package
|
| 148 |
+
base_dir = pathlib.Path(__file__).absolute().parents[3]
|
| 149 |
+
tests_dir = base_dir / 'tests'
|
| 150 |
+
test_files = []
|
| 151 |
+
for test_subdir in tests_dir.iterdir():
|
| 152 |
+
if test_subdir.is_dir() and test_subdir.name != 'errors':
|
| 153 |
+
test_files.extend(test_subdir.rglob("*.py"))
|
| 154 |
+
test_files.extend(test_subdir.rglob("*.pyx"))
|
| 155 |
+
self._test_all_files(base_dir, test_files)
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# empty file
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestCyCache.cpython-310.pyc
ADDED
|
Binary file (6.95 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestCythonizeArgsParser.cpython-310.pyc
ADDED
|
Binary file (17.5 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestDependencies.cpython-310.pyc
ADDED
|
Binary file (5.33 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestInline.cpython-310.pyc
ADDED
|
Binary file (7.05 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestIpythonMagic.cpython-310.pyc
ADDED
|
Binary file (8.37 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestRecythonize.cpython-310.pyc
ADDED
|
Binary file (4.56 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/TestStripLiterals.cpython-310.pyc
ADDED
|
Binary file (5.78 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/Tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (254 Bytes). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__init__.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .Dependencies import cythonize
|
| 2 |
+
|
| 3 |
+
__all__ = ["cythonize"]
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def __getattr__(name):
|
| 7 |
+
if name == 'build_ext':
|
| 8 |
+
# Lazy import, fails if distutils is not available (in Python 3.12+).
|
| 9 |
+
from .Distutils import build_ext
|
| 10 |
+
return build_ext
|
| 11 |
+
raise AttributeError("module '%s' has no attribute '%s'" % (__name__, name))
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/BuildExecutable.cpython-310.pyc
ADDED
|
Binary file (5.1 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Cache.cpython-310.pyc
ADDED
|
Binary file (6.12 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Cythonize.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Dependencies.cpython-310.pyc
ADDED
|
Binary file (36.5 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Distutils.cpython-310.pyc
ADDED
|
Binary file (309 Bytes). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/Inline.cpython-310.pyc
ADDED
|
Binary file (12.8 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/IpythonMagic.cpython-310.pyc
ADDED
|
Binary file (16.6 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/SharedModule.cpython-310.pyc
ADDED
|
Binary file (3.19 kB). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/Build/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (569 Bytes). View file
|
|
|
venv/lib/python3.10/site-packages/Cython/CodeWriter.py
ADDED
|
@@ -0,0 +1,811 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Serializes a Cython code tree to Cython code. This is primarily useful for
|
| 3 |
+
debugging and testing purposes.
|
| 4 |
+
The output is in a strict format, no whitespace or comments from the input
|
| 5 |
+
is preserved (and it could not be as it is not present in the code tree).
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
from .Compiler.Visitor import TreeVisitor
|
| 10 |
+
from .Compiler.ExprNodes import *
|
| 11 |
+
from .Compiler.Nodes import CSimpleBaseTypeNode
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class LinesResult:
|
| 15 |
+
def __init__(self):
|
| 16 |
+
self.lines = []
|
| 17 |
+
self.s = ""
|
| 18 |
+
|
| 19 |
+
def put(self, s):
|
| 20 |
+
self.s += s
|
| 21 |
+
|
| 22 |
+
def newline(self):
|
| 23 |
+
self.lines.append(self.s)
|
| 24 |
+
self.s = ""
|
| 25 |
+
|
| 26 |
+
def putline(self, s):
|
| 27 |
+
self.put(s)
|
| 28 |
+
self.newline()
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class DeclarationWriter(TreeVisitor):
|
| 32 |
+
"""
|
| 33 |
+
A Cython code writer that is limited to declarations nodes.
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
indent_string = " "
|
| 37 |
+
|
| 38 |
+
def __init__(self, result=None):
|
| 39 |
+
super().__init__()
|
| 40 |
+
if result is None:
|
| 41 |
+
result = LinesResult()
|
| 42 |
+
self.result = result
|
| 43 |
+
self.numindents = 0
|
| 44 |
+
self.tempnames = {}
|
| 45 |
+
self.tempblockindex = 0
|
| 46 |
+
|
| 47 |
+
def write(self, tree):
|
| 48 |
+
self.visit(tree)
|
| 49 |
+
return self.result
|
| 50 |
+
|
| 51 |
+
def indent(self):
|
| 52 |
+
self.numindents += 1
|
| 53 |
+
|
| 54 |
+
def dedent(self):
|
| 55 |
+
self.numindents -= 1
|
| 56 |
+
|
| 57 |
+
def startline(self, s=""):
|
| 58 |
+
self.result.put(self.indent_string * self.numindents + s)
|
| 59 |
+
|
| 60 |
+
def put(self, s):
|
| 61 |
+
self.result.put(s)
|
| 62 |
+
|
| 63 |
+
def putline(self, s):
|
| 64 |
+
self.result.putline(self.indent_string * self.numindents + s)
|
| 65 |
+
|
| 66 |
+
def endline(self, s=""):
|
| 67 |
+
self.result.putline(s)
|
| 68 |
+
|
| 69 |
+
def line(self, s):
|
| 70 |
+
self.startline(s)
|
| 71 |
+
self.endline()
|
| 72 |
+
|
| 73 |
+
def comma_separated_list(self, items, output_rhs=False):
|
| 74 |
+
if len(items) > 0:
|
| 75 |
+
for item in items[:-1]:
|
| 76 |
+
self.visit(item)
|
| 77 |
+
if output_rhs and item.default is not None:
|
| 78 |
+
self.put(" = ")
|
| 79 |
+
self.visit(item.default)
|
| 80 |
+
self.put(", ")
|
| 81 |
+
self.visit(items[-1])
|
| 82 |
+
if output_rhs and items[-1].default is not None:
|
| 83 |
+
self.put(" = ")
|
| 84 |
+
self.visit(items[-1].default)
|
| 85 |
+
|
| 86 |
+
def _visit_indented(self, node):
|
| 87 |
+
self.indent()
|
| 88 |
+
self.visit(node)
|
| 89 |
+
self.dedent()
|
| 90 |
+
|
| 91 |
+
def visit_Node(self, node):
|
| 92 |
+
raise AssertionError("Node not handled by serializer: %r" % node)
|
| 93 |
+
|
| 94 |
+
def visit_ModuleNode(self, node):
|
| 95 |
+
self.visitchildren(node)
|
| 96 |
+
|
| 97 |
+
def visit_StatListNode(self, node):
|
| 98 |
+
self.visitchildren(node)
|
| 99 |
+
|
| 100 |
+
def visit_CDefExternNode(self, node):
|
| 101 |
+
if node.include_file is None:
|
| 102 |
+
file = '*'
|
| 103 |
+
else:
|
| 104 |
+
file = '"%s"' % node.include_file
|
| 105 |
+
self.putline("cdef extern from %s:" % file)
|
| 106 |
+
self._visit_indented(node.body)
|
| 107 |
+
|
| 108 |
+
def visit_CPtrDeclaratorNode(self, node):
|
| 109 |
+
self.put('*')
|
| 110 |
+
self.visit(node.base)
|
| 111 |
+
|
| 112 |
+
def visit_CReferenceDeclaratorNode(self, node):
|
| 113 |
+
self.put('&')
|
| 114 |
+
self.visit(node.base)
|
| 115 |
+
|
| 116 |
+
def visit_CArrayDeclaratorNode(self, node):
|
| 117 |
+
self.visit(node.base)
|
| 118 |
+
self.put('[')
|
| 119 |
+
if node.dimension is not None:
|
| 120 |
+
self.visit(node.dimension)
|
| 121 |
+
self.put(']')
|
| 122 |
+
|
| 123 |
+
def visit_CFuncDeclaratorNode(self, node):
|
| 124 |
+
# TODO: except, gil, etc.
|
| 125 |
+
self.visit(node.base)
|
| 126 |
+
self.put('(')
|
| 127 |
+
self.comma_separated_list(node.args)
|
| 128 |
+
self.endline(')')
|
| 129 |
+
|
| 130 |
+
def visit_CNameDeclaratorNode(self, node):
|
| 131 |
+
self.put(node.name)
|
| 132 |
+
|
| 133 |
+
def visit_CSimpleBaseTypeNode(self, node):
|
| 134 |
+
# See Parsing.p_sign_and_longness
|
| 135 |
+
if node.is_basic_c_type:
|
| 136 |
+
self.put(("unsigned ", "", "signed ")[node.signed])
|
| 137 |
+
if node.longness < 0:
|
| 138 |
+
self.put("short " * -node.longness)
|
| 139 |
+
elif node.longness > 0:
|
| 140 |
+
self.put("long " * node.longness)
|
| 141 |
+
if node.name is not None:
|
| 142 |
+
self.put(node.name)
|
| 143 |
+
|
| 144 |
+
def visit_CComplexBaseTypeNode(self, node):
|
| 145 |
+
self.visit(node.base_type)
|
| 146 |
+
self.visit(node.declarator)
|
| 147 |
+
|
| 148 |
+
def visit_CNestedBaseTypeNode(self, node):
|
| 149 |
+
self.visit(node.base_type)
|
| 150 |
+
self.put('.')
|
| 151 |
+
self.put(node.name)
|
| 152 |
+
|
| 153 |
+
def visit_TemplatedTypeNode(self, node):
|
| 154 |
+
self.visit(node.base_type_node)
|
| 155 |
+
self.put('[')
|
| 156 |
+
self.comma_separated_list(node.positional_args + node.keyword_args.key_value_pairs)
|
| 157 |
+
self.put(']')
|
| 158 |
+
|
| 159 |
+
def visit_CVarDefNode(self, node):
|
| 160 |
+
self.startline("cdef ")
|
| 161 |
+
self.visit(node.base_type)
|
| 162 |
+
self.put(" ")
|
| 163 |
+
self.comma_separated_list(node.declarators, output_rhs=True)
|
| 164 |
+
self.endline()
|
| 165 |
+
|
| 166 |
+
def _visit_container_node(self, node, decl, extras, attributes):
|
| 167 |
+
# TODO: visibility
|
| 168 |
+
self.startline(decl)
|
| 169 |
+
if node.name:
|
| 170 |
+
self.put(' ')
|
| 171 |
+
self.put(node.name)
|
| 172 |
+
if node.cname is not None:
|
| 173 |
+
self.put(' "%s"' % node.cname)
|
| 174 |
+
if extras:
|
| 175 |
+
self.put(extras)
|
| 176 |
+
self.endline(':')
|
| 177 |
+
self.indent()
|
| 178 |
+
if not attributes:
|
| 179 |
+
self.putline('pass')
|
| 180 |
+
else:
|
| 181 |
+
for attribute in attributes:
|
| 182 |
+
self.visit(attribute)
|
| 183 |
+
self.dedent()
|
| 184 |
+
|
| 185 |
+
def visit_CStructOrUnionDefNode(self, node):
|
| 186 |
+
if node.typedef_flag:
|
| 187 |
+
decl = 'ctypedef '
|
| 188 |
+
else:
|
| 189 |
+
decl = 'cdef '
|
| 190 |
+
if node.visibility == 'public':
|
| 191 |
+
decl += 'public '
|
| 192 |
+
if node.packed:
|
| 193 |
+
decl += 'packed '
|
| 194 |
+
decl += node.kind
|
| 195 |
+
self._visit_container_node(node, decl, None, node.attributes)
|
| 196 |
+
|
| 197 |
+
def visit_CppClassNode(self, node):
|
| 198 |
+
extras = ""
|
| 199 |
+
if node.templates:
|
| 200 |
+
extras = "[%s]" % ", ".join(node.templates)
|
| 201 |
+
if node.base_classes:
|
| 202 |
+
extras += "(%s)" % ", ".join(node.base_classes)
|
| 203 |
+
self._visit_container_node(node, "cdef cppclass", extras, node.attributes)
|
| 204 |
+
|
| 205 |
+
def visit_CEnumDefNode(self, node):
|
| 206 |
+
self._visit_container_node(node, "cdef enum", None, node.items)
|
| 207 |
+
|
| 208 |
+
def visit_CEnumDefItemNode(self, node):
|
| 209 |
+
self.startline(node.name)
|
| 210 |
+
if node.cname:
|
| 211 |
+
self.put(' "%s"' % node.cname)
|
| 212 |
+
if node.value:
|
| 213 |
+
self.put(" = ")
|
| 214 |
+
self.visit(node.value)
|
| 215 |
+
self.endline()
|
| 216 |
+
|
| 217 |
+
def visit_CClassDefNode(self, node):
|
| 218 |
+
assert not node.module_name
|
| 219 |
+
if node.decorators:
|
| 220 |
+
for decorator in node.decorators:
|
| 221 |
+
self.visit(decorator)
|
| 222 |
+
self.startline("cdef class ")
|
| 223 |
+
self.put(node.class_name)
|
| 224 |
+
if node.base_class_name:
|
| 225 |
+
self.put("(")
|
| 226 |
+
if node.base_class_module:
|
| 227 |
+
self.put(node.base_class_module)
|
| 228 |
+
self.put(".")
|
| 229 |
+
self.put(node.base_class_name)
|
| 230 |
+
self.put(")")
|
| 231 |
+
self.endline(":")
|
| 232 |
+
self._visit_indented(node.body)
|
| 233 |
+
|
| 234 |
+
def visit_CTypeDefNode(self, node):
|
| 235 |
+
self.startline("ctypedef ")
|
| 236 |
+
self.visit(node.base_type)
|
| 237 |
+
self.put(" ")
|
| 238 |
+
self.visit(node.declarator)
|
| 239 |
+
self.endline()
|
| 240 |
+
|
| 241 |
+
def visit_FuncDefNode(self, node):
|
| 242 |
+
# TODO: support cdef + cpdef functions
|
| 243 |
+
self.startline("def %s(" % node.name)
|
| 244 |
+
self.comma_separated_list(node.args)
|
| 245 |
+
self.endline("):")
|
| 246 |
+
self._visit_indented(node.body)
|
| 247 |
+
|
| 248 |
+
def visit_CFuncDefNode(self, node):
|
| 249 |
+
self.startline('cpdef ' if node.overridable else 'cdef ')
|
| 250 |
+
if node.modifiers:
|
| 251 |
+
self.put(' '.join(node.modifiers))
|
| 252 |
+
self.put(' ')
|
| 253 |
+
if node.visibility != 'private':
|
| 254 |
+
self.put(node.visibility)
|
| 255 |
+
self.put(' ')
|
| 256 |
+
if node.api:
|
| 257 |
+
self.put('api ')
|
| 258 |
+
|
| 259 |
+
if node.base_type:
|
| 260 |
+
self.visit(node.base_type)
|
| 261 |
+
if node.base_type.name is not None:
|
| 262 |
+
self.put(' ')
|
| 263 |
+
|
| 264 |
+
# visit the CFuncDeclaratorNode, but put a `:` at the end of line
|
| 265 |
+
self.visit(node.declarator.base)
|
| 266 |
+
self.put('(')
|
| 267 |
+
self.comma_separated_list(node.declarator.args)
|
| 268 |
+
self.endline('):')
|
| 269 |
+
|
| 270 |
+
self._visit_indented(node.body)
|
| 271 |
+
|
| 272 |
+
def visit_CArgDeclNode(self, node):
|
| 273 |
+
# For "CSimpleBaseTypeNode", the variable type may have been parsed as type.
|
| 274 |
+
# For other node types, the "name" is always None.
|
| 275 |
+
if not isinstance(node.base_type, CSimpleBaseTypeNode) or \
|
| 276 |
+
node.base_type.name is not None:
|
| 277 |
+
self.visit(node.base_type)
|
| 278 |
+
|
| 279 |
+
# If we printed something for "node.base_type", we may need to print an extra ' '.
|
| 280 |
+
#
|
| 281 |
+
# Special case: if "node.declarator" is a "CNameDeclaratorNode",
|
| 282 |
+
# its "name" might be an empty string, for example, for "cdef f(x)".
|
| 283 |
+
if node.declarator.declared_name():
|
| 284 |
+
self.put(" ")
|
| 285 |
+
self.visit(node.declarator)
|
| 286 |
+
if node.default is not None:
|
| 287 |
+
self.put(" = ")
|
| 288 |
+
self.visit(node.default)
|
| 289 |
+
|
| 290 |
+
def visit_CImportStatNode(self, node):
|
| 291 |
+
self.startline("cimport ")
|
| 292 |
+
self.put(node.module_name)
|
| 293 |
+
if node.as_name:
|
| 294 |
+
self.put(" as ")
|
| 295 |
+
self.put(node.as_name)
|
| 296 |
+
self.endline()
|
| 297 |
+
|
| 298 |
+
def visit_FromCImportStatNode(self, node):
|
| 299 |
+
self.startline("from ")
|
| 300 |
+
self.put(node.module_name)
|
| 301 |
+
self.put(" cimport ")
|
| 302 |
+
first = True
|
| 303 |
+
for pos, name, as_name, kind in node.imported_names:
|
| 304 |
+
assert kind is None
|
| 305 |
+
if first:
|
| 306 |
+
first = False
|
| 307 |
+
else:
|
| 308 |
+
self.put(", ")
|
| 309 |
+
self.put(name)
|
| 310 |
+
if as_name:
|
| 311 |
+
self.put(" as ")
|
| 312 |
+
self.put(as_name)
|
| 313 |
+
self.endline()
|
| 314 |
+
|
| 315 |
+
def visit_NameNode(self, node):
|
| 316 |
+
self.put(node.name)
|
| 317 |
+
|
| 318 |
+
def visit_DecoratorNode(self, node):
|
| 319 |
+
self.startline("@")
|
| 320 |
+
self.visit(node.decorator)
|
| 321 |
+
self.endline()
|
| 322 |
+
|
| 323 |
+
def visit_PassStatNode(self, node):
|
| 324 |
+
self.startline("pass")
|
| 325 |
+
self.endline()
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
class StatementWriter(DeclarationWriter):
|
| 329 |
+
"""
|
| 330 |
+
A Cython code writer for most language statement features.
|
| 331 |
+
"""
|
| 332 |
+
|
| 333 |
+
def visit_SingleAssignmentNode(self, node):
|
| 334 |
+
self.startline()
|
| 335 |
+
self.visit(node.lhs)
|
| 336 |
+
self.put(" = ")
|
| 337 |
+
self.visit(node.rhs)
|
| 338 |
+
self.endline()
|
| 339 |
+
|
| 340 |
+
def visit_CascadedAssignmentNode(self, node):
|
| 341 |
+
self.startline()
|
| 342 |
+
for lhs in node.lhs_list:
|
| 343 |
+
self.visit(lhs)
|
| 344 |
+
self.put(" = ")
|
| 345 |
+
self.visit(node.rhs)
|
| 346 |
+
self.endline()
|
| 347 |
+
|
| 348 |
+
def visit_PrintStatNode(self, node):
|
| 349 |
+
self.startline("print ")
|
| 350 |
+
self.comma_separated_list(node.arg_tuple.args)
|
| 351 |
+
if not node.append_newline:
|
| 352 |
+
self.put(",")
|
| 353 |
+
self.endline()
|
| 354 |
+
|
| 355 |
+
def visit_ForInStatNode(self, node):
|
| 356 |
+
self.startline("for ")
|
| 357 |
+
if node.target.is_sequence_constructor:
|
| 358 |
+
self.comma_separated_list(node.target.args)
|
| 359 |
+
else:
|
| 360 |
+
self.visit(node.target)
|
| 361 |
+
self.put(" in ")
|
| 362 |
+
self.visit(node.iterator.sequence)
|
| 363 |
+
self.endline(":")
|
| 364 |
+
self._visit_indented(node.body)
|
| 365 |
+
if node.else_clause is not None:
|
| 366 |
+
self.line("else:")
|
| 367 |
+
self._visit_indented(node.else_clause)
|
| 368 |
+
|
| 369 |
+
def visit_IfStatNode(self, node):
|
| 370 |
+
# The IfClauseNode is handled directly without a separate match
|
| 371 |
+
# for clariy.
|
| 372 |
+
self.startline("if ")
|
| 373 |
+
self.visit(node.if_clauses[0].condition)
|
| 374 |
+
self.endline(":")
|
| 375 |
+
self._visit_indented(node.if_clauses[0].body)
|
| 376 |
+
for clause in node.if_clauses[1:]:
|
| 377 |
+
self.startline("elif ")
|
| 378 |
+
self.visit(clause.condition)
|
| 379 |
+
self.endline(":")
|
| 380 |
+
self._visit_indented(clause.body)
|
| 381 |
+
if node.else_clause is not None:
|
| 382 |
+
self.line("else:")
|
| 383 |
+
self._visit_indented(node.else_clause)
|
| 384 |
+
|
| 385 |
+
def visit_WhileStatNode(self, node):
|
| 386 |
+
self.startline("while ")
|
| 387 |
+
self.visit(node.condition)
|
| 388 |
+
self.endline(":")
|
| 389 |
+
self._visit_indented(node.body)
|
| 390 |
+
if node.else_clause is not None:
|
| 391 |
+
self.line("else:")
|
| 392 |
+
self._visit_indented(node.else_clause)
|
| 393 |
+
|
| 394 |
+
def visit_ContinueStatNode(self, node):
|
| 395 |
+
self.line("continue")
|
| 396 |
+
|
| 397 |
+
def visit_BreakStatNode(self, node):
|
| 398 |
+
self.line("break")
|
| 399 |
+
|
| 400 |
+
def visit_SequenceNode(self, node):
|
| 401 |
+
self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
|
| 402 |
+
|
| 403 |
+
def visit_ExprStatNode(self, node):
|
| 404 |
+
self.startline()
|
| 405 |
+
self.visit(node.expr)
|
| 406 |
+
self.endline()
|
| 407 |
+
|
| 408 |
+
def visit_InPlaceAssignmentNode(self, node):
|
| 409 |
+
self.startline()
|
| 410 |
+
self.visit(node.lhs)
|
| 411 |
+
self.put(" %s= " % node.operator)
|
| 412 |
+
self.visit(node.rhs)
|
| 413 |
+
self.endline()
|
| 414 |
+
|
| 415 |
+
def visit_WithStatNode(self, node):
|
| 416 |
+
self.startline()
|
| 417 |
+
self.put("with ")
|
| 418 |
+
self.visit(node.manager)
|
| 419 |
+
if node.target is not None:
|
| 420 |
+
self.put(" as ")
|
| 421 |
+
self.visit(node.target)
|
| 422 |
+
self.endline(":")
|
| 423 |
+
self._visit_indented(node.body)
|
| 424 |
+
|
| 425 |
+
def visit_TryFinallyStatNode(self, node):
|
| 426 |
+
self.line("try:")
|
| 427 |
+
self._visit_indented(node.body)
|
| 428 |
+
self.line("finally:")
|
| 429 |
+
self._visit_indented(node.finally_clause)
|
| 430 |
+
|
| 431 |
+
def visit_TryExceptStatNode(self, node):
|
| 432 |
+
self.line("try:")
|
| 433 |
+
self._visit_indented(node.body)
|
| 434 |
+
for x in node.except_clauses:
|
| 435 |
+
self.visit(x)
|
| 436 |
+
if node.else_clause is not None:
|
| 437 |
+
self.visit(node.else_clause)
|
| 438 |
+
|
| 439 |
+
def visit_ExceptClauseNode(self, node):
|
| 440 |
+
self.startline("except")
|
| 441 |
+
if node.pattern is not None:
|
| 442 |
+
self.put(" ")
|
| 443 |
+
self.visit(node.pattern)
|
| 444 |
+
if node.target is not None:
|
| 445 |
+
self.put(", ")
|
| 446 |
+
self.visit(node.target)
|
| 447 |
+
self.endline(":")
|
| 448 |
+
self._visit_indented(node.body)
|
| 449 |
+
|
| 450 |
+
def visit_ReturnStatNode(self, node):
|
| 451 |
+
self.startline("return")
|
| 452 |
+
if node.value is not None:
|
| 453 |
+
self.put(" ")
|
| 454 |
+
self.visit(node.value)
|
| 455 |
+
self.endline()
|
| 456 |
+
|
| 457 |
+
def visit_ReraiseStatNode(self, node):
|
| 458 |
+
self.line("raise")
|
| 459 |
+
|
| 460 |
+
def visit_ImportNode(self, node):
|
| 461 |
+
self.put("(import %s)" % node.module_name.value)
|
| 462 |
+
|
| 463 |
+
def visit_TempsBlockNode(self, node):
|
| 464 |
+
"""
|
| 465 |
+
Temporaries are output like $1_1', where the first number is
|
| 466 |
+
an index of the TempsBlockNode and the second number is an index
|
| 467 |
+
of the temporary which that block allocates.
|
| 468 |
+
"""
|
| 469 |
+
idx = 0
|
| 470 |
+
for handle in node.temps:
|
| 471 |
+
self.tempnames[handle] = "$%d_%d" % (self.tempblockindex, idx)
|
| 472 |
+
idx += 1
|
| 473 |
+
self.tempblockindex += 1
|
| 474 |
+
self.visit(node.body)
|
| 475 |
+
|
| 476 |
+
def visit_TempRefNode(self, node):
|
| 477 |
+
self.put(self.tempnames[node.handle])
|
| 478 |
+
|
| 479 |
+
|
| 480 |
+
class ExpressionWriter(TreeVisitor):
|
| 481 |
+
"""
|
| 482 |
+
A Cython code writer that is intentionally limited to expressions.
|
| 483 |
+
"""
|
| 484 |
+
|
| 485 |
+
def __init__(self, result=None):
|
| 486 |
+
super().__init__()
|
| 487 |
+
if result is None:
|
| 488 |
+
result = ""
|
| 489 |
+
self.result = result
|
| 490 |
+
self.precedence = [0]
|
| 491 |
+
|
| 492 |
+
def write(self, tree):
|
| 493 |
+
self.visit(tree)
|
| 494 |
+
return self.result
|
| 495 |
+
|
| 496 |
+
def put(self, s):
|
| 497 |
+
self.result += s
|
| 498 |
+
|
| 499 |
+
def remove(self, s):
|
| 500 |
+
if self.result.endswith(s):
|
| 501 |
+
self.result = self.result[:-len(s)]
|
| 502 |
+
|
| 503 |
+
def comma_separated_list(self, items):
|
| 504 |
+
if len(items) > 0:
|
| 505 |
+
for item in items[:-1]:
|
| 506 |
+
self.visit(item)
|
| 507 |
+
self.put(", ")
|
| 508 |
+
self.visit(items[-1])
|
| 509 |
+
|
| 510 |
+
def visit_Node(self, node):
|
| 511 |
+
raise AssertionError("Node not handled by serializer: %r" % node)
|
| 512 |
+
|
| 513 |
+
# TODO: Remove redundancy below. Most constants serialise fine as just "repr(node.value)".
|
| 514 |
+
|
| 515 |
+
def visit_IntNode(self, node):
|
| 516 |
+
self.put(node.value)
|
| 517 |
+
|
| 518 |
+
def visit_FloatNode(self, node):
|
| 519 |
+
self.put(node.value)
|
| 520 |
+
|
| 521 |
+
def visit_NoneNode(self, node):
|
| 522 |
+
self.put("None")
|
| 523 |
+
|
| 524 |
+
def visit_NameNode(self, node):
|
| 525 |
+
self.put(node.name)
|
| 526 |
+
|
| 527 |
+
def visit_EllipsisNode(self, node):
|
| 528 |
+
self.put("...")
|
| 529 |
+
|
| 530 |
+
def visit_BoolNode(self, node):
|
| 531 |
+
self.put(str(node.value))
|
| 532 |
+
|
| 533 |
+
def visit_ConstNode(self, node):
|
| 534 |
+
self.put(str(node.value))
|
| 535 |
+
|
| 536 |
+
def visit_ImagNode(self, node):
|
| 537 |
+
self.put(f"{node.value}j")
|
| 538 |
+
|
| 539 |
+
def visit_BytesNode(self, node):
|
| 540 |
+
self.put(repr(node.value))
|
| 541 |
+
|
| 542 |
+
def visit_UnicodeNode(self, node):
|
| 543 |
+
self.put(repr(node.value))
|
| 544 |
+
|
| 545 |
+
def emit_sequence(self, node, parens=("", "")):
|
| 546 |
+
open_paren, close_paren = parens
|
| 547 |
+
items = node.subexpr_nodes()
|
| 548 |
+
self.put(open_paren)
|
| 549 |
+
self.comma_separated_list(items)
|
| 550 |
+
self.put(close_paren)
|
| 551 |
+
|
| 552 |
+
def visit_ListNode(self, node):
|
| 553 |
+
self.emit_sequence(node, "[]")
|
| 554 |
+
|
| 555 |
+
def visit_TupleNode(self, node):
|
| 556 |
+
self.emit_sequence(node, "()")
|
| 557 |
+
|
| 558 |
+
def visit_SetNode(self, node):
|
| 559 |
+
if len(node.subexpr_nodes()) > 0:
|
| 560 |
+
self.emit_sequence(node, "{}")
|
| 561 |
+
else:
|
| 562 |
+
self.put("set()")
|
| 563 |
+
|
| 564 |
+
def visit_DictNode(self, node):
|
| 565 |
+
self.emit_sequence(node, "{}")
|
| 566 |
+
|
| 567 |
+
def visit_DictItemNode(self, node):
|
| 568 |
+
self.visit(node.key)
|
| 569 |
+
self.put(": ")
|
| 570 |
+
self.visit(node.value)
|
| 571 |
+
|
| 572 |
+
unop_precedence = {
|
| 573 |
+
'not': 3, '!': 3,
|
| 574 |
+
'+': 11, '-': 11, '~': 11,
|
| 575 |
+
}
|
| 576 |
+
binop_precedence = {
|
| 577 |
+
'or': 1,
|
| 578 |
+
'and': 2,
|
| 579 |
+
# unary: 'not': 3, '!': 3,
|
| 580 |
+
'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
|
| 581 |
+
'|': 5,
|
| 582 |
+
'^': 6,
|
| 583 |
+
'&': 7,
|
| 584 |
+
'<<': 8, '>>': 8,
|
| 585 |
+
'+': 9, '-': 9,
|
| 586 |
+
'*': 10, '@': 10, '/': 10, '//': 10, '%': 10,
|
| 587 |
+
# unary: '+': 11, '-': 11, '~': 11
|
| 588 |
+
'**': 12,
|
| 589 |
+
}
|
| 590 |
+
|
| 591 |
+
def operator_enter(self, new_prec):
|
| 592 |
+
old_prec = self.precedence[-1]
|
| 593 |
+
if old_prec > new_prec:
|
| 594 |
+
self.put("(")
|
| 595 |
+
self.precedence.append(new_prec)
|
| 596 |
+
|
| 597 |
+
def operator_exit(self):
|
| 598 |
+
old_prec, new_prec = self.precedence[-2:]
|
| 599 |
+
if old_prec > new_prec:
|
| 600 |
+
self.put(")")
|
| 601 |
+
self.precedence.pop()
|
| 602 |
+
|
| 603 |
+
def visit_NotNode(self, node):
|
| 604 |
+
op = 'not'
|
| 605 |
+
prec = self.unop_precedence[op]
|
| 606 |
+
self.operator_enter(prec)
|
| 607 |
+
self.put("not ")
|
| 608 |
+
self.visit(node.operand)
|
| 609 |
+
self.operator_exit()
|
| 610 |
+
|
| 611 |
+
def visit_UnopNode(self, node):
|
| 612 |
+
op = node.operator
|
| 613 |
+
prec = self.unop_precedence[op]
|
| 614 |
+
self.operator_enter(prec)
|
| 615 |
+
self.put("%s" % node.operator)
|
| 616 |
+
self.visit(node.operand)
|
| 617 |
+
self.operator_exit()
|
| 618 |
+
|
| 619 |
+
def visit_BinopNode(self, node):
|
| 620 |
+
op = node.operator
|
| 621 |
+
prec = self.binop_precedence.get(op, 0)
|
| 622 |
+
self.operator_enter(prec)
|
| 623 |
+
self.visit(node.operand1)
|
| 624 |
+
self.put(" %s " % op.replace('_', ' '))
|
| 625 |
+
self.visit(node.operand2)
|
| 626 |
+
self.operator_exit()
|
| 627 |
+
|
| 628 |
+
def visit_BoolBinopNode(self, node):
|
| 629 |
+
self.visit_BinopNode(node)
|
| 630 |
+
|
| 631 |
+
def visit_PrimaryCmpNode(self, node):
|
| 632 |
+
self.visit_BinopNode(node)
|
| 633 |
+
|
| 634 |
+
def visit_IndexNode(self, node):
|
| 635 |
+
self.visit(node.base)
|
| 636 |
+
self.put("[")
|
| 637 |
+
if isinstance(node.index, TupleNode):
|
| 638 |
+
if node.index.subexpr_nodes():
|
| 639 |
+
self.emit_sequence(node.index)
|
| 640 |
+
else:
|
| 641 |
+
self.put("()")
|
| 642 |
+
else:
|
| 643 |
+
self.visit(node.index)
|
| 644 |
+
self.put("]")
|
| 645 |
+
|
| 646 |
+
def visit_SliceIndexNode(self, node):
|
| 647 |
+
self.visit(node.base)
|
| 648 |
+
self.put("[")
|
| 649 |
+
if node.start:
|
| 650 |
+
self.visit(node.start)
|
| 651 |
+
self.put(":")
|
| 652 |
+
if node.stop:
|
| 653 |
+
self.visit(node.stop)
|
| 654 |
+
if node.slice:
|
| 655 |
+
self.put(":")
|
| 656 |
+
self.visit(node.slice)
|
| 657 |
+
self.put("]")
|
| 658 |
+
|
| 659 |
+
def visit_SliceNode(self, node):
|
| 660 |
+
if not node.start.is_none:
|
| 661 |
+
self.visit(node.start)
|
| 662 |
+
self.put(":")
|
| 663 |
+
if not node.stop.is_none:
|
| 664 |
+
self.visit(node.stop)
|
| 665 |
+
if not node.step.is_none:
|
| 666 |
+
self.put(":")
|
| 667 |
+
self.visit(node.step)
|
| 668 |
+
|
| 669 |
+
def visit_CondExprNode(self, node):
|
| 670 |
+
self.visit(node.true_val)
|
| 671 |
+
self.put(" if ")
|
| 672 |
+
self.visit(node.test)
|
| 673 |
+
self.put(" else ")
|
| 674 |
+
self.visit(node.false_val)
|
| 675 |
+
|
| 676 |
+
def visit_AttributeNode(self, node):
|
| 677 |
+
self.visit(node.obj)
|
| 678 |
+
self.put(".%s" % node.attribute)
|
| 679 |
+
|
| 680 |
+
def visit_SimpleCallNode(self, node):
|
| 681 |
+
self.visit(node.function)
|
| 682 |
+
self.put("(")
|
| 683 |
+
self.comma_separated_list(node.args)
|
| 684 |
+
self.put(")")
|
| 685 |
+
|
| 686 |
+
def emit_pos_args(self, node):
|
| 687 |
+
if node is None:
|
| 688 |
+
return
|
| 689 |
+
if isinstance(node, AddNode):
|
| 690 |
+
self.emit_pos_args(node.operand1)
|
| 691 |
+
self.emit_pos_args(node.operand2)
|
| 692 |
+
elif isinstance(node, TupleNode):
|
| 693 |
+
for expr in node.subexpr_nodes():
|
| 694 |
+
self.visit(expr)
|
| 695 |
+
self.put(", ")
|
| 696 |
+
elif isinstance(node, AsTupleNode):
|
| 697 |
+
self.put("*")
|
| 698 |
+
self.visit(node.arg)
|
| 699 |
+
self.put(", ")
|
| 700 |
+
else:
|
| 701 |
+
self.visit(node)
|
| 702 |
+
self.put(", ")
|
| 703 |
+
|
| 704 |
+
def emit_kwd_args(self, node):
|
| 705 |
+
if node is None:
|
| 706 |
+
return
|
| 707 |
+
if isinstance(node, MergedDictNode):
|
| 708 |
+
for expr in node.subexpr_nodes():
|
| 709 |
+
self.emit_kwd_args(expr)
|
| 710 |
+
elif isinstance(node, DictNode):
|
| 711 |
+
for expr in node.subexpr_nodes():
|
| 712 |
+
self.put("%s=" % expr.key.value)
|
| 713 |
+
self.visit(expr.value)
|
| 714 |
+
self.put(", ")
|
| 715 |
+
else:
|
| 716 |
+
self.put("**")
|
| 717 |
+
self.visit(node)
|
| 718 |
+
self.put(", ")
|
| 719 |
+
|
| 720 |
+
def visit_GeneralCallNode(self, node):
|
| 721 |
+
self.visit(node.function)
|
| 722 |
+
self.put("(")
|
| 723 |
+
self.emit_pos_args(node.positional_args)
|
| 724 |
+
self.emit_kwd_args(node.keyword_args)
|
| 725 |
+
self.remove(", ")
|
| 726 |
+
self.put(")")
|
| 727 |
+
|
| 728 |
+
def emit_comprehension(self, body, target,
|
| 729 |
+
sequence, condition,
|
| 730 |
+
parens=("", "")):
|
| 731 |
+
open_paren, close_paren = parens
|
| 732 |
+
self.put(open_paren)
|
| 733 |
+
self.visit(body)
|
| 734 |
+
self.put(" for ")
|
| 735 |
+
self.visit(target)
|
| 736 |
+
self.put(" in ")
|
| 737 |
+
self.visit(sequence)
|
| 738 |
+
if condition:
|
| 739 |
+
self.put(" if ")
|
| 740 |
+
self.visit(condition)
|
| 741 |
+
self.put(close_paren)
|
| 742 |
+
|
| 743 |
+
def visit_ComprehensionAppendNode(self, node):
|
| 744 |
+
self.visit(node.expr)
|
| 745 |
+
|
| 746 |
+
def visit_DictComprehensionAppendNode(self, node):
|
| 747 |
+
self.visit(node.key_expr)
|
| 748 |
+
self.put(": ")
|
| 749 |
+
self.visit(node.value_expr)
|
| 750 |
+
|
| 751 |
+
def visit_ComprehensionNode(self, node):
|
| 752 |
+
tpmap = {'list': "[]", 'dict': "{}", 'set': "{}"}
|
| 753 |
+
parens = tpmap[node.type.py_type_name()]
|
| 754 |
+
body = node.loop.body
|
| 755 |
+
target = node.loop.target
|
| 756 |
+
sequence = node.loop.iterator.sequence
|
| 757 |
+
condition = None
|
| 758 |
+
if hasattr(body, 'if_clauses'):
|
| 759 |
+
# type(body) is Nodes.IfStatNode
|
| 760 |
+
condition = body.if_clauses[0].condition
|
| 761 |
+
body = body.if_clauses[0].body
|
| 762 |
+
self.emit_comprehension(body, target, sequence, condition, parens)
|
| 763 |
+
|
| 764 |
+
def visit_GeneratorExpressionNode(self, node):
|
| 765 |
+
body = node.loop.body
|
| 766 |
+
target = node.loop.target
|
| 767 |
+
sequence = node.loop.iterator.sequence
|
| 768 |
+
condition = None
|
| 769 |
+
if hasattr(body, 'if_clauses'):
|
| 770 |
+
# type(body) is Nodes.IfStatNode
|
| 771 |
+
condition = body.if_clauses[0].condition
|
| 772 |
+
body = body.if_clauses[0].body.expr.arg
|
| 773 |
+
elif hasattr(body, 'expr'):
|
| 774 |
+
# type(body) is Nodes.ExprStatNode
|
| 775 |
+
body = body.expr.arg
|
| 776 |
+
self.emit_comprehension(body, target, sequence, condition, "()")
|
| 777 |
+
|
| 778 |
+
|
| 779 |
+
class PxdWriter(DeclarationWriter, ExpressionWriter):
|
| 780 |
+
"""
|
| 781 |
+
A Cython code writer for everything supported in pxd files.
|
| 782 |
+
(currently unused)
|
| 783 |
+
"""
|
| 784 |
+
|
| 785 |
+
def __call__(self, node):
|
| 786 |
+
print('\n'.join(self.write(node).lines))
|
| 787 |
+
return node
|
| 788 |
+
|
| 789 |
+
def visit_CFuncDefNode(self, node):
|
| 790 |
+
if node.overridable:
|
| 791 |
+
self.startline('cpdef ')
|
| 792 |
+
else:
|
| 793 |
+
self.startline('cdef ')
|
| 794 |
+
if node.modifiers:
|
| 795 |
+
self.put(' '.join(node.modifiers))
|
| 796 |
+
self.put(' ')
|
| 797 |
+
if node.visibility != 'private':
|
| 798 |
+
self.put(node.visibility)
|
| 799 |
+
self.put(' ')
|
| 800 |
+
if node.api:
|
| 801 |
+
self.put('api ')
|
| 802 |
+
self.visit(node.declarator)
|
| 803 |
+
|
| 804 |
+
def visit_StatNode(self, node):
|
| 805 |
+
pass
|
| 806 |
+
|
| 807 |
+
|
| 808 |
+
class CodeWriter(StatementWriter, ExpressionWriter):
|
| 809 |
+
"""
|
| 810 |
+
A complete Cython code writer.
|
| 811 |
+
"""
|
venv/lib/python3.10/site-packages/Cython/Compiler/AnalysedTreeTransforms.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .Visitor import ScopeTrackingTransform
|
| 2 |
+
from .Nodes import StatListNode, SingleAssignmentNode, CFuncDefNode, DefNode
|
| 3 |
+
from .ExprNodes import DictNode, DictItemNode, NameNode, UnicodeNode
|
| 4 |
+
from .PyrexTypes import py_object_type
|
| 5 |
+
from .StringEncoding import EncodedString
|
| 6 |
+
from . import Symtab
|
| 7 |
+
|
| 8 |
+
class AutoTestDictTransform(ScopeTrackingTransform):
|
| 9 |
+
# Handles autotestdict directive
|
| 10 |
+
|
| 11 |
+
excludelist = ['__cinit__', '__dealloc__', '__richcmp__',
|
| 12 |
+
'__nonzero__', '__bool__',
|
| 13 |
+
'__len__', '__contains__']
|
| 14 |
+
|
| 15 |
+
def visit_ModuleNode(self, node):
|
| 16 |
+
if node.is_pxd:
|
| 17 |
+
return node
|
| 18 |
+
self.scope_type = 'module'
|
| 19 |
+
self.scope_node = node
|
| 20 |
+
|
| 21 |
+
if not self.current_directives['autotestdict']:
|
| 22 |
+
return node
|
| 23 |
+
self.all_docstrings = self.current_directives['autotestdict.all']
|
| 24 |
+
self.cdef_docstrings = self.all_docstrings or self.current_directives['autotestdict.cdef']
|
| 25 |
+
|
| 26 |
+
assert isinstance(node.body, StatListNode)
|
| 27 |
+
|
| 28 |
+
# First see if __test__ is already created
|
| 29 |
+
if '__test__' in node.scope.entries:
|
| 30 |
+
# Do nothing
|
| 31 |
+
return node
|
| 32 |
+
|
| 33 |
+
pos = node.pos
|
| 34 |
+
|
| 35 |
+
self.tests = []
|
| 36 |
+
self.testspos = node.pos
|
| 37 |
+
|
| 38 |
+
test_dict_entry = node.scope.declare_var(EncodedString('__test__'),
|
| 39 |
+
py_object_type,
|
| 40 |
+
pos,
|
| 41 |
+
visibility='public')
|
| 42 |
+
create_test_dict_assignment = SingleAssignmentNode(pos,
|
| 43 |
+
lhs=NameNode(pos, name=EncodedString('__test__'),
|
| 44 |
+
entry=test_dict_entry),
|
| 45 |
+
rhs=DictNode(pos, key_value_pairs=self.tests))
|
| 46 |
+
self.visitchildren(node)
|
| 47 |
+
node.body.stats.append(create_test_dict_assignment)
|
| 48 |
+
return node
|
| 49 |
+
|
| 50 |
+
def add_test(self, testpos, path, doctest):
|
| 51 |
+
pos = self.testspos
|
| 52 |
+
keystr = EncodedString(f'{path} (line {testpos[1]:d})')
|
| 53 |
+
key = UnicodeNode(pos, value=keystr)
|
| 54 |
+
value = UnicodeNode(pos, value=doctest)
|
| 55 |
+
self.tests.append(DictItemNode(pos, key=key, value=value))
|
| 56 |
+
|
| 57 |
+
def visit_ExprNode(self, node):
|
| 58 |
+
# expressions cannot contain functions and lambda expressions
|
| 59 |
+
# do not have a docstring
|
| 60 |
+
return node
|
| 61 |
+
|
| 62 |
+
def visit_FuncDefNode(self, node):
|
| 63 |
+
if not node.doc or (isinstance(node, DefNode) and node.fused_py_func):
|
| 64 |
+
return node
|
| 65 |
+
if not self.cdef_docstrings:
|
| 66 |
+
if isinstance(node, CFuncDefNode) and not node.py_func:
|
| 67 |
+
return node
|
| 68 |
+
if not self.all_docstrings and '>>>' not in node.doc:
|
| 69 |
+
return node
|
| 70 |
+
|
| 71 |
+
pos = self.testspos
|
| 72 |
+
if self.scope_type == 'module':
|
| 73 |
+
path = node.entry.name
|
| 74 |
+
elif self.scope_type in ('pyclass', 'cclass'):
|
| 75 |
+
if isinstance(node, CFuncDefNode):
|
| 76 |
+
if node.py_func is not None:
|
| 77 |
+
name = node.py_func.name
|
| 78 |
+
else:
|
| 79 |
+
name = node.entry.name
|
| 80 |
+
else:
|
| 81 |
+
name = node.name
|
| 82 |
+
if self.scope_type == 'cclass' and name in self.excludelist:
|
| 83 |
+
return node
|
| 84 |
+
if self.scope_type == 'pyclass':
|
| 85 |
+
class_name = self.scope_node.name
|
| 86 |
+
else:
|
| 87 |
+
class_name = self.scope_node.class_name
|
| 88 |
+
if isinstance(node.entry.scope, Symtab.PropertyScope):
|
| 89 |
+
property_method_name = node.entry.scope.name
|
| 90 |
+
path = "%s.%s.%s" % (class_name, node.entry.scope.name,
|
| 91 |
+
node.entry.name)
|
| 92 |
+
else:
|
| 93 |
+
path = "%s.%s" % (class_name, node.entry.name)
|
| 94 |
+
else:
|
| 95 |
+
assert False
|
| 96 |
+
self.add_test(node.pos, path, node.doc)
|
| 97 |
+
return node
|
venv/lib/python3.10/site-packages/Cython/Compiler/Annotate.py
ADDED
|
@@ -0,0 +1,325 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Note: Work in progress
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
import os.path
|
| 6 |
+
import re
|
| 7 |
+
import textwrap
|
| 8 |
+
from datetime import datetime
|
| 9 |
+
from functools import partial
|
| 10 |
+
from collections import defaultdict
|
| 11 |
+
from xml.sax.saxutils import escape as html_escape
|
| 12 |
+
from io import StringIO
|
| 13 |
+
|
| 14 |
+
from . import Version
|
| 15 |
+
from .Code import CCodeWriter
|
| 16 |
+
from .. import Utils
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class AnnotationCCodeWriter(CCodeWriter):
|
| 20 |
+
|
| 21 |
+
# also used as marker for detection of complete code emission in tests
|
| 22 |
+
COMPLETE_CODE_TITLE = "Complete cythonized code"
|
| 23 |
+
|
| 24 |
+
def __init__(self, create_from=None, buffer=None, copy_formatting=True, show_entire_c_code=False, source_desc=None):
|
| 25 |
+
CCodeWriter.__init__(self, create_from, buffer, copy_formatting=copy_formatting)
|
| 26 |
+
self.show_entire_c_code = show_entire_c_code
|
| 27 |
+
if create_from is None:
|
| 28 |
+
self.annotation_buffer = StringIO()
|
| 29 |
+
self.last_annotated_pos = None
|
| 30 |
+
# annotations[filename][line] -> [(column, AnnotationItem)*]
|
| 31 |
+
self.annotations = defaultdict(partial(defaultdict, list))
|
| 32 |
+
# code[filename][line] -> str
|
| 33 |
+
self.code = defaultdict(partial(defaultdict, str))
|
| 34 |
+
# scopes[filename][line] -> set(scopes)
|
| 35 |
+
self.scopes = defaultdict(partial(defaultdict, set))
|
| 36 |
+
else:
|
| 37 |
+
# When creating an insertion point, keep references to the same database
|
| 38 |
+
self.annotation_buffer = create_from.annotation_buffer
|
| 39 |
+
self.annotations = create_from.annotations
|
| 40 |
+
self.code = create_from.code
|
| 41 |
+
self.scopes = create_from.scopes
|
| 42 |
+
self.last_annotated_pos = create_from.last_annotated_pos
|
| 43 |
+
|
| 44 |
+
def create_new(self, create_from, buffer, copy_formatting):
|
| 45 |
+
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
|
| 46 |
+
|
| 47 |
+
def _write_to_buffer(self, s):
|
| 48 |
+
self.buffer.write(s)
|
| 49 |
+
self.annotation_buffer.write(s)
|
| 50 |
+
|
| 51 |
+
def mark_pos(self, pos, trace=True):
|
| 52 |
+
if pos is not None:
|
| 53 |
+
CCodeWriter.mark_pos(self, pos, trace)
|
| 54 |
+
if self.funcstate and self.funcstate.scope:
|
| 55 |
+
# lambdas and genexprs can result in multiple scopes per line => keep them in a set
|
| 56 |
+
self.scopes[pos[0].filename][pos[1]].add(self.funcstate.scope)
|
| 57 |
+
if self.last_annotated_pos:
|
| 58 |
+
source_desc, line, _ = self.last_annotated_pos
|
| 59 |
+
pos_code = self.code[source_desc.filename]
|
| 60 |
+
pos_code[line] += self.annotation_buffer.getvalue()
|
| 61 |
+
self.annotation_buffer = StringIO()
|
| 62 |
+
self.last_annotated_pos = pos
|
| 63 |
+
|
| 64 |
+
def annotate(self, pos, item):
|
| 65 |
+
self.annotations[pos[0].filename][pos[1]].append((pos[2], item))
|
| 66 |
+
|
| 67 |
+
def _css(self):
|
| 68 |
+
"""css template will later allow to choose a colormap"""
|
| 69 |
+
css = [self._css_template]
|
| 70 |
+
for i in range(255):
|
| 71 |
+
color_shade = int(255.0 // (1.0 + i/10.0))
|
| 72 |
+
css.append(f'.cython.score-{i:d} {{background-color: #FFFF{color_shade:02x};}}')
|
| 73 |
+
try:
|
| 74 |
+
from pygments.formatters import HtmlFormatter
|
| 75 |
+
except ImportError:
|
| 76 |
+
pass
|
| 77 |
+
else:
|
| 78 |
+
css.append(HtmlFormatter().get_style_defs('.cython'))
|
| 79 |
+
return '\n'.join(css)
|
| 80 |
+
|
| 81 |
+
_css_template = textwrap.dedent("""
|
| 82 |
+
body.cython { font-family: courier; font-size: 12; }
|
| 83 |
+
|
| 84 |
+
.cython.tag { }
|
| 85 |
+
.cython.line { color: #000000; margin: 0em }
|
| 86 |
+
.cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; }
|
| 87 |
+
|
| 88 |
+
.cython.line .run { background-color: #B0FFB0; }
|
| 89 |
+
.cython.line .mis { background-color: #FFB0B0; }
|
| 90 |
+
.cython.code.run { border-left: 8px solid #B0FFB0; }
|
| 91 |
+
.cython.code.mis { border-left: 8px solid #FFB0B0; }
|
| 92 |
+
|
| 93 |
+
.cython.code .py_c_api { color: red; }
|
| 94 |
+
.cython.code .py_macro_api { color: #FF7000; }
|
| 95 |
+
.cython.code .pyx_c_api { color: #FF3000; }
|
| 96 |
+
.cython.code .pyx_macro_api { color: #FF7000; }
|
| 97 |
+
.cython.code .refnanny { color: #FFA000; }
|
| 98 |
+
.cython.code .trace { color: #FFA000; }
|
| 99 |
+
.cython.code .error_goto { color: #FFA000; }
|
| 100 |
+
|
| 101 |
+
.cython.code .coerce { color: #008000; border: 1px dotted #008000 }
|
| 102 |
+
.cython.code .py_attr { color: #FF0000; font-weight: bold; }
|
| 103 |
+
.cython.code .c_attr { color: #0000FF; }
|
| 104 |
+
.cython.code .py_call { color: #FF0000; font-weight: bold; }
|
| 105 |
+
.cython.code .c_call { color: #0000FF; }
|
| 106 |
+
""")
|
| 107 |
+
|
| 108 |
+
# on-click toggle function to show/hide C source code
|
| 109 |
+
_onclick_attr = ' onclick="{}"'.format((
|
| 110 |
+
"(function(s){"
|
| 111 |
+
" s.display = s.display === 'block' ? 'none' : 'block'"
|
| 112 |
+
"})(this.nextElementSibling.style)"
|
| 113 |
+
).replace(' ', '') # poor dev's JS minification
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
def save_annotation(self, source_filename, target_filename, coverage_xml=None):
|
| 117 |
+
with Utils.open_source_file(source_filename) as f:
|
| 118 |
+
code = f.read()
|
| 119 |
+
generated_code = self.code.get(source_filename, {})
|
| 120 |
+
c_file = Utils.decode_filename(os.path.basename(target_filename))
|
| 121 |
+
html_filename = os.path.splitext(target_filename)[0] + ".html"
|
| 122 |
+
|
| 123 |
+
with open(html_filename, "w", encoding="UTF-8") as out_buffer:
|
| 124 |
+
out_buffer.write(self._save_annotation(code, generated_code, c_file, source_filename, coverage_xml))
|
| 125 |
+
|
| 126 |
+
def _save_annotation_header(self, c_file, source_filename, coverage_timestamp=None):
|
| 127 |
+
coverage_info = ''
|
| 128 |
+
if coverage_timestamp:
|
| 129 |
+
coverage_info = ' with coverage data from {timestamp}'.format(
|
| 130 |
+
timestamp=datetime.fromtimestamp(int(coverage_timestamp) // 1000))
|
| 131 |
+
|
| 132 |
+
outlist = [
|
| 133 |
+
textwrap.dedent('''\
|
| 134 |
+
<!DOCTYPE html>
|
| 135 |
+
<!-- Generated by Cython {watermark} -->
|
| 136 |
+
<html>
|
| 137 |
+
<head>
|
| 138 |
+
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
| 139 |
+
<title>Cython: {filename}</title>
|
| 140 |
+
<style type="text/css">
|
| 141 |
+
{css}
|
| 142 |
+
</style>
|
| 143 |
+
</head>
|
| 144 |
+
<body class="cython">
|
| 145 |
+
<p><span style="border-bottom: solid 1px grey;">Generated by Cython {watermark}</span>{more_info}</p>
|
| 146 |
+
<p>
|
| 147 |
+
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
|
| 148 |
+
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
|
| 149 |
+
</p>
|
| 150 |
+
''').format(css=self._css(), watermark=Version.watermark,
|
| 151 |
+
filename=os.path.basename(source_filename) if source_filename else '',
|
| 152 |
+
more_info=coverage_info)
|
| 153 |
+
]
|
| 154 |
+
if c_file:
|
| 155 |
+
outlist.append('<p>Raw output: <a href="%s">%s</a></p>\n' % (c_file, c_file))
|
| 156 |
+
return outlist
|
| 157 |
+
|
| 158 |
+
def _save_annotation_footer(self):
|
| 159 |
+
return ('</body></html>\n',)
|
| 160 |
+
|
| 161 |
+
def _save_annotation(self, code, generated_code, c_file=None, source_filename=None, coverage_xml=None):
|
| 162 |
+
"""
|
| 163 |
+
lines : original cython source code split by lines
|
| 164 |
+
generated_code : generated c code keyed by line number in original file
|
| 165 |
+
target filename : name of the file in which to store the generated html
|
| 166 |
+
c_file : filename in which the c_code has been written
|
| 167 |
+
"""
|
| 168 |
+
if coverage_xml is not None and source_filename:
|
| 169 |
+
coverage_timestamp = coverage_xml.get('timestamp', '').strip()
|
| 170 |
+
covered_lines = self._get_line_coverage(coverage_xml, source_filename)
|
| 171 |
+
else:
|
| 172 |
+
coverage_timestamp = covered_lines = None
|
| 173 |
+
annotation_items = dict(self.annotations[source_filename])
|
| 174 |
+
scopes = dict(self.scopes[source_filename])
|
| 175 |
+
|
| 176 |
+
outlist = []
|
| 177 |
+
outlist.extend(self._save_annotation_header(c_file, source_filename, coverage_timestamp))
|
| 178 |
+
outlist.extend(self._save_annotation_body(code, generated_code, annotation_items, scopes, covered_lines))
|
| 179 |
+
outlist.extend(self._save_annotation_footer())
|
| 180 |
+
return ''.join(outlist)
|
| 181 |
+
|
| 182 |
+
def _get_line_coverage(self, coverage_xml, source_filename):
|
| 183 |
+
coverage_data = None
|
| 184 |
+
for entry in coverage_xml.iterfind('.//class'):
|
| 185 |
+
if not entry.get('filename'):
|
| 186 |
+
continue
|
| 187 |
+
if (entry.get('filename') == source_filename or
|
| 188 |
+
os.path.abspath(entry.get('filename')) == source_filename):
|
| 189 |
+
coverage_data = entry
|
| 190 |
+
break
|
| 191 |
+
elif source_filename.endswith(entry.get('filename')):
|
| 192 |
+
coverage_data = entry # but we might still find a better match...
|
| 193 |
+
if coverage_data is None:
|
| 194 |
+
return None
|
| 195 |
+
return {
|
| 196 |
+
int(line.get('number')): int(line.get('hits'))
|
| 197 |
+
for line in coverage_data.iterfind('lines/line')
|
| 198 |
+
}
|
| 199 |
+
|
| 200 |
+
def _htmlify_code(self, code, language):
|
| 201 |
+
try:
|
| 202 |
+
from pygments import highlight
|
| 203 |
+
from pygments.lexers import CythonLexer, CppLexer
|
| 204 |
+
from pygments.formatters import HtmlFormatter
|
| 205 |
+
except ImportError:
|
| 206 |
+
# no Pygments, just escape the code
|
| 207 |
+
return html_escape(code)
|
| 208 |
+
|
| 209 |
+
if language == "cython":
|
| 210 |
+
lexer = CythonLexer(stripnl=False, stripall=False)
|
| 211 |
+
elif language == "c/cpp":
|
| 212 |
+
lexer = CppLexer(stripnl=False, stripall=False)
|
| 213 |
+
else:
|
| 214 |
+
# unknown language, use fallback
|
| 215 |
+
return html_escape(code)
|
| 216 |
+
html_code = highlight(
|
| 217 |
+
code, lexer,
|
| 218 |
+
HtmlFormatter(nowrap=True))
|
| 219 |
+
return html_code
|
| 220 |
+
|
| 221 |
+
def _save_annotation_body(self, cython_code, generated_code, annotation_items, scopes, covered_lines=None):
|
| 222 |
+
outlist = ['<div class="cython">']
|
| 223 |
+
pos_comment_marker = '/* \N{HORIZONTAL ELLIPSIS} */\n'
|
| 224 |
+
new_calls_map = {
|
| 225 |
+
name: 0 for name in
|
| 226 |
+
'refnanny trace py_macro_api py_c_api pyx_macro_api pyx_c_api error_goto'.split()
|
| 227 |
+
}.copy
|
| 228 |
+
|
| 229 |
+
self.mark_pos(None)
|
| 230 |
+
|
| 231 |
+
def annotate(match):
|
| 232 |
+
group_name = match.lastgroup
|
| 233 |
+
calls[group_name] += 1
|
| 234 |
+
return f"<span class='{group_name}'>{match.group(group_name)}</span>"
|
| 235 |
+
|
| 236 |
+
lines = self._htmlify_code(cython_code, "cython").splitlines()
|
| 237 |
+
lineno_width = len(str(len(lines)))
|
| 238 |
+
if not covered_lines:
|
| 239 |
+
covered_lines = None
|
| 240 |
+
|
| 241 |
+
for k, line in enumerate(lines, 1):
|
| 242 |
+
try:
|
| 243 |
+
c_code = generated_code[k]
|
| 244 |
+
except KeyError:
|
| 245 |
+
c_code = ''
|
| 246 |
+
else:
|
| 247 |
+
c_code = _replace_pos_comment(pos_comment_marker, c_code)
|
| 248 |
+
if c_code.startswith(pos_comment_marker):
|
| 249 |
+
c_code = c_code[len(pos_comment_marker):]
|
| 250 |
+
c_code = html_escape(c_code)
|
| 251 |
+
|
| 252 |
+
calls = new_calls_map()
|
| 253 |
+
c_code = _parse_code(annotate, c_code)
|
| 254 |
+
score = (5 * calls['py_c_api'] + 2 * calls['pyx_c_api'] +
|
| 255 |
+
calls['py_macro_api'] + calls['pyx_macro_api'])
|
| 256 |
+
|
| 257 |
+
if c_code:
|
| 258 |
+
onclick = self._onclick_attr
|
| 259 |
+
expandsymbol = '+'
|
| 260 |
+
else:
|
| 261 |
+
onclick = ''
|
| 262 |
+
expandsymbol = ' '
|
| 263 |
+
|
| 264 |
+
covered = ''
|
| 265 |
+
if covered_lines is not None and k in covered_lines:
|
| 266 |
+
hits = covered_lines[k]
|
| 267 |
+
if hits is not None:
|
| 268 |
+
covered = 'run' if hits else 'mis'
|
| 269 |
+
|
| 270 |
+
outlist.append(
|
| 271 |
+
f'<pre class="cython line score-{score}"{onclick}>'
|
| 272 |
+
# generate line number with expand symbol in front,
|
| 273 |
+
# and the right number of digit
|
| 274 |
+
f'{expandsymbol}<span class="{covered}">{k:0{lineno_width}d}</span>: {line.rstrip()}</pre>\n'
|
| 275 |
+
)
|
| 276 |
+
if c_code:
|
| 277 |
+
outlist.append(f"<pre class='cython code score-{score} {covered}'>{c_code}</pre>")
|
| 278 |
+
outlist.append("</div>")
|
| 279 |
+
|
| 280 |
+
# now the whole c-code if needed:
|
| 281 |
+
if self.show_entire_c_code:
|
| 282 |
+
complete_code_as_html = self._htmlify_code(self.buffer.getvalue(), "c/cpp")
|
| 283 |
+
outlist.append(
|
| 284 |
+
'<p><div class="cython">'
|
| 285 |
+
f"<pre class='cython line'{self._onclick_attr}>+ {AnnotationCCodeWriter.COMPLETE_CODE_TITLE}</pre>\n"
|
| 286 |
+
f"<pre class='cython code'>{complete_code_as_html}</pre>"
|
| 287 |
+
"</div></p>"
|
| 288 |
+
)
|
| 289 |
+
|
| 290 |
+
return outlist
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
_parse_code = re.compile((
|
| 294 |
+
br'(?P<refnanny>__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|'
|
| 295 |
+
br'(?P<trace>__Pyx_Trace[A-Za-z]+)|'
|
| 296 |
+
br'(?:'
|
| 297 |
+
br'(?P<pyx_macro_api>__Pyx_[A-Z][A-Z_]+)|'
|
| 298 |
+
br'(?P<pyx_c_api>(?:__Pyx_[A-Z][a-z_][A-Za-z_]*)|__pyx_convert_[A-Za-z_]*)|'
|
| 299 |
+
br'(?P<py_macro_api>Py[A-Z][a-z]+_[A-Z][A-Z_]+)|'
|
| 300 |
+
br'(?P<py_c_api>Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)'
|
| 301 |
+
br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement
|
| 302 |
+
br'(?P<error_goto>(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))'
|
| 303 |
+
).decode('ascii')).sub
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
_replace_pos_comment = re.compile(
|
| 307 |
+
# this matches what Cython generates as code line marker comment
|
| 308 |
+
br'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n'.decode('ascii'),
|
| 309 |
+
re.M
|
| 310 |
+
).sub
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class AnnotationItem:
|
| 314 |
+
|
| 315 |
+
def __init__(self, style, text, tag="", size=0):
|
| 316 |
+
self.style = style
|
| 317 |
+
self.text = text
|
| 318 |
+
self.tag = tag
|
| 319 |
+
self.size = size
|
| 320 |
+
|
| 321 |
+
def start(self):
|
| 322 |
+
return "<span class='cython tag %s' title='%s'>%s" % (self.style, self.text, self.tag)
|
| 323 |
+
|
| 324 |
+
def end(self):
|
| 325 |
+
return self.size, "</span>"
|
venv/lib/python3.10/site-packages/Cython/Compiler/AutoDocTransforms.py
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import inspect
|
| 2 |
+
|
| 3 |
+
from .Visitor import CythonTransform
|
| 4 |
+
from .StringEncoding import EncodedString
|
| 5 |
+
from . import Options
|
| 6 |
+
from . import PyrexTypes
|
| 7 |
+
from ..CodeWriter import ExpressionWriter
|
| 8 |
+
from .Errors import warning
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class AnnotationWriter(ExpressionWriter):
|
| 12 |
+
"""
|
| 13 |
+
A Cython code writer for Python expressions in argument/variable annotations.
|
| 14 |
+
"""
|
| 15 |
+
def __init__(self, description=None):
|
| 16 |
+
"""description is optional. If specified it is used in
|
| 17 |
+
warning messages for the nodes that don't convert to string properly.
|
| 18 |
+
If not specified then no messages are generated.
|
| 19 |
+
"""
|
| 20 |
+
ExpressionWriter.__init__(self)
|
| 21 |
+
self.description = description
|
| 22 |
+
self.incomplete = False
|
| 23 |
+
|
| 24 |
+
def visit_Node(self, node):
|
| 25 |
+
self.put("<???>")
|
| 26 |
+
self.incomplete = True
|
| 27 |
+
if self.description:
|
| 28 |
+
warning(node.pos,
|
| 29 |
+
"Failed to convert code to string representation in {}".format(
|
| 30 |
+
self.description), level=1)
|
| 31 |
+
|
| 32 |
+
def visit_LambdaNode(self, node):
|
| 33 |
+
# XXX Should we do better?
|
| 34 |
+
self.put("<lambda>")
|
| 35 |
+
self.incomplete = True
|
| 36 |
+
if self.description:
|
| 37 |
+
warning(node.pos,
|
| 38 |
+
"Failed to convert lambda to string representation in {}".format(
|
| 39 |
+
self.description), level=1)
|
| 40 |
+
|
| 41 |
+
def visit_AnnotationNode(self, node):
|
| 42 |
+
self.put(node.string.value)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class EmbedSignature(CythonTransform):
|
| 46 |
+
|
| 47 |
+
def __init__(self, context):
|
| 48 |
+
super().__init__(context)
|
| 49 |
+
self.class_name = None
|
| 50 |
+
self.class_node = None
|
| 51 |
+
|
| 52 |
+
def _fmt_expr(self, node):
|
| 53 |
+
writer = ExpressionWriter()
|
| 54 |
+
result = writer.write(node)
|
| 55 |
+
# print(type(node).__name__, '-->', result)
|
| 56 |
+
return result
|
| 57 |
+
|
| 58 |
+
def _fmt_annotation(self, node):
|
| 59 |
+
writer = AnnotationWriter()
|
| 60 |
+
result = writer.write(node)
|
| 61 |
+
# print(type(node).__name__, '-->', result)
|
| 62 |
+
return result
|
| 63 |
+
|
| 64 |
+
def _setup_format(self):
|
| 65 |
+
signature_format = self.current_directives['embedsignature.format']
|
| 66 |
+
self.is_format_c = signature_format == 'c'
|
| 67 |
+
self.is_format_python = signature_format == 'python'
|
| 68 |
+
self.is_format_clinic = signature_format == 'clinic'
|
| 69 |
+
|
| 70 |
+
def _fmt_arg(self, arg):
|
| 71 |
+
arg_doc = arg.name
|
| 72 |
+
annotation = None
|
| 73 |
+
defaultval = None
|
| 74 |
+
if arg.is_self_arg:
|
| 75 |
+
if self.is_format_clinic:
|
| 76 |
+
arg_doc = '$self'
|
| 77 |
+
elif arg.is_type_arg:
|
| 78 |
+
if self.is_format_clinic:
|
| 79 |
+
arg_doc = '$type'
|
| 80 |
+
elif self.is_format_c:
|
| 81 |
+
if arg.type is not PyrexTypes.py_object_type:
|
| 82 |
+
arg_doc = arg.type.declaration_code(arg.name, for_display=1)
|
| 83 |
+
elif self.is_format_python:
|
| 84 |
+
if not arg.annotation:
|
| 85 |
+
annotation = self._fmt_type(arg.type)
|
| 86 |
+
if arg.annotation:
|
| 87 |
+
if not self.is_format_clinic:
|
| 88 |
+
annotation = self._fmt_annotation(arg.annotation)
|
| 89 |
+
if arg.default:
|
| 90 |
+
defaultval = self._fmt_expr(arg.default)
|
| 91 |
+
if annotation:
|
| 92 |
+
arg_doc = arg_doc + (': %s' % annotation)
|
| 93 |
+
if defaultval:
|
| 94 |
+
arg_doc = arg_doc + (' = %s' % defaultval)
|
| 95 |
+
elif defaultval:
|
| 96 |
+
arg_doc = arg_doc + ('=%s' % defaultval)
|
| 97 |
+
return arg_doc
|
| 98 |
+
|
| 99 |
+
def _fmt_star_arg(self, arg):
|
| 100 |
+
arg_doc = arg.name
|
| 101 |
+
if arg.annotation:
|
| 102 |
+
if not self.is_format_clinic:
|
| 103 |
+
annotation = self._fmt_annotation(arg.annotation)
|
| 104 |
+
arg_doc = arg_doc + (': %s' % annotation)
|
| 105 |
+
return arg_doc
|
| 106 |
+
|
| 107 |
+
def _fmt_arglist(self, args,
|
| 108 |
+
npoargs=0, npargs=0, pargs=None,
|
| 109 |
+
nkargs=0, kargs=None,
|
| 110 |
+
hide_self=False):
|
| 111 |
+
arglist = []
|
| 112 |
+
for arg in args:
|
| 113 |
+
if not hide_self or not arg.entry.is_self_arg:
|
| 114 |
+
arg_doc = self._fmt_arg(arg)
|
| 115 |
+
arglist.append(arg_doc)
|
| 116 |
+
if pargs:
|
| 117 |
+
arg_doc = self._fmt_star_arg(pargs)
|
| 118 |
+
arglist.insert(npargs + npoargs, '*%s' % arg_doc)
|
| 119 |
+
elif nkargs:
|
| 120 |
+
arglist.insert(npargs + npoargs, '*')
|
| 121 |
+
if npoargs:
|
| 122 |
+
arglist.insert(npoargs, '/')
|
| 123 |
+
if kargs:
|
| 124 |
+
arg_doc = self._fmt_star_arg(kargs)
|
| 125 |
+
arglist.append('**%s' % arg_doc)
|
| 126 |
+
return arglist
|
| 127 |
+
|
| 128 |
+
def _fmt_type(self, type):
|
| 129 |
+
if type is PyrexTypes.py_object_type:
|
| 130 |
+
return None
|
| 131 |
+
elif self.is_format_c:
|
| 132 |
+
code = type.declaration_code("", for_display=1)
|
| 133 |
+
return code
|
| 134 |
+
elif self.is_format_python:
|
| 135 |
+
annotation = None
|
| 136 |
+
if type.is_string:
|
| 137 |
+
annotation = self.current_directives['c_string_type']
|
| 138 |
+
elif type.is_numeric:
|
| 139 |
+
annotation = type.py_type_name()
|
| 140 |
+
if annotation is None:
|
| 141 |
+
code = type.declaration_code('', for_display=1)
|
| 142 |
+
annotation = code.replace(' ', '_').replace('*', 'p')
|
| 143 |
+
return annotation
|
| 144 |
+
return None
|
| 145 |
+
|
| 146 |
+
def _fmt_signature(self, cls_name, func_name, args,
|
| 147 |
+
npoargs=0, npargs=0, pargs=None,
|
| 148 |
+
nkargs=0, kargs=None,
|
| 149 |
+
return_expr=None, return_type=None,
|
| 150 |
+
hide_self=False):
|
| 151 |
+
arglist = self._fmt_arglist(
|
| 152 |
+
args, npoargs, npargs, pargs, nkargs, kargs,
|
| 153 |
+
hide_self=hide_self,
|
| 154 |
+
)
|
| 155 |
+
arglist_doc = ', '.join(arglist)
|
| 156 |
+
func_doc = '%s(%s)' % (func_name, arglist_doc)
|
| 157 |
+
if self.is_format_c and cls_name:
|
| 158 |
+
func_doc = '%s.%s' % (cls_name, func_doc)
|
| 159 |
+
if not self.is_format_clinic:
|
| 160 |
+
ret_doc = None
|
| 161 |
+
if return_expr:
|
| 162 |
+
ret_doc = self._fmt_annotation(return_expr)
|
| 163 |
+
elif return_type:
|
| 164 |
+
ret_doc = self._fmt_type(return_type)
|
| 165 |
+
if ret_doc:
|
| 166 |
+
func_doc = '%s -> %s' % (func_doc, ret_doc)
|
| 167 |
+
return func_doc
|
| 168 |
+
|
| 169 |
+
def _embed_signature(self, signature, node_doc):
|
| 170 |
+
if self.is_format_clinic and self.current_directives['binding']:
|
| 171 |
+
return node_doc
|
| 172 |
+
if node_doc:
|
| 173 |
+
if self.is_format_clinic:
|
| 174 |
+
docfmt = "%s\n--\n\n%s"
|
| 175 |
+
else:
|
| 176 |
+
docfmt = "%s\n\n%s"
|
| 177 |
+
node_doc = inspect.cleandoc(node_doc)
|
| 178 |
+
return docfmt % (signature, node_doc)
|
| 179 |
+
else:
|
| 180 |
+
if self.is_format_clinic:
|
| 181 |
+
docfmt = "%s\n--\n\n"
|
| 182 |
+
else:
|
| 183 |
+
docfmt = "%s"
|
| 184 |
+
return docfmt % signature
|
| 185 |
+
|
| 186 |
+
def __call__(self, node):
|
| 187 |
+
if not Options.docstrings:
|
| 188 |
+
return node
|
| 189 |
+
else:
|
| 190 |
+
return super().__call__(node)
|
| 191 |
+
|
| 192 |
+
def visit_ClassDefNode(self, node):
|
| 193 |
+
oldname = self.class_name
|
| 194 |
+
oldclass = self.class_node
|
| 195 |
+
self.class_node = node
|
| 196 |
+
try:
|
| 197 |
+
# PyClassDefNode
|
| 198 |
+
self.class_name = node.name
|
| 199 |
+
except AttributeError:
|
| 200 |
+
# CClassDefNode
|
| 201 |
+
self.class_name = node.class_name
|
| 202 |
+
self.visitchildren(node)
|
| 203 |
+
self.class_name = oldname
|
| 204 |
+
self.class_node = oldclass
|
| 205 |
+
return node
|
| 206 |
+
|
| 207 |
+
def visit_LambdaNode(self, node):
|
| 208 |
+
# lambda expressions so not have signature or inner functions
|
| 209 |
+
return node
|
| 210 |
+
|
| 211 |
+
def visit_DefNode(self, node):
|
| 212 |
+
if not self.current_directives['embedsignature']:
|
| 213 |
+
return node
|
| 214 |
+
self._setup_format()
|
| 215 |
+
|
| 216 |
+
is_constructor = False
|
| 217 |
+
hide_self = False
|
| 218 |
+
if node.entry.is_special:
|
| 219 |
+
is_constructor = self.class_node and node.name == '__init__'
|
| 220 |
+
if is_constructor:
|
| 221 |
+
class_name = None
|
| 222 |
+
func_name = node.name
|
| 223 |
+
if self.is_format_c:
|
| 224 |
+
func_name = self.class_name
|
| 225 |
+
hide_self = True
|
| 226 |
+
else:
|
| 227 |
+
class_name, func_name = self.class_name, node.name
|
| 228 |
+
else:
|
| 229 |
+
class_name, func_name = self.class_name, node.name
|
| 230 |
+
|
| 231 |
+
npoargs = getattr(node, 'num_posonly_args', 0)
|
| 232 |
+
nkargs = getattr(node, 'num_kwonly_args', 0)
|
| 233 |
+
npargs = len(node.args) - nkargs - npoargs
|
| 234 |
+
signature = self._fmt_signature(
|
| 235 |
+
class_name, func_name, node.args,
|
| 236 |
+
npoargs, npargs, node.star_arg,
|
| 237 |
+
nkargs, node.starstar_arg,
|
| 238 |
+
return_expr=node.return_type_annotation,
|
| 239 |
+
return_type=None, hide_self=hide_self)
|
| 240 |
+
if signature:
|
| 241 |
+
if is_constructor and self.is_format_c:
|
| 242 |
+
doc_holder = self.class_node.entry.type.scope
|
| 243 |
+
else:
|
| 244 |
+
doc_holder = node.entry
|
| 245 |
+
if doc_holder.doc is not None:
|
| 246 |
+
old_doc = doc_holder.doc
|
| 247 |
+
elif not is_constructor and getattr(node, 'py_func', None) is not None:
|
| 248 |
+
old_doc = node.py_func.entry.doc
|
| 249 |
+
else:
|
| 250 |
+
old_doc = None
|
| 251 |
+
new_doc = self._embed_signature(signature, old_doc)
|
| 252 |
+
if not node.entry.is_special or is_constructor or node.entry.wrapperbase_cname is not None:
|
| 253 |
+
# TODO: the wrapperbase must be generated for __doc__ to exist;
|
| 254 |
+
# however this phase is run later in the pipeline than
|
| 255 |
+
# Compiler/Nodes.py:declare_pyfunction, so wrapperbase_cname
|
| 256 |
+
# may already be set to None
|
| 257 |
+
doc_holder.doc = EncodedString(new_doc)
|
| 258 |
+
if not is_constructor and getattr(node, 'py_func', None) is not None:
|
| 259 |
+
node.py_func.entry.doc = EncodedString(new_doc)
|
| 260 |
+
return node
|
| 261 |
+
|
| 262 |
+
def visit_CFuncDefNode(self, node):
|
| 263 |
+
if not node.overridable: # not cpdef FOO(...):
|
| 264 |
+
return node
|
| 265 |
+
if not self.current_directives['embedsignature']:
|
| 266 |
+
return node
|
| 267 |
+
self._setup_format()
|
| 268 |
+
|
| 269 |
+
signature = self._fmt_signature(
|
| 270 |
+
self.class_name, node.declarator.base.name,
|
| 271 |
+
node.declarator.args,
|
| 272 |
+
return_type=node.return_type)
|
| 273 |
+
if signature:
|
| 274 |
+
if node.entry.doc is not None:
|
| 275 |
+
old_doc = node.entry.doc
|
| 276 |
+
elif getattr(node, 'py_func', None) is not None:
|
| 277 |
+
old_doc = node.py_func.entry.doc
|
| 278 |
+
else:
|
| 279 |
+
old_doc = None
|
| 280 |
+
new_doc = self._embed_signature(signature, old_doc)
|
| 281 |
+
node.entry.doc = EncodedString(new_doc)
|
| 282 |
+
py_func = getattr(node, 'py_func', None)
|
| 283 |
+
if py_func is not None:
|
| 284 |
+
py_func.entry.doc = EncodedString(new_doc)
|
| 285 |
+
return node
|
| 286 |
+
|
| 287 |
+
def visit_PropertyNode(self, node):
|
| 288 |
+
if not self.current_directives['embedsignature']:
|
| 289 |
+
return node
|
| 290 |
+
self._setup_format()
|
| 291 |
+
|
| 292 |
+
entry = node.entry
|
| 293 |
+
body = node.body
|
| 294 |
+
prop_name = entry.name
|
| 295 |
+
type_name = None
|
| 296 |
+
if entry.visibility == 'public':
|
| 297 |
+
if self.is_format_c:
|
| 298 |
+
# property synthesised from a cdef public attribute
|
| 299 |
+
type_name = entry.type.declaration_code("", for_display=1)
|
| 300 |
+
if not entry.type.is_pyobject:
|
| 301 |
+
type_name = "'%s'" % type_name
|
| 302 |
+
elif entry.type.is_extension_type:
|
| 303 |
+
type_name = entry.type.module_name + '.' + type_name
|
| 304 |
+
elif self.is_format_python:
|
| 305 |
+
type_name = self._fmt_type(entry.type)
|
| 306 |
+
if type_name is None:
|
| 307 |
+
for stat in body.stats:
|
| 308 |
+
if stat.name != '__get__':
|
| 309 |
+
continue
|
| 310 |
+
if self.is_format_c:
|
| 311 |
+
prop_name = '%s.%s' % (self.class_name, prop_name)
|
| 312 |
+
ret_annotation = stat.return_type_annotation
|
| 313 |
+
if ret_annotation:
|
| 314 |
+
type_name = self._fmt_annotation(ret_annotation)
|
| 315 |
+
if type_name is not None :
|
| 316 |
+
signature = '%s: %s' % (prop_name, type_name)
|
| 317 |
+
new_doc = self._embed_signature(signature, entry.doc)
|
| 318 |
+
if not self.is_format_clinic:
|
| 319 |
+
entry.doc = EncodedString(new_doc)
|
| 320 |
+
return node
|
venv/lib/python3.10/site-packages/Cython/Compiler/Buffer.py
ADDED
|
@@ -0,0 +1,680 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .Visitor import CythonTransform
|
| 2 |
+
from .ModuleNode import ModuleNode
|
| 3 |
+
from .Errors import CompileError
|
| 4 |
+
from .UtilityCode import CythonUtilityCode
|
| 5 |
+
from .Code import UtilityCode, TempitaUtilityCode
|
| 6 |
+
|
| 7 |
+
from . import Options
|
| 8 |
+
from . import Interpreter
|
| 9 |
+
from . import PyrexTypes
|
| 10 |
+
from . import Naming
|
| 11 |
+
from . import Symtab
|
| 12 |
+
|
| 13 |
+
def dedent(text, reindent=0):
|
| 14 |
+
from textwrap import dedent
|
| 15 |
+
text = dedent(text)
|
| 16 |
+
if reindent > 0:
|
| 17 |
+
indent = " " * reindent
|
| 18 |
+
text = '\n'.join([indent + x for x in text.split('\n')])
|
| 19 |
+
return text
|
| 20 |
+
|
| 21 |
+
class IntroduceBufferAuxiliaryVars(CythonTransform):
|
| 22 |
+
|
| 23 |
+
#
|
| 24 |
+
# Entry point
|
| 25 |
+
#
|
| 26 |
+
|
| 27 |
+
buffers_exists = False
|
| 28 |
+
using_memoryview = False
|
| 29 |
+
|
| 30 |
+
def __call__(self, node):
|
| 31 |
+
assert isinstance(node, ModuleNode)
|
| 32 |
+
self.max_ndim = 0
|
| 33 |
+
result = super().__call__(node)
|
| 34 |
+
if self.buffers_exists:
|
| 35 |
+
use_bufstruct_declare_code(node.scope)
|
| 36 |
+
|
| 37 |
+
return result
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
#
|
| 41 |
+
# Basic operations for transforms
|
| 42 |
+
#
|
| 43 |
+
def handle_scope(self, node, scope):
|
| 44 |
+
# For all buffers, insert extra variables in the scope.
|
| 45 |
+
# The variables are also accessible from the buffer_info
|
| 46 |
+
# on the buffer entry
|
| 47 |
+
scope_items = scope.entries.items()
|
| 48 |
+
bufvars = [entry for name, entry in scope_items if entry.type.is_buffer]
|
| 49 |
+
if len(bufvars) > 0:
|
| 50 |
+
bufvars.sort(key=lambda entry: entry.name)
|
| 51 |
+
self.buffers_exists = True
|
| 52 |
+
|
| 53 |
+
memviewslicevars = [entry for name, entry in scope_items if entry.type.is_memoryviewslice]
|
| 54 |
+
if len(memviewslicevars) > 0:
|
| 55 |
+
self.buffers_exists = True
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
for (name, entry) in scope_items:
|
| 59 |
+
if name == 'memoryview' and isinstance(entry.utility_code_definition, CythonUtilityCode):
|
| 60 |
+
self.using_memoryview = True
|
| 61 |
+
break
|
| 62 |
+
del scope_items
|
| 63 |
+
|
| 64 |
+
if isinstance(node, ModuleNode) and len(bufvars) > 0:
|
| 65 |
+
# for now...note that pos is wrong
|
| 66 |
+
raise CompileError(node.pos, "Buffer vars not allowed in module scope")
|
| 67 |
+
for entry in bufvars:
|
| 68 |
+
if entry.type.dtype.is_ptr:
|
| 69 |
+
raise CompileError(node.pos, "Buffers with pointer types not yet supported.")
|
| 70 |
+
|
| 71 |
+
name = entry.name
|
| 72 |
+
buftype = entry.type
|
| 73 |
+
if buftype.ndim > Options.buffer_max_dims:
|
| 74 |
+
raise CompileError(node.pos,
|
| 75 |
+
"Buffer ndims exceeds Options.buffer_max_dims = %d" % Options.buffer_max_dims)
|
| 76 |
+
if buftype.ndim > self.max_ndim:
|
| 77 |
+
self.max_ndim = buftype.ndim
|
| 78 |
+
|
| 79 |
+
# Declare auxiliary vars
|
| 80 |
+
def decvar(type, prefix):
|
| 81 |
+
cname = scope.mangle(prefix, name)
|
| 82 |
+
aux_var = scope.declare_var(name=None, cname=cname,
|
| 83 |
+
type=type, pos=node.pos)
|
| 84 |
+
if entry.is_arg:
|
| 85 |
+
aux_var.used = True # otherwise, NameNode will mark whether it is used
|
| 86 |
+
|
| 87 |
+
return aux_var
|
| 88 |
+
|
| 89 |
+
auxvars = ((PyrexTypes.c_pyx_buffer_nd_type, Naming.pybuffernd_prefix),
|
| 90 |
+
(PyrexTypes.c_pyx_buffer_type, Naming.pybufferstruct_prefix))
|
| 91 |
+
pybuffernd, rcbuffer = [decvar(type, prefix) for (type, prefix) in auxvars]
|
| 92 |
+
|
| 93 |
+
entry.buffer_aux = Symtab.BufferAux(pybuffernd, rcbuffer)
|
| 94 |
+
|
| 95 |
+
scope.buffer_entries = bufvars
|
| 96 |
+
self.scope = scope
|
| 97 |
+
|
| 98 |
+
def visit_ModuleNode(self, node):
|
| 99 |
+
self.handle_scope(node, node.scope)
|
| 100 |
+
self.visitchildren(node)
|
| 101 |
+
return node
|
| 102 |
+
|
| 103 |
+
def visit_FuncDefNode(self, node):
|
| 104 |
+
self.handle_scope(node, node.local_scope)
|
| 105 |
+
self.visitchildren(node)
|
| 106 |
+
return node
|
| 107 |
+
|
| 108 |
+
#
|
| 109 |
+
# Analysis
|
| 110 |
+
#
|
| 111 |
+
buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered!
|
| 112 |
+
buffer_defaults = {"ndim": 1, "mode": "full", "negative_indices": True, "cast": False}
|
| 113 |
+
buffer_positional_options_count = 1 # anything beyond this needs keyword argument
|
| 114 |
+
|
| 115 |
+
ERR_BUF_OPTION_UNKNOWN = '"%s" is not a buffer option'
|
| 116 |
+
ERR_BUF_TOO_MANY = 'Too many buffer options'
|
| 117 |
+
ERR_BUF_DUP = '"%s" buffer option already supplied'
|
| 118 |
+
ERR_BUF_MISSING = '"%s" missing'
|
| 119 |
+
ERR_BUF_MODE = 'Only allowed buffer modes are: "c", "fortran", "full", "strided" (as a compile-time string)'
|
| 120 |
+
ERR_BUF_NDIM = 'ndim must be a non-negative integer'
|
| 121 |
+
ERR_BUF_DTYPE = 'dtype must be "object", numeric type or a struct'
|
| 122 |
+
ERR_BUF_BOOL = '"%s" must be a boolean'
|
| 123 |
+
|
| 124 |
+
def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, need_complete=True):
|
| 125 |
+
"""
|
| 126 |
+
Must be called during type analysis, as analyse is called
|
| 127 |
+
on the dtype argument.
|
| 128 |
+
|
| 129 |
+
posargs and dictargs should consist of a list and a dict
|
| 130 |
+
of tuples (value, pos). Defaults should be a dict of values.
|
| 131 |
+
|
| 132 |
+
Returns a dict containing all the options a buffer can have and
|
| 133 |
+
its value (with the positions stripped).
|
| 134 |
+
"""
|
| 135 |
+
if defaults is None:
|
| 136 |
+
defaults = buffer_defaults
|
| 137 |
+
|
| 138 |
+
posargs, dictargs = Interpreter.interpret_compiletime_options(
|
| 139 |
+
posargs, dictargs, type_env=env, type_args=(0, 'dtype'))
|
| 140 |
+
|
| 141 |
+
if len(posargs) > buffer_positional_options_count:
|
| 142 |
+
raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)
|
| 143 |
+
|
| 144 |
+
options = {}
|
| 145 |
+
for name, (value, pos) in dictargs.items():
|
| 146 |
+
if name not in buffer_options:
|
| 147 |
+
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
|
| 148 |
+
options[name] = value
|
| 149 |
+
|
| 150 |
+
for name, (value, pos) in zip(buffer_options, posargs):
|
| 151 |
+
if name not in buffer_options:
|
| 152 |
+
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
|
| 153 |
+
if name in options:
|
| 154 |
+
raise CompileError(pos, ERR_BUF_DUP % name)
|
| 155 |
+
options[name] = value
|
| 156 |
+
|
| 157 |
+
# Check that they are all there and copy defaults
|
| 158 |
+
for name in buffer_options:
|
| 159 |
+
if name not in options:
|
| 160 |
+
try:
|
| 161 |
+
options[name] = defaults[name]
|
| 162 |
+
except KeyError:
|
| 163 |
+
if need_complete:
|
| 164 |
+
raise CompileError(globalpos, ERR_BUF_MISSING % name)
|
| 165 |
+
|
| 166 |
+
dtype = options.get("dtype")
|
| 167 |
+
if dtype and dtype.is_extension_type:
|
| 168 |
+
raise CompileError(globalpos, ERR_BUF_DTYPE)
|
| 169 |
+
|
| 170 |
+
ndim = options.get("ndim")
|
| 171 |
+
if ndim and (not isinstance(ndim, int) or ndim < 0):
|
| 172 |
+
raise CompileError(globalpos, ERR_BUF_NDIM)
|
| 173 |
+
|
| 174 |
+
mode = options.get("mode")
|
| 175 |
+
if mode and not (mode in ('full', 'strided', 'c', 'fortran')):
|
| 176 |
+
raise CompileError(globalpos, ERR_BUF_MODE)
|
| 177 |
+
|
| 178 |
+
def assert_bool(name):
|
| 179 |
+
x = options.get(name)
|
| 180 |
+
if not isinstance(x, bool):
|
| 181 |
+
raise CompileError(globalpos, ERR_BUF_BOOL % name)
|
| 182 |
+
|
| 183 |
+
assert_bool('negative_indices')
|
| 184 |
+
assert_bool('cast')
|
| 185 |
+
|
| 186 |
+
return options
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
#
|
| 190 |
+
# Code generation
|
| 191 |
+
#
|
| 192 |
+
|
| 193 |
+
class BufferEntry:
|
| 194 |
+
def __init__(self, entry):
|
| 195 |
+
self.entry = entry
|
| 196 |
+
self.type = entry.type
|
| 197 |
+
self.cname = entry.buffer_aux.buflocal_nd_var.cname
|
| 198 |
+
self.buf_ptr = "%s.rcbuffer->pybuffer.buf" % self.cname
|
| 199 |
+
self.buf_ptr_type = entry.type.buffer_ptr_type
|
| 200 |
+
self.init_attributes()
|
| 201 |
+
|
| 202 |
+
def init_attributes(self):
|
| 203 |
+
self.shape = self.get_buf_shapevars()
|
| 204 |
+
self.strides = self.get_buf_stridevars()
|
| 205 |
+
self.suboffsets = self.get_buf_suboffsetvars()
|
| 206 |
+
|
| 207 |
+
def get_buf_suboffsetvars(self):
|
| 208 |
+
return self._for_all_ndim("%s.diminfo[%d].suboffsets")
|
| 209 |
+
|
| 210 |
+
def get_buf_stridevars(self):
|
| 211 |
+
return self._for_all_ndim("%s.diminfo[%d].strides")
|
| 212 |
+
|
| 213 |
+
def get_buf_shapevars(self):
|
| 214 |
+
return self._for_all_ndim("%s.diminfo[%d].shape")
|
| 215 |
+
|
| 216 |
+
def _for_all_ndim(self, s):
|
| 217 |
+
return [s % (self.cname, i) for i in range(self.type.ndim)]
|
| 218 |
+
|
| 219 |
+
def generate_buffer_lookup_code(self, code, index_cnames):
|
| 220 |
+
# Create buffer lookup and return it
|
| 221 |
+
# This is done via utility macros/inline functions, which vary
|
| 222 |
+
# according to the access mode used.
|
| 223 |
+
params = []
|
| 224 |
+
nd = self.type.ndim
|
| 225 |
+
mode = self.type.mode
|
| 226 |
+
if mode == 'full':
|
| 227 |
+
for i, s, o in zip(index_cnames,
|
| 228 |
+
self.get_buf_stridevars(),
|
| 229 |
+
self.get_buf_suboffsetvars()):
|
| 230 |
+
params.append(i)
|
| 231 |
+
params.append(s)
|
| 232 |
+
params.append(o)
|
| 233 |
+
funcname = "__Pyx_BufPtrFull%dd" % nd
|
| 234 |
+
funcgen = buf_lookup_full_code
|
| 235 |
+
else:
|
| 236 |
+
if mode == 'strided':
|
| 237 |
+
funcname = "__Pyx_BufPtrStrided%dd" % nd
|
| 238 |
+
funcgen = buf_lookup_strided_code
|
| 239 |
+
elif mode == 'c':
|
| 240 |
+
funcname = "__Pyx_BufPtrCContig%dd" % nd
|
| 241 |
+
funcgen = buf_lookup_c_code
|
| 242 |
+
elif mode == 'fortran':
|
| 243 |
+
funcname = "__Pyx_BufPtrFortranContig%dd" % nd
|
| 244 |
+
funcgen = buf_lookup_fortran_code
|
| 245 |
+
else:
|
| 246 |
+
assert False
|
| 247 |
+
for i, s in zip(index_cnames, self.get_buf_stridevars()):
|
| 248 |
+
params.append(i)
|
| 249 |
+
params.append(s)
|
| 250 |
+
|
| 251 |
+
# Make sure the utility code is available
|
| 252 |
+
if funcname not in code.globalstate.utility_codes:
|
| 253 |
+
code.globalstate.utility_codes.add(funcname)
|
| 254 |
+
protocode = code.globalstate['utility_code_proto']
|
| 255 |
+
defcode = code.globalstate['utility_code_def']
|
| 256 |
+
funcgen(protocode, defcode, name=funcname, nd=nd)
|
| 257 |
+
|
| 258 |
+
buf_ptr_type_code = self.buf_ptr_type.empty_declaration_code()
|
| 259 |
+
ptrcode = "%s(%s, %s, %s)" % (funcname, buf_ptr_type_code, self.buf_ptr,
|
| 260 |
+
", ".join(params))
|
| 261 |
+
return ptrcode
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def get_flags(buffer_aux, buffer_type):
|
| 265 |
+
flags = 'PyBUF_FORMAT'
|
| 266 |
+
mode = buffer_type.mode
|
| 267 |
+
if mode == 'full':
|
| 268 |
+
flags += '| PyBUF_INDIRECT'
|
| 269 |
+
elif mode == 'strided':
|
| 270 |
+
flags += '| PyBUF_STRIDES'
|
| 271 |
+
elif mode == 'c':
|
| 272 |
+
flags += '| PyBUF_C_CONTIGUOUS'
|
| 273 |
+
elif mode == 'fortran':
|
| 274 |
+
flags += '| PyBUF_F_CONTIGUOUS'
|
| 275 |
+
else:
|
| 276 |
+
assert False
|
| 277 |
+
if buffer_aux.writable_needed: flags += "| PyBUF_WRITABLE"
|
| 278 |
+
return flags
|
| 279 |
+
|
| 280 |
+
def used_buffer_aux_vars(entry):
|
| 281 |
+
buffer_aux = entry.buffer_aux
|
| 282 |
+
buffer_aux.buflocal_nd_var.used = True
|
| 283 |
+
buffer_aux.rcbuf_var.used = True
|
| 284 |
+
|
| 285 |
+
def put_unpack_buffer_aux_into_scope(buf_entry, code):
|
| 286 |
+
# Generate code to copy the needed struct info into local
|
| 287 |
+
# variables.
|
| 288 |
+
buffer_aux, mode = buf_entry.buffer_aux, buf_entry.type.mode
|
| 289 |
+
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
| 290 |
+
|
| 291 |
+
fldnames = ['strides', 'shape']
|
| 292 |
+
if mode == 'full':
|
| 293 |
+
fldnames.append('suboffsets')
|
| 294 |
+
|
| 295 |
+
ln = []
|
| 296 |
+
for i in range(buf_entry.type.ndim):
|
| 297 |
+
for fldname in fldnames:
|
| 298 |
+
ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % (
|
| 299 |
+
pybuffernd_struct, i, fldname,
|
| 300 |
+
pybuffernd_struct, fldname, i,
|
| 301 |
+
))
|
| 302 |
+
code.putln(' '.join(ln))
|
| 303 |
+
|
| 304 |
+
def put_init_vars(entry, code):
|
| 305 |
+
bufaux = entry.buffer_aux
|
| 306 |
+
pybuffernd_struct = bufaux.buflocal_nd_var.cname
|
| 307 |
+
pybuffer_struct = bufaux.rcbuf_var.cname
|
| 308 |
+
# init pybuffer_struct
|
| 309 |
+
code.putln("%s.pybuffer.buf = NULL;" % pybuffer_struct)
|
| 310 |
+
code.putln("%s.refcount = 0;" % pybuffer_struct)
|
| 311 |
+
# init the buffer object
|
| 312 |
+
# code.put_init_var_to_py_none(entry)
|
| 313 |
+
# init the pybuffernd_struct
|
| 314 |
+
code.putln("%s.data = NULL;" % pybuffernd_struct)
|
| 315 |
+
code.putln("%s.rcbuffer = &%s;" % (pybuffernd_struct, pybuffer_struct))
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def put_acquire_arg_buffer(entry, code, pos):
|
| 319 |
+
buffer_aux = entry.buffer_aux
|
| 320 |
+
getbuffer = get_getbuffer_call(code, entry.cname, buffer_aux, entry.type)
|
| 321 |
+
|
| 322 |
+
# Acquire any new buffer
|
| 323 |
+
code.putln("{")
|
| 324 |
+
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth())
|
| 325 |
+
code.putln(code.error_goto_if("%s == -1" % getbuffer, pos))
|
| 326 |
+
code.putln("}")
|
| 327 |
+
# An exception raised in arg parsing cannot be caught, so no
|
| 328 |
+
# need to care about the buffer then.
|
| 329 |
+
put_unpack_buffer_aux_into_scope(entry, code)
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def put_release_buffer_code(code, entry):
|
| 333 |
+
code.globalstate.use_utility_code(acquire_utility_code)
|
| 334 |
+
code.putln("__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);" % entry.buffer_aux.buflocal_nd_var.cname)
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
|
| 338 |
+
ndim = buffer_type.ndim
|
| 339 |
+
cast = int(buffer_type.cast)
|
| 340 |
+
flags = get_flags(buffer_aux, buffer_type)
|
| 341 |
+
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
| 342 |
+
|
| 343 |
+
dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype)
|
| 344 |
+
|
| 345 |
+
code.globalstate.use_utility_code(acquire_utility_code)
|
| 346 |
+
return ("__Pyx_GetBufferAndValidate(&%(pybuffernd_struct)s.rcbuffer->pybuffer, "
|
| 347 |
+
"(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, "
|
| 348 |
+
"%(cast)d, __pyx_stack)" % locals())
|
| 349 |
+
|
| 350 |
+
|
| 351 |
+
def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
|
| 352 |
+
is_initialized, pos, code):
|
| 353 |
+
"""
|
| 354 |
+
Generate code for reassigning a buffer variables. This only deals with getting
|
| 355 |
+
the buffer auxiliary structure and variables set up correctly, the assignment
|
| 356 |
+
itself and refcounting is the responsibility of the caller.
|
| 357 |
+
|
| 358 |
+
However, the assignment operation may throw an exception so that the reassignment
|
| 359 |
+
never happens.
|
| 360 |
+
|
| 361 |
+
Depending on the circumstances there are two possible outcomes:
|
| 362 |
+
- Old buffer released, new acquired, rhs assigned to lhs
|
| 363 |
+
- Old buffer released, new acquired which fails, reaqcuire old lhs buffer
|
| 364 |
+
(which may or may not succeed).
|
| 365 |
+
"""
|
| 366 |
+
|
| 367 |
+
buffer_aux, buffer_type = buf_entry.buffer_aux, buf_entry.type
|
| 368 |
+
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
| 369 |
+
flags = get_flags(buffer_aux, buffer_type)
|
| 370 |
+
|
| 371 |
+
code.putln("{") # Set up necessary stack for getbuffer
|
| 372 |
+
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())
|
| 373 |
+
|
| 374 |
+
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
|
| 375 |
+
|
| 376 |
+
if is_initialized:
|
| 377 |
+
# Release any existing buffer
|
| 378 |
+
code.putln('__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);' % pybuffernd_struct)
|
| 379 |
+
# Acquire
|
| 380 |
+
retcode_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
| 381 |
+
code.putln("%s = %s;" % (retcode_cname, getbuffer % rhs_cname))
|
| 382 |
+
code.putln('if (%s) {' % (code.unlikely("%s < 0" % retcode_cname)))
|
| 383 |
+
# If acquisition failed, attempt to reacquire the old buffer
|
| 384 |
+
# before raising the exception. A failure of reacquisition
|
| 385 |
+
# will cause the reacquisition exception to be reported, one
|
| 386 |
+
# can consider working around this later.
|
| 387 |
+
exc_temps = tuple(code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False)
|
| 388 |
+
for _ in range(3))
|
| 389 |
+
code.putln('PyErr_Fetch(&%s, &%s, &%s);' % exc_temps)
|
| 390 |
+
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % lhs_cname)))
|
| 391 |
+
code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % exc_temps) # Do not refnanny these!
|
| 392 |
+
code.globalstate.use_utility_code(raise_buffer_fallback_code)
|
| 393 |
+
code.putln('__Pyx_RaiseBufferFallbackError();')
|
| 394 |
+
code.putln('} else {')
|
| 395 |
+
code.putln('PyErr_Restore(%s, %s, %s);' % exc_temps)
|
| 396 |
+
code.putln('}')
|
| 397 |
+
code.putln('%s = %s = %s = 0;' % exc_temps)
|
| 398 |
+
for t in exc_temps:
|
| 399 |
+
code.funcstate.release_temp(t)
|
| 400 |
+
code.putln('}')
|
| 401 |
+
# Unpack indices
|
| 402 |
+
put_unpack_buffer_aux_into_scope(buf_entry, code)
|
| 403 |
+
code.putln(code.error_goto_if_neg(retcode_cname, pos))
|
| 404 |
+
code.funcstate.release_temp(retcode_cname)
|
| 405 |
+
else:
|
| 406 |
+
# Our entry had no previous value, so set to None when acquisition fails.
|
| 407 |
+
# In this case, auxiliary vars should be set up right in initialization to a zero-buffer,
|
| 408 |
+
# so it suffices to set the buf field to NULL.
|
| 409 |
+
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % rhs_cname)))
|
| 410 |
+
code.putln('%s = %s; __Pyx_INCREF(Py_None); %s.rcbuffer->pybuffer.buf = NULL;' %
|
| 411 |
+
(lhs_cname,
|
| 412 |
+
PyrexTypes.typecast(buffer_type, PyrexTypes.py_object_type, "Py_None"),
|
| 413 |
+
pybuffernd_struct))
|
| 414 |
+
code.putln(code.error_goto(pos))
|
| 415 |
+
code.put('} else {')
|
| 416 |
+
# Unpack indices
|
| 417 |
+
put_unpack_buffer_aux_into_scope(buf_entry, code)
|
| 418 |
+
code.putln('}')
|
| 419 |
+
|
| 420 |
+
code.putln("}") # Release stack
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives,
|
| 424 |
+
pos, code, negative_indices, in_nogil_context):
|
| 425 |
+
"""
|
| 426 |
+
Generates code to process indices and calculate an offset into
|
| 427 |
+
a buffer. Returns a C string which gives a pointer which can be
|
| 428 |
+
read from or written to at will (it is an expression so caller should
|
| 429 |
+
store it in a temporary if it is used more than once).
|
| 430 |
+
|
| 431 |
+
As the bounds checking can have any number of combinations of unsigned
|
| 432 |
+
arguments, smart optimizations etc. we insert it directly in the function
|
| 433 |
+
body. The lookup however is delegated to a inline function that is instantiated
|
| 434 |
+
once per ndim (lookup with suboffsets tend to get quite complicated).
|
| 435 |
+
|
| 436 |
+
entry is a BufferEntry
|
| 437 |
+
"""
|
| 438 |
+
negative_indices = directives['wraparound'] and negative_indices
|
| 439 |
+
|
| 440 |
+
if directives['boundscheck']:
|
| 441 |
+
# Check bounds and fix negative indices.
|
| 442 |
+
# We allocate a temporary which is initialized to -1, meaning OK (!).
|
| 443 |
+
# If an error occurs, the temp is set to the index dimension the
|
| 444 |
+
# error is occurring at.
|
| 445 |
+
failed_dim_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
| 446 |
+
code.putln("%s = -1;" % failed_dim_temp)
|
| 447 |
+
for dim, (signed, cname, shape) in enumerate(zip(index_signeds, index_cnames, entry.get_buf_shapevars())):
|
| 448 |
+
if signed != 0:
|
| 449 |
+
# not unsigned, deal with negative index
|
| 450 |
+
code.putln("if (%s < 0) {" % cname)
|
| 451 |
+
if negative_indices:
|
| 452 |
+
code.putln("%s += %s;" % (cname, shape))
|
| 453 |
+
code.putln("if (%s) %s = %d;" % (
|
| 454 |
+
code.unlikely("%s < 0" % cname),
|
| 455 |
+
failed_dim_temp, dim))
|
| 456 |
+
else:
|
| 457 |
+
code.putln("%s = %d;" % (failed_dim_temp, dim))
|
| 458 |
+
code.put("} else ")
|
| 459 |
+
# check bounds in positive direction
|
| 460 |
+
if signed != 0:
|
| 461 |
+
cast = ""
|
| 462 |
+
else:
|
| 463 |
+
cast = "(size_t)"
|
| 464 |
+
code.putln("if (%s) %s = %d;" % (
|
| 465 |
+
code.unlikely("%s >= %s%s" % (cname, cast, shape)),
|
| 466 |
+
failed_dim_temp, dim))
|
| 467 |
+
|
| 468 |
+
if in_nogil_context:
|
| 469 |
+
code.globalstate.use_utility_code(raise_indexerror_nogil)
|
| 470 |
+
func = '__Pyx_RaiseBufferIndexErrorNogil'
|
| 471 |
+
else:
|
| 472 |
+
code.globalstate.use_utility_code(raise_indexerror_code)
|
| 473 |
+
func = '__Pyx_RaiseBufferIndexError'
|
| 474 |
+
|
| 475 |
+
code.putln("if (%s) {" % code.unlikely("%s != -1" % failed_dim_temp))
|
| 476 |
+
code.putln('%s(%s);' % (func, failed_dim_temp))
|
| 477 |
+
code.putln(code.error_goto(pos))
|
| 478 |
+
code.putln('}')
|
| 479 |
+
code.funcstate.release_temp(failed_dim_temp)
|
| 480 |
+
elif negative_indices:
|
| 481 |
+
# Only fix negative indices.
|
| 482 |
+
for signed, cname, shape in zip(index_signeds, index_cnames, entry.get_buf_shapevars()):
|
| 483 |
+
if signed != 0:
|
| 484 |
+
code.putln("if (%s < 0) %s += %s;" % (cname, cname, shape))
|
| 485 |
+
|
| 486 |
+
return entry.generate_buffer_lookup_code(code, index_cnames)
|
| 487 |
+
|
| 488 |
+
|
| 489 |
+
def use_bufstruct_declare_code(env):
|
| 490 |
+
env.use_utility_code(buffer_struct_declare_code)
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
def buf_lookup_full_code(proto, defin, name, nd):
|
| 494 |
+
"""
|
| 495 |
+
Generates a buffer lookup function for the right number
|
| 496 |
+
of dimensions. The function gives back a void* at the right location.
|
| 497 |
+
"""
|
| 498 |
+
# _i_ndex, _s_tride, sub_o_ffset
|
| 499 |
+
macroargs = ", ".join(["i%d, s%d, o%d" % (i, i, i) for i in range(nd)])
|
| 500 |
+
proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs))
|
| 501 |
+
|
| 502 |
+
funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)])
|
| 503 |
+
proto.putln("static CYTHON_INLINE void* %s_imp(void* buf, %s);" % (name, funcargs))
|
| 504 |
+
defin.putln(dedent("""
|
| 505 |
+
static CYTHON_INLINE void* %s_imp(void* buf, %s) {
|
| 506 |
+
char* ptr = (char*)buf;
|
| 507 |
+
""") % (name, funcargs) + "".join([dedent("""\
|
| 508 |
+
ptr += s%d * i%d;
|
| 509 |
+
if (o%d >= 0) ptr = *((char**)ptr) + o%d;
|
| 510 |
+
""") % (i, i, i, i) for i in range(nd)]
|
| 511 |
+
) + "\nreturn ptr;\n}")
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def buf_lookup_strided_code(proto, defin, name, nd):
|
| 515 |
+
"""
|
| 516 |
+
Generates a buffer lookup function for the right number
|
| 517 |
+
of dimensions. The function gives back a void* at the right location.
|
| 518 |
+
"""
|
| 519 |
+
# _i_ndex, _s_tride
|
| 520 |
+
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
| 521 |
+
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd)])
|
| 522 |
+
proto.putln("#define %s(type, buf, %s) (type)((char*)buf + %s)" % (name, args, offset))
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def buf_lookup_c_code(proto, defin, name, nd):
|
| 526 |
+
"""
|
| 527 |
+
Similar to strided lookup, but can assume that the last dimension
|
| 528 |
+
doesn't need a multiplication as long as.
|
| 529 |
+
Still we keep the same signature for now.
|
| 530 |
+
"""
|
| 531 |
+
if nd == 1:
|
| 532 |
+
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
|
| 533 |
+
else:
|
| 534 |
+
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
| 535 |
+
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd - 1)])
|
| 536 |
+
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, nd - 1))
|
| 537 |
+
|
| 538 |
+
|
| 539 |
+
def buf_lookup_fortran_code(proto, defin, name, nd):
|
| 540 |
+
"""
|
| 541 |
+
Like C lookup, but the first index is optimized instead.
|
| 542 |
+
"""
|
| 543 |
+
if nd == 1:
|
| 544 |
+
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
|
| 545 |
+
else:
|
| 546 |
+
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
| 547 |
+
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(1, nd)])
|
| 548 |
+
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, 0))
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
def mangle_dtype_name(dtype):
|
| 552 |
+
# Use prefixes to separate user defined types from builtins
|
| 553 |
+
# (consider "typedef float unsigned_int")
|
| 554 |
+
if dtype.is_pyobject:
|
| 555 |
+
return "object"
|
| 556 |
+
elif dtype.is_ptr:
|
| 557 |
+
return "ptr"
|
| 558 |
+
else:
|
| 559 |
+
if dtype.is_typedef or dtype.is_struct_or_union:
|
| 560 |
+
prefix = "nn_"
|
| 561 |
+
else:
|
| 562 |
+
prefix = ""
|
| 563 |
+
return prefix + dtype.specialization_name()
|
| 564 |
+
|
| 565 |
+
def get_type_information_cname(code, dtype, maxdepth=None):
|
| 566 |
+
"""
|
| 567 |
+
Output the run-time type information (__Pyx_TypeInfo) for given dtype,
|
| 568 |
+
and return the name of the type info struct.
|
| 569 |
+
|
| 570 |
+
Structs with two floats of the same size are encoded as complex numbers.
|
| 571 |
+
One can separate between complex numbers declared as struct or with native
|
| 572 |
+
encoding by inspecting to see if the fields field of the type is
|
| 573 |
+
filled in.
|
| 574 |
+
"""
|
| 575 |
+
namesuffix = mangle_dtype_name(dtype)
|
| 576 |
+
name = "__Pyx_TypeInfo_%s" % namesuffix
|
| 577 |
+
structinfo_name = "__Pyx_StructFields_%s" % namesuffix
|
| 578 |
+
|
| 579 |
+
if dtype.is_error: return "<error>"
|
| 580 |
+
|
| 581 |
+
# It's critical that walking the type info doesn't use more stack
|
| 582 |
+
# depth than dtype.struct_nesting_depth() returns, so use an assertion for this
|
| 583 |
+
if maxdepth is None: maxdepth = dtype.struct_nesting_depth()
|
| 584 |
+
if maxdepth <= 0:
|
| 585 |
+
assert False
|
| 586 |
+
|
| 587 |
+
if name not in code.globalstate.utility_codes:
|
| 588 |
+
code.globalstate.utility_codes.add(name)
|
| 589 |
+
typecode = code.globalstate['typeinfo']
|
| 590 |
+
|
| 591 |
+
arraysizes = []
|
| 592 |
+
if dtype.is_array:
|
| 593 |
+
while dtype.is_array:
|
| 594 |
+
arraysizes.append(dtype.size)
|
| 595 |
+
dtype = dtype.base_type
|
| 596 |
+
|
| 597 |
+
complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()
|
| 598 |
+
|
| 599 |
+
declcode = dtype.empty_declaration_code()
|
| 600 |
+
if dtype.is_simple_buffer_dtype():
|
| 601 |
+
structinfo_name = "NULL"
|
| 602 |
+
elif dtype.is_struct:
|
| 603 |
+
struct_scope = dtype.scope
|
| 604 |
+
if dtype.is_cv_qualified:
|
| 605 |
+
struct_scope = struct_scope.base_type_scope
|
| 606 |
+
# Must pre-call all used types in order not to recurse during utility code writing.
|
| 607 |
+
fields = struct_scope.var_entries
|
| 608 |
+
assert len(fields) > 0
|
| 609 |
+
types = [get_type_information_cname(code, f.type, maxdepth - 1)
|
| 610 |
+
for f in fields]
|
| 611 |
+
typecode.putln("static const __Pyx_StructField %s[] = {" % structinfo_name, safe=True)
|
| 612 |
+
|
| 613 |
+
if dtype.is_cv_qualified:
|
| 614 |
+
# roughly speaking, remove "const" from struct_type
|
| 615 |
+
struct_type = dtype.cv_base_type.empty_declaration_code()
|
| 616 |
+
else:
|
| 617 |
+
struct_type = dtype.empty_declaration_code()
|
| 618 |
+
|
| 619 |
+
for f, typeinfo in zip(fields, types):
|
| 620 |
+
typecode.putln(' {&%s, "%s", offsetof(%s, %s)},' %
|
| 621 |
+
(typeinfo, f.name, struct_type, f.cname), safe=True)
|
| 622 |
+
|
| 623 |
+
typecode.putln(' {NULL, NULL, 0}', safe=True)
|
| 624 |
+
typecode.putln("};", safe=True)
|
| 625 |
+
else:
|
| 626 |
+
assert False
|
| 627 |
+
|
| 628 |
+
rep = str(dtype)
|
| 629 |
+
|
| 630 |
+
flags = "0"
|
| 631 |
+
is_unsigned = "0"
|
| 632 |
+
if dtype is PyrexTypes.c_char_type:
|
| 633 |
+
is_unsigned = "__PYX_IS_UNSIGNED(%s)" % declcode
|
| 634 |
+
typegroup = "'H'"
|
| 635 |
+
elif dtype.is_int:
|
| 636 |
+
is_unsigned = "__PYX_IS_UNSIGNED(%s)" % declcode
|
| 637 |
+
typegroup = "%s ? 'U' : 'I'" % is_unsigned
|
| 638 |
+
elif complex_possible or dtype.is_complex:
|
| 639 |
+
typegroup = "'C'"
|
| 640 |
+
elif dtype.is_float:
|
| 641 |
+
typegroup = "'R'"
|
| 642 |
+
elif dtype.is_struct:
|
| 643 |
+
typegroup = "'S'"
|
| 644 |
+
if dtype.packed:
|
| 645 |
+
flags = "__PYX_BUF_FLAGS_PACKED_STRUCT"
|
| 646 |
+
elif dtype.is_pyobject:
|
| 647 |
+
typegroup = "'O'"
|
| 648 |
+
else:
|
| 649 |
+
assert False, dtype
|
| 650 |
+
|
| 651 |
+
typeinfo = ('static const __Pyx_TypeInfo %s = '
|
| 652 |
+
'{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };')
|
| 653 |
+
tup = (name, rep, structinfo_name, declcode,
|
| 654 |
+
', '.join([str(x) for x in arraysizes]) or '0', len(arraysizes),
|
| 655 |
+
typegroup, is_unsigned, flags)
|
| 656 |
+
typecode.putln(typeinfo % tup, safe=True)
|
| 657 |
+
|
| 658 |
+
return name
|
| 659 |
+
|
| 660 |
+
def load_buffer_utility(util_code_name, context=None, **kwargs):
|
| 661 |
+
if context is None:
|
| 662 |
+
return UtilityCode.load(util_code_name, "Buffer.c", **kwargs)
|
| 663 |
+
else:
|
| 664 |
+
return TempitaUtilityCode.load(util_code_name, "Buffer.c", context=context, **kwargs)
|
| 665 |
+
|
| 666 |
+
context = dict(max_dims=Options.buffer_max_dims)
|
| 667 |
+
buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare", context=context)
|
| 668 |
+
buffer_formats_declare_code = load_buffer_utility("BufferFormatStructs")
|
| 669 |
+
|
| 670 |
+
# Utility function to set the right exception
|
| 671 |
+
# The caller should immediately goto_error
|
| 672 |
+
raise_indexerror_code = load_buffer_utility("BufferIndexError")
|
| 673 |
+
raise_indexerror_nogil = load_buffer_utility("BufferIndexErrorNogil")
|
| 674 |
+
raise_buffer_fallback_code = load_buffer_utility("BufferFallbackError")
|
| 675 |
+
|
| 676 |
+
acquire_utility_code = load_buffer_utility("BufferGetAndValidate", context=context)
|
| 677 |
+
buffer_format_check_code = load_buffer_utility("BufferFormatCheck", context=context)
|
| 678 |
+
|
| 679 |
+
# See utility code BufferFormatFromTypeInfo
|
| 680 |
+
_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat")
|
venv/lib/python3.10/site-packages/Cython/Compiler/Builtin.py
ADDED
|
@@ -0,0 +1,948 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Builtin Definitions
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
from .StringEncoding import EncodedString
|
| 7 |
+
from .Symtab import BuiltinScope, CClassScope, StructOrUnionScope, ModuleScope, Entry
|
| 8 |
+
from .Code import UtilityCode, TempitaUtilityCode
|
| 9 |
+
from .TypeSlots import Signature
|
| 10 |
+
from . import PyrexTypes
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
# C-level implementations of builtin types, functions and methods
|
| 14 |
+
|
| 15 |
+
iter_next_utility_code = UtilityCode.load("IterNext", "ObjectHandling.c")
|
| 16 |
+
getattr_utility_code = UtilityCode.load("GetAttr", "ObjectHandling.c")
|
| 17 |
+
getattr3_utility_code = UtilityCode.load("GetAttr3", "Builtins.c")
|
| 18 |
+
pyexec_utility_code = UtilityCode.load("PyExec", "Builtins.c")
|
| 19 |
+
pyexec_globals_utility_code = UtilityCode.load("PyExecGlobals", "Builtins.c")
|
| 20 |
+
globals_utility_code = UtilityCode.load("Globals", "Builtins.c")
|
| 21 |
+
include_std_lib_h_utility_code = UtilityCode.load("IncludeStdlibH", "ModuleSetupCode.c")
|
| 22 |
+
pysequence_multiply_utility_code = UtilityCode.load("PySequenceMultiply", "ObjectHandling.c")
|
| 23 |
+
slice_accessor_utility_code = UtilityCode.load("PySliceAccessors", "Builtins.c")
|
| 24 |
+
|
| 25 |
+
# mapping from builtins to their C-level equivalents
|
| 26 |
+
|
| 27 |
+
class _BuiltinOverride:
|
| 28 |
+
def __init__(self, py_name, args, ret_type, cname, py_equiv="*",
|
| 29 |
+
utility_code=None, sig=None, func_type=None,
|
| 30 |
+
is_strict_signature=False, builtin_return_type=None,
|
| 31 |
+
nogil=None, specialiser=None):
|
| 32 |
+
self.py_name, self.cname, self.py_equiv = py_name, cname, py_equiv
|
| 33 |
+
self.args, self.ret_type = args, ret_type
|
| 34 |
+
self.func_type, self.sig = func_type, sig
|
| 35 |
+
self.builtin_return_type = builtin_return_type
|
| 36 |
+
self.is_strict_signature = is_strict_signature
|
| 37 |
+
self.utility_code = utility_code
|
| 38 |
+
self.nogil = nogil
|
| 39 |
+
self.specialiser = specialiser
|
| 40 |
+
|
| 41 |
+
def build_func_type(self, sig=None, self_arg=None):
|
| 42 |
+
if sig is None:
|
| 43 |
+
sig = Signature(self.args, self.ret_type, nogil=self.nogil)
|
| 44 |
+
sig.exception_check = False # not needed for the current builtins
|
| 45 |
+
func_type = sig.function_type(self_arg)
|
| 46 |
+
if self.is_strict_signature:
|
| 47 |
+
func_type.is_strict_signature = True
|
| 48 |
+
if self.builtin_return_type:
|
| 49 |
+
func_type.return_type = builtin_types[self.builtin_return_type]
|
| 50 |
+
return func_type
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class BuiltinAttribute:
|
| 54 |
+
def __init__(self, py_name, cname=None, field_type=None, field_type_name=None):
|
| 55 |
+
self.py_name = py_name
|
| 56 |
+
self.cname = cname or py_name
|
| 57 |
+
self.field_type_name = field_type_name # can't do the lookup before the type is declared!
|
| 58 |
+
self.field_type = field_type
|
| 59 |
+
|
| 60 |
+
def declare_in_type(self, self_type):
|
| 61 |
+
if self.field_type_name is not None:
|
| 62 |
+
# lazy type lookup
|
| 63 |
+
field_type = builtin_scope.lookup(self.field_type_name).type
|
| 64 |
+
else:
|
| 65 |
+
field_type = self.field_type or PyrexTypes.py_object_type
|
| 66 |
+
entry = self_type.scope.declare(self.py_name, self.cname, field_type, None, 'private')
|
| 67 |
+
entry.is_variable = True
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class BuiltinFunction(_BuiltinOverride):
|
| 71 |
+
def declare_in_scope(self, scope):
|
| 72 |
+
func_type, sig = self.func_type, self.sig
|
| 73 |
+
if func_type is None:
|
| 74 |
+
func_type = self.build_func_type(sig)
|
| 75 |
+
scope.declare_builtin_cfunction(
|
| 76 |
+
self.py_name, func_type, self.cname, self.py_equiv, self.utility_code,
|
| 77 |
+
specialiser=self.specialiser,
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class BuiltinMethod(_BuiltinOverride):
|
| 82 |
+
def declare_in_type(self, self_type):
|
| 83 |
+
method_type, sig = self.func_type, self.sig
|
| 84 |
+
if method_type is None:
|
| 85 |
+
# override 'self' type (first argument)
|
| 86 |
+
self_arg = PyrexTypes.CFuncTypeArg("", self_type, None)
|
| 87 |
+
self_arg.not_none = True
|
| 88 |
+
self_arg.accept_builtin_subtypes = True
|
| 89 |
+
method_type = self.build_func_type(sig, self_arg)
|
| 90 |
+
self_type.scope.declare_builtin_cfunction(
|
| 91 |
+
self.py_name, method_type, self.cname, utility_code=self.utility_code)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class BuiltinProperty:
|
| 95 |
+
# read only for now
|
| 96 |
+
def __init__(self, py_name, property_type, call_cname,
|
| 97 |
+
exception_value=None, exception_check=None, utility_code=None):
|
| 98 |
+
self.py_name = py_name
|
| 99 |
+
self.property_type = property_type
|
| 100 |
+
self.call_cname = call_cname
|
| 101 |
+
self.utility_code = utility_code
|
| 102 |
+
self.exception_value = exception_value
|
| 103 |
+
self.exception_check = exception_check
|
| 104 |
+
|
| 105 |
+
def declare_in_type(self, self_type):
|
| 106 |
+
self_type.scope.declare_cproperty(
|
| 107 |
+
self.py_name,
|
| 108 |
+
self.property_type,
|
| 109 |
+
self.call_cname,
|
| 110 |
+
exception_value=self.exception_value,
|
| 111 |
+
exception_check=self.exception_check,
|
| 112 |
+
utility_code=self.utility_code
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
### Special builtin implementations generated at runtime.
|
| 117 |
+
|
| 118 |
+
def _generate_divmod_function(scope, argument_types):
|
| 119 |
+
if len(argument_types) != 2:
|
| 120 |
+
return None
|
| 121 |
+
type_op1, type_op2 = argument_types
|
| 122 |
+
|
| 123 |
+
# Resolve internal typedefs to avoid useless code duplication.
|
| 124 |
+
if type_op1.is_typedef:
|
| 125 |
+
type_op1 = type_op1.resolve_known_type()
|
| 126 |
+
if type_op2.is_typedef:
|
| 127 |
+
type_op2 = type_op2.resolve_known_type()
|
| 128 |
+
|
| 129 |
+
if type_op1.is_float or type_op1 is float_type or type_op2.is_float and (type_op1.is_int or type_op1 is int_type):
|
| 130 |
+
impl = "float"
|
| 131 |
+
# TODO: support 'long double'? Currently fails to handle the error return value.
|
| 132 |
+
number_type = PyrexTypes.c_double_type
|
| 133 |
+
elif type_op1.is_int and type_op2.is_int:
|
| 134 |
+
impl = "int"
|
| 135 |
+
number_type = type_op1 if type_op1.rank >= type_op2.rank else type_op2
|
| 136 |
+
else:
|
| 137 |
+
return None
|
| 138 |
+
|
| 139 |
+
nogil = scope.nogil
|
| 140 |
+
cfunc_suffix = f"{'nogil_' if nogil else ''}{impl}_{'td_' if number_type.is_typedef else ''}{number_type.specialization_name()}"
|
| 141 |
+
function_cname = f"__Pyx_divmod_{cfunc_suffix}"
|
| 142 |
+
|
| 143 |
+
# Reuse an existing specialisation, if available.
|
| 144 |
+
builtin_scope = scope.builtin_scope()
|
| 145 |
+
existing_entry = builtin_scope.lookup_here("divmod")
|
| 146 |
+
if existing_entry is not None:
|
| 147 |
+
for entry in existing_entry.all_alternatives():
|
| 148 |
+
if entry.cname == function_cname:
|
| 149 |
+
return entry
|
| 150 |
+
|
| 151 |
+
# Generate a new specialisation.
|
| 152 |
+
ctuple_entry = scope.declare_tuple_type(None, [number_type]*2)
|
| 153 |
+
ctuple_entry.used = True
|
| 154 |
+
return_type = ctuple_entry.type
|
| 155 |
+
|
| 156 |
+
function_type = PyrexTypes.CFuncType(
|
| 157 |
+
return_type, [
|
| 158 |
+
PyrexTypes.CFuncTypeArg("a", number_type, None),
|
| 159 |
+
PyrexTypes.CFuncTypeArg("b", number_type, None),
|
| 160 |
+
],
|
| 161 |
+
exception_value=f"__Pyx_divmod_ERROR_VALUE_{cfunc_suffix}",
|
| 162 |
+
exception_check=True,
|
| 163 |
+
is_strict_signature=True,
|
| 164 |
+
nogil=nogil,
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
utility_code = TempitaUtilityCode.load(
|
| 168 |
+
f"divmod_{impl}", "Builtins.c", context={
|
| 169 |
+
'CFUNC_SUFFIX': cfunc_suffix,
|
| 170 |
+
'MATH_SUFFIX': number_type.math_h_modifier if number_type.is_float else '',
|
| 171 |
+
'TYPE': number_type.empty_declaration_code(),
|
| 172 |
+
'RETURN_TYPE': return_type.empty_declaration_code(),
|
| 173 |
+
'NOGIL': nogil,
|
| 174 |
+
})
|
| 175 |
+
|
| 176 |
+
entry = builtin_scope.declare_builtin_cfunction(
|
| 177 |
+
"divmod", function_type, function_cname, utility_code=utility_code)
|
| 178 |
+
|
| 179 |
+
return entry
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
### List of builtin functions and their implementation.
|
| 183 |
+
|
| 184 |
+
builtin_function_table = [
|
| 185 |
+
# name, args, return, C API func, py equiv = "*"
|
| 186 |
+
BuiltinFunction('abs', "d", "d", "fabs",
|
| 187 |
+
is_strict_signature=True, nogil=True,
|
| 188 |
+
utility_code=include_std_lib_h_utility_code),
|
| 189 |
+
BuiltinFunction('abs', "f", "f", "fabsf",
|
| 190 |
+
is_strict_signature=True, nogil=True,
|
| 191 |
+
utility_code=include_std_lib_h_utility_code),
|
| 192 |
+
BuiltinFunction('abs', "i", "i", "abs",
|
| 193 |
+
is_strict_signature=True, nogil=True,
|
| 194 |
+
utility_code=include_std_lib_h_utility_code),
|
| 195 |
+
BuiltinFunction('abs', "l", "l", "labs",
|
| 196 |
+
is_strict_signature=True, nogil=True,
|
| 197 |
+
utility_code=include_std_lib_h_utility_code),
|
| 198 |
+
BuiltinFunction('abs', None, None, "__Pyx_abs_longlong",
|
| 199 |
+
utility_code = UtilityCode.load("abs_longlong", "Builtins.c"),
|
| 200 |
+
func_type = PyrexTypes.CFuncType(
|
| 201 |
+
PyrexTypes.c_longlong_type, [
|
| 202 |
+
PyrexTypes.CFuncTypeArg("arg", PyrexTypes.c_longlong_type, None)
|
| 203 |
+
],
|
| 204 |
+
is_strict_signature = True, nogil=True)),
|
| 205 |
+
] + list(
|
| 206 |
+
BuiltinFunction('abs', None, None, "/*abs_{}*/".format(t.specialization_name()),
|
| 207 |
+
func_type = PyrexTypes.CFuncType(
|
| 208 |
+
t,
|
| 209 |
+
[PyrexTypes.CFuncTypeArg("arg", t, None)],
|
| 210 |
+
is_strict_signature = True, nogil=True))
|
| 211 |
+
for t in (PyrexTypes.c_uint_type, PyrexTypes.c_ulong_type, PyrexTypes.c_ulonglong_type)
|
| 212 |
+
) + list(
|
| 213 |
+
BuiltinFunction('abs', None, None, "__Pyx_c_abs{}".format(t.funcsuffix),
|
| 214 |
+
func_type = PyrexTypes.CFuncType(
|
| 215 |
+
t.real_type, [
|
| 216 |
+
PyrexTypes.CFuncTypeArg("arg", t, None)
|
| 217 |
+
],
|
| 218 |
+
is_strict_signature = True, nogil=True))
|
| 219 |
+
for t in (PyrexTypes.c_float_complex_type,
|
| 220 |
+
PyrexTypes.c_double_complex_type,
|
| 221 |
+
PyrexTypes.c_longdouble_complex_type)
|
| 222 |
+
) + [
|
| 223 |
+
BuiltinFunction('abs', "O", "O", "__Pyx_PyNumber_Absolute",
|
| 224 |
+
utility_code=UtilityCode.load("py_abs", "Builtins.c")),
|
| 225 |
+
#('all', "", "", ""),
|
| 226 |
+
#('any', "", "", ""),
|
| 227 |
+
#('ascii', "", "", ""),
|
| 228 |
+
#('bin', "", "", ""),
|
| 229 |
+
BuiltinFunction('callable', "O", "b", "__Pyx_PyCallable_Check",
|
| 230 |
+
utility_code = UtilityCode.load("CallableCheck", "ObjectHandling.c")),
|
| 231 |
+
BuiltinFunction('chr', "i", "O", "PyUnicode_FromOrdinal", builtin_return_type='str'),
|
| 232 |
+
#('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result)
|
| 233 |
+
#('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start)
|
| 234 |
+
BuiltinFunction('delattr', "OO", "r", "__Pyx_PyObject_DelAttr",
|
| 235 |
+
utility_code=UtilityCode.load("PyObjectDelAttr", "ObjectHandling.c")),
|
| 236 |
+
BuiltinFunction('dir', "O", "O", "PyObject_Dir"),
|
| 237 |
+
BuiltinFunction('divmod', "OO", "O", "PyNumber_Divmod",
|
| 238 |
+
specialiser=_generate_divmod_function),
|
| 239 |
+
BuiltinFunction('exec', "O", "O", "__Pyx_PyExecGlobals",
|
| 240 |
+
utility_code = pyexec_globals_utility_code),
|
| 241 |
+
BuiltinFunction('exec', "OO", "O", "__Pyx_PyExec2",
|
| 242 |
+
utility_code = pyexec_utility_code),
|
| 243 |
+
BuiltinFunction('exec', "OOO", "O", "__Pyx_PyExec3",
|
| 244 |
+
utility_code = pyexec_utility_code),
|
| 245 |
+
#('eval', "", "", ""),
|
| 246 |
+
#('execfile', "", "", ""),
|
| 247 |
+
#('filter', "", "", ""),
|
| 248 |
+
BuiltinFunction('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr",
|
| 249 |
+
utility_code=getattr3_utility_code), # Pyrex legacy
|
| 250 |
+
BuiltinFunction('getattr', "OOO", "O", "__Pyx_GetAttr3",
|
| 251 |
+
utility_code=getattr3_utility_code),
|
| 252 |
+
BuiltinFunction('getattr', "OO", "O", "__Pyx_GetAttr",
|
| 253 |
+
utility_code=getattr_utility_code),
|
| 254 |
+
BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr",
|
| 255 |
+
utility_code = UtilityCode.load("HasAttr", "Builtins.c")),
|
| 256 |
+
BuiltinFunction('hash', "O", "h", "PyObject_Hash"),
|
| 257 |
+
#('hex', "", "", ""),
|
| 258 |
+
#('id', "", "", ""),
|
| 259 |
+
#('input', "", "", ""),
|
| 260 |
+
BuiltinFunction('intern', "O", "O", "__Pyx_Intern",
|
| 261 |
+
utility_code = UtilityCode.load("Intern", "Builtins.c")),
|
| 262 |
+
BuiltinFunction('isinstance', "OO", "b", "PyObject_IsInstance"),
|
| 263 |
+
BuiltinFunction('issubclass', "OO", "b", "PyObject_IsSubclass"),
|
| 264 |
+
BuiltinFunction('iter', "OO", "O", "PyCallIter_New"),
|
| 265 |
+
BuiltinFunction('iter', "O", "O", "PyObject_GetIter"),
|
| 266 |
+
BuiltinFunction('len', "O", "z", "PyObject_Length"),
|
| 267 |
+
BuiltinFunction('locals', "", "O", "__pyx_locals"),
|
| 268 |
+
#('map', "", "", ""),
|
| 269 |
+
#('max', "", "", ""),
|
| 270 |
+
#('min', "", "", ""),
|
| 271 |
+
BuiltinFunction('next', "O", "O", "__Pyx_PyIter_Next",
|
| 272 |
+
utility_code = iter_next_utility_code), # not available in Py2 => implemented here
|
| 273 |
+
BuiltinFunction('next', "OO", "O", "__Pyx_PyIter_Next2",
|
| 274 |
+
utility_code = iter_next_utility_code), # not available in Py2 => implemented here
|
| 275 |
+
#('oct', "", "", ""),
|
| 276 |
+
#('open', "ss", "O", "PyFile_FromString"), # not in Py3
|
| 277 |
+
] + [
|
| 278 |
+
BuiltinFunction('ord', None, None, "__Pyx_long_cast",
|
| 279 |
+
func_type=PyrexTypes.CFuncType(
|
| 280 |
+
PyrexTypes.c_long_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)],
|
| 281 |
+
is_strict_signature=True))
|
| 282 |
+
for c_type in [PyrexTypes.c_py_ucs4_type, PyrexTypes.c_py_unicode_type]
|
| 283 |
+
] + [
|
| 284 |
+
BuiltinFunction('ord', None, None, "__Pyx_uchar_cast",
|
| 285 |
+
func_type=PyrexTypes.CFuncType(
|
| 286 |
+
PyrexTypes.c_uchar_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)],
|
| 287 |
+
is_strict_signature=True))
|
| 288 |
+
for c_type in [PyrexTypes.c_char_type, PyrexTypes.c_schar_type, PyrexTypes.c_uchar_type]
|
| 289 |
+
] + [
|
| 290 |
+
BuiltinFunction('ord', None, None, "__Pyx_PyObject_Ord",
|
| 291 |
+
utility_code=UtilityCode.load_cached("object_ord", "Builtins.c"),
|
| 292 |
+
func_type=PyrexTypes.CFuncType(
|
| 293 |
+
PyrexTypes.c_long_type, [
|
| 294 |
+
PyrexTypes.CFuncTypeArg("c", PyrexTypes.py_object_type, None)
|
| 295 |
+
],
|
| 296 |
+
exception_value="(long)(Py_UCS4)-1")),
|
| 297 |
+
BuiltinFunction('pow', "OOO", "O", "PyNumber_Power"),
|
| 298 |
+
BuiltinFunction('pow', "OO", "O", "__Pyx_PyNumber_Power2",
|
| 299 |
+
utility_code = UtilityCode.load("pow2", "Builtins.c")),
|
| 300 |
+
#('range', "", "", ""),
|
| 301 |
+
#('raw_input', "", "", ""),
|
| 302 |
+
#('reduce', "", "", ""),
|
| 303 |
+
BuiltinFunction('reload', "O", "O", "PyImport_ReloadModule"),
|
| 304 |
+
BuiltinFunction('repr', "O", "O", "PyObject_Repr", builtin_return_type='str'),
|
| 305 |
+
#('round', "", "", ""),
|
| 306 |
+
BuiltinFunction('setattr', "OOO", "r", "PyObject_SetAttr"),
|
| 307 |
+
#('sum', "", "", ""),
|
| 308 |
+
#('sorted', "", "", ""),
|
| 309 |
+
#('type', "O", "O", "PyObject_Type"),
|
| 310 |
+
BuiltinFunction('unichr', "i", "O", "PyUnicode_FromOrdinal", builtin_return_type='str'),
|
| 311 |
+
#('vars', "", "", ""),
|
| 312 |
+
#('zip', "", "", ""),
|
| 313 |
+
# Can't do these easily until we have builtin type entries.
|
| 314 |
+
#('typecheck', "OO", "i", "PyObject_TypeCheck", False),
|
| 315 |
+
#('issubtype', "OO", "i", "PyType_IsSubtype", False),
|
| 316 |
+
|
| 317 |
+
# Put in namespace append optimization.
|
| 318 |
+
BuiltinFunction('__Pyx_PyObject_Append', "OO", "O", "__Pyx_PyObject_Append"),
|
| 319 |
+
|
| 320 |
+
# This is conditionally looked up based on a compiler directive.
|
| 321 |
+
BuiltinFunction('__Pyx_Globals', "", "O", "__Pyx_Globals",
|
| 322 |
+
utility_code=globals_utility_code),
|
| 323 |
+
]
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
# Builtin types
|
| 327 |
+
# bool
|
| 328 |
+
# buffer
|
| 329 |
+
# classmethod
|
| 330 |
+
# dict
|
| 331 |
+
# enumerate
|
| 332 |
+
# file
|
| 333 |
+
# float
|
| 334 |
+
# int
|
| 335 |
+
# list
|
| 336 |
+
# long
|
| 337 |
+
# object
|
| 338 |
+
# property
|
| 339 |
+
# slice
|
| 340 |
+
# staticmethod
|
| 341 |
+
# super
|
| 342 |
+
# str
|
| 343 |
+
# tuple
|
| 344 |
+
# type
|
| 345 |
+
# xrange
|
| 346 |
+
|
| 347 |
+
builtin_types_table = [
|
| 348 |
+
|
| 349 |
+
("type", "&PyType_Type", []),
|
| 350 |
+
|
| 351 |
+
# This conflicts with the C++ bool type, and unfortunately
|
| 352 |
+
# C++ is too liberal about PyObject* <-> bool conversions,
|
| 353 |
+
# resulting in unintuitive runtime behavior and segfaults.
|
| 354 |
+
# ("bool", "&PyBool_Type", []),
|
| 355 |
+
|
| 356 |
+
("int", "&PyLong_Type", []),
|
| 357 |
+
("float", "&PyFloat_Type", []),
|
| 358 |
+
|
| 359 |
+
("complex", "&PyComplex_Type", [BuiltinAttribute('cval', field_type_name = 'Py_complex'),
|
| 360 |
+
BuiltinAttribute('real', 'cval.real', field_type = PyrexTypes.c_double_type),
|
| 361 |
+
BuiltinAttribute('imag', 'cval.imag', field_type = PyrexTypes.c_double_type),
|
| 362 |
+
]),
|
| 363 |
+
|
| 364 |
+
("bytearray", "&PyByteArray_Type", [
|
| 365 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 366 |
+
utility_code=pysequence_multiply_utility_code),
|
| 367 |
+
]),
|
| 368 |
+
("bytes", "&PyBytes_Type", [BuiltinMethod("join", "TO", "O", "__Pyx_PyBytes_Join",
|
| 369 |
+
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
| 370 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 371 |
+
utility_code=pysequence_multiply_utility_code),
|
| 372 |
+
]),
|
| 373 |
+
("str", "&PyUnicode_Type", [BuiltinMethod("__contains__", "TO", "b", "PyUnicode_Contains"),
|
| 374 |
+
BuiltinMethod("join", "TO", "T", "PyUnicode_Join"),
|
| 375 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 376 |
+
utility_code=pysequence_multiply_utility_code),
|
| 377 |
+
]),
|
| 378 |
+
|
| 379 |
+
("tuple", "&PyTuple_Type", [BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 380 |
+
utility_code=pysequence_multiply_utility_code),
|
| 381 |
+
]),
|
| 382 |
+
|
| 383 |
+
("list", "&PyList_Type", [BuiltinMethod("insert", "TzO", "r", "PyList_Insert"),
|
| 384 |
+
BuiltinMethod("reverse", "T", "r", "PyList_Reverse"),
|
| 385 |
+
BuiltinMethod("append", "TO", "r", "__Pyx_PyList_Append",
|
| 386 |
+
utility_code=UtilityCode.load("ListAppend", "Optimize.c")),
|
| 387 |
+
BuiltinMethod("extend", "TO", "r", "__Pyx_PyList_Extend",
|
| 388 |
+
utility_code=UtilityCode.load("ListExtend", "Optimize.c")),
|
| 389 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 390 |
+
utility_code=pysequence_multiply_utility_code),
|
| 391 |
+
]),
|
| 392 |
+
|
| 393 |
+
("dict", "&PyDict_Type", [BuiltinMethod("__contains__", "TO", "b", "PyDict_Contains"),
|
| 394 |
+
BuiltinMethod("has_key", "TO", "b", "PyDict_Contains"),
|
| 395 |
+
BuiltinMethod("items", "T", "O", "__Pyx_PyDict_Items",
|
| 396 |
+
utility_code=UtilityCode.load("py_dict_items", "Builtins.c")),
|
| 397 |
+
BuiltinMethod("keys", "T", "O", "__Pyx_PyDict_Keys",
|
| 398 |
+
utility_code=UtilityCode.load("py_dict_keys", "Builtins.c")),
|
| 399 |
+
BuiltinMethod("values", "T", "O", "__Pyx_PyDict_Values",
|
| 400 |
+
utility_code=UtilityCode.load("py_dict_values", "Builtins.c")),
|
| 401 |
+
BuiltinMethod("iteritems", "T", "O", "__Pyx_PyDict_IterItems",
|
| 402 |
+
utility_code=UtilityCode.load("py_dict_iteritems", "Builtins.c")),
|
| 403 |
+
BuiltinMethod("iterkeys", "T", "O", "__Pyx_PyDict_IterKeys",
|
| 404 |
+
utility_code=UtilityCode.load("py_dict_iterkeys", "Builtins.c")),
|
| 405 |
+
BuiltinMethod("itervalues", "T", "O", "__Pyx_PyDict_IterValues",
|
| 406 |
+
utility_code=UtilityCode.load("py_dict_itervalues", "Builtins.c")),
|
| 407 |
+
BuiltinMethod("viewitems", "T", "O", "__Pyx_PyDict_ViewItems",
|
| 408 |
+
utility_code=UtilityCode.load("py_dict_viewitems", "Builtins.c")),
|
| 409 |
+
BuiltinMethod("viewkeys", "T", "O", "__Pyx_PyDict_ViewKeys",
|
| 410 |
+
utility_code=UtilityCode.load("py_dict_viewkeys", "Builtins.c")),
|
| 411 |
+
BuiltinMethod("viewvalues", "T", "O", "__Pyx_PyDict_ViewValues",
|
| 412 |
+
utility_code=UtilityCode.load("py_dict_viewvalues", "Builtins.c")),
|
| 413 |
+
BuiltinMethod("clear", "T", "r", "__Pyx_PyDict_Clear",
|
| 414 |
+
utility_code=UtilityCode.load("py_dict_clear", "Optimize.c")),
|
| 415 |
+
BuiltinMethod("copy", "T", "T", "PyDict_Copy")]),
|
| 416 |
+
|
| 417 |
+
("slice", "&PySlice_Type", [BuiltinProperty("start", PyrexTypes.py_object_type, '__Pyx_PySlice_Start',
|
| 418 |
+
utility_code=slice_accessor_utility_code),
|
| 419 |
+
BuiltinProperty("stop", PyrexTypes.py_object_type, '__Pyx_PySlice_Stop',
|
| 420 |
+
utility_code=slice_accessor_utility_code),
|
| 421 |
+
BuiltinProperty("step", PyrexTypes.py_object_type, '__Pyx_PySlice_Step',
|
| 422 |
+
utility_code=slice_accessor_utility_code),
|
| 423 |
+
]),
|
| 424 |
+
|
| 425 |
+
("set", "&PySet_Type", [BuiltinMethod("clear", "T", "r", "PySet_Clear"),
|
| 426 |
+
# discard() and remove() have a special treatment for unhashable values
|
| 427 |
+
BuiltinMethod("discard", "TO", "r", "__Pyx_PySet_Discard",
|
| 428 |
+
utility_code=UtilityCode.load("py_set_discard", "Optimize.c")),
|
| 429 |
+
BuiltinMethod("remove", "TO", "r", "__Pyx_PySet_Remove",
|
| 430 |
+
utility_code=UtilityCode.load("py_set_remove", "Optimize.c")),
|
| 431 |
+
# update is actually variadic (see Github issue #1645)
|
| 432 |
+
# BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update",
|
| 433 |
+
# utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")),
|
| 434 |
+
BuiltinMethod("add", "TO", "r", "PySet_Add"),
|
| 435 |
+
BuiltinMethod("pop", "T", "O", "PySet_Pop")]),
|
| 436 |
+
("frozenset", "&PyFrozenSet_Type", []),
|
| 437 |
+
("BaseException", "((PyTypeObject*)PyExc_BaseException)", []),
|
| 438 |
+
("Exception", "((PyTypeObject*)PyExc_Exception)", []),
|
| 439 |
+
("memoryview", "&PyMemoryView_Type", [
|
| 440 |
+
# TODO - format would be nice, but hard to get
|
| 441 |
+
# __len__ can be accessed through a direct lookup of the buffer (but probably in Optimize.c)
|
| 442 |
+
# error checking would ideally be limited api only
|
| 443 |
+
BuiltinProperty("ndim", PyrexTypes.c_int_type, '__Pyx_PyMemoryView_Get_ndim',
|
| 444 |
+
exception_value=-1, exception_check=True,
|
| 445 |
+
utility_code=TempitaUtilityCode.load_cached(
|
| 446 |
+
"memoryview_get_from_buffer", "Builtins.c",
|
| 447 |
+
context=dict(name="ndim")
|
| 448 |
+
)
|
| 449 |
+
),
|
| 450 |
+
BuiltinProperty("readonly", PyrexTypes.c_bint_type, '__Pyx_PyMemoryView_Get_readonly',
|
| 451 |
+
exception_value=-1, exception_check=True,
|
| 452 |
+
utility_code=TempitaUtilityCode.load_cached(
|
| 453 |
+
"memoryview_get_from_buffer", "Builtins.c",
|
| 454 |
+
context=dict(name="readonly")
|
| 455 |
+
)
|
| 456 |
+
),
|
| 457 |
+
BuiltinProperty("itemsize", PyrexTypes.c_py_ssize_t_type, '__Pyx_PyMemoryView_Get_itemsize',
|
| 458 |
+
exception_value=-1, exception_check=True,
|
| 459 |
+
utility_code=TempitaUtilityCode.load_cached(
|
| 460 |
+
"memoryview_get_from_buffer", "Builtins.c",
|
| 461 |
+
context=dict(name="itemsize")
|
| 462 |
+
)
|
| 463 |
+
)]
|
| 464 |
+
)
|
| 465 |
+
]
|
| 466 |
+
|
| 467 |
+
|
| 468 |
+
types_that_construct_their_instance = frozenset({
|
| 469 |
+
# some builtin types do not always return an instance of
|
| 470 |
+
# themselves - these do:
|
| 471 |
+
'type', 'bool', 'int', 'float', 'complex',
|
| 472 |
+
'bytes', 'unicode', 'bytearray', 'str',
|
| 473 |
+
'tuple', 'list', 'dict', 'set', 'frozenset',
|
| 474 |
+
'memoryview'
|
| 475 |
+
})
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
# When updating this mapping, also update "unsafe_compile_time_methods" below
|
| 479 |
+
# if methods are added that are not safe to evaluate at compile time.
|
| 480 |
+
inferred_method_return_types = {
|
| 481 |
+
'complex': dict(
|
| 482 |
+
conjugate='complex',
|
| 483 |
+
),
|
| 484 |
+
'int': dict(
|
| 485 |
+
as_integer_ratio='tuple[int,int]',
|
| 486 |
+
bit_count='T',
|
| 487 |
+
bit_length='T',
|
| 488 |
+
conjugate='T',
|
| 489 |
+
from_bytes='T', # classmethod
|
| 490 |
+
is_integer='bint',
|
| 491 |
+
to_bytes='bytes',
|
| 492 |
+
),
|
| 493 |
+
'float': dict(
|
| 494 |
+
as_integer_ratio='tuple[int,int]',
|
| 495 |
+
conjugate='T',
|
| 496 |
+
fromhex='T', # classmethod
|
| 497 |
+
hex='str',
|
| 498 |
+
is_integer='bint',
|
| 499 |
+
),
|
| 500 |
+
'list': dict(
|
| 501 |
+
copy='T',
|
| 502 |
+
count='Py_ssize_t',
|
| 503 |
+
index='Py_ssize_t',
|
| 504 |
+
),
|
| 505 |
+
'tuple': dict(
|
| 506 |
+
count='Py_ssize_t',
|
| 507 |
+
index='Py_ssize_t',
|
| 508 |
+
),
|
| 509 |
+
'str': dict(
|
| 510 |
+
capitalize='T',
|
| 511 |
+
casefold='T',
|
| 512 |
+
center='T',
|
| 513 |
+
count='Py_ssize_t',
|
| 514 |
+
encode='bytes',
|
| 515 |
+
endswith='bint',
|
| 516 |
+
expandtabs='T',
|
| 517 |
+
find='Py_ssize_t',
|
| 518 |
+
format='T',
|
| 519 |
+
format_map='T',
|
| 520 |
+
index='Py_ssize_t',
|
| 521 |
+
isalnum='bint',
|
| 522 |
+
isalpha='bint',
|
| 523 |
+
isascii='bint',
|
| 524 |
+
isdecimal='bint',
|
| 525 |
+
isdigit='bint',
|
| 526 |
+
isidentifier='bint',
|
| 527 |
+
islower='bint',
|
| 528 |
+
isnumeric='bint',
|
| 529 |
+
isprintable='bint',
|
| 530 |
+
isspace='bint',
|
| 531 |
+
istitle='bint',
|
| 532 |
+
isupper='bint',
|
| 533 |
+
join='T',
|
| 534 |
+
ljust='T',
|
| 535 |
+
lower='T',
|
| 536 |
+
lstrip='T',
|
| 537 |
+
maketrans='dict[int,object]', # staticmethod
|
| 538 |
+
partition='tuple[T,T,T]',
|
| 539 |
+
removeprefix='T',
|
| 540 |
+
removesuffix='T',
|
| 541 |
+
replace='T',
|
| 542 |
+
rfind='Py_ssize_t',
|
| 543 |
+
rindex='Py_ssize_t',
|
| 544 |
+
rjust='T',
|
| 545 |
+
rpartition='tuple[T,T,T]',
|
| 546 |
+
rsplit='list[T]',
|
| 547 |
+
rstrip='T',
|
| 548 |
+
split='list[T]',
|
| 549 |
+
splitlines='list[T]',
|
| 550 |
+
startswith='bint',
|
| 551 |
+
strip='T',
|
| 552 |
+
swapcase='T',
|
| 553 |
+
title='T',
|
| 554 |
+
translate='T',
|
| 555 |
+
upper='T',
|
| 556 |
+
zfill='T',
|
| 557 |
+
),
|
| 558 |
+
'bytes': dict(
|
| 559 |
+
capitalize='T',
|
| 560 |
+
center='T',
|
| 561 |
+
count='Py_ssize_t',
|
| 562 |
+
decode='str',
|
| 563 |
+
endswith='bint',
|
| 564 |
+
expandtabs='T',
|
| 565 |
+
find='Py_ssize_t',
|
| 566 |
+
fromhex='T', # classmethod
|
| 567 |
+
hex='str',
|
| 568 |
+
index='Py_ssize_t',
|
| 569 |
+
isalnum='bint',
|
| 570 |
+
isalpha='bint',
|
| 571 |
+
isascii='bint',
|
| 572 |
+
isdigit='bint',
|
| 573 |
+
islower='bint',
|
| 574 |
+
isspace='bint',
|
| 575 |
+
istitle='bint',
|
| 576 |
+
isupper='bint',
|
| 577 |
+
join='T',
|
| 578 |
+
ljust='T',
|
| 579 |
+
lower='T',
|
| 580 |
+
lstrip='T',
|
| 581 |
+
maketrans='bytes', # staticmethod
|
| 582 |
+
partition='tuple[T,T,T]',
|
| 583 |
+
removeprefix='T',
|
| 584 |
+
removesuffix='T',
|
| 585 |
+
replace='T',
|
| 586 |
+
rfind='Py_ssize_t',
|
| 587 |
+
rindex='Py_ssize_t',
|
| 588 |
+
rjust='T',
|
| 589 |
+
rpartition='tuple[T,T,T]',
|
| 590 |
+
rsplit='list[T]',
|
| 591 |
+
rstrip='T',
|
| 592 |
+
split='list[T]',
|
| 593 |
+
splitlines='list[T]',
|
| 594 |
+
startswith='bint',
|
| 595 |
+
strip='T',
|
| 596 |
+
swapcase='T',
|
| 597 |
+
title='T',
|
| 598 |
+
translate='T',
|
| 599 |
+
upper='T',
|
| 600 |
+
zfill='T',
|
| 601 |
+
),
|
| 602 |
+
'bytearray': dict(
|
| 603 |
+
# Inherited from 'bytes' below.
|
| 604 |
+
),
|
| 605 |
+
'memoryview': dict(
|
| 606 |
+
cast='T',
|
| 607 |
+
hex='str',
|
| 608 |
+
tobytes='bytes',
|
| 609 |
+
tolist='list',
|
| 610 |
+
toreadonly='T',
|
| 611 |
+
),
|
| 612 |
+
'set': dict(
|
| 613 |
+
copy='T',
|
| 614 |
+
difference='T',
|
| 615 |
+
intersection='T',
|
| 616 |
+
isdisjoint='bint',
|
| 617 |
+
issubset='bint',
|
| 618 |
+
issuperset='bint',
|
| 619 |
+
symmetric_difference='T',
|
| 620 |
+
union='T',
|
| 621 |
+
),
|
| 622 |
+
'frozenset': dict(
|
| 623 |
+
# Inherited from 'set' below.
|
| 624 |
+
),
|
| 625 |
+
'dict': dict(
|
| 626 |
+
copy='T',
|
| 627 |
+
fromkeys='T', # classmethod
|
| 628 |
+
popitem='tuple',
|
| 629 |
+
),
|
| 630 |
+
}
|
| 631 |
+
|
| 632 |
+
inferred_method_return_types['bytearray'].update(inferred_method_return_types['bytes'])
|
| 633 |
+
inferred_method_return_types['frozenset'].update(inferred_method_return_types['set'])
|
| 634 |
+
|
| 635 |
+
|
| 636 |
+
def find_return_type_of_builtin_method(builtin_type, method_name):
|
| 637 |
+
type_name = builtin_type.name
|
| 638 |
+
if type_name in inferred_method_return_types:
|
| 639 |
+
methods = inferred_method_return_types[type_name]
|
| 640 |
+
if method_name in methods:
|
| 641 |
+
return_type_name = methods[method_name]
|
| 642 |
+
if '[' in return_type_name:
|
| 643 |
+
# TODO: Keep the "[...]" part when we add support for generics.
|
| 644 |
+
return_type_name = return_type_name.partition('[')[0]
|
| 645 |
+
if return_type_name == 'T':
|
| 646 |
+
return builtin_type
|
| 647 |
+
if 'T' in return_type_name:
|
| 648 |
+
return_type_name = return_type_name.replace('T', builtin_type.name)
|
| 649 |
+
if return_type_name == 'bint':
|
| 650 |
+
return PyrexTypes.c_bint_type
|
| 651 |
+
elif return_type_name == 'Py_ssize_t':
|
| 652 |
+
return PyrexTypes.c_py_ssize_t_type
|
| 653 |
+
return builtin_scope.lookup(return_type_name).type
|
| 654 |
+
return PyrexTypes.py_object_type
|
| 655 |
+
|
| 656 |
+
|
| 657 |
+
unsafe_compile_time_methods = {
|
| 658 |
+
# We name here only unsafe and non-portable methods if:
|
| 659 |
+
# - the type has a literal representation, allowing for constant folding.
|
| 660 |
+
# - the return type is not None (thus excluding modifier methods)
|
| 661 |
+
# and is listed in 'inferred_method_return_types' above.
|
| 662 |
+
#
|
| 663 |
+
# See the consistency check in TestBuiltin.py.
|
| 664 |
+
#
|
| 665 |
+
'complex': set(),
|
| 666 |
+
'int': {
|
| 667 |
+
'bit_count', # Py3.10+
|
| 668 |
+
'from_bytes', # classmethod
|
| 669 |
+
'is_integer', # Py3.12+
|
| 670 |
+
'to_bytes', # changed in Py3.11
|
| 671 |
+
},
|
| 672 |
+
'float': {
|
| 673 |
+
'fromhex', # classmethod
|
| 674 |
+
},
|
| 675 |
+
'list': {
|
| 676 |
+
'copy',
|
| 677 |
+
},
|
| 678 |
+
'tuple': set(),
|
| 679 |
+
'str': {
|
| 680 |
+
'replace', # changed in Py3.13+
|
| 681 |
+
'maketrans', # staticmethod
|
| 682 |
+
'removeprefix', # Py3.9+
|
| 683 |
+
'removesuffix', # Py3.9+
|
| 684 |
+
},
|
| 685 |
+
'bytes': {
|
| 686 |
+
'fromhex', # classmethod
|
| 687 |
+
'maketrans', # staticmethod
|
| 688 |
+
'removeprefix', # Py3.9+
|
| 689 |
+
'removesuffix', # Py3.9+
|
| 690 |
+
},
|
| 691 |
+
'set': set(),
|
| 692 |
+
}
|
| 693 |
+
|
| 694 |
+
|
| 695 |
+
def is_safe_compile_time_method(builtin_type_name: str, method_name: str):
|
| 696 |
+
unsafe_methods = unsafe_compile_time_methods.get(builtin_type_name)
|
| 697 |
+
if unsafe_methods is None:
|
| 698 |
+
# Not a literal type.
|
| 699 |
+
return False
|
| 700 |
+
if method_name in unsafe_methods:
|
| 701 |
+
# Not a safe method.
|
| 702 |
+
return False
|
| 703 |
+
known_methods = inferred_method_return_types.get(builtin_type_name)
|
| 704 |
+
if known_methods is None or method_name not in known_methods:
|
| 705 |
+
# Not a known method.
|
| 706 |
+
return False
|
| 707 |
+
return True
|
| 708 |
+
|
| 709 |
+
|
| 710 |
+
builtin_structs_table = [
|
| 711 |
+
('Py_buffer', 'Py_buffer',
|
| 712 |
+
[("buf", PyrexTypes.c_void_ptr_type),
|
| 713 |
+
("obj", PyrexTypes.py_object_type),
|
| 714 |
+
("len", PyrexTypes.c_py_ssize_t_type),
|
| 715 |
+
("itemsize", PyrexTypes.c_py_ssize_t_type),
|
| 716 |
+
("readonly", PyrexTypes.c_bint_type),
|
| 717 |
+
("ndim", PyrexTypes.c_int_type),
|
| 718 |
+
("format", PyrexTypes.c_char_ptr_type),
|
| 719 |
+
("shape", PyrexTypes.c_py_ssize_t_ptr_type),
|
| 720 |
+
("strides", PyrexTypes.c_py_ssize_t_ptr_type),
|
| 721 |
+
("suboffsets", PyrexTypes.c_py_ssize_t_ptr_type),
|
| 722 |
+
("smalltable", PyrexTypes.CArrayType(PyrexTypes.c_py_ssize_t_type, 2)),
|
| 723 |
+
("internal", PyrexTypes.c_void_ptr_type),
|
| 724 |
+
]),
|
| 725 |
+
('Py_complex', 'Py_complex',
|
| 726 |
+
[('real', PyrexTypes.c_double_type),
|
| 727 |
+
('imag', PyrexTypes.c_double_type),
|
| 728 |
+
])
|
| 729 |
+
]
|
| 730 |
+
|
| 731 |
+
# set up builtin scope
|
| 732 |
+
|
| 733 |
+
builtin_scope = BuiltinScope()
|
| 734 |
+
|
| 735 |
+
def init_builtin_funcs():
|
| 736 |
+
for bf in builtin_function_table:
|
| 737 |
+
bf.declare_in_scope(builtin_scope)
|
| 738 |
+
|
| 739 |
+
builtin_types = {}
|
| 740 |
+
|
| 741 |
+
def init_builtin_types():
|
| 742 |
+
global builtin_types
|
| 743 |
+
for name, cname, methods in builtin_types_table:
|
| 744 |
+
if name == 'frozenset':
|
| 745 |
+
objstruct_cname = 'PySetObject'
|
| 746 |
+
elif name == 'bytearray':
|
| 747 |
+
objstruct_cname = 'PyByteArrayObject'
|
| 748 |
+
elif name == 'int':
|
| 749 |
+
objstruct_cname = 'PyLongObject'
|
| 750 |
+
elif name == 'str':
|
| 751 |
+
objstruct_cname = 'PyUnicodeObject'
|
| 752 |
+
elif name == 'bool':
|
| 753 |
+
objstruct_cname = 'PyLongObject'
|
| 754 |
+
elif name == 'BaseException':
|
| 755 |
+
objstruct_cname = "PyBaseExceptionObject"
|
| 756 |
+
elif name == 'Exception':
|
| 757 |
+
objstruct_cname = "PyBaseExceptionObject"
|
| 758 |
+
else:
|
| 759 |
+
objstruct_cname = 'Py%sObject' % name.capitalize()
|
| 760 |
+
type_class = PyrexTypes.BuiltinObjectType
|
| 761 |
+
if name in ['dict', 'list', 'set', 'frozenset']:
|
| 762 |
+
type_class = PyrexTypes.BuiltinTypeConstructorObjectType
|
| 763 |
+
elif name == 'tuple':
|
| 764 |
+
type_class = PyrexTypes.PythonTupleTypeConstructor
|
| 765 |
+
the_type = builtin_scope.declare_builtin_type(
|
| 766 |
+
name, cname, objstruct_cname=objstruct_cname, type_class=type_class)
|
| 767 |
+
builtin_types[name] = the_type
|
| 768 |
+
for method in methods:
|
| 769 |
+
method.declare_in_type(the_type)
|
| 770 |
+
|
| 771 |
+
|
| 772 |
+
def init_builtin_structs():
|
| 773 |
+
for name, cname, attribute_types in builtin_structs_table:
|
| 774 |
+
scope = StructOrUnionScope(name)
|
| 775 |
+
for attribute_name, attribute_type in attribute_types:
|
| 776 |
+
scope.declare_var(attribute_name, attribute_type, None,
|
| 777 |
+
attribute_name, allow_pyobject=True)
|
| 778 |
+
builtin_scope.declare_struct_or_union(
|
| 779 |
+
name, "struct", scope, 1, None, cname = cname)
|
| 780 |
+
|
| 781 |
+
|
| 782 |
+
def init_builtins():
|
| 783 |
+
#Errors.init_thread() # hopefully not needed - we should not emit warnings ourselves
|
| 784 |
+
init_builtin_structs()
|
| 785 |
+
init_builtin_types()
|
| 786 |
+
init_builtin_funcs()
|
| 787 |
+
|
| 788 |
+
entry = builtin_scope.declare_var(
|
| 789 |
+
'__debug__', PyrexTypes.c_const_type(PyrexTypes.c_bint_type),
|
| 790 |
+
pos=None, cname='__pyx_assertions_enabled()', is_cdef=True)
|
| 791 |
+
entry.utility_code = UtilityCode.load_cached("AssertionsEnabled", "Exceptions.c")
|
| 792 |
+
|
| 793 |
+
global type_type, list_type, tuple_type, dict_type, set_type, frozenset_type, slice_type
|
| 794 |
+
global bytes_type, unicode_type, bytearray_type
|
| 795 |
+
global float_type, int_type, bool_type, complex_type
|
| 796 |
+
global memoryview_type, py_buffer_type
|
| 797 |
+
global sequence_types
|
| 798 |
+
type_type = builtin_scope.lookup('type').type
|
| 799 |
+
list_type = builtin_scope.lookup('list').type
|
| 800 |
+
tuple_type = builtin_scope.lookup('tuple').type
|
| 801 |
+
dict_type = builtin_scope.lookup('dict').type
|
| 802 |
+
set_type = builtin_scope.lookup('set').type
|
| 803 |
+
frozenset_type = builtin_scope.lookup('frozenset').type
|
| 804 |
+
slice_type = builtin_scope.lookup('slice').type
|
| 805 |
+
|
| 806 |
+
bytes_type = builtin_scope.lookup('bytes').type
|
| 807 |
+
unicode_type = builtin_scope.lookup('str').type
|
| 808 |
+
bytearray_type = builtin_scope.lookup('bytearray').type
|
| 809 |
+
memoryview_type = builtin_scope.lookup('memoryview').type
|
| 810 |
+
|
| 811 |
+
float_type = builtin_scope.lookup('float').type
|
| 812 |
+
int_type = builtin_scope.lookup('int').type
|
| 813 |
+
#bool_type = builtin_scope.lookup('bool').type
|
| 814 |
+
complex_type = builtin_scope.lookup('complex').type
|
| 815 |
+
|
| 816 |
+
# Most entries are initialized via "declare_builtin_type()"", except for "bool"
|
| 817 |
+
# which is apparently a special case because it conflicts with C++ bool.
|
| 818 |
+
# Here, we only declare it as builtin name, not as actual type.
|
| 819 |
+
bool_type = PyrexTypes.BuiltinObjectType(EncodedString('bool'), "((PyObject*)&PyBool_Type)", "PyLongObject")
|
| 820 |
+
scope = CClassScope('bool', outer_scope=None, visibility='extern', parent_type=bool_type)
|
| 821 |
+
bool_type.set_scope(scope)
|
| 822 |
+
bool_type.is_final_type = True
|
| 823 |
+
bool_type.entry = builtin_scope.declare_var(EncodedString('bool'), bool_type, pos=None, cname="((PyObject*)&PyBool_Type)")
|
| 824 |
+
builtin_types['bool'] = bool_type
|
| 825 |
+
|
| 826 |
+
sequence_types = (
|
| 827 |
+
list_type,
|
| 828 |
+
tuple_type,
|
| 829 |
+
bytes_type,
|
| 830 |
+
unicode_type,
|
| 831 |
+
bytearray_type,
|
| 832 |
+
memoryview_type,
|
| 833 |
+
)
|
| 834 |
+
|
| 835 |
+
# Set up type inference links between equivalent Python/C types
|
| 836 |
+
assert bool_type.name == 'bool', bool_type.name
|
| 837 |
+
bool_type.equivalent_type = PyrexTypes.c_bint_type
|
| 838 |
+
PyrexTypes.c_bint_type.equivalent_type = bool_type
|
| 839 |
+
|
| 840 |
+
assert float_type.name == 'float', float_type.name
|
| 841 |
+
float_type.equivalent_type = PyrexTypes.c_double_type
|
| 842 |
+
PyrexTypes.c_double_type.equivalent_type = float_type
|
| 843 |
+
|
| 844 |
+
assert complex_type.name == 'complex', complex_type.name
|
| 845 |
+
complex_type.equivalent_type = PyrexTypes.c_double_complex_type
|
| 846 |
+
PyrexTypes.c_double_complex_type.equivalent_type = complex_type
|
| 847 |
+
|
| 848 |
+
py_buffer_type = builtin_scope.lookup('Py_buffer').type
|
| 849 |
+
|
| 850 |
+
|
| 851 |
+
init_builtins()
|
| 852 |
+
|
| 853 |
+
##############################
|
| 854 |
+
# Support for a few standard library modules that Cython understands (currently typing and dataclasses)
|
| 855 |
+
##############################
|
| 856 |
+
_known_module_scopes = {}
|
| 857 |
+
|
| 858 |
+
def get_known_standard_library_module_scope(module_name):
|
| 859 |
+
mod = _known_module_scopes.get(module_name)
|
| 860 |
+
if mod:
|
| 861 |
+
return mod
|
| 862 |
+
|
| 863 |
+
if module_name == "typing":
|
| 864 |
+
mod = ModuleScope(module_name, None, None)
|
| 865 |
+
for name, tp in [
|
| 866 |
+
('Dict', dict_type),
|
| 867 |
+
('List', list_type),
|
| 868 |
+
('Tuple', tuple_type),
|
| 869 |
+
('Set', set_type),
|
| 870 |
+
('FrozenSet', frozenset_type),
|
| 871 |
+
]:
|
| 872 |
+
name = EncodedString(name)
|
| 873 |
+
entry = mod.declare_type(name, tp, pos = None)
|
| 874 |
+
var_entry = Entry(name, None, PyrexTypes.py_object_type)
|
| 875 |
+
var_entry.is_pyglobal = True
|
| 876 |
+
var_entry.is_variable = True
|
| 877 |
+
var_entry.scope = mod
|
| 878 |
+
entry.as_variable = var_entry
|
| 879 |
+
entry.known_standard_library_import = "%s.%s" % (module_name, name)
|
| 880 |
+
|
| 881 |
+
for name in ['ClassVar', 'Optional', 'Union']:
|
| 882 |
+
name = EncodedString(name)
|
| 883 |
+
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("typing."+name))
|
| 884 |
+
entry = mod.declare_type(name, indexed_type, pos = None)
|
| 885 |
+
var_entry = Entry(name, None, PyrexTypes.py_object_type)
|
| 886 |
+
var_entry.is_pyglobal = True
|
| 887 |
+
var_entry.is_variable = True
|
| 888 |
+
var_entry.scope = mod
|
| 889 |
+
entry.as_variable = var_entry
|
| 890 |
+
entry.known_standard_library_import = "%s.%s" % (module_name, name)
|
| 891 |
+
_known_module_scopes[module_name] = mod
|
| 892 |
+
elif module_name == "dataclasses":
|
| 893 |
+
mod = ModuleScope(module_name, None, None)
|
| 894 |
+
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("dataclasses.InitVar"))
|
| 895 |
+
initvar_string = EncodedString("InitVar")
|
| 896 |
+
entry = mod.declare_type(initvar_string, indexed_type, pos = None)
|
| 897 |
+
var_entry = Entry(initvar_string, None, PyrexTypes.py_object_type)
|
| 898 |
+
var_entry.is_pyglobal = True
|
| 899 |
+
var_entry.scope = mod
|
| 900 |
+
entry.as_variable = var_entry
|
| 901 |
+
entry.known_standard_library_import = "%s.InitVar" % module_name
|
| 902 |
+
for name in ["dataclass", "field"]:
|
| 903 |
+
mod.declare_var(EncodedString(name), PyrexTypes.py_object_type, pos=None)
|
| 904 |
+
_known_module_scopes[module_name] = mod
|
| 905 |
+
elif module_name == "functools":
|
| 906 |
+
mod = ModuleScope(module_name, None, None)
|
| 907 |
+
for name in ["total_ordering"]:
|
| 908 |
+
mod.declare_var(EncodedString(name), PyrexTypes.py_object_type, pos=None)
|
| 909 |
+
_known_module_scopes[module_name] = mod
|
| 910 |
+
|
| 911 |
+
return mod
|
| 912 |
+
|
| 913 |
+
|
| 914 |
+
def get_known_standard_library_entry(qualified_name):
|
| 915 |
+
name_parts = qualified_name.split(".")
|
| 916 |
+
module_name = EncodedString(name_parts[0])
|
| 917 |
+
rest = name_parts[1:]
|
| 918 |
+
|
| 919 |
+
if len(rest) > 1: # for now, we don't know how to deal with any nested modules
|
| 920 |
+
return None
|
| 921 |
+
|
| 922 |
+
mod = get_known_standard_library_module_scope(module_name)
|
| 923 |
+
|
| 924 |
+
# eventually handle more sophisticated multiple lookups if needed
|
| 925 |
+
if mod and rest:
|
| 926 |
+
return mod.lookup_here(rest[0])
|
| 927 |
+
return None
|
| 928 |
+
|
| 929 |
+
|
| 930 |
+
def exprnode_to_known_standard_library_name(node, env):
|
| 931 |
+
qualified_name_parts = []
|
| 932 |
+
known_name = None
|
| 933 |
+
while node.is_attribute:
|
| 934 |
+
qualified_name_parts.append(node.attribute)
|
| 935 |
+
node = node.obj
|
| 936 |
+
if node.is_name:
|
| 937 |
+
entry = env.lookup(node.name)
|
| 938 |
+
if entry and entry.known_standard_library_import:
|
| 939 |
+
if get_known_standard_library_entry(
|
| 940 |
+
entry.known_standard_library_import):
|
| 941 |
+
known_name = entry.known_standard_library_import
|
| 942 |
+
else:
|
| 943 |
+
standard_env = get_known_standard_library_module_scope(
|
| 944 |
+
entry.known_standard_library_import)
|
| 945 |
+
if standard_env:
|
| 946 |
+
qualified_name_parts.append(standard_env.name)
|
| 947 |
+
known_name = ".".join(reversed(qualified_name_parts))
|
| 948 |
+
return known_name
|
venv/lib/python3.10/site-packages/Cython/Compiler/CmdLine.py
ADDED
|
@@ -0,0 +1,259 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Cython - Command Line Parsing
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from argparse import ArgumentParser, Action, SUPPRESS, RawDescriptionHelpFormatter
|
| 8 |
+
from . import Options
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class ParseDirectivesAction(Action):
|
| 12 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 13 |
+
old_directives = dict(getattr(namespace, self.dest,
|
| 14 |
+
Options.get_directive_defaults()))
|
| 15 |
+
directives = Options.parse_directive_list(
|
| 16 |
+
values, relaxed_bool=True, current_settings=old_directives)
|
| 17 |
+
setattr(namespace, self.dest, directives)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class ParseOptionsAction(Action):
|
| 21 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 22 |
+
options = dict(getattr(namespace, self.dest, {}))
|
| 23 |
+
for opt in values.split(','):
|
| 24 |
+
if '=' in opt:
|
| 25 |
+
n, v = opt.split('=', 1)
|
| 26 |
+
v = v.lower() not in ('false', 'f', '0', 'no')
|
| 27 |
+
else:
|
| 28 |
+
n, v = opt, True
|
| 29 |
+
options[n] = v
|
| 30 |
+
setattr(namespace, self.dest, options)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class ParseCompileTimeEnvAction(Action):
|
| 34 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 35 |
+
old_env = dict(getattr(namespace, self.dest, {}))
|
| 36 |
+
new_env = Options.parse_compile_time_env(values, current_settings=old_env)
|
| 37 |
+
setattr(namespace, self.dest, new_env)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class ActivateAllWarningsAction(Action):
|
| 41 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 42 |
+
directives = getattr(namespace, 'compiler_directives', {})
|
| 43 |
+
directives.update(Options.extra_warnings)
|
| 44 |
+
namespace.compiler_directives = directives
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class SetLenientAction(Action):
|
| 48 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 49 |
+
namespace.error_on_unknown_names = False
|
| 50 |
+
namespace.error_on_uninitialized = False
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class SetGDBDebugAction(Action):
|
| 54 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 55 |
+
namespace.gdb_debug = True
|
| 56 |
+
namespace.output_dir = os.curdir
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class SetGDBDebugOutputAction(Action):
|
| 60 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 61 |
+
namespace.gdb_debug = True
|
| 62 |
+
namespace.output_dir = values
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class SetAnnotateCoverageAction(Action):
|
| 66 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 67 |
+
namespace.annotate = True
|
| 68 |
+
namespace.annotate_coverage_xml = values
|
| 69 |
+
|
| 70 |
+
def create_cython_argparser():
|
| 71 |
+
description = "Cython (https://cython.org/) is a compiler for code written in the "\
|
| 72 |
+
"Cython language. Cython is based on Pyrex by Greg Ewing."
|
| 73 |
+
|
| 74 |
+
parser = ArgumentParser(
|
| 75 |
+
description=description,
|
| 76 |
+
argument_default=SUPPRESS,
|
| 77 |
+
formatter_class=RawDescriptionHelpFormatter,
|
| 78 |
+
epilog="""\
|
| 79 |
+
Environment variables:
|
| 80 |
+
CYTHON_CACHE_DIR: the base directory containing Cython's caches."""
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
parser.add_argument("-V", "--version", dest='show_version', action='store_const', const=1,
|
| 84 |
+
help='Display version number of cython compiler')
|
| 85 |
+
parser.add_argument("-l", "--create-listing", dest='use_listing_file', action='store_const', const=1,
|
| 86 |
+
help='Write error messages to a listing file')
|
| 87 |
+
parser.add_argument("-I", "--include-dir", dest='include_path', action='append',
|
| 88 |
+
help='Search for include files in named directory '
|
| 89 |
+
'(multiple include directories are allowed).')
|
| 90 |
+
parser.add_argument("-o", "--output-file", dest='output_file', action='store', type=str,
|
| 91 |
+
help='Specify name of generated C file')
|
| 92 |
+
parser.add_argument("-t", "--timestamps", dest='timestamps', action='store_const', const=1,
|
| 93 |
+
help='Only compile newer source files')
|
| 94 |
+
parser.add_argument("-f", "--force", dest='timestamps', action='store_const', const=0,
|
| 95 |
+
help='Compile all source files (overrides implied -t)')
|
| 96 |
+
parser.add_argument("-v", "--verbose", dest='verbose', action='count',
|
| 97 |
+
help='Be verbose, print file names on multiple compilation')
|
| 98 |
+
parser.add_argument("-p", "--embed-positions", dest='embed_pos_in_docstring', action='store_const', const=1,
|
| 99 |
+
help='If specified, the positions in Cython files of each '
|
| 100 |
+
'function definition is embedded in its docstring.')
|
| 101 |
+
parser.add_argument("--cleanup", dest='generate_cleanup_code', action='store', type=int,
|
| 102 |
+
help='Release interned objects on python exit, for memory debugging. '
|
| 103 |
+
'Level indicates aggressiveness, default 0 releases nothing.')
|
| 104 |
+
parser.add_argument("--cache", dest='cache', action='store_true',
|
| 105 |
+
help='Enables Cython compilation cache.')
|
| 106 |
+
parser.add_argument("-w", "--working", dest='working_path', action='store', type=str,
|
| 107 |
+
help='Sets the working directory for Cython (the directory modules are searched from)')
|
| 108 |
+
parser.add_argument("--gdb", action=SetGDBDebugAction, nargs=0,
|
| 109 |
+
help='Output debug information for cygdb')
|
| 110 |
+
parser.add_argument("--gdb-outdir", action=SetGDBDebugOutputAction, type=str,
|
| 111 |
+
help='Specify gdb debug information output directory. Implies --gdb.')
|
| 112 |
+
parser.add_argument("-D", "--no-docstrings", dest='docstrings', action='store_false',
|
| 113 |
+
help='Strip docstrings from the compiled module.')
|
| 114 |
+
parser.add_argument('-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 115 |
+
help='Produce a colorized HTML version of the source.')
|
| 116 |
+
parser.add_argument('--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 117 |
+
help='Produce a colorized HTML version of the source '
|
| 118 |
+
'which includes entire generated C/C++-code.')
|
| 119 |
+
parser.add_argument("--annotate-coverage", dest='annotate_coverage_xml', action=SetAnnotateCoverageAction, type=str,
|
| 120 |
+
help='Annotate and include coverage information from cov.xml.')
|
| 121 |
+
parser.add_argument("--line-directives", dest='emit_linenums', action='store_true',
|
| 122 |
+
help='Produce #line directives pointing to the .pyx source')
|
| 123 |
+
parser.add_argument("-+", "--cplus", dest='cplus', action='store_const', const=1,
|
| 124 |
+
help='Output a C++ rather than C file.')
|
| 125 |
+
parser.add_argument('--embed', action='store_const', const='main',
|
| 126 |
+
help='Generate a main() function that embeds the Python interpreter. '
|
| 127 |
+
'Pass --embed=<method_name> for a name other than main().')
|
| 128 |
+
parser.add_argument('-2', dest='language_level', action='store_const', const=2,
|
| 129 |
+
help='Compile based on Python-2 syntax and code semantics.')
|
| 130 |
+
parser.add_argument('-3', dest='language_level', action='store_const', const=3,
|
| 131 |
+
help='Compile based on Python-3 syntax and code semantics.')
|
| 132 |
+
parser.add_argument('--3str', dest='language_level', action='store_const', const='3',
|
| 133 |
+
help='Compile based on Python-3 syntax and code semantics (same as -3 since Cython 3.1).')
|
| 134 |
+
parser.add_argument("--lenient", action=SetLenientAction, nargs=0,
|
| 135 |
+
help='Change some compile time errors to runtime errors to '
|
| 136 |
+
'improve Python compatibility')
|
| 137 |
+
parser.add_argument("--capi-reexport-cincludes", dest='capi_reexport_cincludes', action='store_true',
|
| 138 |
+
help='Add cincluded headers to any auto-generated header files.')
|
| 139 |
+
parser.add_argument("--fast-fail", dest='fast_fail', action='store_true',
|
| 140 |
+
help='Abort the compilation on the first error')
|
| 141 |
+
parser.add_argument("-Werror", "--warning-errors", dest='warning_errors', action='store_true',
|
| 142 |
+
help='Make all warnings into errors')
|
| 143 |
+
parser.add_argument("-Wextra", "--warning-extra", action=ActivateAllWarningsAction, nargs=0,
|
| 144 |
+
help='Enable extra warnings')
|
| 145 |
+
|
| 146 |
+
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
|
| 147 |
+
dest='compiler_directives', type=str,
|
| 148 |
+
action=ParseDirectivesAction,
|
| 149 |
+
help='Overrides a compiler directive')
|
| 150 |
+
parser.add_argument('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
| 151 |
+
dest='compile_time_env', type=str,
|
| 152 |
+
action=ParseCompileTimeEnvAction,
|
| 153 |
+
help='Provides compile time env like DEF would do.')
|
| 154 |
+
parser.add_argument("--module-name",
|
| 155 |
+
dest='module_name', type=str, action='store',
|
| 156 |
+
help='Fully qualified module name. If not given, is '
|
| 157 |
+
'deduced from the import path if source file is in '
|
| 158 |
+
'a package, or equals the filename otherwise.')
|
| 159 |
+
parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
| 160 |
+
parser.add_argument("--generate-shared", dest='shared_c_file_path', action='store', type=str,
|
| 161 |
+
help='Generates shared module with specified name.')
|
| 162 |
+
parser.add_argument("--shared", dest='shared_utility_qualified_name', action='store', type=str,
|
| 163 |
+
help='Imports utility code from shared module specified by fully qualified module name.')
|
| 164 |
+
|
| 165 |
+
parser.add_argument('sources', nargs='*', default=[])
|
| 166 |
+
|
| 167 |
+
# TODO: add help
|
| 168 |
+
parser.add_argument("-z", "--pre-import", dest='pre_import', action='store', type=str, help=SUPPRESS)
|
| 169 |
+
parser.add_argument("--convert-range", dest='convert_range', action='store_true', help=SUPPRESS)
|
| 170 |
+
parser.add_argument("--no-c-in-traceback", dest='c_line_in_traceback', action='store_false', help=SUPPRESS)
|
| 171 |
+
parser.add_argument("--cimport-from-pyx", dest='cimport_from_pyx', action='store_true', help=SUPPRESS)
|
| 172 |
+
parser.add_argument("--old-style-globals", dest='old_style_globals', action='store_true', help=SUPPRESS)
|
| 173 |
+
|
| 174 |
+
# debug stuff:
|
| 175 |
+
from . import DebugFlags
|
| 176 |
+
for name in vars(DebugFlags):
|
| 177 |
+
if name.startswith("debug"):
|
| 178 |
+
option_name = name.replace('_', '-')
|
| 179 |
+
parser.add_argument("--" + option_name, action='store_true', help=SUPPRESS)
|
| 180 |
+
|
| 181 |
+
return parser
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def parse_command_line_raw(parser, args):
|
| 185 |
+
# special handling for --embed and --embed=xxxx as they aren't correctly parsed
|
| 186 |
+
def filter_out_embed_options(args):
|
| 187 |
+
with_embed, without_embed = [], []
|
| 188 |
+
for x in args:
|
| 189 |
+
if x == '--embed' or x.startswith('--embed='):
|
| 190 |
+
with_embed.append(x)
|
| 191 |
+
else:
|
| 192 |
+
without_embed.append(x)
|
| 193 |
+
return with_embed, without_embed
|
| 194 |
+
|
| 195 |
+
with_embed, args_without_embed = filter_out_embed_options(args)
|
| 196 |
+
|
| 197 |
+
arguments, unknown = parser.parse_known_args(args_without_embed)
|
| 198 |
+
|
| 199 |
+
sources = arguments.sources
|
| 200 |
+
del arguments.sources
|
| 201 |
+
|
| 202 |
+
# unknown can be either debug, embed or input files or really unknown
|
| 203 |
+
for option in unknown:
|
| 204 |
+
if option.startswith('-'):
|
| 205 |
+
parser.error("unknown option " + option)
|
| 206 |
+
else:
|
| 207 |
+
sources.append(option)
|
| 208 |
+
|
| 209 |
+
# embed-stuff must be handled extra:
|
| 210 |
+
for x in with_embed:
|
| 211 |
+
if x == '--embed':
|
| 212 |
+
name = 'main' # default value
|
| 213 |
+
else:
|
| 214 |
+
name = x[len('--embed='):]
|
| 215 |
+
setattr(arguments, 'embed', name)
|
| 216 |
+
|
| 217 |
+
return arguments, sources
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def parse_command_line(args):
|
| 221 |
+
parser = create_cython_argparser()
|
| 222 |
+
arguments, sources = parse_command_line_raw(parser, args)
|
| 223 |
+
|
| 224 |
+
work_dir = getattr(arguments, 'working_path', '')
|
| 225 |
+
for source in sources:
|
| 226 |
+
if work_dir and not os.path.isabs(source):
|
| 227 |
+
source = os.path.join(work_dir, source)
|
| 228 |
+
if not os.path.exists(source):
|
| 229 |
+
import errno
|
| 230 |
+
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), source)
|
| 231 |
+
|
| 232 |
+
options = Options.CompilationOptions(Options.default_options)
|
| 233 |
+
for name, value in vars(arguments).items():
|
| 234 |
+
if name.startswith('debug'):
|
| 235 |
+
from . import DebugFlags
|
| 236 |
+
if name in dir(DebugFlags):
|
| 237 |
+
setattr(DebugFlags, name, value)
|
| 238 |
+
else:
|
| 239 |
+
parser.error("Unknown debug flag: %s\n" % name)
|
| 240 |
+
elif hasattr(Options, name):
|
| 241 |
+
setattr(Options, name, value)
|
| 242 |
+
else:
|
| 243 |
+
setattr(options, name, value)
|
| 244 |
+
|
| 245 |
+
if options.use_listing_file and len(sources) > 1:
|
| 246 |
+
parser.error("cython: Only one source file allowed when using -o\n")
|
| 247 |
+
if options.shared_c_file_path:
|
| 248 |
+
if len(sources) > 0:
|
| 249 |
+
parser.error("cython: Source file not allowed when using --generate-shared\n")
|
| 250 |
+
elif len(sources) == 0 and not options.show_version:
|
| 251 |
+
parser.error("cython: Need at least one source file\n")
|
| 252 |
+
if Options.embed and len(sources) > 1:
|
| 253 |
+
parser.error("cython: Only one source file allowed when using --embed\n")
|
| 254 |
+
if options.module_name:
|
| 255 |
+
if options.timestamps:
|
| 256 |
+
parser.error("cython: Cannot use --module-name with --timestamps\n")
|
| 257 |
+
if len(sources) > 1:
|
| 258 |
+
parser.error("cython: Only one source file allowed when using --module-name\n")
|
| 259 |
+
return options, sources
|
venv/lib/python3.10/site-packages/Cython/Compiler/Code.pxd
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cimport cython
|
| 2 |
+
from ..StringIOTree cimport StringIOTree
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
cdef class AbstractUtilityCode:
|
| 6 |
+
pass
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
cdef class UtilityCodeBase(AbstractUtilityCode):
|
| 10 |
+
cpdef format_code(self, code_string, replace_empty_lines=*)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
cdef class UtilityCode(UtilityCodeBase):
|
| 14 |
+
cdef public object name
|
| 15 |
+
cdef public object proto
|
| 16 |
+
cdef public object impl
|
| 17 |
+
cdef public object init
|
| 18 |
+
cdef public object cleanup
|
| 19 |
+
cdef public object proto_block
|
| 20 |
+
cdef public object module_state_decls
|
| 21 |
+
cdef public object requires
|
| 22 |
+
cdef public dict _cache
|
| 23 |
+
cdef public list specialize_list
|
| 24 |
+
cdef public object file
|
| 25 |
+
cdef public tuple _parts_tuple
|
| 26 |
+
|
| 27 |
+
cpdef none_or_sub(self, s, context)
|
| 28 |
+
# TODO - Signature not compatible with previous declaration
|
| 29 |
+
#@cython.final
|
| 30 |
+
#cdef bint _put_code_section(self, writer, code_type: str) except -1
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
cdef class FunctionState:
|
| 34 |
+
cdef public set names_taken
|
| 35 |
+
cdef public object owner
|
| 36 |
+
cdef public object scope
|
| 37 |
+
|
| 38 |
+
cdef public object error_label
|
| 39 |
+
cdef public size_t label_counter
|
| 40 |
+
cdef public set labels_used
|
| 41 |
+
cdef public object return_label
|
| 42 |
+
cdef public object continue_label
|
| 43 |
+
cdef public object break_label
|
| 44 |
+
cdef public list yield_labels
|
| 45 |
+
|
| 46 |
+
cdef public object return_from_error_cleanup_label # not used in __init__ ?
|
| 47 |
+
|
| 48 |
+
cdef public object exc_vars
|
| 49 |
+
cdef public object current_except
|
| 50 |
+
cdef public bint can_trace
|
| 51 |
+
cdef public bint gil_owned
|
| 52 |
+
|
| 53 |
+
cdef public list temps_allocated
|
| 54 |
+
cdef public dict temps_free
|
| 55 |
+
cdef public dict temps_used_type
|
| 56 |
+
cdef public set zombie_temps
|
| 57 |
+
cdef public size_t temp_counter
|
| 58 |
+
cdef public list collect_temps_stack
|
| 59 |
+
|
| 60 |
+
cdef public object closure_temps
|
| 61 |
+
cdef public bint should_declare_error_indicator
|
| 62 |
+
cdef public bint uses_error_indicator
|
| 63 |
+
cdef public bint error_without_exception
|
| 64 |
+
|
| 65 |
+
cdef public bint needs_refnanny
|
| 66 |
+
|
| 67 |
+
cpdef new_label(self, name=*)
|
| 68 |
+
cpdef tuple get_loop_labels(self)
|
| 69 |
+
cpdef set_loop_labels(self, labels)
|
| 70 |
+
cpdef tuple get_all_labels(self)
|
| 71 |
+
cpdef set_all_labels(self, labels)
|
| 72 |
+
cpdef start_collecting_temps(self)
|
| 73 |
+
cpdef stop_collecting_temps(self)
|
| 74 |
+
|
| 75 |
+
cpdef list temps_in_use(self)
|
| 76 |
+
|
| 77 |
+
cdef class IntConst:
|
| 78 |
+
cdef public object cname
|
| 79 |
+
cdef public object value
|
| 80 |
+
cdef public bint is_long
|
| 81 |
+
|
| 82 |
+
cdef class PyObjectConst:
|
| 83 |
+
cdef public object cname
|
| 84 |
+
cdef public object type
|
| 85 |
+
|
| 86 |
+
cdef class StringConst:
|
| 87 |
+
cdef public object cname
|
| 88 |
+
cdef public object text
|
| 89 |
+
cdef public object escaped_value
|
| 90 |
+
cdef public dict py_strings
|
| 91 |
+
cdef public list py_versions
|
| 92 |
+
|
| 93 |
+
cpdef get_py_string_const(self, encoding, identifier=*)
|
| 94 |
+
|
| 95 |
+
## cdef class PyStringConst:
|
| 96 |
+
## cdef public object cname
|
| 97 |
+
## cdef public object encoding
|
| 98 |
+
## cdef public bint is_str
|
| 99 |
+
## cdef public bint is_unicode
|
| 100 |
+
## cdef public bint intern
|
| 101 |
+
|
| 102 |
+
#class GlobalState(object):
|
| 103 |
+
|
| 104 |
+
#def funccontext_property(name):
|
| 105 |
+
|
| 106 |
+
cdef class CCodeWriter(object):
|
| 107 |
+
cdef readonly StringIOTree buffer
|
| 108 |
+
cdef readonly list pyclass_stack
|
| 109 |
+
cdef readonly object globalstate
|
| 110 |
+
cdef readonly object funcstate
|
| 111 |
+
cdef object code_config
|
| 112 |
+
cdef tuple last_pos
|
| 113 |
+
cdef tuple last_marked_pos
|
| 114 |
+
cdef Py_ssize_t level
|
| 115 |
+
cdef public Py_ssize_t call_level # debug-only, see Nodes.py
|
| 116 |
+
cdef bint bol
|
| 117 |
+
|
| 118 |
+
cpdef write(self, s)
|
| 119 |
+
@cython.final
|
| 120 |
+
cdef _write_lines(self, s)
|
| 121 |
+
cpdef _write_to_buffer(self, s)
|
| 122 |
+
cpdef put(self, code)
|
| 123 |
+
cpdef put_safe(self, code)
|
| 124 |
+
cpdef putln(self, code=*, bint safe=*)
|
| 125 |
+
@cython.final
|
| 126 |
+
cdef emit_marker(self)
|
| 127 |
+
@cython.final
|
| 128 |
+
cdef _build_marker(self, tuple pos)
|
| 129 |
+
@cython.final
|
| 130 |
+
cdef increase_indent(self)
|
| 131 |
+
@cython.final
|
| 132 |
+
cdef decrease_indent(self)
|
| 133 |
+
@cython.final
|
| 134 |
+
cdef indent(self)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
cdef class PyrexCodeWriter:
|
| 138 |
+
cdef public object f
|
| 139 |
+
cdef public Py_ssize_t level
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
cdef class PyxCodeWriter:
|
| 143 |
+
cdef public StringIOTree buffer
|
| 144 |
+
cdef public object context
|
| 145 |
+
cdef object encoding
|
| 146 |
+
cdef Py_ssize_t level
|
| 147 |
+
cdef Py_ssize_t original_level
|
| 148 |
+
cdef dict _insertion_points
|
venv/lib/python3.10/site-packages/Cython/Compiler/Code.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
venv/lib/python3.10/site-packages/Cython/Compiler/CodeGeneration.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .Visitor import VisitorTransform
|
| 2 |
+
from .Nodes import StatListNode
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class ExtractPxdCode(VisitorTransform):
|
| 6 |
+
"""
|
| 7 |
+
Finds nodes in a pxd file that should generate code, and
|
| 8 |
+
returns them in a StatListNode.
|
| 9 |
+
|
| 10 |
+
The result is a tuple (StatListNode, ModuleScope), i.e.
|
| 11 |
+
everything that is needed from the pxd after it is processed.
|
| 12 |
+
|
| 13 |
+
A purer approach would be to separately compile the pxd code,
|
| 14 |
+
but the result would have to be slightly more sophisticated
|
| 15 |
+
than pure strings (functions + wanted interned strings +
|
| 16 |
+
wanted utility code + wanted cached objects) so for now this
|
| 17 |
+
approach is taken.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
def __call__(self, root):
|
| 21 |
+
self.funcs = []
|
| 22 |
+
self.visitchildren(root)
|
| 23 |
+
return (StatListNode(root.pos, stats=self.funcs), root.scope)
|
| 24 |
+
|
| 25 |
+
def visit_FuncDefNode(self, node):
|
| 26 |
+
self.funcs.append(node)
|
| 27 |
+
# Do not visit children, nested funcdefnodes will
|
| 28 |
+
# also be moved by this action...
|
| 29 |
+
return node
|
| 30 |
+
|
| 31 |
+
def visit_Node(self, node):
|
| 32 |
+
self.visitchildren(node)
|
| 33 |
+
return node
|
venv/lib/python3.10/site-packages/Cython/Compiler/CythonScope.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .Symtab import ModuleScope
|
| 2 |
+
from .PyrexTypes import *
|
| 3 |
+
from .UtilityCode import CythonUtilityCode
|
| 4 |
+
from .Errors import error
|
| 5 |
+
from .Scanning import StringSourceDescriptor
|
| 6 |
+
from . import MemoryView
|
| 7 |
+
from .StringEncoding import EncodedString
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class CythonScope(ModuleScope):
|
| 11 |
+
is_cython_builtin = 1
|
| 12 |
+
_cythonscope_initialized = False
|
| 13 |
+
|
| 14 |
+
def __init__(self, context):
|
| 15 |
+
ModuleScope.__init__(self, 'cython', None, None)
|
| 16 |
+
self.pxd_file_loaded = True
|
| 17 |
+
self.populate_cython_scope()
|
| 18 |
+
# The Main.Context object
|
| 19 |
+
self._context = context
|
| 20 |
+
|
| 21 |
+
for fused_type in (cy_integral_type, cy_floating_type, cy_numeric_type):
|
| 22 |
+
entry = self.declare_typedef(fused_type.name,
|
| 23 |
+
fused_type,
|
| 24 |
+
None,
|
| 25 |
+
cname='<error>')
|
| 26 |
+
entry.in_cinclude = True
|
| 27 |
+
|
| 28 |
+
entry = self.declare_type(
|
| 29 |
+
"pymutex", cy_pymutex_type, None,
|
| 30 |
+
cname="__Pyx_Locks_PyMutex")
|
| 31 |
+
entry = self.declare_type(
|
| 32 |
+
"pythread_type_lock", cy_pythread_type_lock_type, None,
|
| 33 |
+
cname="__Pyx_Locks_PyThreadTypeLock")
|
| 34 |
+
|
| 35 |
+
def is_cpp(self):
|
| 36 |
+
# Allow C++ utility code in C++ contexts.
|
| 37 |
+
return self.context.cpp
|
| 38 |
+
|
| 39 |
+
def lookup_type(self, name):
|
| 40 |
+
# This function should go away when types are all first-level objects.
|
| 41 |
+
type = parse_basic_type(name)
|
| 42 |
+
if type:
|
| 43 |
+
return type
|
| 44 |
+
|
| 45 |
+
return super().lookup_type(name)
|
| 46 |
+
|
| 47 |
+
def lookup(self, name):
|
| 48 |
+
entry = super().lookup(name)
|
| 49 |
+
|
| 50 |
+
if entry is None and not self._cythonscope_initialized:
|
| 51 |
+
self.load_cythonscope()
|
| 52 |
+
entry = super().lookup(name)
|
| 53 |
+
|
| 54 |
+
return entry
|
| 55 |
+
|
| 56 |
+
def find_module(self, module_name, pos):
|
| 57 |
+
error("cython.%s is not available" % module_name, pos)
|
| 58 |
+
|
| 59 |
+
def find_submodule(self, module_name, as_package=False):
|
| 60 |
+
entry = self.entries.get(module_name, None)
|
| 61 |
+
if not entry:
|
| 62 |
+
self.load_cythonscope()
|
| 63 |
+
entry = self.entries.get(module_name, None)
|
| 64 |
+
|
| 65 |
+
if entry and entry.as_module:
|
| 66 |
+
return entry.as_module
|
| 67 |
+
else:
|
| 68 |
+
# TODO: fix find_submodule control flow so that we're not
|
| 69 |
+
# expected to create a submodule here (to protect CythonScope's
|
| 70 |
+
# possible immutability). Hack ourselves out of the situation
|
| 71 |
+
# for now.
|
| 72 |
+
raise error((StringSourceDescriptor("cython", ""), 0, 0),
|
| 73 |
+
"cython.%s is not available" % module_name)
|
| 74 |
+
|
| 75 |
+
def lookup_qualified_name(self, qname):
|
| 76 |
+
# ExprNode.as_cython_attribute generates qnames and we untangle it here...
|
| 77 |
+
name_path = qname.split('.')
|
| 78 |
+
scope = self
|
| 79 |
+
while len(name_path) > 1:
|
| 80 |
+
scope = scope.lookup_here(name_path[0])
|
| 81 |
+
if scope:
|
| 82 |
+
scope = scope.as_module
|
| 83 |
+
del name_path[0]
|
| 84 |
+
if scope is None:
|
| 85 |
+
return None
|
| 86 |
+
else:
|
| 87 |
+
return scope.lookup_here(name_path[0])
|
| 88 |
+
|
| 89 |
+
def populate_cython_scope(self):
|
| 90 |
+
# These are used to optimize isinstance in FinalOptimizePhase
|
| 91 |
+
type_object = self.declare_typedef(
|
| 92 |
+
'PyTypeObject',
|
| 93 |
+
base_type = c_void_type,
|
| 94 |
+
pos = None,
|
| 95 |
+
cname = 'PyTypeObject')
|
| 96 |
+
type_object.is_void = True
|
| 97 |
+
type_object_type = type_object.type
|
| 98 |
+
|
| 99 |
+
self.declare_cfunction(
|
| 100 |
+
'PyObject_TypeCheck',
|
| 101 |
+
CFuncType(c_bint_type, [CFuncTypeArg("o", py_object_type, None),
|
| 102 |
+
CFuncTypeArg("t", c_ptr_type(type_object_type), None)]),
|
| 103 |
+
pos = None,
|
| 104 |
+
defining = 1,
|
| 105 |
+
cname = 'PyObject_TypeCheck')
|
| 106 |
+
|
| 107 |
+
def load_cythonscope(self):
|
| 108 |
+
"""
|
| 109 |
+
Creates some entries for testing purposes and entries for
|
| 110 |
+
cython.array() and for cython.view.*.
|
| 111 |
+
"""
|
| 112 |
+
if self._cythonscope_initialized:
|
| 113 |
+
return
|
| 114 |
+
|
| 115 |
+
self._cythonscope_initialized = True
|
| 116 |
+
cython_testscope_utility_code.declare_in_scope(
|
| 117 |
+
self, cython_scope=self)
|
| 118 |
+
cython_test_extclass_utility_code.declare_in_scope(
|
| 119 |
+
self, cython_scope=self)
|
| 120 |
+
|
| 121 |
+
#
|
| 122 |
+
# The view sub-scope
|
| 123 |
+
#
|
| 124 |
+
self.viewscope = viewscope = ModuleScope('view', self, None)
|
| 125 |
+
self.declare_module('view', viewscope, None).as_module = viewscope
|
| 126 |
+
viewscope.is_cython_builtin = True
|
| 127 |
+
viewscope.pxd_file_loaded = True
|
| 128 |
+
|
| 129 |
+
cythonview_testscope_utility_code.declare_in_scope(
|
| 130 |
+
viewscope, cython_scope=self)
|
| 131 |
+
|
| 132 |
+
view_utility_scope = MemoryView.get_view_utility_code(
|
| 133 |
+
self.context.shared_utility_qualified_name
|
| 134 |
+
).declare_in_scope(
|
| 135 |
+
self.viewscope, cython_scope=self, allowlist=MemoryView.view_utility_allowlist)
|
| 136 |
+
|
| 137 |
+
# Marks the types as being cython_builtin_type so that they can be
|
| 138 |
+
# extended from without Cython attempting to import cython.view
|
| 139 |
+
ext_types = [ entry.type
|
| 140 |
+
for entry in view_utility_scope.entries.values()
|
| 141 |
+
if entry.type.is_extension_type ]
|
| 142 |
+
for ext_type in ext_types:
|
| 143 |
+
ext_type.is_cython_builtin_type = 1
|
| 144 |
+
|
| 145 |
+
# self.entries["array"] = view_utility_scope.entries.pop("array")
|
| 146 |
+
|
| 147 |
+
# dataclasses scope
|
| 148 |
+
dc_str = EncodedString('dataclasses')
|
| 149 |
+
dataclassesscope = ModuleScope(dc_str, self, context=None)
|
| 150 |
+
self.declare_module(dc_str, dataclassesscope, pos=None).as_module = dataclassesscope
|
| 151 |
+
dataclassesscope.is_cython_builtin = True
|
| 152 |
+
dataclassesscope.pxd_file_loaded = True
|
| 153 |
+
# doesn't actually have any contents
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def create_cython_scope(context):
|
| 157 |
+
# One could in fact probably make it a singleton,
|
| 158 |
+
# but not sure yet whether any code mutates it (which would kill reusing
|
| 159 |
+
# it across different contexts)
|
| 160 |
+
return CythonScope(context)
|
| 161 |
+
|
| 162 |
+
# Load test utilities for the cython scope
|
| 163 |
+
|
| 164 |
+
def load_testscope_utility(cy_util_name, **kwargs):
|
| 165 |
+
return CythonUtilityCode.load(cy_util_name, "TestCythonScope.pyx", **kwargs)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
undecorated_methods_protos = UtilityCode(proto="""
|
| 169 |
+
/* These methods are undecorated and have therefore no prototype */
|
| 170 |
+
static PyObject *__pyx_TestClass_cdef_method(
|
| 171 |
+
struct __pyx_TestClass_obj *self, int value);
|
| 172 |
+
static PyObject *__pyx_TestClass_cpdef_method(
|
| 173 |
+
struct __pyx_TestClass_obj *self, int value, int skip_dispatch);
|
| 174 |
+
static PyObject *__pyx_TestClass_def_method(
|
| 175 |
+
PyObject *self, PyObject *value);
|
| 176 |
+
""")
|
| 177 |
+
|
| 178 |
+
cython_testscope_utility_code = load_testscope_utility("TestScope")
|
| 179 |
+
|
| 180 |
+
test_cython_utility_dep = load_testscope_utility("TestDep")
|
| 181 |
+
|
| 182 |
+
cython_test_extclass_utility_code = \
|
| 183 |
+
load_testscope_utility("TestClass", name="TestClass",
|
| 184 |
+
requires=[undecorated_methods_protos,
|
| 185 |
+
test_cython_utility_dep])
|
| 186 |
+
|
| 187 |
+
cythonview_testscope_utility_code = load_testscope_utility("View.TestScope")
|
venv/lib/python3.10/site-packages/Cython/Compiler/Dataclass.py
ADDED
|
@@ -0,0 +1,868 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# functions to transform a c class into a dataclass
|
| 2 |
+
|
| 3 |
+
from collections import OrderedDict
|
| 4 |
+
from textwrap import dedent
|
| 5 |
+
import operator
|
| 6 |
+
|
| 7 |
+
from . import ExprNodes
|
| 8 |
+
from . import Nodes
|
| 9 |
+
from . import PyrexTypes
|
| 10 |
+
from . import Builtin
|
| 11 |
+
from . import Naming
|
| 12 |
+
from .Errors import error, warning
|
| 13 |
+
from .Code import UtilityCode, TempitaUtilityCode, PyxCodeWriter
|
| 14 |
+
from .Visitor import VisitorTransform
|
| 15 |
+
from .StringEncoding import EncodedString
|
| 16 |
+
from .TreeFragment import TreeFragment
|
| 17 |
+
from .ParseTreeTransforms import NormalizeTree, SkipDeclarations
|
| 18 |
+
from .Options import copy_inherited_directives
|
| 19 |
+
|
| 20 |
+
_dataclass_loader_utilitycode = None
|
| 21 |
+
|
| 22 |
+
def make_dataclasses_module_callnode(pos):
|
| 23 |
+
global _dataclass_loader_utilitycode
|
| 24 |
+
if not _dataclass_loader_utilitycode:
|
| 25 |
+
python_utility_code = UtilityCode.load_cached("Dataclasses_fallback", "Dataclasses.py")
|
| 26 |
+
python_utility_code = EncodedString(python_utility_code.impl)
|
| 27 |
+
_dataclass_loader_utilitycode = TempitaUtilityCode.load(
|
| 28 |
+
"SpecificModuleLoader", "Dataclasses.c",
|
| 29 |
+
context={'cname': "dataclasses", 'py_code': python_utility_code.as_c_string_literal()})
|
| 30 |
+
return ExprNodes.PythonCapiCallNode(
|
| 31 |
+
pos, "__Pyx_Load_dataclasses_Module",
|
| 32 |
+
PyrexTypes.CFuncType(PyrexTypes.py_object_type, []),
|
| 33 |
+
utility_code=_dataclass_loader_utilitycode,
|
| 34 |
+
args=[],
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
def make_dataclass_call_helper(pos, callable, kwds):
|
| 38 |
+
utility_code = UtilityCode.load_cached("DataclassesCallHelper", "Dataclasses.c")
|
| 39 |
+
func_type = PyrexTypes.CFuncType(
|
| 40 |
+
PyrexTypes.py_object_type, [
|
| 41 |
+
PyrexTypes.CFuncTypeArg("callable", PyrexTypes.py_object_type, None),
|
| 42 |
+
PyrexTypes.CFuncTypeArg("kwds", PyrexTypes.py_object_type, None)
|
| 43 |
+
],
|
| 44 |
+
)
|
| 45 |
+
return ExprNodes.PythonCapiCallNode(
|
| 46 |
+
pos,
|
| 47 |
+
function_name="__Pyx_DataclassesCallHelper",
|
| 48 |
+
func_type=func_type,
|
| 49 |
+
utility_code=utility_code,
|
| 50 |
+
args=[callable, kwds],
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class RemoveAssignmentsToNames(VisitorTransform, SkipDeclarations):
|
| 55 |
+
"""
|
| 56 |
+
Cython (and Python) normally treats
|
| 57 |
+
|
| 58 |
+
class A:
|
| 59 |
+
x = 1
|
| 60 |
+
|
| 61 |
+
as generating a class attribute. However for dataclasses the `= 1` should be interpreted as
|
| 62 |
+
a default value to initialize an instance attribute with.
|
| 63 |
+
This transform therefore removes the `x=1` assignment so that the class attribute isn't
|
| 64 |
+
generated, while recording what it has removed so that it can be used in the initialization.
|
| 65 |
+
"""
|
| 66 |
+
def __init__(self, names):
|
| 67 |
+
super().__init__()
|
| 68 |
+
self.names = names
|
| 69 |
+
self.removed_assignments = {}
|
| 70 |
+
|
| 71 |
+
def visit_CClassNode(self, node):
|
| 72 |
+
self.visitchildren(node)
|
| 73 |
+
return node
|
| 74 |
+
|
| 75 |
+
def visit_PyClassNode(self, node):
|
| 76 |
+
return node # go no further
|
| 77 |
+
|
| 78 |
+
def visit_FuncDefNode(self, node):
|
| 79 |
+
return node # go no further
|
| 80 |
+
|
| 81 |
+
def visit_SingleAssignmentNode(self, node):
|
| 82 |
+
if node.lhs.is_name and node.lhs.name in self.names:
|
| 83 |
+
if node.lhs.name in self.removed_assignments:
|
| 84 |
+
warning(node.pos, ("Multiple assignments for '%s' in dataclass; "
|
| 85 |
+
"using most recent") % node.lhs.name, 1)
|
| 86 |
+
self.removed_assignments[node.lhs.name] = node.rhs
|
| 87 |
+
return []
|
| 88 |
+
return node
|
| 89 |
+
|
| 90 |
+
# I believe cascaded assignment is always a syntax error with annotations
|
| 91 |
+
# so there's no need to define visit_CascadedAssignmentNode
|
| 92 |
+
|
| 93 |
+
def visit_Node(self, node):
|
| 94 |
+
self.visitchildren(node)
|
| 95 |
+
return node
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class TemplateCode:
|
| 99 |
+
"""
|
| 100 |
+
Adds the ability to keep track of placeholder argument names to PyxCodeWriter.
|
| 101 |
+
|
| 102 |
+
Also adds extra_stats which are nodes bundled at the end when this
|
| 103 |
+
is converted to a tree.
|
| 104 |
+
"""
|
| 105 |
+
_placeholder_count = 0
|
| 106 |
+
|
| 107 |
+
def __init__(self, writer=None, placeholders=None, extra_stats=None):
|
| 108 |
+
self.writer = PyxCodeWriter() if writer is None else writer
|
| 109 |
+
self.placeholders = {} if placeholders is None else placeholders
|
| 110 |
+
self.extra_stats = [] if extra_stats is None else extra_stats
|
| 111 |
+
|
| 112 |
+
def add_code_line(self, code_line):
|
| 113 |
+
self.writer.putln(code_line)
|
| 114 |
+
|
| 115 |
+
def add_code_chunk(self, code_chunk):
|
| 116 |
+
self.writer.put_chunk(code_chunk)
|
| 117 |
+
|
| 118 |
+
def reset(self):
|
| 119 |
+
# don't attempt to reset placeholders - it really doesn't matter if
|
| 120 |
+
# we have unused placeholders
|
| 121 |
+
self.writer.reset()
|
| 122 |
+
|
| 123 |
+
def empty(self):
|
| 124 |
+
return self.writer.empty()
|
| 125 |
+
|
| 126 |
+
def indent(self):
|
| 127 |
+
self.writer.indent()
|
| 128 |
+
|
| 129 |
+
def dedent(self):
|
| 130 |
+
self.writer.dedent()
|
| 131 |
+
|
| 132 |
+
def indenter(self, block_opener_line):
|
| 133 |
+
return self.writer.indenter(block_opener_line)
|
| 134 |
+
|
| 135 |
+
def new_placeholder(self, field_names, value):
|
| 136 |
+
name = self._new_placeholder_name(field_names)
|
| 137 |
+
self.placeholders[name] = value
|
| 138 |
+
return name
|
| 139 |
+
|
| 140 |
+
def add_extra_statements(self, statements):
|
| 141 |
+
if self.extra_stats is None:
|
| 142 |
+
assert False, "Can only use add_extra_statements on top-level writer"
|
| 143 |
+
self.extra_stats.extend(statements)
|
| 144 |
+
|
| 145 |
+
def _new_placeholder_name(self, field_names):
|
| 146 |
+
while True:
|
| 147 |
+
name = f"DATACLASS_PLACEHOLDER_{self._placeholder_count:d}"
|
| 148 |
+
if (name not in self.placeholders
|
| 149 |
+
and name not in field_names):
|
| 150 |
+
# make sure name isn't already used and doesn't
|
| 151 |
+
# conflict with a variable name (which is unlikely but possible)
|
| 152 |
+
break
|
| 153 |
+
self._placeholder_count += 1
|
| 154 |
+
return name
|
| 155 |
+
|
| 156 |
+
def generate_tree(self, level='c_class'):
|
| 157 |
+
stat_list_node = TreeFragment(
|
| 158 |
+
self.writer.getvalue(),
|
| 159 |
+
level=level,
|
| 160 |
+
pipeline=[NormalizeTree(None)],
|
| 161 |
+
).substitute(self.placeholders)
|
| 162 |
+
|
| 163 |
+
stat_list_node.stats += self.extra_stats
|
| 164 |
+
return stat_list_node
|
| 165 |
+
|
| 166 |
+
def insertion_point(self):
|
| 167 |
+
new_writer = self.writer.insertion_point()
|
| 168 |
+
return TemplateCode(
|
| 169 |
+
writer=new_writer,
|
| 170 |
+
placeholders=self.placeholders,
|
| 171 |
+
extra_stats=self.extra_stats
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
class _MISSING_TYPE:
|
| 176 |
+
pass
|
| 177 |
+
MISSING = _MISSING_TYPE()
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class Field:
|
| 181 |
+
"""
|
| 182 |
+
Field is based on the dataclasses.field class from the standard library module.
|
| 183 |
+
It is used internally during the generation of Cython dataclasses to keep track
|
| 184 |
+
of the settings for individual attributes.
|
| 185 |
+
|
| 186 |
+
Attributes of this class are stored as nodes so they can be used in code construction
|
| 187 |
+
more readily (i.e. we store BoolNode rather than bool)
|
| 188 |
+
"""
|
| 189 |
+
default = MISSING
|
| 190 |
+
default_factory = MISSING
|
| 191 |
+
private = False
|
| 192 |
+
|
| 193 |
+
literal_keys = ("repr", "hash", "init", "compare", "metadata")
|
| 194 |
+
|
| 195 |
+
# default values are defined by the CPython dataclasses.field
|
| 196 |
+
def __init__(self, pos, default=MISSING, default_factory=MISSING,
|
| 197 |
+
repr=None, hash=None, init=None,
|
| 198 |
+
compare=None, metadata=None,
|
| 199 |
+
is_initvar=False, is_classvar=False,
|
| 200 |
+
**additional_kwds):
|
| 201 |
+
if default is not MISSING:
|
| 202 |
+
self.default = default
|
| 203 |
+
if default_factory is not MISSING:
|
| 204 |
+
self.default_factory = default_factory
|
| 205 |
+
self.repr = repr or ExprNodes.BoolNode(pos, value=True)
|
| 206 |
+
self.hash = hash or ExprNodes.NoneNode(pos)
|
| 207 |
+
self.init = init or ExprNodes.BoolNode(pos, value=True)
|
| 208 |
+
self.compare = compare or ExprNodes.BoolNode(pos, value=True)
|
| 209 |
+
self.metadata = metadata or ExprNodes.NoneNode(pos)
|
| 210 |
+
self.is_initvar = is_initvar
|
| 211 |
+
self.is_classvar = is_classvar
|
| 212 |
+
|
| 213 |
+
for k, v in additional_kwds.items():
|
| 214 |
+
# There should not be any additional keywords!
|
| 215 |
+
error(v.pos, "cython.dataclasses.field() got an unexpected keyword argument '%s'" % k)
|
| 216 |
+
|
| 217 |
+
for field_name in self.literal_keys:
|
| 218 |
+
field_value = getattr(self, field_name)
|
| 219 |
+
if not field_value.is_literal:
|
| 220 |
+
error(field_value.pos,
|
| 221 |
+
"cython.dataclasses.field parameter '%s' must be a literal value" % field_name)
|
| 222 |
+
|
| 223 |
+
def iterate_record_node_arguments(self):
|
| 224 |
+
for key in (self.literal_keys + ('default', 'default_factory')):
|
| 225 |
+
value = getattr(self, key)
|
| 226 |
+
if value is not MISSING:
|
| 227 |
+
yield key, value
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
def process_class_get_fields(node):
|
| 231 |
+
var_entries = node.scope.var_entries
|
| 232 |
+
# order of definition is used in the dataclass
|
| 233 |
+
var_entries = sorted(var_entries, key=operator.attrgetter('pos'))
|
| 234 |
+
var_names = [entry.name for entry in var_entries]
|
| 235 |
+
|
| 236 |
+
# don't treat `x = 1` as an assignment of a class attribute within the dataclass
|
| 237 |
+
transform = RemoveAssignmentsToNames(var_names)
|
| 238 |
+
transform(node)
|
| 239 |
+
default_value_assignments = transform.removed_assignments
|
| 240 |
+
|
| 241 |
+
base_type = node.base_type
|
| 242 |
+
fields = OrderedDict()
|
| 243 |
+
while base_type:
|
| 244 |
+
if base_type.is_external or not base_type.scope.implemented:
|
| 245 |
+
warning(node.pos, "Cannot reliably handle Cython dataclasses with base types "
|
| 246 |
+
"in external modules since it is not possible to tell what fields they have", 2)
|
| 247 |
+
if base_type.dataclass_fields:
|
| 248 |
+
fields = base_type.dataclass_fields.copy()
|
| 249 |
+
break
|
| 250 |
+
base_type = base_type.base_type
|
| 251 |
+
|
| 252 |
+
for entry in var_entries:
|
| 253 |
+
name = entry.name
|
| 254 |
+
is_initvar = entry.declared_with_pytyping_modifier("dataclasses.InitVar")
|
| 255 |
+
# TODO - classvars aren't included in "var_entries" so are missed here
|
| 256 |
+
# and thus this code is never triggered
|
| 257 |
+
is_classvar = entry.declared_with_pytyping_modifier("typing.ClassVar")
|
| 258 |
+
if name in default_value_assignments:
|
| 259 |
+
assignment = default_value_assignments[name]
|
| 260 |
+
if (isinstance(assignment, ExprNodes.CallNode) and (
|
| 261 |
+
assignment.function.as_cython_attribute() == "dataclasses.field" or
|
| 262 |
+
Builtin.exprnode_to_known_standard_library_name(
|
| 263 |
+
assignment.function, node.scope) == "dataclasses.field")):
|
| 264 |
+
# I believe most of this is well-enforced when it's treated as a directive
|
| 265 |
+
# but it doesn't hurt to make sure
|
| 266 |
+
valid_general_call = (isinstance(assignment, ExprNodes.GeneralCallNode)
|
| 267 |
+
and isinstance(assignment.positional_args, ExprNodes.TupleNode)
|
| 268 |
+
and not assignment.positional_args.args
|
| 269 |
+
and (assignment.keyword_args is None or isinstance(assignment.keyword_args, ExprNodes.DictNode)))
|
| 270 |
+
valid_simple_call = (isinstance(assignment, ExprNodes.SimpleCallNode) and not assignment.args)
|
| 271 |
+
if not (valid_general_call or valid_simple_call):
|
| 272 |
+
error(assignment.pos, "Call to 'cython.dataclasses.field' must only consist "
|
| 273 |
+
"of compile-time keyword arguments")
|
| 274 |
+
continue
|
| 275 |
+
keyword_args = assignment.keyword_args.as_python_dict() if valid_general_call and assignment.keyword_args else {}
|
| 276 |
+
if 'default' in keyword_args and 'default_factory' in keyword_args:
|
| 277 |
+
error(assignment.pos, "cannot specify both default and default_factory")
|
| 278 |
+
continue
|
| 279 |
+
field = Field(node.pos, **keyword_args)
|
| 280 |
+
else:
|
| 281 |
+
if assignment.type in [Builtin.list_type, Builtin.dict_type, Builtin.set_type]:
|
| 282 |
+
# The standard library module generates a TypeError at runtime
|
| 283 |
+
# in this situation.
|
| 284 |
+
# Error message is copied from CPython
|
| 285 |
+
error(assignment.pos, "mutable default <class '{}'> for field {} is not allowed: "
|
| 286 |
+
"use default_factory".format(assignment.type.name, name))
|
| 287 |
+
|
| 288 |
+
field = Field(node.pos, default=assignment)
|
| 289 |
+
else:
|
| 290 |
+
field = Field(node.pos)
|
| 291 |
+
field.is_initvar = is_initvar
|
| 292 |
+
field.is_classvar = is_classvar
|
| 293 |
+
if entry.visibility == "private":
|
| 294 |
+
field.private = True
|
| 295 |
+
fields[name] = field
|
| 296 |
+
node.entry.type.dataclass_fields = fields
|
| 297 |
+
return fields
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
|
| 301 |
+
# default argument values from https://docs.python.org/3/library/dataclasses.html
|
| 302 |
+
kwargs = dict(init=True, repr=True, eq=True,
|
| 303 |
+
order=False, unsafe_hash=False,
|
| 304 |
+
frozen=False, kw_only=False, match_args=True)
|
| 305 |
+
if dataclass_args is not None:
|
| 306 |
+
if dataclass_args[0]:
|
| 307 |
+
error(node.pos, "cython.dataclasses.dataclass takes no positional arguments")
|
| 308 |
+
for k, v in dataclass_args[1].items():
|
| 309 |
+
if k not in kwargs:
|
| 310 |
+
error(node.pos,
|
| 311 |
+
"cython.dataclasses.dataclass() got an unexpected keyword argument '%s'" % k)
|
| 312 |
+
if not isinstance(v, ExprNodes.BoolNode):
|
| 313 |
+
error(node.pos,
|
| 314 |
+
"Arguments passed to cython.dataclasses.dataclass must be True or False")
|
| 315 |
+
kwargs[k] = v.value
|
| 316 |
+
|
| 317 |
+
kw_only = kwargs['kw_only']
|
| 318 |
+
|
| 319 |
+
fields = process_class_get_fields(node)
|
| 320 |
+
|
| 321 |
+
dataclass_module = make_dataclasses_module_callnode(node.pos)
|
| 322 |
+
|
| 323 |
+
# create __dataclass_params__ attribute. I try to use the exact
|
| 324 |
+
# `_DataclassParams` class defined in the standard library module if at all possible
|
| 325 |
+
# for maximum duck-typing compatibility.
|
| 326 |
+
dataclass_params_func = ExprNodes.AttributeNode(node.pos, obj=dataclass_module,
|
| 327 |
+
attribute=EncodedString("_DataclassParams"))
|
| 328 |
+
dataclass_params_keywords = ExprNodes.DictNode.from_pairs(
|
| 329 |
+
node.pos,
|
| 330 |
+
[ (ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 331 |
+
ExprNodes.BoolNode(node.pos, value=v))
|
| 332 |
+
for k, v in kwargs.items() ] +
|
| 333 |
+
[ (ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 334 |
+
ExprNodes.BoolNode(node.pos, value=v))
|
| 335 |
+
for k, v in [('kw_only', kw_only),
|
| 336 |
+
('slots', False), ('weakref_slot', False)]
|
| 337 |
+
])
|
| 338 |
+
dataclass_params = make_dataclass_call_helper(
|
| 339 |
+
node.pos, dataclass_params_func, dataclass_params_keywords)
|
| 340 |
+
dataclass_params_assignment = Nodes.SingleAssignmentNode(
|
| 341 |
+
node.pos,
|
| 342 |
+
lhs = ExprNodes.NameNode(node.pos, name=EncodedString("__dataclass_params__")),
|
| 343 |
+
rhs = dataclass_params)
|
| 344 |
+
|
| 345 |
+
dataclass_fields_stats = _set_up_dataclass_fields(node, fields, dataclass_module)
|
| 346 |
+
|
| 347 |
+
stats = Nodes.StatListNode(node.pos,
|
| 348 |
+
stats=[dataclass_params_assignment] + dataclass_fields_stats)
|
| 349 |
+
|
| 350 |
+
code = TemplateCode()
|
| 351 |
+
generate_init_code(code, kwargs['init'], node, fields, kw_only)
|
| 352 |
+
generate_match_args(code, kwargs['match_args'], node, fields, kw_only)
|
| 353 |
+
generate_repr_code(code, kwargs['repr'], node, fields)
|
| 354 |
+
generate_eq_code(code, kwargs['eq'], node, fields)
|
| 355 |
+
generate_order_code(code, kwargs['order'], node, fields)
|
| 356 |
+
generate_hash_code(code, kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields)
|
| 357 |
+
|
| 358 |
+
stats.stats += code.generate_tree().stats
|
| 359 |
+
|
| 360 |
+
# turn off annotation typing, so all arguments to __init__ are accepted as
|
| 361 |
+
# generic objects and thus can accept _HAS_DEFAULT_FACTORY.
|
| 362 |
+
# Type conversion comes later
|
| 363 |
+
comp_directives = Nodes.CompilerDirectivesNode(node.pos,
|
| 364 |
+
directives=copy_inherited_directives(node.scope.directives, annotation_typing=False),
|
| 365 |
+
body=stats)
|
| 366 |
+
|
| 367 |
+
comp_directives.analyse_declarations(node.scope)
|
| 368 |
+
# probably already in this scope, but it doesn't hurt to make sure
|
| 369 |
+
analyse_decs_transform.enter_scope(node, node.scope)
|
| 370 |
+
analyse_decs_transform.visit(comp_directives)
|
| 371 |
+
analyse_decs_transform.exit_scope()
|
| 372 |
+
|
| 373 |
+
node.body.stats.append(comp_directives)
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
def generate_init_code(code, init, node, fields, kw_only):
|
| 377 |
+
"""
|
| 378 |
+
Notes on CPython generated "__init__":
|
| 379 |
+
* Implemented in `_init_fn`.
|
| 380 |
+
* The use of the `dataclasses._HAS_DEFAULT_FACTORY` sentinel value as
|
| 381 |
+
the default argument for fields that need constructing with a factory
|
| 382 |
+
function is copied from the CPython implementation. (`None` isn't
|
| 383 |
+
suitable because it could also be a value for the user to pass.)
|
| 384 |
+
There's no real reason why it needs importing from the dataclasses module
|
| 385 |
+
though - it could equally be a value generated by Cython when the module loads.
|
| 386 |
+
* seen_default and the associated error message are copied directly from Python
|
| 387 |
+
* Call to user-defined __post_init__ function (if it exists) is copied from
|
| 388 |
+
CPython.
|
| 389 |
+
|
| 390 |
+
Cython behaviour deviates a little here (to be decided if this is right...)
|
| 391 |
+
Because the class variable from the assignment does not exist Cython fields will
|
| 392 |
+
return None (or whatever their type default is) if not initialized while Python
|
| 393 |
+
dataclasses will fall back to looking up the class variable.
|
| 394 |
+
"""
|
| 395 |
+
if not init or node.scope.lookup_here("__init__"):
|
| 396 |
+
return
|
| 397 |
+
|
| 398 |
+
# selfname behaviour copied from the cpython module
|
| 399 |
+
selfname = "__dataclass_self__" if "self" in fields else "self"
|
| 400 |
+
args = [selfname]
|
| 401 |
+
|
| 402 |
+
if kw_only:
|
| 403 |
+
args.append("*")
|
| 404 |
+
|
| 405 |
+
function_start_point = code.insertion_point()
|
| 406 |
+
code = code.insertion_point()
|
| 407 |
+
code.indent()
|
| 408 |
+
|
| 409 |
+
# create a temp to get _HAS_DEFAULT_FACTORY
|
| 410 |
+
dataclass_module = make_dataclasses_module_callnode(node.pos)
|
| 411 |
+
has_default_factory = ExprNodes.AttributeNode(
|
| 412 |
+
node.pos,
|
| 413 |
+
obj=dataclass_module,
|
| 414 |
+
attribute=EncodedString("_HAS_DEFAULT_FACTORY")
|
| 415 |
+
)
|
| 416 |
+
|
| 417 |
+
default_factory_placeholder = code.new_placeholder(fields, has_default_factory)
|
| 418 |
+
|
| 419 |
+
seen_default = False
|
| 420 |
+
for name, field in fields.items():
|
| 421 |
+
entry = node.scope.lookup(name)
|
| 422 |
+
if entry.annotation:
|
| 423 |
+
annotation = f": {entry.annotation.string.value}"
|
| 424 |
+
else:
|
| 425 |
+
annotation = ""
|
| 426 |
+
assignment = ''
|
| 427 |
+
if field.default is not MISSING or field.default_factory is not MISSING:
|
| 428 |
+
if field.init.value:
|
| 429 |
+
seen_default = True
|
| 430 |
+
if field.default_factory is not MISSING:
|
| 431 |
+
ph_name = default_factory_placeholder
|
| 432 |
+
else:
|
| 433 |
+
ph_name = code.new_placeholder(fields, field.default) # 'default' should be a node
|
| 434 |
+
assignment = f" = {ph_name}"
|
| 435 |
+
elif seen_default and not kw_only and field.init.value:
|
| 436 |
+
error(entry.pos, ("non-default argument '%s' follows default argument "
|
| 437 |
+
"in dataclass __init__") % name)
|
| 438 |
+
code.reset()
|
| 439 |
+
return
|
| 440 |
+
|
| 441 |
+
if field.init.value:
|
| 442 |
+
args.append(f"{name}{annotation}{assignment}")
|
| 443 |
+
|
| 444 |
+
if field.is_initvar:
|
| 445 |
+
continue
|
| 446 |
+
elif field.default_factory is MISSING:
|
| 447 |
+
if field.init.value:
|
| 448 |
+
code.add_code_line(f"{selfname}.{name} = {name}")
|
| 449 |
+
elif assignment:
|
| 450 |
+
# not an argument to the function, but is still initialized
|
| 451 |
+
code.add_code_line(f"{selfname}.{name}{assignment}")
|
| 452 |
+
else:
|
| 453 |
+
ph_name = code.new_placeholder(fields, field.default_factory)
|
| 454 |
+
if field.init.value:
|
| 455 |
+
# close to:
|
| 456 |
+
# def __init__(self, name=_PLACEHOLDER_VALUE):
|
| 457 |
+
# self.name = name_default_factory() if name is _PLACEHOLDER_VALUE else name
|
| 458 |
+
code.add_code_line(
|
| 459 |
+
f"{selfname}.{name} = {ph_name}() if {name} is {default_factory_placeholder} else {name}"
|
| 460 |
+
)
|
| 461 |
+
else:
|
| 462 |
+
# still need to use the default factory to initialize
|
| 463 |
+
code.add_code_line(f"{selfname}.{name} = {ph_name}()")
|
| 464 |
+
|
| 465 |
+
if node.scope.lookup("__post_init__"):
|
| 466 |
+
post_init_vars = ", ".join(name for name, field in fields.items()
|
| 467 |
+
if field.is_initvar)
|
| 468 |
+
code.add_code_line(f"{selfname}.__post_init__({post_init_vars})")
|
| 469 |
+
|
| 470 |
+
if code.empty():
|
| 471 |
+
code.add_code_line("pass")
|
| 472 |
+
|
| 473 |
+
args = ", ".join(args)
|
| 474 |
+
function_start_point.add_code_line(f"def __init__({args}):")
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
def generate_match_args(code, match_args, node, fields, global_kw_only):
|
| 478 |
+
"""
|
| 479 |
+
Generates a tuple containing what would be the positional args to __init__
|
| 480 |
+
|
| 481 |
+
Note that this is generated even if the user overrides init
|
| 482 |
+
"""
|
| 483 |
+
if not match_args or node.scope.lookup_here("__match_args__"):
|
| 484 |
+
return
|
| 485 |
+
positional_arg_names = []
|
| 486 |
+
for field_name, field in fields.items():
|
| 487 |
+
# TODO hasattr and global_kw_only can be removed once full kw_only support is added
|
| 488 |
+
field_is_kw_only = global_kw_only or (
|
| 489 |
+
hasattr(field, 'kw_only') and field.kw_only.value
|
| 490 |
+
)
|
| 491 |
+
if not field_is_kw_only:
|
| 492 |
+
positional_arg_names.append(field_name)
|
| 493 |
+
code.add_code_line("__match_args__ = %s" % str(tuple(positional_arg_names)))
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
def generate_repr_code(code, repr, node, fields):
|
| 497 |
+
"""
|
| 498 |
+
The core of the CPython implementation is just:
|
| 499 |
+
['return self.__class__.__qualname__ + f"(' +
|
| 500 |
+
', '.join([f"{f.name}={{self.{f.name}!r}}"
|
| 501 |
+
for f in fields]) +
|
| 502 |
+
')"'],
|
| 503 |
+
|
| 504 |
+
The only notable difference here is self.__class__.__qualname__ -> type(self).__name__
|
| 505 |
+
which is because Cython currently supports Python 2.
|
| 506 |
+
|
| 507 |
+
However, it also has some guards for recursive repr invocations. In the standard
|
| 508 |
+
library implementation they're done with a wrapper decorator that captures a set
|
| 509 |
+
(with the set keyed by id and thread). Here we create a set as a thread local
|
| 510 |
+
variable and key only by id.
|
| 511 |
+
"""
|
| 512 |
+
if not repr or node.scope.lookup("__repr__"):
|
| 513 |
+
return
|
| 514 |
+
|
| 515 |
+
# The recursive guard is likely a little costly, so skip it if possible.
|
| 516 |
+
# is_gc_simple defines where it can contain recursive objects
|
| 517 |
+
needs_recursive_guard = False
|
| 518 |
+
for name in fields.keys():
|
| 519 |
+
entry = node.scope.lookup(name)
|
| 520 |
+
type_ = entry.type
|
| 521 |
+
if type_.is_memoryviewslice:
|
| 522 |
+
type_ = type_.dtype
|
| 523 |
+
if not type_.is_pyobject:
|
| 524 |
+
continue # no GC
|
| 525 |
+
if not type_.is_gc_simple:
|
| 526 |
+
needs_recursive_guard = True
|
| 527 |
+
break
|
| 528 |
+
|
| 529 |
+
if needs_recursive_guard:
|
| 530 |
+
code.add_code_chunk("""
|
| 531 |
+
__pyx_recursive_repr_guard = __import__('threading').local()
|
| 532 |
+
__pyx_recursive_repr_guard.running = set()
|
| 533 |
+
""")
|
| 534 |
+
|
| 535 |
+
with code.indenter("def __repr__(self):"):
|
| 536 |
+
if needs_recursive_guard:
|
| 537 |
+
code.add_code_chunk("""
|
| 538 |
+
key = id(self)
|
| 539 |
+
guard_set = self.__pyx_recursive_repr_guard.running
|
| 540 |
+
if key in guard_set: return '...'
|
| 541 |
+
guard_set.add(key)
|
| 542 |
+
try:
|
| 543 |
+
""")
|
| 544 |
+
code.indent()
|
| 545 |
+
|
| 546 |
+
strs = ["%s={self.%s!r}" % (name, name)
|
| 547 |
+
for name, field in fields.items()
|
| 548 |
+
if field.repr.value and not field.is_initvar]
|
| 549 |
+
format_string = ", ".join(strs)
|
| 550 |
+
|
| 551 |
+
code.add_code_chunk(f'''
|
| 552 |
+
name = getattr(type(self), "__qualname__", None) or type(self).__name__
|
| 553 |
+
return f'{{name}}({format_string})'
|
| 554 |
+
''')
|
| 555 |
+
if needs_recursive_guard:
|
| 556 |
+
code.dedent()
|
| 557 |
+
with code.indenter("finally:"):
|
| 558 |
+
code.add_code_line("guard_set.remove(key)")
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
def generate_cmp_code(code, op, funcname, node, fields):
|
| 562 |
+
if node.scope.lookup_here(funcname):
|
| 563 |
+
return
|
| 564 |
+
|
| 565 |
+
names = [name for name, field in fields.items() if (field.compare.value and not field.is_initvar)]
|
| 566 |
+
|
| 567 |
+
with code.indenter(f"def {funcname}(self, other):"):
|
| 568 |
+
code.add_code_chunk(f"""
|
| 569 |
+
if other.__class__ is not self.__class__: return NotImplemented
|
| 570 |
+
|
| 571 |
+
cdef {node.class_name} other_cast
|
| 572 |
+
other_cast = <{node.class_name}>other
|
| 573 |
+
""")
|
| 574 |
+
|
| 575 |
+
# The Python implementation of dataclasses.py does a tuple comparison
|
| 576 |
+
# (roughly):
|
| 577 |
+
# return self._attributes_to_tuple() {op} other._attributes_to_tuple()
|
| 578 |
+
#
|
| 579 |
+
# For the Cython implementation a tuple comparison isn't an option because
|
| 580 |
+
# not all attributes can be converted to Python objects and stored in a tuple
|
| 581 |
+
#
|
| 582 |
+
# TODO - better diagnostics of whether the types support comparison before
|
| 583 |
+
# generating the code. Plus, do we want to convert C structs to dicts and
|
| 584 |
+
# compare them that way (I think not, but it might be in demand)?
|
| 585 |
+
checks = []
|
| 586 |
+
op_without_equals = op.replace('=', '')
|
| 587 |
+
|
| 588 |
+
for name in names:
|
| 589 |
+
if op != '==':
|
| 590 |
+
# tuple comparison rules - early elements take precedence
|
| 591 |
+
code.add_code_line(f"if self.{name} {op_without_equals} other_cast.{name}: return True")
|
| 592 |
+
code.add_code_line(f"if self.{name} != other_cast.{name}: return False")
|
| 593 |
+
code.add_code_line(f"return {'True' if '=' in op else 'False'}") # "() == ()" is True
|
| 594 |
+
|
| 595 |
+
|
| 596 |
+
def generate_eq_code(code, eq, node, fields):
|
| 597 |
+
if not eq:
|
| 598 |
+
return
|
| 599 |
+
generate_cmp_code(code, "==", "__eq__", node, fields)
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
def generate_order_code(code, order, node, fields):
|
| 603 |
+
if not order:
|
| 604 |
+
return
|
| 605 |
+
|
| 606 |
+
for op, name in [("<", "__lt__"),
|
| 607 |
+
("<=", "__le__"),
|
| 608 |
+
(">", "__gt__"),
|
| 609 |
+
(">=", "__ge__")]:
|
| 610 |
+
generate_cmp_code(code, op, name, node, fields)
|
| 611 |
+
|
| 612 |
+
|
| 613 |
+
def generate_hash_code(code, unsafe_hash, eq, frozen, node, fields):
|
| 614 |
+
"""
|
| 615 |
+
Copied from CPython implementation - the intention is to follow this as far as
|
| 616 |
+
is possible:
|
| 617 |
+
# +------------------- unsafe_hash= parameter
|
| 618 |
+
# | +----------- eq= parameter
|
| 619 |
+
# | | +--- frozen= parameter
|
| 620 |
+
# | | |
|
| 621 |
+
# v v v | | |
|
| 622 |
+
# | no | yes | <--- class has explicitly defined __hash__
|
| 623 |
+
# +=======+=======+=======+========+========+
|
| 624 |
+
# | False | False | False | | | No __eq__, use the base class __hash__
|
| 625 |
+
# +-------+-------+-------+--------+--------+
|
| 626 |
+
# | False | False | True | | | No __eq__, use the base class __hash__
|
| 627 |
+
# +-------+-------+-------+--------+--------+
|
| 628 |
+
# | False | True | False | None | | <-- the default, not hashable
|
| 629 |
+
# +-------+-------+-------+--------+--------+
|
| 630 |
+
# | False | True | True | add | | Frozen, so hashable, allows override
|
| 631 |
+
# +-------+-------+-------+--------+--------+
|
| 632 |
+
# | True | False | False | add | raise | Has no __eq__, but hashable
|
| 633 |
+
# +-------+-------+-------+--------+--------+
|
| 634 |
+
# | True | False | True | add | raise | Has no __eq__, but hashable
|
| 635 |
+
# +-------+-------+-------+--------+--------+
|
| 636 |
+
# | True | True | False | add | raise | Not frozen, but hashable
|
| 637 |
+
# +-------+-------+-------+--------+--------+
|
| 638 |
+
# | True | True | True | add | raise | Frozen, so hashable
|
| 639 |
+
# +=======+=======+=======+========+========+
|
| 640 |
+
# For boxes that are blank, __hash__ is untouched and therefore
|
| 641 |
+
# inherited from the base class. If the base is object, then
|
| 642 |
+
# id-based hashing is used.
|
| 643 |
+
|
| 644 |
+
The Python implementation creates a tuple of all the fields, then hashes them.
|
| 645 |
+
This implementation creates a tuple of all the hashes of all the fields and hashes that.
|
| 646 |
+
The reason for this slight difference is to avoid to-Python conversions for anything
|
| 647 |
+
that Cython knows how to hash directly (It doesn't look like this currently applies to
|
| 648 |
+
anything though...).
|
| 649 |
+
"""
|
| 650 |
+
|
| 651 |
+
hash_entry = node.scope.lookup_here("__hash__")
|
| 652 |
+
if hash_entry:
|
| 653 |
+
# TODO ideally assignment of __hash__ to None shouldn't trigger this
|
| 654 |
+
# but difficult to get the right information here
|
| 655 |
+
if unsafe_hash:
|
| 656 |
+
# error message taken from CPython dataclasses module
|
| 657 |
+
error(node.pos, "Cannot overwrite attribute __hash__ in class %s" % node.class_name)
|
| 658 |
+
return
|
| 659 |
+
|
| 660 |
+
if not unsafe_hash:
|
| 661 |
+
if not eq:
|
| 662 |
+
return
|
| 663 |
+
if not frozen:
|
| 664 |
+
code.add_extra_statements([
|
| 665 |
+
Nodes.SingleAssignmentNode(
|
| 666 |
+
node.pos,
|
| 667 |
+
lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")),
|
| 668 |
+
rhs=ExprNodes.NoneNode(node.pos),
|
| 669 |
+
)
|
| 670 |
+
])
|
| 671 |
+
return
|
| 672 |
+
|
| 673 |
+
names = [
|
| 674 |
+
name for name, field in fields.items()
|
| 675 |
+
if not field.is_initvar and (
|
| 676 |
+
field.compare.value if field.hash.value is None else field.hash.value)
|
| 677 |
+
]
|
| 678 |
+
|
| 679 |
+
# make a tuple of the hashes
|
| 680 |
+
hash_tuple_items = ", ".join("self.%s" % name for name in names)
|
| 681 |
+
if hash_tuple_items:
|
| 682 |
+
hash_tuple_items += "," # ensure that one arg form is a tuple
|
| 683 |
+
|
| 684 |
+
# if we're here we want to generate a hash
|
| 685 |
+
with code.indenter("def __hash__(self):"):
|
| 686 |
+
code.add_code_line(f"return hash(({hash_tuple_items}))")
|
| 687 |
+
|
| 688 |
+
|
| 689 |
+
def get_field_type(pos, entry):
|
| 690 |
+
"""
|
| 691 |
+
sets the .type attribute for a field
|
| 692 |
+
|
| 693 |
+
Returns the annotation if possible (since this is what the dataclasses
|
| 694 |
+
module does). If not (for example, attributes defined with cdef) then
|
| 695 |
+
it creates a string fallback.
|
| 696 |
+
"""
|
| 697 |
+
if entry.annotation:
|
| 698 |
+
# Right now it doesn't look like cdef classes generate an
|
| 699 |
+
# __annotations__ dict, therefore it's safe to just return
|
| 700 |
+
# entry.annotation
|
| 701 |
+
# (TODO: remove .string if we ditch PEP563)
|
| 702 |
+
return entry.annotation.string
|
| 703 |
+
# If they do in future then we may need to look up into that
|
| 704 |
+
# to duplicating the node. The code below should do this:
|
| 705 |
+
#class_name_node = ExprNodes.NameNode(pos, name=entry.scope.name)
|
| 706 |
+
#annotations = ExprNodes.AttributeNode(
|
| 707 |
+
# pos, obj=class_name_node,
|
| 708 |
+
# attribute=EncodedString("__annotations__")
|
| 709 |
+
#)
|
| 710 |
+
#return ExprNodes.IndexNode(
|
| 711 |
+
# pos, base=annotations,
|
| 712 |
+
# index=ExprNodes.UnicodeNode(pos, value=entry.name)
|
| 713 |
+
#)
|
| 714 |
+
else:
|
| 715 |
+
# it's slightly unclear what the best option is here - we could
|
| 716 |
+
# try to return PyType_Type. This case should only happen with
|
| 717 |
+
# attributes defined with cdef so Cython is free to make it's own
|
| 718 |
+
# decision
|
| 719 |
+
s = EncodedString(entry.type.declaration_code("", for_display=1))
|
| 720 |
+
return ExprNodes.UnicodeNode(pos, value=s)
|
| 721 |
+
|
| 722 |
+
|
| 723 |
+
class FieldRecordNode(ExprNodes.ExprNode):
|
| 724 |
+
"""
|
| 725 |
+
__dataclass_fields__ contains a bunch of field objects recording how each field
|
| 726 |
+
of the dataclass was initialized (mainly corresponding to the arguments passed to
|
| 727 |
+
the "field" function). This node is used for the attributes of these field objects.
|
| 728 |
+
|
| 729 |
+
If possible, coerces `arg` to a Python object.
|
| 730 |
+
Otherwise, generates a sensible backup string.
|
| 731 |
+
"""
|
| 732 |
+
subexprs = ['arg']
|
| 733 |
+
|
| 734 |
+
def __init__(self, pos, arg):
|
| 735 |
+
super().__init__(pos, arg=arg)
|
| 736 |
+
|
| 737 |
+
def analyse_types(self, env):
|
| 738 |
+
self.arg.analyse_types(env)
|
| 739 |
+
self.type = self.arg.type
|
| 740 |
+
return self
|
| 741 |
+
|
| 742 |
+
def coerce_to_pyobject(self, env):
|
| 743 |
+
if self.arg.type.can_coerce_to_pyobject(env):
|
| 744 |
+
return self.arg.coerce_to_pyobject(env)
|
| 745 |
+
else:
|
| 746 |
+
# A string representation of the code that gave the field seems like a reasonable
|
| 747 |
+
# fallback. This'll mostly happen for "default" and "default_factory" where the
|
| 748 |
+
# type may be a C-type that can't be converted to Python.
|
| 749 |
+
return self._make_string()
|
| 750 |
+
|
| 751 |
+
def _make_string(self):
|
| 752 |
+
from .AutoDocTransforms import AnnotationWriter
|
| 753 |
+
writer = AnnotationWriter(description="Dataclass field")
|
| 754 |
+
string = writer.write(self.arg)
|
| 755 |
+
return ExprNodes.UnicodeNode(self.pos, value=EncodedString(string))
|
| 756 |
+
|
| 757 |
+
def generate_evaluation_code(self, code):
|
| 758 |
+
return self.arg.generate_evaluation_code(code)
|
| 759 |
+
|
| 760 |
+
|
| 761 |
+
def _set_up_dataclass_fields(node, fields, dataclass_module):
|
| 762 |
+
# For defaults and default_factories containing things like lambda,
|
| 763 |
+
# they're already declared in the class scope, and it creates a big
|
| 764 |
+
# problem if multiple copies are floating around in both the __init__
|
| 765 |
+
# function, and in the __dataclass_fields__ structure.
|
| 766 |
+
# Therefore, create module-level constants holding these values and
|
| 767 |
+
# pass those around instead
|
| 768 |
+
#
|
| 769 |
+
# If possible we use the `Field` class defined in the standard library
|
| 770 |
+
# module so that the information stored here is as close to a regular
|
| 771 |
+
# dataclass as is possible.
|
| 772 |
+
variables_assignment_stats = []
|
| 773 |
+
for name, field in fields.items():
|
| 774 |
+
if field.private:
|
| 775 |
+
continue # doesn't appear in the public interface
|
| 776 |
+
for attrname in [ "default", "default_factory" ]:
|
| 777 |
+
field_default = getattr(field, attrname)
|
| 778 |
+
if field_default is MISSING or field_default.is_literal or field_default.is_name:
|
| 779 |
+
# some simple cases where we don't need to set up
|
| 780 |
+
# the variable as a module-level constant
|
| 781 |
+
continue
|
| 782 |
+
global_scope = node.scope.global_scope()
|
| 783 |
+
module_field_name = global_scope.mangle(
|
| 784 |
+
global_scope.mangle(Naming.dataclass_field_default_cname, node.class_name),
|
| 785 |
+
name)
|
| 786 |
+
# create an entry in the global scope for this variable to live
|
| 787 |
+
field_node = ExprNodes.NameNode(field_default.pos, name=EncodedString(module_field_name))
|
| 788 |
+
field_node.entry = global_scope.declare_var(
|
| 789 |
+
field_node.name, type=field_default.type or PyrexTypes.unspecified_type,
|
| 790 |
+
pos=field_default.pos, cname=field_node.name, is_cdef=True,
|
| 791 |
+
# TODO: do we need to set 'pytyping_modifiers' here?
|
| 792 |
+
)
|
| 793 |
+
# replace the field so that future users just receive the namenode
|
| 794 |
+
setattr(field, attrname, field_node)
|
| 795 |
+
|
| 796 |
+
variables_assignment_stats.append(
|
| 797 |
+
Nodes.SingleAssignmentNode(field_default.pos, lhs=field_node, rhs=field_default))
|
| 798 |
+
|
| 799 |
+
placeholders = {}
|
| 800 |
+
field_func = ExprNodes.AttributeNode(node.pos, obj=dataclass_module,
|
| 801 |
+
attribute=EncodedString("field"))
|
| 802 |
+
dc_fields = ExprNodes.DictNode(node.pos, key_value_pairs=[])
|
| 803 |
+
dc_fields_namevalue_assignments = []
|
| 804 |
+
|
| 805 |
+
for name, field in fields.items():
|
| 806 |
+
if field.private:
|
| 807 |
+
continue # doesn't appear in the public interface
|
| 808 |
+
type_placeholder_name = "PLACEHOLDER_%s" % name
|
| 809 |
+
placeholders[type_placeholder_name] = get_field_type(
|
| 810 |
+
node.pos, node.scope.entries[name]
|
| 811 |
+
)
|
| 812 |
+
|
| 813 |
+
# defining these make the fields introspect more like a Python dataclass
|
| 814 |
+
field_type_placeholder_name = "PLACEHOLDER_FIELD_TYPE_%s" % name
|
| 815 |
+
if field.is_initvar:
|
| 816 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 817 |
+
node.pos, obj=dataclass_module,
|
| 818 |
+
attribute=EncodedString("_FIELD_INITVAR")
|
| 819 |
+
)
|
| 820 |
+
elif field.is_classvar:
|
| 821 |
+
# TODO - currently this isn't triggered
|
| 822 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 823 |
+
node.pos, obj=dataclass_module,
|
| 824 |
+
attribute=EncodedString("_FIELD_CLASSVAR")
|
| 825 |
+
)
|
| 826 |
+
else:
|
| 827 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 828 |
+
node.pos, obj=dataclass_module,
|
| 829 |
+
attribute=EncodedString("_FIELD")
|
| 830 |
+
)
|
| 831 |
+
|
| 832 |
+
dc_field_keywords = ExprNodes.DictNode.from_pairs(
|
| 833 |
+
node.pos,
|
| 834 |
+
[(ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 835 |
+
FieldRecordNode(node.pos, arg=v))
|
| 836 |
+
for k, v in field.iterate_record_node_arguments()]
|
| 837 |
+
|
| 838 |
+
)
|
| 839 |
+
dc_field_call = make_dataclass_call_helper(
|
| 840 |
+
node.pos, field_func, dc_field_keywords
|
| 841 |
+
)
|
| 842 |
+
dc_fields.key_value_pairs.append(
|
| 843 |
+
ExprNodes.DictItemNode(
|
| 844 |
+
node.pos,
|
| 845 |
+
key=ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(name)),
|
| 846 |
+
value=dc_field_call))
|
| 847 |
+
dc_fields_namevalue_assignments.append(
|
| 848 |
+
dedent(f"""\
|
| 849 |
+
__dataclass_fields__[{name!r}].name = {name!r}
|
| 850 |
+
__dataclass_fields__[{name!r}].type = {type_placeholder_name}
|
| 851 |
+
__dataclass_fields__[{name!r}]._field_type = {field_type_placeholder_name}
|
| 852 |
+
"""))
|
| 853 |
+
|
| 854 |
+
dataclass_fields_assignment = \
|
| 855 |
+
Nodes.SingleAssignmentNode(node.pos,
|
| 856 |
+
lhs = ExprNodes.NameNode(node.pos,
|
| 857 |
+
name=EncodedString("__dataclass_fields__")),
|
| 858 |
+
rhs = dc_fields)
|
| 859 |
+
|
| 860 |
+
dc_fields_namevalue_assignments = "\n".join(dc_fields_namevalue_assignments)
|
| 861 |
+
dc_fields_namevalue_assignments = TreeFragment(dc_fields_namevalue_assignments,
|
| 862 |
+
level="c_class",
|
| 863 |
+
pipeline=[NormalizeTree(None)])
|
| 864 |
+
dc_fields_namevalue_assignments = dc_fields_namevalue_assignments.substitute(placeholders)
|
| 865 |
+
|
| 866 |
+
return (variables_assignment_stats
|
| 867 |
+
+ [dataclass_fields_assignment]
|
| 868 |
+
+ dc_fields_namevalue_assignments.stats)
|
venv/lib/python3.10/site-packages/Cython/Compiler/DebugFlags.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Can be enabled at the command line with --debug-xxx.
|
| 2 |
+
|
| 3 |
+
debug_disposal_code = 0
|
| 4 |
+
debug_temp_alloc = 0
|
| 5 |
+
debug_coercion = 0
|
| 6 |
+
|
| 7 |
+
# Write comments into the C code that show where temporary variables
|
| 8 |
+
# are allocated and released.
|
| 9 |
+
debug_temp_code_comments = 0
|
| 10 |
+
|
| 11 |
+
# Write a call trace of the code generation phase into the C code.
|
| 12 |
+
debug_trace_code_generation = 0
|
| 13 |
+
|
| 14 |
+
# Do not replace exceptions with user-friendly error messages.
|
| 15 |
+
debug_no_exception_intercept = 0
|
| 16 |
+
|
| 17 |
+
# Print a message each time a new stage in the pipeline is entered.
|
| 18 |
+
debug_verbose_pipeline = 0
|
| 19 |
+
|
| 20 |
+
# Print a message each time an Entry type is assigned.
|
| 21 |
+
debug_verbose_entry_types = False
|
| 22 |
+
|
| 23 |
+
# Raise an exception when an error is encountered.
|
| 24 |
+
debug_exception_on_error = 0
|
venv/lib/python3.10/site-packages/Cython/Compiler/Errors.py
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Errors
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
any_string_type = (bytes, str)
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
from contextlib import contextmanager
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
from threading import local as _threadlocal
|
| 12 |
+
except ImportError:
|
| 13 |
+
class _threadlocal: pass
|
| 14 |
+
|
| 15 |
+
threadlocal = _threadlocal()
|
| 16 |
+
|
| 17 |
+
from ..Utils import open_new_file
|
| 18 |
+
from . import DebugFlags
|
| 19 |
+
from . import Options
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class PyrexError(Exception):
|
| 23 |
+
pass
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class PyrexWarning(Exception):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
class CannotSpecialize(PyrexError):
|
| 30 |
+
pass
|
| 31 |
+
|
| 32 |
+
def context(position):
|
| 33 |
+
source = position[0]
|
| 34 |
+
assert not (isinstance(source, any_string_type)), (
|
| 35 |
+
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
|
| 36 |
+
try:
|
| 37 |
+
F = source.get_lines()
|
| 38 |
+
except UnicodeDecodeError:
|
| 39 |
+
# file has an encoding problem
|
| 40 |
+
s = "[unprintable code]\n"
|
| 41 |
+
else:
|
| 42 |
+
s = ''.join(F[max(0, position[1]-6):position[1]])
|
| 43 |
+
s = '...\n%s%s^\n' % (s, ' '*(position[2]))
|
| 44 |
+
s = '%s\n%s%s\n' % ('-'*60, s, '-'*60)
|
| 45 |
+
return s
|
| 46 |
+
|
| 47 |
+
def format_position(position):
|
| 48 |
+
if position:
|
| 49 |
+
return "%s:%d:%d: " % (position[0].get_error_description(),
|
| 50 |
+
position[1], position[2])
|
| 51 |
+
return ''
|
| 52 |
+
|
| 53 |
+
def format_error(message, position):
|
| 54 |
+
if position:
|
| 55 |
+
pos_str = format_position(position)
|
| 56 |
+
cont = context(position)
|
| 57 |
+
message = '\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or '')
|
| 58 |
+
return message
|
| 59 |
+
|
| 60 |
+
class CompileError(PyrexError):
|
| 61 |
+
|
| 62 |
+
def __init__(self, position = None, message = ""):
|
| 63 |
+
self.position = position
|
| 64 |
+
self.message_only = message
|
| 65 |
+
self.formatted_message = format_error(message, position)
|
| 66 |
+
self.reported = False
|
| 67 |
+
Exception.__init__(self, self.formatted_message)
|
| 68 |
+
# Python Exception subclass pickling is broken,
|
| 69 |
+
# see https://bugs.python.org/issue1692335
|
| 70 |
+
self.args = (position, message)
|
| 71 |
+
|
| 72 |
+
def __str__(self):
|
| 73 |
+
return self.formatted_message
|
| 74 |
+
|
| 75 |
+
class CompileWarning(PyrexWarning):
|
| 76 |
+
|
| 77 |
+
def __init__(self, position = None, message = ""):
|
| 78 |
+
self.position = position
|
| 79 |
+
Exception.__init__(self, format_position(position) + message)
|
| 80 |
+
|
| 81 |
+
class InternalError(Exception):
|
| 82 |
+
# If this is ever raised, there is a bug in the compiler.
|
| 83 |
+
|
| 84 |
+
def __init__(self, message):
|
| 85 |
+
self.message_only = message
|
| 86 |
+
Exception.__init__(self, "Internal compiler error: %s"
|
| 87 |
+
% message)
|
| 88 |
+
|
| 89 |
+
class AbortError(Exception):
|
| 90 |
+
# Throw this to stop the compilation immediately.
|
| 91 |
+
|
| 92 |
+
def __init__(self, message):
|
| 93 |
+
self.message_only = message
|
| 94 |
+
Exception.__init__(self, "Abort error: %s" % message)
|
| 95 |
+
|
| 96 |
+
class CompilerCrash(CompileError):
|
| 97 |
+
# raised when an unexpected exception occurs in a transform
|
| 98 |
+
def __init__(self, pos, context, message, cause, stacktrace=None):
|
| 99 |
+
if message:
|
| 100 |
+
message = '\n' + message
|
| 101 |
+
else:
|
| 102 |
+
message = '\n'
|
| 103 |
+
self.message_only = message
|
| 104 |
+
if context:
|
| 105 |
+
message = "Compiler crash in %s%s" % (context, message)
|
| 106 |
+
if stacktrace:
|
| 107 |
+
import traceback
|
| 108 |
+
message += (
|
| 109 |
+
'\n\nCompiler crash traceback from this point on:\n' +
|
| 110 |
+
''.join(traceback.format_tb(stacktrace)))
|
| 111 |
+
if cause:
|
| 112 |
+
if not stacktrace:
|
| 113 |
+
message += '\n'
|
| 114 |
+
message += '%s: %s' % (cause.__class__.__name__, cause)
|
| 115 |
+
CompileError.__init__(self, pos, message)
|
| 116 |
+
# Python Exception subclass pickling is broken,
|
| 117 |
+
# see https://bugs.python.org/issue1692335
|
| 118 |
+
self.args = (pos, context, message, cause, stacktrace)
|
| 119 |
+
|
| 120 |
+
class NoElementTreeInstalledException(PyrexError):
|
| 121 |
+
"""raised when the user enabled options.gdb_debug but no ElementTree
|
| 122 |
+
implementation was found
|
| 123 |
+
"""
|
| 124 |
+
|
| 125 |
+
def open_listing_file(path, echo_to_stderr=True):
|
| 126 |
+
# Begin a new error listing. If path is None, no file
|
| 127 |
+
# is opened, the error counter is just reset.
|
| 128 |
+
if path is not None:
|
| 129 |
+
threadlocal.cython_errors_listing_file = open_new_file(path)
|
| 130 |
+
else:
|
| 131 |
+
threadlocal.cython_errors_listing_file = None
|
| 132 |
+
if echo_to_stderr:
|
| 133 |
+
threadlocal.cython_errors_echo_file = sys.stderr
|
| 134 |
+
else:
|
| 135 |
+
threadlocal.cython_errors_echo_file = None
|
| 136 |
+
threadlocal.cython_errors_count = 0
|
| 137 |
+
|
| 138 |
+
def close_listing_file():
|
| 139 |
+
if threadlocal.cython_errors_listing_file:
|
| 140 |
+
threadlocal.cython_errors_listing_file.close()
|
| 141 |
+
threadlocal.cython_errors_listing_file = None
|
| 142 |
+
|
| 143 |
+
def report_error(err, use_stack=True):
|
| 144 |
+
error_stack = threadlocal.cython_errors_stack
|
| 145 |
+
if error_stack and use_stack:
|
| 146 |
+
error_stack[-1].append(err)
|
| 147 |
+
else:
|
| 148 |
+
# See Main.py for why dual reporting occurs. Quick fix for now.
|
| 149 |
+
if err.reported: return
|
| 150 |
+
err.reported = True
|
| 151 |
+
try: line = "%s\n" % err
|
| 152 |
+
except UnicodeEncodeError:
|
| 153 |
+
# Python <= 2.5 does this for non-ASCII Unicode exceptions
|
| 154 |
+
line = format_error(getattr(err, 'message_only', "[unprintable exception message]"),
|
| 155 |
+
getattr(err, 'position', None)) + '\n'
|
| 156 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 157 |
+
if listing_file:
|
| 158 |
+
try: listing_file.write(line)
|
| 159 |
+
except UnicodeEncodeError:
|
| 160 |
+
listing_file.write(line.encode('ASCII', 'replace'))
|
| 161 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 162 |
+
if echo_file:
|
| 163 |
+
try: echo_file.write(line)
|
| 164 |
+
except UnicodeEncodeError:
|
| 165 |
+
echo_file.write(line.encode('ASCII', 'replace'))
|
| 166 |
+
threadlocal.cython_errors_count += 1
|
| 167 |
+
if Options.fast_fail:
|
| 168 |
+
raise AbortError("fatal errors")
|
| 169 |
+
|
| 170 |
+
def error(position, message):
|
| 171 |
+
#print("Errors.error:", repr(position), repr(message)) ###
|
| 172 |
+
if position is None:
|
| 173 |
+
raise InternalError(message)
|
| 174 |
+
err = CompileError(position, message)
|
| 175 |
+
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
|
| 176 |
+
report_error(err)
|
| 177 |
+
return err
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
LEVEL = 1 # warn about all errors level 1 or higher
|
| 181 |
+
|
| 182 |
+
def _write_file_encode(file, line):
|
| 183 |
+
try:
|
| 184 |
+
file.write(line)
|
| 185 |
+
except UnicodeEncodeError:
|
| 186 |
+
file.write(line.encode('ascii', 'replace'))
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
def performance_hint(position, message, env):
|
| 190 |
+
if not env.directives['show_performance_hints']:
|
| 191 |
+
return
|
| 192 |
+
warn = CompileWarning(position, message)
|
| 193 |
+
line = "performance hint: %s\n" % warn
|
| 194 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 195 |
+
if listing_file:
|
| 196 |
+
_write_file_encode(listing_file, line)
|
| 197 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 198 |
+
if echo_file:
|
| 199 |
+
_write_file_encode(echo_file, line)
|
| 200 |
+
return warn
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
def message(position, message, level=1):
|
| 204 |
+
if level < LEVEL:
|
| 205 |
+
return
|
| 206 |
+
warn = CompileWarning(position, message)
|
| 207 |
+
line = "note: %s\n" % warn
|
| 208 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 209 |
+
if listing_file:
|
| 210 |
+
_write_file_encode(listing_file, line)
|
| 211 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 212 |
+
if echo_file:
|
| 213 |
+
_write_file_encode(echo_file, line)
|
| 214 |
+
return warn
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def warning(position, message, level=0):
|
| 218 |
+
if level < LEVEL:
|
| 219 |
+
return
|
| 220 |
+
if Options.warning_errors and position:
|
| 221 |
+
return error(position, message)
|
| 222 |
+
warn = CompileWarning(position, message)
|
| 223 |
+
line = "warning: %s\n" % warn
|
| 224 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 225 |
+
if listing_file:
|
| 226 |
+
_write_file_encode(listing_file, line)
|
| 227 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 228 |
+
if echo_file:
|
| 229 |
+
_write_file_encode(echo_file, line)
|
| 230 |
+
return warn
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def warn_once(position, message, level=0):
|
| 234 |
+
if level < LEVEL:
|
| 235 |
+
return
|
| 236 |
+
warn_once_seen = threadlocal.cython_errors_warn_once_seen
|
| 237 |
+
if message in warn_once_seen:
|
| 238 |
+
return
|
| 239 |
+
warn = CompileWarning(position, message)
|
| 240 |
+
line = "warning: %s\n" % warn
|
| 241 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 242 |
+
if listing_file:
|
| 243 |
+
_write_file_encode(listing_file, line)
|
| 244 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 245 |
+
if echo_file:
|
| 246 |
+
_write_file_encode(echo_file, line)
|
| 247 |
+
warn_once_seen.add(message)
|
| 248 |
+
return warn
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
# These functions can be used to momentarily suppress errors.
|
| 252 |
+
|
| 253 |
+
def hold_errors():
|
| 254 |
+
errors = []
|
| 255 |
+
threadlocal.cython_errors_stack.append(errors)
|
| 256 |
+
return errors
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def release_errors(ignore=False):
|
| 260 |
+
held_errors = threadlocal.cython_errors_stack.pop()
|
| 261 |
+
if not ignore:
|
| 262 |
+
for err in held_errors:
|
| 263 |
+
report_error(err)
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
def held_errors():
|
| 267 |
+
return threadlocal.cython_errors_stack[-1]
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
# same as context manager:
|
| 271 |
+
|
| 272 |
+
@contextmanager
|
| 273 |
+
def local_errors(ignore=False):
|
| 274 |
+
errors = hold_errors()
|
| 275 |
+
try:
|
| 276 |
+
yield errors
|
| 277 |
+
finally:
|
| 278 |
+
release_errors(ignore=ignore)
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
# Keep all global state in thread local storage to support parallel cythonisation in distutils.
|
| 282 |
+
|
| 283 |
+
def init_thread():
|
| 284 |
+
threadlocal.cython_errors_count = 0
|
| 285 |
+
threadlocal.cython_errors_listing_file = None
|
| 286 |
+
threadlocal.cython_errors_echo_file = None
|
| 287 |
+
threadlocal.cython_errors_warn_once_seen = set()
|
| 288 |
+
threadlocal.cython_errors_stack = []
|
| 289 |
+
|
| 290 |
+
def reset():
|
| 291 |
+
threadlocal.cython_errors_warn_once_seen.clear()
|
| 292 |
+
del threadlocal.cython_errors_stack[:]
|
| 293 |
+
|
| 294 |
+
def get_errors_count():
|
| 295 |
+
return threadlocal.cython_errors_count
|
venv/lib/python3.10/site-packages/Cython/Compiler/ExprNodes.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
venv/lib/python3.10/site-packages/Cython/Compiler/FlowControl.pxd
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cimport cython
|
| 2 |
+
|
| 3 |
+
from .Visitor cimport CythonTransform, TreeVisitor
|
| 4 |
+
|
| 5 |
+
cdef class ControlBlock:
|
| 6 |
+
cdef public set children
|
| 7 |
+
cdef public set parents
|
| 8 |
+
cdef public set positions
|
| 9 |
+
cdef public list stats
|
| 10 |
+
cdef public dict gen
|
| 11 |
+
cdef public set bounded
|
| 12 |
+
|
| 13 |
+
# Big integer bitsets
|
| 14 |
+
cdef public object i_input
|
| 15 |
+
cdef public object i_output
|
| 16 |
+
cdef public object i_gen
|
| 17 |
+
cdef public object i_kill
|
| 18 |
+
cdef public object i_state
|
| 19 |
+
|
| 20 |
+
cpdef bint empty(self)
|
| 21 |
+
cpdef detach(self)
|
| 22 |
+
cpdef add_child(self, block)
|
| 23 |
+
|
| 24 |
+
cdef class ExitBlock(ControlBlock):
|
| 25 |
+
cpdef bint empty(self)
|
| 26 |
+
|
| 27 |
+
cdef class NameAssignment:
|
| 28 |
+
cdef public bint is_arg
|
| 29 |
+
cdef public bint is_deletion
|
| 30 |
+
cdef public object lhs
|
| 31 |
+
cdef public object rhs
|
| 32 |
+
cdef public object entry
|
| 33 |
+
cdef public object pos
|
| 34 |
+
cdef public set refs
|
| 35 |
+
cdef public object bit
|
| 36 |
+
cdef public object inferred_type
|
| 37 |
+
cdef public object rhs_scope
|
| 38 |
+
|
| 39 |
+
cdef class AssignmentList:
|
| 40 |
+
cdef public object bit
|
| 41 |
+
cdef public object mask
|
| 42 |
+
cdef public list stats
|
| 43 |
+
|
| 44 |
+
cdef class AssignmentCollector(TreeVisitor):
|
| 45 |
+
cdef list assignments
|
| 46 |
+
|
| 47 |
+
@cython.final
|
| 48 |
+
cdef class ControlFlow:
|
| 49 |
+
cdef public set blocks
|
| 50 |
+
cdef public set entries
|
| 51 |
+
cdef public list loops
|
| 52 |
+
cdef public list exceptions
|
| 53 |
+
|
| 54 |
+
cdef public ControlBlock entry_point
|
| 55 |
+
cdef public ExitBlock exit_point
|
| 56 |
+
cdef public ControlBlock block
|
| 57 |
+
|
| 58 |
+
cdef public dict assmts
|
| 59 |
+
|
| 60 |
+
cdef public Py_ssize_t in_try_block
|
| 61 |
+
|
| 62 |
+
cpdef newblock(self, ControlBlock parent=*)
|
| 63 |
+
cpdef nextblock(self, ControlBlock parent=*)
|
| 64 |
+
cpdef bint is_tracked(self, entry)
|
| 65 |
+
cpdef bint is_statically_assigned(self, entry)
|
| 66 |
+
cpdef mark_position(self, node)
|
| 67 |
+
cpdef mark_assignment(self, lhs, rhs, entry, rhs_scope=*)
|
| 68 |
+
cpdef mark_argument(self, lhs, rhs, entry)
|
| 69 |
+
cpdef mark_deletion(self, node, entry)
|
| 70 |
+
cpdef mark_reference(self, node, entry)
|
| 71 |
+
cpdef normalize(self)
|
| 72 |
+
cpdef initialize(self)
|
| 73 |
+
cpdef set map_one(self, istate, entry)
|
| 74 |
+
cdef reaching_definitions(self)
|
| 75 |
+
|
| 76 |
+
cdef class Uninitialized:
|
| 77 |
+
pass
|
| 78 |
+
|
| 79 |
+
cdef class Unknown:
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
cdef class MessageCollection:
|
| 83 |
+
cdef set messages
|
| 84 |
+
|
| 85 |
+
@cython.final
|
| 86 |
+
cdef class ControlFlowAnalysis(CythonTransform):
|
| 87 |
+
cdef object gv_ctx
|
| 88 |
+
cdef object constant_folder
|
| 89 |
+
cdef set reductions
|
| 90 |
+
cdef list stack # a stack of (env, flow) tuples
|
| 91 |
+
cdef object env
|
| 92 |
+
cdef ControlFlow flow
|
| 93 |
+
cdef object object_expr
|
| 94 |
+
cdef bint in_inplace_assignment
|
| 95 |
+
|
| 96 |
+
cpdef mark_assignment(self, lhs, rhs=*, rhs_scope=*)
|
| 97 |
+
cpdef mark_position(self, node)
|