Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/BuildExecutable.py +170 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Cythonize.py +255 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Dependencies.py +1380 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Distutils.py +1 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Inline.py +367 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/IpythonMagic.py +572 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestCyCache.py +121 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestCythonizeArgsParser.py +482 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestDependencies.py +142 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestInline.py +112 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestIpythonMagic.py +295 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestRecythonize.py +212 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestStripLiterals.py +56 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/__init__.py +1 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/__init__.py +14 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/CodeWriter.py +820 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/AnalysedTreeTransforms.py +99 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Annotate.py +341 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/AutoDocTransforms.py +318 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Buffer.py +749 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Builtin.py +644 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/CmdLine.py +251 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Code.cp39-win_amd64.pyd +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Code.pxd +131 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Code.py +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/CodeGeneration.py +35 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/CythonScope.py +181 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Dataclass.py +839 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/DebugFlags.py +21 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Errors.py +300 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/ExprNodes.py +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FlowControl.cp39-win_amd64.pyd +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FlowControl.pxd +111 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FlowControl.py +1383 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FusedNode.cp39-win_amd64.pyd +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FusedNode.py +1015 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Future.py +16 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Interpreter.py +64 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Lexicon.py +342 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Main.py +789 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/MemoryView.py +863 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/ModuleNode.py +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Naming.py +198 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Nodes.py +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Optimize.py +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Options.py +799 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/ParseTreeTransforms.pxd +84 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/ParseTreeTransforms.py +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Parsing.cp39-win_amd64.pyd +0 -0
- .eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Parsing.pxd +205 -0
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/BuildExecutable.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Compile a Python script into an executable that embeds CPython.
|
| 3 |
+
Requires CPython to be built as a shared library ('libpythonX.Y').
|
| 4 |
+
|
| 5 |
+
Basic usage:
|
| 6 |
+
|
| 7 |
+
python -m Cython.Build.BuildExecutable [ARGS] somefile.py
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from __future__ import absolute_import
|
| 11 |
+
|
| 12 |
+
DEBUG = True
|
| 13 |
+
|
| 14 |
+
import sys
|
| 15 |
+
import os
|
| 16 |
+
if sys.version_info < (3, 9):
|
| 17 |
+
from distutils import sysconfig as _sysconfig
|
| 18 |
+
|
| 19 |
+
class sysconfig(object):
|
| 20 |
+
|
| 21 |
+
@staticmethod
|
| 22 |
+
def get_path(name):
|
| 23 |
+
assert name == 'include'
|
| 24 |
+
return _sysconfig.get_python_inc()
|
| 25 |
+
|
| 26 |
+
get_config_var = staticmethod(_sysconfig.get_config_var)
|
| 27 |
+
else:
|
| 28 |
+
# sysconfig can be trusted from cpython >= 3.8.7
|
| 29 |
+
import sysconfig
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def get_config_var(name, default=''):
|
| 33 |
+
return sysconfig.get_config_var(name) or default
|
| 34 |
+
|
| 35 |
+
INCDIR = sysconfig.get_path('include')
|
| 36 |
+
LIBDIR1 = get_config_var('LIBDIR')
|
| 37 |
+
LIBDIR2 = get_config_var('LIBPL')
|
| 38 |
+
PYLIB = get_config_var('LIBRARY')
|
| 39 |
+
PYLIB_DYN = get_config_var('LDLIBRARY')
|
| 40 |
+
if PYLIB_DYN == PYLIB:
|
| 41 |
+
# no shared library
|
| 42 |
+
PYLIB_DYN = ''
|
| 43 |
+
else:
|
| 44 |
+
PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
|
| 45 |
+
|
| 46 |
+
CC = get_config_var('CC', os.environ.get('CC', ''))
|
| 47 |
+
CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '')
|
| 48 |
+
LINKCC = get_config_var('LINKCC', os.environ.get('LINKCC', CC))
|
| 49 |
+
LINKFORSHARED = get_config_var('LINKFORSHARED')
|
| 50 |
+
LIBS = get_config_var('LIBS')
|
| 51 |
+
SYSLIBS = get_config_var('SYSLIBS')
|
| 52 |
+
EXE_EXT = sysconfig.get_config_var('EXE')
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _debug(msg, *args):
|
| 56 |
+
if DEBUG:
|
| 57 |
+
if args:
|
| 58 |
+
msg = msg % args
|
| 59 |
+
sys.stderr.write(msg + '\n')
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def dump_config():
|
| 63 |
+
_debug('INCDIR: %s', INCDIR)
|
| 64 |
+
_debug('LIBDIR1: %s', LIBDIR1)
|
| 65 |
+
_debug('LIBDIR2: %s', LIBDIR2)
|
| 66 |
+
_debug('PYLIB: %s', PYLIB)
|
| 67 |
+
_debug('PYLIB_DYN: %s', PYLIB_DYN)
|
| 68 |
+
_debug('CC: %s', CC)
|
| 69 |
+
_debug('CFLAGS: %s', CFLAGS)
|
| 70 |
+
_debug('LINKCC: %s', LINKCC)
|
| 71 |
+
_debug('LINKFORSHARED: %s', LINKFORSHARED)
|
| 72 |
+
_debug('LIBS: %s', LIBS)
|
| 73 |
+
_debug('SYSLIBS: %s', SYSLIBS)
|
| 74 |
+
_debug('EXE_EXT: %s', EXE_EXT)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _parse_args(args):
|
| 78 |
+
cy_args = []
|
| 79 |
+
last_arg = None
|
| 80 |
+
for i, arg in enumerate(args):
|
| 81 |
+
if arg.startswith('-'):
|
| 82 |
+
cy_args.append(arg)
|
| 83 |
+
elif last_arg in ('-X', '--directive'):
|
| 84 |
+
cy_args.append(arg)
|
| 85 |
+
else:
|
| 86 |
+
input_file = arg
|
| 87 |
+
args = args[i+1:]
|
| 88 |
+
break
|
| 89 |
+
last_arg = arg
|
| 90 |
+
else:
|
| 91 |
+
raise ValueError('no input file provided')
|
| 92 |
+
|
| 93 |
+
return input_file, cy_args, args
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def runcmd(cmd, shell=True):
|
| 97 |
+
if shell:
|
| 98 |
+
cmd = ' '.join(cmd)
|
| 99 |
+
_debug(cmd)
|
| 100 |
+
else:
|
| 101 |
+
_debug(' '.join(cmd))
|
| 102 |
+
|
| 103 |
+
import subprocess
|
| 104 |
+
returncode = subprocess.call(cmd, shell=shell)
|
| 105 |
+
|
| 106 |
+
if returncode:
|
| 107 |
+
sys.exit(returncode)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def clink(basename):
|
| 111 |
+
runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2]
|
| 112 |
+
+ [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)]
|
| 113 |
+
+ LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split())
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def ccompile(basename):
|
| 117 |
+
runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split())
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def cycompile(input_file, options=()):
|
| 121 |
+
from ..Compiler import Version, CmdLine, Main
|
| 122 |
+
options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file])
|
| 123 |
+
_debug('Using Cython %s to compile %s', Version.version, input_file)
|
| 124 |
+
result = Main.compile(sources, options)
|
| 125 |
+
if result.num_errors > 0:
|
| 126 |
+
sys.exit(1)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def exec_file(program_name, args=()):
|
| 130 |
+
runcmd([os.path.abspath(program_name)] + list(args), shell=False)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def build(input_file, compiler_args=(), force=False):
|
| 134 |
+
"""
|
| 135 |
+
Build an executable program from a Cython module.
|
| 136 |
+
|
| 137 |
+
Returns the name of the executable file.
|
| 138 |
+
"""
|
| 139 |
+
basename = os.path.splitext(input_file)[0]
|
| 140 |
+
exe_file = basename + EXE_EXT
|
| 141 |
+
if not force and os.path.abspath(exe_file) == os.path.abspath(input_file):
|
| 142 |
+
raise ValueError("Input and output file names are the same, refusing to overwrite")
|
| 143 |
+
if (not force and os.path.exists(exe_file) and os.path.exists(input_file)
|
| 144 |
+
and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
|
| 145 |
+
_debug("File is up to date, not regenerating %s", exe_file)
|
| 146 |
+
return exe_file
|
| 147 |
+
cycompile(input_file, compiler_args)
|
| 148 |
+
ccompile(basename)
|
| 149 |
+
clink(basename)
|
| 150 |
+
return exe_file
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def build_and_run(args):
|
| 154 |
+
"""
|
| 155 |
+
Build an executable program from a Cython module and run it.
|
| 156 |
+
|
| 157 |
+
Arguments after the module name will be passed verbatimly to the program.
|
| 158 |
+
"""
|
| 159 |
+
program_name, args = _build(args)
|
| 160 |
+
exec_file(program_name, args)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def _build(args):
|
| 164 |
+
input_file, cy_args, args = _parse_args(args)
|
| 165 |
+
program_name = build(input_file, cy_args)
|
| 166 |
+
return program_name, args
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
if __name__ == '__main__':
|
| 170 |
+
_build(sys.argv[1:])
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Cythonize.py
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import, print_function
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import shutil
|
| 5 |
+
import tempfile
|
| 6 |
+
|
| 7 |
+
from .Dependencies import cythonize, extended_iglob
|
| 8 |
+
from ..Utils import is_package_dir
|
| 9 |
+
from ..Compiler import Options
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import multiprocessing
|
| 13 |
+
parallel_compiles = int(multiprocessing.cpu_count() * 1.5)
|
| 14 |
+
except ImportError:
|
| 15 |
+
multiprocessing = None
|
| 16 |
+
parallel_compiles = 0
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class _FakePool(object):
|
| 20 |
+
def map_async(self, func, args):
|
| 21 |
+
try:
|
| 22 |
+
from itertools import imap
|
| 23 |
+
except ImportError:
|
| 24 |
+
imap=map
|
| 25 |
+
for _ in imap(func, args):
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
def close(self):
|
| 29 |
+
pass
|
| 30 |
+
|
| 31 |
+
def terminate(self):
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
def join(self):
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def find_package_base(path):
|
| 39 |
+
base_dir, package_path = os.path.split(path)
|
| 40 |
+
while is_package_dir(base_dir):
|
| 41 |
+
base_dir, parent = os.path.split(base_dir)
|
| 42 |
+
package_path = '%s/%s' % (parent, package_path)
|
| 43 |
+
return base_dir, package_path
|
| 44 |
+
|
| 45 |
+
def cython_compile(path_pattern, options):
|
| 46 |
+
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
|
| 47 |
+
_cython_compile_files(all_paths, options)
|
| 48 |
+
|
| 49 |
+
def _cython_compile_files(all_paths, options):
|
| 50 |
+
pool = None
|
| 51 |
+
try:
|
| 52 |
+
for path in all_paths:
|
| 53 |
+
if options.build_inplace:
|
| 54 |
+
base_dir = path
|
| 55 |
+
while not os.path.isdir(base_dir) or is_package_dir(base_dir):
|
| 56 |
+
base_dir = os.path.dirname(base_dir)
|
| 57 |
+
else:
|
| 58 |
+
base_dir = None
|
| 59 |
+
|
| 60 |
+
if os.path.isdir(path):
|
| 61 |
+
# recursively compiling a package
|
| 62 |
+
paths = [os.path.join(path, '**', '*.{py,pyx}')]
|
| 63 |
+
else:
|
| 64 |
+
# assume it's a file(-like thing)
|
| 65 |
+
paths = [path]
|
| 66 |
+
|
| 67 |
+
ext_modules = cythonize(
|
| 68 |
+
paths,
|
| 69 |
+
nthreads=options.parallel,
|
| 70 |
+
exclude_failures=options.keep_going,
|
| 71 |
+
exclude=options.excludes,
|
| 72 |
+
compiler_directives=options.directives,
|
| 73 |
+
compile_time_env=options.compile_time_env,
|
| 74 |
+
force=options.force,
|
| 75 |
+
quiet=options.quiet,
|
| 76 |
+
depfile=options.depfile,
|
| 77 |
+
language=options.language,
|
| 78 |
+
**options.options)
|
| 79 |
+
|
| 80 |
+
if ext_modules and options.build:
|
| 81 |
+
if len(ext_modules) > 1 and options.parallel > 1:
|
| 82 |
+
if pool is None:
|
| 83 |
+
try:
|
| 84 |
+
pool = multiprocessing.Pool(options.parallel)
|
| 85 |
+
except OSError:
|
| 86 |
+
pool = _FakePool()
|
| 87 |
+
pool.map_async(run_distutils, [
|
| 88 |
+
(base_dir, [ext]) for ext in ext_modules])
|
| 89 |
+
else:
|
| 90 |
+
run_distutils((base_dir, ext_modules))
|
| 91 |
+
except:
|
| 92 |
+
if pool is not None:
|
| 93 |
+
pool.terminate()
|
| 94 |
+
raise
|
| 95 |
+
else:
|
| 96 |
+
if pool is not None:
|
| 97 |
+
pool.close()
|
| 98 |
+
pool.join()
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def run_distutils(args):
|
| 102 |
+
try:
|
| 103 |
+
from distutils.core import setup
|
| 104 |
+
except ImportError:
|
| 105 |
+
try:
|
| 106 |
+
from setuptools import setup
|
| 107 |
+
except ImportError:
|
| 108 |
+
raise ImportError("'distutils' is not available. Please install 'setuptools' for binary builds.")
|
| 109 |
+
|
| 110 |
+
base_dir, ext_modules = args
|
| 111 |
+
script_args = ['build_ext', '-i']
|
| 112 |
+
cwd = os.getcwd()
|
| 113 |
+
temp_dir = None
|
| 114 |
+
try:
|
| 115 |
+
if base_dir:
|
| 116 |
+
os.chdir(base_dir)
|
| 117 |
+
temp_dir = tempfile.mkdtemp(dir=base_dir)
|
| 118 |
+
script_args.extend(['--build-temp', temp_dir])
|
| 119 |
+
setup(
|
| 120 |
+
script_name='setup.py',
|
| 121 |
+
script_args=script_args,
|
| 122 |
+
ext_modules=ext_modules,
|
| 123 |
+
)
|
| 124 |
+
finally:
|
| 125 |
+
if base_dir:
|
| 126 |
+
os.chdir(cwd)
|
| 127 |
+
if temp_dir and os.path.isdir(temp_dir):
|
| 128 |
+
shutil.rmtree(temp_dir)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def create_args_parser():
|
| 132 |
+
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
| 133 |
+
from ..Compiler.CmdLine import ParseDirectivesAction, ParseOptionsAction, ParseCompileTimeEnvAction
|
| 134 |
+
|
| 135 |
+
parser = ArgumentParser(
|
| 136 |
+
formatter_class=RawDescriptionHelpFormatter,
|
| 137 |
+
epilog="""\
|
| 138 |
+
Environment variables:
|
| 139 |
+
CYTHON_FORCE_REGEN: if set to 1, forces cythonize to regenerate the output files regardless
|
| 140 |
+
of modification times and changes.
|
| 141 |
+
Environment variables accepted by setuptools are supported to configure the C compiler and build:
|
| 142 |
+
https://setuptools.pypa.io/en/latest/userguide/ext_modules.html#compiler-and-linker-options"""
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
|
| 146 |
+
dest='directives', default={}, type=str,
|
| 147 |
+
action=ParseDirectivesAction,
|
| 148 |
+
help='set a compiler directive')
|
| 149 |
+
parser.add_argument('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
| 150 |
+
dest='compile_time_env', default={}, type=str,
|
| 151 |
+
action=ParseCompileTimeEnvAction,
|
| 152 |
+
help='set a compile time environment variable')
|
| 153 |
+
parser.add_argument('-s', '--option', metavar='NAME=VALUE',
|
| 154 |
+
dest='options', default={}, type=str,
|
| 155 |
+
action=ParseOptionsAction,
|
| 156 |
+
help='set a cythonize option')
|
| 157 |
+
parser.add_argument('-2', dest='language_level', action='store_const', const=2, default=None,
|
| 158 |
+
help='use Python 2 syntax mode by default')
|
| 159 |
+
parser.add_argument('-3', dest='language_level', action='store_const', const=3,
|
| 160 |
+
help='use Python 3 syntax mode by default')
|
| 161 |
+
parser.add_argument('--3str', dest='language_level', action='store_const', const='3str',
|
| 162 |
+
help='use Python 3 syntax mode by default')
|
| 163 |
+
parser.add_argument('-+', '--cplus', dest='language', action='store_const', const='c++', default=None,
|
| 164 |
+
help='Compile as C++ rather than C')
|
| 165 |
+
parser.add_argument('-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 166 |
+
help='Produce a colorized HTML version of the source.')
|
| 167 |
+
parser.add_argument('--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 168 |
+
help='Produce a colorized HTML version of the source '
|
| 169 |
+
'which includes entire generated C/C++-code.')
|
| 170 |
+
parser.add_argument('-x', '--exclude', metavar='PATTERN', dest='excludes',
|
| 171 |
+
action='append', default=[],
|
| 172 |
+
help='exclude certain file patterns from the compilation')
|
| 173 |
+
|
| 174 |
+
parser.add_argument('-b', '--build', dest='build', action='store_true', default=None,
|
| 175 |
+
help='build extension modules using distutils/setuptools')
|
| 176 |
+
parser.add_argument('-i', '--inplace', dest='build_inplace', action='store_true', default=None,
|
| 177 |
+
help='build extension modules in place using distutils/setuptools (implies -b)')
|
| 178 |
+
parser.add_argument('-j', '--parallel', dest='parallel', metavar='N',
|
| 179 |
+
type=int, default=parallel_compiles,
|
| 180 |
+
help=('run builds in N parallel jobs (default: %d)' %
|
| 181 |
+
parallel_compiles or 1))
|
| 182 |
+
parser.add_argument('-f', '--force', dest='force', action='store_true', default=None,
|
| 183 |
+
help='force recompilation')
|
| 184 |
+
parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', default=None,
|
| 185 |
+
help='be less verbose during compilation')
|
| 186 |
+
|
| 187 |
+
parser.add_argument('--lenient', dest='lenient', action='store_true', default=None,
|
| 188 |
+
help='increase Python compatibility by ignoring some compile time errors')
|
| 189 |
+
parser.add_argument('-k', '--keep-going', dest='keep_going', action='store_true', default=None,
|
| 190 |
+
help='compile as much as possible, ignore compilation failures')
|
| 191 |
+
parser.add_argument('--no-docstrings', dest='no_docstrings', action='store_true', default=None,
|
| 192 |
+
help='strip docstrings')
|
| 193 |
+
parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
| 194 |
+
parser.add_argument('sources', nargs='*')
|
| 195 |
+
return parser
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def parse_args_raw(parser, args):
|
| 199 |
+
options, unknown = parser.parse_known_args(args)
|
| 200 |
+
sources = options.sources
|
| 201 |
+
# if positional arguments were interspersed
|
| 202 |
+
# some of them are in unknown
|
| 203 |
+
for option in unknown:
|
| 204 |
+
if option.startswith('-'):
|
| 205 |
+
parser.error("unknown option "+option)
|
| 206 |
+
else:
|
| 207 |
+
sources.append(option)
|
| 208 |
+
del options.sources
|
| 209 |
+
return (options, sources)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def parse_args(args):
|
| 213 |
+
parser = create_args_parser()
|
| 214 |
+
options, args = parse_args_raw(parser, args)
|
| 215 |
+
|
| 216 |
+
if not args:
|
| 217 |
+
parser.error("no source files provided")
|
| 218 |
+
if options.build_inplace:
|
| 219 |
+
options.build = True
|
| 220 |
+
if multiprocessing is None:
|
| 221 |
+
options.parallel = 0
|
| 222 |
+
if options.language_level:
|
| 223 |
+
assert options.language_level in (2, 3, '3str')
|
| 224 |
+
options.options['language_level'] = options.language_level
|
| 225 |
+
|
| 226 |
+
if options.lenient:
|
| 227 |
+
# increase Python compatibility by ignoring compile time errors
|
| 228 |
+
Options.error_on_unknown_names = False
|
| 229 |
+
Options.error_on_uninitialized = False
|
| 230 |
+
|
| 231 |
+
if options.annotate:
|
| 232 |
+
Options.annotate = options.annotate
|
| 233 |
+
|
| 234 |
+
if options.no_docstrings:
|
| 235 |
+
Options.docstrings = False
|
| 236 |
+
|
| 237 |
+
return options, args
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def main(args=None):
|
| 241 |
+
options, paths = parse_args(args)
|
| 242 |
+
|
| 243 |
+
all_paths = []
|
| 244 |
+
for path in paths:
|
| 245 |
+
expanded_path = [os.path.abspath(p) for p in extended_iglob(path)]
|
| 246 |
+
if not expanded_path:
|
| 247 |
+
import sys
|
| 248 |
+
print("{}: No such file or directory: '{}'".format(sys.argv[0], path), file=sys.stderr)
|
| 249 |
+
sys.exit(1)
|
| 250 |
+
all_paths.extend(expanded_path)
|
| 251 |
+
_cython_compile_files(all_paths, options)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
if __name__ == '__main__':
|
| 255 |
+
main()
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Dependencies.py
ADDED
|
@@ -0,0 +1,1380 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import, print_function
|
| 2 |
+
|
| 3 |
+
import cython
|
| 4 |
+
from .. import __version__
|
| 5 |
+
|
| 6 |
+
import collections
|
| 7 |
+
import contextlib
|
| 8 |
+
import hashlib
|
| 9 |
+
import os
|
| 10 |
+
import shutil
|
| 11 |
+
import subprocess
|
| 12 |
+
import re, sys, time
|
| 13 |
+
from glob import iglob
|
| 14 |
+
from io import open as io_open
|
| 15 |
+
from os.path import relpath as _relpath
|
| 16 |
+
import zipfile
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
from collections.abc import Iterable
|
| 20 |
+
except ImportError:
|
| 21 |
+
from collections import Iterable
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
import gzip
|
| 25 |
+
gzip_open = gzip.open
|
| 26 |
+
gzip_ext = '.gz'
|
| 27 |
+
except ImportError:
|
| 28 |
+
gzip_open = open
|
| 29 |
+
gzip_ext = ''
|
| 30 |
+
|
| 31 |
+
try:
|
| 32 |
+
import zlib
|
| 33 |
+
zipfile_compression_mode = zipfile.ZIP_DEFLATED
|
| 34 |
+
except ImportError:
|
| 35 |
+
zipfile_compression_mode = zipfile.ZIP_STORED
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
import pythran
|
| 39 |
+
except:
|
| 40 |
+
pythran = None
|
| 41 |
+
|
| 42 |
+
from .. import Utils
|
| 43 |
+
from ..Utils import (cached_function, cached_method, path_exists,
|
| 44 |
+
safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, write_depfile)
|
| 45 |
+
from ..Compiler import Errors
|
| 46 |
+
from ..Compiler.Main import Context
|
| 47 |
+
from ..Compiler.Options import (CompilationOptions, default_options,
|
| 48 |
+
get_directive_defaults)
|
| 49 |
+
|
| 50 |
+
join_path = cached_function(os.path.join)
|
| 51 |
+
copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
|
| 52 |
+
safe_makedirs_once = cached_function(safe_makedirs)
|
| 53 |
+
|
| 54 |
+
if sys.version_info[0] < 3:
|
| 55 |
+
# stupid Py2 distutils enforces str type in list of sources
|
| 56 |
+
_fs_encoding = sys.getfilesystemencoding()
|
| 57 |
+
if _fs_encoding is None:
|
| 58 |
+
_fs_encoding = sys.getdefaultencoding()
|
| 59 |
+
def encode_filename_in_py2(filename):
|
| 60 |
+
if not isinstance(filename, bytes):
|
| 61 |
+
return filename.encode(_fs_encoding)
|
| 62 |
+
return filename
|
| 63 |
+
else:
|
| 64 |
+
def encode_filename_in_py2(filename):
|
| 65 |
+
return filename
|
| 66 |
+
basestring = str
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def _make_relative(file_paths, base=None):
|
| 70 |
+
if not base:
|
| 71 |
+
base = os.getcwd()
|
| 72 |
+
if base[-1] != os.path.sep:
|
| 73 |
+
base += os.path.sep
|
| 74 |
+
return [_relpath(path, base) if path.startswith(base) else path
|
| 75 |
+
for path in file_paths]
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def extended_iglob(pattern):
|
| 79 |
+
if '{' in pattern:
|
| 80 |
+
m = re.match('(.*){([^}]+)}(.*)', pattern)
|
| 81 |
+
if m:
|
| 82 |
+
before, switch, after = m.groups()
|
| 83 |
+
for case in switch.split(','):
|
| 84 |
+
for path in extended_iglob(before + case + after):
|
| 85 |
+
yield path
|
| 86 |
+
return
|
| 87 |
+
|
| 88 |
+
# We always accept '/' and also '\' on Windows,
|
| 89 |
+
# because '/' is generally common for relative paths.
|
| 90 |
+
if '**/' in pattern or os.sep == '\\' and '**\\' in pattern:
|
| 91 |
+
seen = set()
|
| 92 |
+
first, rest = re.split(r'\*\*[%s]' % ('/\\\\' if os.sep == '\\' else '/'), pattern, 1)
|
| 93 |
+
if first:
|
| 94 |
+
first = iglob(first + os.sep)
|
| 95 |
+
else:
|
| 96 |
+
first = ['']
|
| 97 |
+
for root in first:
|
| 98 |
+
for path in extended_iglob(join_path(root, rest)):
|
| 99 |
+
if path not in seen:
|
| 100 |
+
seen.add(path)
|
| 101 |
+
yield path
|
| 102 |
+
for path in extended_iglob(join_path(root, '*', '**', rest)):
|
| 103 |
+
if path not in seen:
|
| 104 |
+
seen.add(path)
|
| 105 |
+
yield path
|
| 106 |
+
else:
|
| 107 |
+
for path in iglob(pattern):
|
| 108 |
+
yield path
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def nonempty(it, error_msg="expected non-empty iterator"):
|
| 112 |
+
empty = True
|
| 113 |
+
for value in it:
|
| 114 |
+
empty = False
|
| 115 |
+
yield value
|
| 116 |
+
if empty:
|
| 117 |
+
raise ValueError(error_msg)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
@cached_function
|
| 121 |
+
def file_hash(filename):
|
| 122 |
+
path = os.path.normpath(filename)
|
| 123 |
+
prefix = ('%d:%s' % (len(path), path)).encode("UTF-8")
|
| 124 |
+
m = hashlib.sha1(prefix)
|
| 125 |
+
with open(path, 'rb') as f:
|
| 126 |
+
data = f.read(65000)
|
| 127 |
+
while data:
|
| 128 |
+
m.update(data)
|
| 129 |
+
data = f.read(65000)
|
| 130 |
+
return m.hexdigest()
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def update_pythran_extension(ext):
|
| 134 |
+
if pythran is None:
|
| 135 |
+
raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
|
| 136 |
+
try:
|
| 137 |
+
pythran_ext = pythran.config.make_extension(python=True)
|
| 138 |
+
except TypeError: # older pythran version only
|
| 139 |
+
pythran_ext = pythran.config.make_extension()
|
| 140 |
+
|
| 141 |
+
ext.include_dirs.extend(pythran_ext['include_dirs'])
|
| 142 |
+
ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
|
| 143 |
+
ext.extra_link_args.extend(pythran_ext['extra_link_args'])
|
| 144 |
+
ext.define_macros.extend(pythran_ext['define_macros'])
|
| 145 |
+
ext.undef_macros.extend(pythran_ext['undef_macros'])
|
| 146 |
+
ext.library_dirs.extend(pythran_ext['library_dirs'])
|
| 147 |
+
ext.libraries.extend(pythran_ext['libraries'])
|
| 148 |
+
ext.language = 'c++'
|
| 149 |
+
|
| 150 |
+
# These options are not compatible with the way normal Cython extensions work
|
| 151 |
+
for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
|
| 152 |
+
try:
|
| 153 |
+
ext.extra_compile_args.remove(bad_option)
|
| 154 |
+
except ValueError:
|
| 155 |
+
pass
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def parse_list(s):
|
| 159 |
+
"""
|
| 160 |
+
>>> parse_list("")
|
| 161 |
+
[]
|
| 162 |
+
>>> parse_list("a")
|
| 163 |
+
['a']
|
| 164 |
+
>>> parse_list("a b c")
|
| 165 |
+
['a', 'b', 'c']
|
| 166 |
+
>>> parse_list("[a, b, c]")
|
| 167 |
+
['a', 'b', 'c']
|
| 168 |
+
>>> parse_list('a " " b')
|
| 169 |
+
['a', ' ', 'b']
|
| 170 |
+
>>> parse_list('[a, ",a", "a,", ",", ]')
|
| 171 |
+
['a', ',a', 'a,', ',']
|
| 172 |
+
"""
|
| 173 |
+
if len(s) >= 2 and s[0] == '[' and s[-1] == ']':
|
| 174 |
+
s = s[1:-1]
|
| 175 |
+
delimiter = ','
|
| 176 |
+
else:
|
| 177 |
+
delimiter = ' '
|
| 178 |
+
s, literals = strip_string_literals(s)
|
| 179 |
+
def unquote(literal):
|
| 180 |
+
literal = literal.strip()
|
| 181 |
+
if literal[0] in "'\"":
|
| 182 |
+
return literals[literal[1:-1]]
|
| 183 |
+
else:
|
| 184 |
+
return literal
|
| 185 |
+
return [unquote(item) for item in s.split(delimiter) if item.strip()]
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
transitive_str = object()
|
| 189 |
+
transitive_list = object()
|
| 190 |
+
bool_or = object()
|
| 191 |
+
|
| 192 |
+
distutils_settings = {
|
| 193 |
+
'name': str,
|
| 194 |
+
'sources': list,
|
| 195 |
+
'define_macros': list,
|
| 196 |
+
'undef_macros': list,
|
| 197 |
+
'libraries': transitive_list,
|
| 198 |
+
'library_dirs': transitive_list,
|
| 199 |
+
'runtime_library_dirs': transitive_list,
|
| 200 |
+
'include_dirs': transitive_list,
|
| 201 |
+
'extra_objects': list,
|
| 202 |
+
'extra_compile_args': transitive_list,
|
| 203 |
+
'extra_link_args': transitive_list,
|
| 204 |
+
'export_symbols': list,
|
| 205 |
+
'depends': transitive_list,
|
| 206 |
+
'language': transitive_str,
|
| 207 |
+
'np_pythran': bool_or
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def _legacy_strtobool(val):
|
| 212 |
+
# Used to be "distutils.util.strtobool", adapted for deprecation warnings.
|
| 213 |
+
if val == "True":
|
| 214 |
+
return True
|
| 215 |
+
elif val == "False":
|
| 216 |
+
return False
|
| 217 |
+
|
| 218 |
+
import warnings
|
| 219 |
+
warnings.warn("The 'np_python' option requires 'True' or 'False'", category=DeprecationWarning)
|
| 220 |
+
val = val.lower()
|
| 221 |
+
if val in ('y', 'yes', 't', 'true', 'on', '1'):
|
| 222 |
+
return True
|
| 223 |
+
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
|
| 224 |
+
return False
|
| 225 |
+
else:
|
| 226 |
+
raise ValueError("invalid truth value %r" % (val,))
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
|
| 230 |
+
def line_iter(source):
|
| 231 |
+
if isinstance(source, basestring):
|
| 232 |
+
start = 0
|
| 233 |
+
while True:
|
| 234 |
+
end = source.find('\n', start)
|
| 235 |
+
if end == -1:
|
| 236 |
+
yield source[start:]
|
| 237 |
+
return
|
| 238 |
+
yield source[start:end]
|
| 239 |
+
start = end+1
|
| 240 |
+
else:
|
| 241 |
+
for line in source:
|
| 242 |
+
yield line
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
class DistutilsInfo(object):
|
| 246 |
+
|
| 247 |
+
def __init__(self, source=None, exn=None):
|
| 248 |
+
self.values = {}
|
| 249 |
+
if source is not None:
|
| 250 |
+
for line in line_iter(source):
|
| 251 |
+
line = line.lstrip()
|
| 252 |
+
if not line:
|
| 253 |
+
continue
|
| 254 |
+
if line[0] != '#':
|
| 255 |
+
break
|
| 256 |
+
line = line[1:].lstrip()
|
| 257 |
+
kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None)
|
| 258 |
+
if kind is not None:
|
| 259 |
+
key, _, value = [s.strip() for s in line[len(kind):].partition('=')]
|
| 260 |
+
type = distutils_settings.get(key, None)
|
| 261 |
+
if line.startswith("cython:") and type is None: continue
|
| 262 |
+
if type in (list, transitive_list):
|
| 263 |
+
value = parse_list(value)
|
| 264 |
+
if key == 'define_macros':
|
| 265 |
+
value = [tuple(macro.split('=', 1))
|
| 266 |
+
if '=' in macro else (macro, None)
|
| 267 |
+
for macro in value]
|
| 268 |
+
if type is bool_or:
|
| 269 |
+
value = _legacy_strtobool(value)
|
| 270 |
+
self.values[key] = value
|
| 271 |
+
elif exn is not None:
|
| 272 |
+
for key in distutils_settings:
|
| 273 |
+
if key in ('name', 'sources','np_pythran'):
|
| 274 |
+
continue
|
| 275 |
+
value = getattr(exn, key, None)
|
| 276 |
+
if value:
|
| 277 |
+
self.values[key] = value
|
| 278 |
+
|
| 279 |
+
def merge(self, other):
|
| 280 |
+
if other is None:
|
| 281 |
+
return self
|
| 282 |
+
for key, value in other.values.items():
|
| 283 |
+
type = distutils_settings[key]
|
| 284 |
+
if type is transitive_str and key not in self.values:
|
| 285 |
+
self.values[key] = value
|
| 286 |
+
elif type is transitive_list:
|
| 287 |
+
if key in self.values:
|
| 288 |
+
# Change a *copy* of the list (Trac #845)
|
| 289 |
+
all = self.values[key][:]
|
| 290 |
+
for v in value:
|
| 291 |
+
if v not in all:
|
| 292 |
+
all.append(v)
|
| 293 |
+
value = all
|
| 294 |
+
self.values[key] = value
|
| 295 |
+
elif type is bool_or:
|
| 296 |
+
self.values[key] = self.values.get(key, False) | value
|
| 297 |
+
return self
|
| 298 |
+
|
| 299 |
+
def subs(self, aliases):
|
| 300 |
+
if aliases is None:
|
| 301 |
+
return self
|
| 302 |
+
resolved = DistutilsInfo()
|
| 303 |
+
for key, value in self.values.items():
|
| 304 |
+
type = distutils_settings[key]
|
| 305 |
+
if type in [list, transitive_list]:
|
| 306 |
+
new_value_list = []
|
| 307 |
+
for v in value:
|
| 308 |
+
if v in aliases:
|
| 309 |
+
v = aliases[v]
|
| 310 |
+
if isinstance(v, list):
|
| 311 |
+
new_value_list += v
|
| 312 |
+
else:
|
| 313 |
+
new_value_list.append(v)
|
| 314 |
+
value = new_value_list
|
| 315 |
+
else:
|
| 316 |
+
if value in aliases:
|
| 317 |
+
value = aliases[value]
|
| 318 |
+
resolved.values[key] = value
|
| 319 |
+
return resolved
|
| 320 |
+
|
| 321 |
+
def apply(self, extension):
|
| 322 |
+
for key, value in self.values.items():
|
| 323 |
+
type = distutils_settings[key]
|
| 324 |
+
if type in [list, transitive_list]:
|
| 325 |
+
value = getattr(extension, key) + list(value)
|
| 326 |
+
setattr(extension, key, value)
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
@cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t,
|
| 330 |
+
single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t,
|
| 331 |
+
hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t,
|
| 332 |
+
k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t)
|
| 333 |
+
def strip_string_literals(code, prefix='__Pyx_L'):
|
| 334 |
+
"""
|
| 335 |
+
Normalizes every string literal to be of the form '__Pyx_Lxxx',
|
| 336 |
+
returning the normalized code and a mapping of labels to
|
| 337 |
+
string literals.
|
| 338 |
+
"""
|
| 339 |
+
new_code = []
|
| 340 |
+
literals = {}
|
| 341 |
+
counter = 0
|
| 342 |
+
start = q = 0
|
| 343 |
+
in_quote = False
|
| 344 |
+
hash_mark = single_q = double_q = -1
|
| 345 |
+
code_len = len(code)
|
| 346 |
+
quote_type = None
|
| 347 |
+
quote_len = -1
|
| 348 |
+
|
| 349 |
+
while True:
|
| 350 |
+
if hash_mark < q:
|
| 351 |
+
hash_mark = code.find('#', q)
|
| 352 |
+
if single_q < q:
|
| 353 |
+
single_q = code.find("'", q)
|
| 354 |
+
if double_q < q:
|
| 355 |
+
double_q = code.find('"', q)
|
| 356 |
+
q = min(single_q, double_q)
|
| 357 |
+
if q == -1:
|
| 358 |
+
q = max(single_q, double_q)
|
| 359 |
+
|
| 360 |
+
# We're done.
|
| 361 |
+
if q == -1 and hash_mark == -1:
|
| 362 |
+
new_code.append(code[start:])
|
| 363 |
+
break
|
| 364 |
+
|
| 365 |
+
# Try to close the quote.
|
| 366 |
+
elif in_quote:
|
| 367 |
+
if code[q-1] == u'\\':
|
| 368 |
+
k = 2
|
| 369 |
+
while q >= k and code[q-k] == u'\\':
|
| 370 |
+
k += 1
|
| 371 |
+
if k % 2 == 0:
|
| 372 |
+
q += 1
|
| 373 |
+
continue
|
| 374 |
+
if code[q] == quote_type and (
|
| 375 |
+
quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])):
|
| 376 |
+
counter += 1
|
| 377 |
+
label = "%s%s_" % (prefix, counter)
|
| 378 |
+
literals[label] = code[start+quote_len:q]
|
| 379 |
+
full_quote = code[q:q+quote_len]
|
| 380 |
+
new_code.append(full_quote)
|
| 381 |
+
new_code.append(label)
|
| 382 |
+
new_code.append(full_quote)
|
| 383 |
+
q += quote_len
|
| 384 |
+
in_quote = False
|
| 385 |
+
start = q
|
| 386 |
+
else:
|
| 387 |
+
q += 1
|
| 388 |
+
|
| 389 |
+
# Process comment.
|
| 390 |
+
elif -1 != hash_mark and (hash_mark < q or q == -1):
|
| 391 |
+
new_code.append(code[start:hash_mark+1])
|
| 392 |
+
end = code.find('\n', hash_mark)
|
| 393 |
+
counter += 1
|
| 394 |
+
label = "%s%s_" % (prefix, counter)
|
| 395 |
+
if end == -1:
|
| 396 |
+
end_or_none = None
|
| 397 |
+
else:
|
| 398 |
+
end_or_none = end
|
| 399 |
+
literals[label] = code[hash_mark+1:end_or_none]
|
| 400 |
+
new_code.append(label)
|
| 401 |
+
if end == -1:
|
| 402 |
+
break
|
| 403 |
+
start = q = end
|
| 404 |
+
|
| 405 |
+
# Open the quote.
|
| 406 |
+
else:
|
| 407 |
+
if code_len >= q+3 and (code[q] == code[q+1] == code[q+2]):
|
| 408 |
+
quote_len = 3
|
| 409 |
+
else:
|
| 410 |
+
quote_len = 1
|
| 411 |
+
in_quote = True
|
| 412 |
+
quote_type = code[q]
|
| 413 |
+
new_code.append(code[start:q])
|
| 414 |
+
start = q
|
| 415 |
+
q += quote_len
|
| 416 |
+
|
| 417 |
+
return "".join(new_code), literals
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
# We need to allow spaces to allow for conditional compilation like
|
| 421 |
+
# IF ...:
|
| 422 |
+
# cimport ...
|
| 423 |
+
dependency_regex = re.compile(r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|"
|
| 424 |
+
r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|"
|
| 425 |
+
r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|"
|
| 426 |
+
r"(?:^\s*include +['\"]([^'\"]+)['\"])", re.M)
|
| 427 |
+
dependency_after_from_regex = re.compile(
|
| 428 |
+
r"(?:^\s+\(([0-9a-zA-Z_., ]*)\)[#\n])|"
|
| 429 |
+
r"(?:^\s+([0-9a-zA-Z_., ]*)[#\n])",
|
| 430 |
+
re.M)
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
def normalize_existing(base_path, rel_paths):
|
| 434 |
+
return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths)))
|
| 435 |
+
|
| 436 |
+
|
| 437 |
+
@cached_function
|
| 438 |
+
def normalize_existing0(base_dir, rel_paths):
|
| 439 |
+
"""
|
| 440 |
+
Given some base directory ``base_dir`` and a list of path names
|
| 441 |
+
``rel_paths``, normalize each relative path name ``rel`` by
|
| 442 |
+
replacing it by ``os.path.join(base, rel)`` if that file exists.
|
| 443 |
+
|
| 444 |
+
Return a couple ``(normalized, needed_base)`` where ``normalized``
|
| 445 |
+
if the list of normalized file names and ``needed_base`` is
|
| 446 |
+
``base_dir`` if we actually needed ``base_dir``. If no paths were
|
| 447 |
+
changed (for example, if all paths were already absolute), then
|
| 448 |
+
``needed_base`` is ``None``.
|
| 449 |
+
"""
|
| 450 |
+
normalized = []
|
| 451 |
+
needed_base = None
|
| 452 |
+
for rel in rel_paths:
|
| 453 |
+
if os.path.isabs(rel):
|
| 454 |
+
normalized.append(rel)
|
| 455 |
+
continue
|
| 456 |
+
path = join_path(base_dir, rel)
|
| 457 |
+
if path_exists(path):
|
| 458 |
+
normalized.append(os.path.normpath(path))
|
| 459 |
+
needed_base = base_dir
|
| 460 |
+
else:
|
| 461 |
+
normalized.append(rel)
|
| 462 |
+
return (normalized, needed_base)
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
def resolve_depends(depends, include_dirs):
|
| 466 |
+
include_dirs = tuple(include_dirs)
|
| 467 |
+
resolved = []
|
| 468 |
+
for depend in depends:
|
| 469 |
+
path = resolve_depend(depend, include_dirs)
|
| 470 |
+
if path is not None:
|
| 471 |
+
resolved.append(path)
|
| 472 |
+
return resolved
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
@cached_function
|
| 476 |
+
def resolve_depend(depend, include_dirs):
|
| 477 |
+
if depend[0] == '<' and depend[-1] == '>':
|
| 478 |
+
return None
|
| 479 |
+
for dir in include_dirs:
|
| 480 |
+
path = join_path(dir, depend)
|
| 481 |
+
if path_exists(path):
|
| 482 |
+
return os.path.normpath(path)
|
| 483 |
+
return None
|
| 484 |
+
|
| 485 |
+
|
| 486 |
+
@cached_function
|
| 487 |
+
def package(filename):
|
| 488 |
+
dir = os.path.dirname(os.path.abspath(str(filename)))
|
| 489 |
+
if dir != filename and is_package_dir(dir):
|
| 490 |
+
return package(dir) + (os.path.basename(dir),)
|
| 491 |
+
else:
|
| 492 |
+
return ()
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
@cached_function
|
| 496 |
+
def fully_qualified_name(filename):
|
| 497 |
+
module = os.path.splitext(os.path.basename(filename))[0]
|
| 498 |
+
return '.'.join(package(filename) + (module,))
|
| 499 |
+
|
| 500 |
+
|
| 501 |
+
@cached_function
|
| 502 |
+
def parse_dependencies(source_filename):
|
| 503 |
+
# Actual parsing is way too slow, so we use regular expressions.
|
| 504 |
+
# The only catch is that we must strip comments and string
|
| 505 |
+
# literals ahead of time.
|
| 506 |
+
with Utils.open_source_file(source_filename, error_handling='ignore') as fh:
|
| 507 |
+
source = fh.read()
|
| 508 |
+
distutils_info = DistutilsInfo(source)
|
| 509 |
+
source, literals = strip_string_literals(source)
|
| 510 |
+
source = source.replace('\\\n', ' ').replace('\t', ' ')
|
| 511 |
+
|
| 512 |
+
# TODO: pure mode
|
| 513 |
+
cimports = []
|
| 514 |
+
includes = []
|
| 515 |
+
externs = []
|
| 516 |
+
for m in dependency_regex.finditer(source):
|
| 517 |
+
cimport_from, cimport_list, extern, include = m.groups()
|
| 518 |
+
if cimport_from:
|
| 519 |
+
cimports.append(cimport_from)
|
| 520 |
+
m_after_from = dependency_after_from_regex.search(source, pos=m.end())
|
| 521 |
+
if m_after_from:
|
| 522 |
+
multiline, one_line = m_after_from.groups()
|
| 523 |
+
subimports = multiline or one_line
|
| 524 |
+
cimports.extend("{0}.{1}".format(cimport_from, s.strip())
|
| 525 |
+
for s in subimports.split(','))
|
| 526 |
+
|
| 527 |
+
elif cimport_list:
|
| 528 |
+
cimports.extend(x.strip() for x in cimport_list.split(","))
|
| 529 |
+
elif extern:
|
| 530 |
+
externs.append(literals[extern])
|
| 531 |
+
else:
|
| 532 |
+
includes.append(literals[include])
|
| 533 |
+
return cimports, includes, externs, distutils_info
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
class DependencyTree(object):
|
| 537 |
+
|
| 538 |
+
def __init__(self, context, quiet=False):
|
| 539 |
+
self.context = context
|
| 540 |
+
self.quiet = quiet
|
| 541 |
+
self._transitive_cache = {}
|
| 542 |
+
|
| 543 |
+
def parse_dependencies(self, source_filename):
|
| 544 |
+
if path_exists(source_filename):
|
| 545 |
+
source_filename = os.path.normpath(source_filename)
|
| 546 |
+
return parse_dependencies(source_filename)
|
| 547 |
+
|
| 548 |
+
@cached_method
|
| 549 |
+
def included_files(self, filename):
|
| 550 |
+
# This is messy because included files are textually included, resolving
|
| 551 |
+
# cimports (but not includes) relative to the including file.
|
| 552 |
+
all = set()
|
| 553 |
+
for include in self.parse_dependencies(filename)[1]:
|
| 554 |
+
include_path = join_path(os.path.dirname(filename), include)
|
| 555 |
+
if not path_exists(include_path):
|
| 556 |
+
include_path = self.context.find_include_file(include, source_file_path=filename)
|
| 557 |
+
if include_path:
|
| 558 |
+
if '.' + os.path.sep in include_path:
|
| 559 |
+
include_path = os.path.normpath(include_path)
|
| 560 |
+
all.add(include_path)
|
| 561 |
+
all.update(self.included_files(include_path))
|
| 562 |
+
elif not self.quiet:
|
| 563 |
+
print(u"Unable to locate '%s' referenced from '%s'" % (filename, include))
|
| 564 |
+
return all
|
| 565 |
+
|
| 566 |
+
@cached_method
|
| 567 |
+
def cimports_externs_incdirs(self, filename):
|
| 568 |
+
# This is really ugly. Nested cimports are resolved with respect to the
|
| 569 |
+
# includer, but includes are resolved with respect to the includee.
|
| 570 |
+
cimports, includes, externs = self.parse_dependencies(filename)[:3]
|
| 571 |
+
cimports = set(cimports)
|
| 572 |
+
externs = set(externs)
|
| 573 |
+
incdirs = set()
|
| 574 |
+
for include in self.included_files(filename):
|
| 575 |
+
included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include)
|
| 576 |
+
cimports.update(included_cimports)
|
| 577 |
+
externs.update(included_externs)
|
| 578 |
+
incdirs.update(included_incdirs)
|
| 579 |
+
externs, incdir = normalize_existing(filename, externs)
|
| 580 |
+
if incdir:
|
| 581 |
+
incdirs.add(incdir)
|
| 582 |
+
return tuple(cimports), externs, incdirs
|
| 583 |
+
|
| 584 |
+
def cimports(self, filename):
|
| 585 |
+
return self.cimports_externs_incdirs(filename)[0]
|
| 586 |
+
|
| 587 |
+
def package(self, filename):
|
| 588 |
+
return package(filename)
|
| 589 |
+
|
| 590 |
+
def fully_qualified_name(self, filename):
|
| 591 |
+
return fully_qualified_name(filename)
|
| 592 |
+
|
| 593 |
+
@cached_method
|
| 594 |
+
def find_pxd(self, module, filename=None):
|
| 595 |
+
is_relative = module[0] == '.'
|
| 596 |
+
if is_relative and not filename:
|
| 597 |
+
raise NotImplementedError("New relative imports.")
|
| 598 |
+
if filename is not None:
|
| 599 |
+
module_path = module.split('.')
|
| 600 |
+
if is_relative:
|
| 601 |
+
module_path.pop(0) # just explicitly relative
|
| 602 |
+
package_path = list(self.package(filename))
|
| 603 |
+
while module_path and not module_path[0]:
|
| 604 |
+
try:
|
| 605 |
+
package_path.pop()
|
| 606 |
+
except IndexError:
|
| 607 |
+
return None # FIXME: error?
|
| 608 |
+
module_path.pop(0)
|
| 609 |
+
relative = '.'.join(package_path + module_path)
|
| 610 |
+
pxd = self.context.find_pxd_file(relative, source_file_path=filename)
|
| 611 |
+
if pxd:
|
| 612 |
+
return pxd
|
| 613 |
+
if is_relative:
|
| 614 |
+
return None # FIXME: error?
|
| 615 |
+
return self.context.find_pxd_file(module, source_file_path=filename)
|
| 616 |
+
|
| 617 |
+
@cached_method
|
| 618 |
+
def cimported_files(self, filename):
|
| 619 |
+
filename_root, filename_ext = os.path.splitext(filename)
|
| 620 |
+
if filename_ext in ('.pyx', '.py') and path_exists(filename_root + '.pxd'):
|
| 621 |
+
pxd_list = [filename_root + '.pxd']
|
| 622 |
+
else:
|
| 623 |
+
pxd_list = []
|
| 624 |
+
# Cimports generates all possible combinations package.module
|
| 625 |
+
# when imported as from package cimport module.
|
| 626 |
+
for module in self.cimports(filename):
|
| 627 |
+
if module[:7] == 'cython.' or module == 'cython':
|
| 628 |
+
continue
|
| 629 |
+
pxd_file = self.find_pxd(module, filename)
|
| 630 |
+
if pxd_file is not None:
|
| 631 |
+
pxd_list.append(pxd_file)
|
| 632 |
+
return tuple(pxd_list)
|
| 633 |
+
|
| 634 |
+
@cached_method
|
| 635 |
+
def immediate_dependencies(self, filename):
|
| 636 |
+
all_deps = {filename}
|
| 637 |
+
all_deps.update(self.cimported_files(filename))
|
| 638 |
+
all_deps.update(self.included_files(filename))
|
| 639 |
+
return all_deps
|
| 640 |
+
|
| 641 |
+
def all_dependencies(self, filename):
|
| 642 |
+
return self.transitive_merge(filename, self.immediate_dependencies, set.union)
|
| 643 |
+
|
| 644 |
+
@cached_method
|
| 645 |
+
def timestamp(self, filename):
|
| 646 |
+
return os.path.getmtime(filename)
|
| 647 |
+
|
| 648 |
+
def extract_timestamp(self, filename):
|
| 649 |
+
return self.timestamp(filename), filename
|
| 650 |
+
|
| 651 |
+
def newest_dependency(self, filename):
|
| 652 |
+
return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)])
|
| 653 |
+
|
| 654 |
+
def transitive_fingerprint(self, filename, module, compilation_options):
|
| 655 |
+
r"""
|
| 656 |
+
Return a fingerprint of a cython file that is about to be cythonized.
|
| 657 |
+
|
| 658 |
+
Fingerprints are looked up in future compilations. If the fingerprint
|
| 659 |
+
is found, the cythonization can be skipped. The fingerprint must
|
| 660 |
+
incorporate everything that has an influence on the generated code.
|
| 661 |
+
"""
|
| 662 |
+
try:
|
| 663 |
+
m = hashlib.sha1(__version__.encode('UTF-8'))
|
| 664 |
+
m.update(file_hash(filename).encode('UTF-8'))
|
| 665 |
+
for x in sorted(self.all_dependencies(filename)):
|
| 666 |
+
if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'):
|
| 667 |
+
m.update(file_hash(x).encode('UTF-8'))
|
| 668 |
+
# Include the module attributes that change the compilation result
|
| 669 |
+
# in the fingerprint. We do not iterate over module.__dict__ and
|
| 670 |
+
# include almost everything here as users might extend Extension
|
| 671 |
+
# with arbitrary (random) attributes that would lead to cache
|
| 672 |
+
# misses.
|
| 673 |
+
m.update(str((
|
| 674 |
+
module.language,
|
| 675 |
+
getattr(module, 'py_limited_api', False),
|
| 676 |
+
getattr(module, 'np_pythran', False)
|
| 677 |
+
)).encode('UTF-8'))
|
| 678 |
+
|
| 679 |
+
m.update(compilation_options.get_fingerprint().encode('UTF-8'))
|
| 680 |
+
return m.hexdigest()
|
| 681 |
+
except IOError:
|
| 682 |
+
return None
|
| 683 |
+
|
| 684 |
+
def distutils_info0(self, filename):
|
| 685 |
+
info = self.parse_dependencies(filename)[3]
|
| 686 |
+
kwds = info.values
|
| 687 |
+
cimports, externs, incdirs = self.cimports_externs_incdirs(filename)
|
| 688 |
+
basedir = os.getcwd()
|
| 689 |
+
# Add dependencies on "cdef extern from ..." files
|
| 690 |
+
if externs:
|
| 691 |
+
externs = _make_relative(externs, basedir)
|
| 692 |
+
if 'depends' in kwds:
|
| 693 |
+
kwds['depends'] = list(set(kwds['depends']).union(externs))
|
| 694 |
+
else:
|
| 695 |
+
kwds['depends'] = list(externs)
|
| 696 |
+
# Add include_dirs to ensure that the C compiler will find the
|
| 697 |
+
# "cdef extern from ..." files
|
| 698 |
+
if incdirs:
|
| 699 |
+
include_dirs = list(kwds.get('include_dirs', []))
|
| 700 |
+
for inc in _make_relative(incdirs, basedir):
|
| 701 |
+
if inc not in include_dirs:
|
| 702 |
+
include_dirs.append(inc)
|
| 703 |
+
kwds['include_dirs'] = include_dirs
|
| 704 |
+
return info
|
| 705 |
+
|
| 706 |
+
def distutils_info(self, filename, aliases=None, base=None):
|
| 707 |
+
return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge)
|
| 708 |
+
.subs(aliases)
|
| 709 |
+
.merge(base))
|
| 710 |
+
|
| 711 |
+
def transitive_merge(self, node, extract, merge):
|
| 712 |
+
try:
|
| 713 |
+
seen = self._transitive_cache[extract, merge]
|
| 714 |
+
except KeyError:
|
| 715 |
+
seen = self._transitive_cache[extract, merge] = {}
|
| 716 |
+
return self.transitive_merge_helper(
|
| 717 |
+
node, extract, merge, seen, {}, self.cimported_files)[0]
|
| 718 |
+
|
| 719 |
+
def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing):
|
| 720 |
+
if node in seen:
|
| 721 |
+
return seen[node], None
|
| 722 |
+
deps = extract(node)
|
| 723 |
+
if node in stack:
|
| 724 |
+
return deps, node
|
| 725 |
+
try:
|
| 726 |
+
stack[node] = len(stack)
|
| 727 |
+
loop = None
|
| 728 |
+
for next in outgoing(node):
|
| 729 |
+
sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing)
|
| 730 |
+
if sub_loop is not None:
|
| 731 |
+
if loop is not None and stack[loop] < stack[sub_loop]:
|
| 732 |
+
pass
|
| 733 |
+
else:
|
| 734 |
+
loop = sub_loop
|
| 735 |
+
deps = merge(deps, sub_deps)
|
| 736 |
+
if loop == node:
|
| 737 |
+
loop = None
|
| 738 |
+
if loop is None:
|
| 739 |
+
seen[node] = deps
|
| 740 |
+
return deps, loop
|
| 741 |
+
finally:
|
| 742 |
+
del stack[node]
|
| 743 |
+
|
| 744 |
+
|
| 745 |
+
_dep_tree = None
|
| 746 |
+
|
| 747 |
+
def create_dependency_tree(ctx=None, quiet=False):
|
| 748 |
+
global _dep_tree
|
| 749 |
+
if _dep_tree is None:
|
| 750 |
+
if ctx is None:
|
| 751 |
+
ctx = Context(["."], get_directive_defaults(),
|
| 752 |
+
options=CompilationOptions(default_options))
|
| 753 |
+
_dep_tree = DependencyTree(ctx, quiet=quiet)
|
| 754 |
+
return _dep_tree
|
| 755 |
+
|
| 756 |
+
|
| 757 |
+
# If this changes, change also docs/src/reference/compilation.rst
|
| 758 |
+
# which mentions this function
|
| 759 |
+
def default_create_extension(template, kwds):
|
| 760 |
+
if 'depends' in kwds:
|
| 761 |
+
include_dirs = kwds.get('include_dirs', []) + ["."]
|
| 762 |
+
depends = resolve_depends(kwds['depends'], include_dirs)
|
| 763 |
+
kwds['depends'] = sorted(set(depends + template.depends))
|
| 764 |
+
|
| 765 |
+
t = template.__class__
|
| 766 |
+
ext = t(**kwds)
|
| 767 |
+
metadata = dict(distutils=kwds, module_name=kwds['name'])
|
| 768 |
+
return (ext, metadata)
|
| 769 |
+
|
| 770 |
+
|
| 771 |
+
# This may be useful for advanced users?
|
| 772 |
+
def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
|
| 773 |
+
exclude_failures=False):
|
| 774 |
+
if language is not None:
|
| 775 |
+
print('Warning: passing language={0!r} to cythonize() is deprecated. '
|
| 776 |
+
'Instead, put "# distutils: language={0}" in your .pyx or .pxd file(s)'.format(language))
|
| 777 |
+
if exclude is None:
|
| 778 |
+
exclude = []
|
| 779 |
+
if patterns is None:
|
| 780 |
+
return [], {}
|
| 781 |
+
elif isinstance(patterns, basestring) or not isinstance(patterns, Iterable):
|
| 782 |
+
patterns = [patterns]
|
| 783 |
+
|
| 784 |
+
from distutils.extension import Extension
|
| 785 |
+
if 'setuptools' in sys.modules:
|
| 786 |
+
# Support setuptools Extension instances as well.
|
| 787 |
+
extension_classes = (
|
| 788 |
+
Extension, # should normally be the same as 'setuptools.extension._Extension'
|
| 789 |
+
sys.modules['setuptools.extension']._Extension,
|
| 790 |
+
sys.modules['setuptools'].Extension,
|
| 791 |
+
)
|
| 792 |
+
else:
|
| 793 |
+
extension_classes = (Extension,)
|
| 794 |
+
|
| 795 |
+
explicit_modules = {m.name for m in patterns if isinstance(m, extension_classes)}
|
| 796 |
+
deps = create_dependency_tree(ctx, quiet=quiet)
|
| 797 |
+
|
| 798 |
+
to_exclude = set()
|
| 799 |
+
if not isinstance(exclude, list):
|
| 800 |
+
exclude = [exclude]
|
| 801 |
+
for pattern in exclude:
|
| 802 |
+
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
|
| 803 |
+
|
| 804 |
+
module_list = []
|
| 805 |
+
module_metadata = {}
|
| 806 |
+
|
| 807 |
+
# if no create_extension() function is defined, use a simple
|
| 808 |
+
# default function.
|
| 809 |
+
create_extension = ctx.options.create_extension or default_create_extension
|
| 810 |
+
|
| 811 |
+
seen = set()
|
| 812 |
+
for pattern in patterns:
|
| 813 |
+
if not isinstance(pattern, extension_classes):
|
| 814 |
+
pattern = encode_filename_in_py2(pattern)
|
| 815 |
+
if isinstance(pattern, str):
|
| 816 |
+
filepattern = pattern
|
| 817 |
+
template = Extension(pattern, []) # Fake Extension without sources
|
| 818 |
+
name = '*'
|
| 819 |
+
base = None
|
| 820 |
+
ext_language = language
|
| 821 |
+
elif isinstance(pattern, extension_classes):
|
| 822 |
+
cython_sources = [s for s in pattern.sources
|
| 823 |
+
if os.path.splitext(s)[1] in ('.py', '.pyx')]
|
| 824 |
+
if cython_sources:
|
| 825 |
+
filepattern = cython_sources[0]
|
| 826 |
+
if len(cython_sources) > 1:
|
| 827 |
+
print(u"Warning: Multiple cython sources found for extension '%s': %s\n"
|
| 828 |
+
u"See https://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
|
| 829 |
+
u"for sharing declarations among Cython files." % (pattern.name, cython_sources))
|
| 830 |
+
else:
|
| 831 |
+
# ignore non-cython modules
|
| 832 |
+
module_list.append(pattern)
|
| 833 |
+
continue
|
| 834 |
+
template = pattern
|
| 835 |
+
name = template.name
|
| 836 |
+
base = DistutilsInfo(exn=template)
|
| 837 |
+
ext_language = None # do not override whatever the Extension says
|
| 838 |
+
else:
|
| 839 |
+
msg = str("pattern is not of type str nor subclass of Extension (%s)"
|
| 840 |
+
" but of type %s and class %s" % (repr(Extension),
|
| 841 |
+
type(pattern),
|
| 842 |
+
pattern.__class__))
|
| 843 |
+
raise TypeError(msg)
|
| 844 |
+
|
| 845 |
+
for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern):
|
| 846 |
+
if os.path.abspath(file) in to_exclude:
|
| 847 |
+
continue
|
| 848 |
+
module_name = deps.fully_qualified_name(file)
|
| 849 |
+
if '*' in name:
|
| 850 |
+
if module_name in explicit_modules:
|
| 851 |
+
continue
|
| 852 |
+
elif name:
|
| 853 |
+
module_name = name
|
| 854 |
+
|
| 855 |
+
Utils.raise_error_if_module_name_forbidden(module_name)
|
| 856 |
+
|
| 857 |
+
if module_name not in seen:
|
| 858 |
+
try:
|
| 859 |
+
kwds = deps.distutils_info(file, aliases, base).values
|
| 860 |
+
except Exception:
|
| 861 |
+
if exclude_failures:
|
| 862 |
+
continue
|
| 863 |
+
raise
|
| 864 |
+
if base is not None:
|
| 865 |
+
for key, value in base.values.items():
|
| 866 |
+
if key not in kwds:
|
| 867 |
+
kwds[key] = value
|
| 868 |
+
|
| 869 |
+
kwds['name'] = module_name
|
| 870 |
+
|
| 871 |
+
sources = [file] + [m for m in template.sources if m != filepattern]
|
| 872 |
+
if 'sources' in kwds:
|
| 873 |
+
# allow users to add .c files etc.
|
| 874 |
+
for source in kwds['sources']:
|
| 875 |
+
source = encode_filename_in_py2(source)
|
| 876 |
+
if source not in sources:
|
| 877 |
+
sources.append(source)
|
| 878 |
+
kwds['sources'] = sources
|
| 879 |
+
|
| 880 |
+
if ext_language and 'language' not in kwds:
|
| 881 |
+
kwds['language'] = ext_language
|
| 882 |
+
|
| 883 |
+
np_pythran = kwds.pop('np_pythran', False)
|
| 884 |
+
|
| 885 |
+
# Create the new extension
|
| 886 |
+
m, metadata = create_extension(template, kwds)
|
| 887 |
+
m.np_pythran = np_pythran or getattr(m, 'np_pythran', False)
|
| 888 |
+
if m.np_pythran:
|
| 889 |
+
update_pythran_extension(m)
|
| 890 |
+
module_list.append(m)
|
| 891 |
+
|
| 892 |
+
# Store metadata (this will be written as JSON in the
|
| 893 |
+
# generated C file but otherwise has no purpose)
|
| 894 |
+
module_metadata[module_name] = metadata
|
| 895 |
+
|
| 896 |
+
if file not in m.sources:
|
| 897 |
+
# Old setuptools unconditionally replaces .pyx with .c/.cpp
|
| 898 |
+
target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
|
| 899 |
+
try:
|
| 900 |
+
m.sources.remove(target_file)
|
| 901 |
+
except ValueError:
|
| 902 |
+
# never seen this in the wild, but probably better to warn about this unexpected case
|
| 903 |
+
print(u"Warning: Cython source file not found in sources list, adding %s" % file)
|
| 904 |
+
m.sources.insert(0, file)
|
| 905 |
+
seen.add(name)
|
| 906 |
+
return module_list, module_metadata
|
| 907 |
+
|
| 908 |
+
|
| 909 |
+
# This is the user-exposed entry point.
|
| 910 |
+
def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=None, language=None,
|
| 911 |
+
exclude_failures=False, show_all_warnings=False, **options):
|
| 912 |
+
"""
|
| 913 |
+
Compile a set of source modules into C/C++ files and return a list of distutils
|
| 914 |
+
Extension objects for them.
|
| 915 |
+
|
| 916 |
+
:param module_list: As module list, pass either a glob pattern, a list of glob
|
| 917 |
+
patterns or a list of Extension objects. The latter
|
| 918 |
+
allows you to configure the extensions separately
|
| 919 |
+
through the normal distutils options.
|
| 920 |
+
You can also pass Extension objects that have
|
| 921 |
+
glob patterns as their sources. Then, cythonize
|
| 922 |
+
will resolve the pattern and create a
|
| 923 |
+
copy of the Extension for every matching file.
|
| 924 |
+
|
| 925 |
+
:param exclude: When passing glob patterns as ``module_list``, you can exclude certain
|
| 926 |
+
module names explicitly by passing them into the ``exclude`` option.
|
| 927 |
+
|
| 928 |
+
:param nthreads: The number of concurrent builds for parallel compilation
|
| 929 |
+
(requires the ``multiprocessing`` module).
|
| 930 |
+
|
| 931 |
+
:param aliases: If you want to use compiler directives like ``# distutils: ...`` but
|
| 932 |
+
can only know at compile time (when running the ``setup.py``) which values
|
| 933 |
+
to use, you can use aliases and pass a dictionary mapping those aliases
|
| 934 |
+
to Python strings when calling :func:`cythonize`. As an example, say you
|
| 935 |
+
want to use the compiler
|
| 936 |
+
directive ``# distutils: include_dirs = ../static_libs/include/``
|
| 937 |
+
but this path isn't always fixed and you want to find it when running
|
| 938 |
+
the ``setup.py``. You can then do ``# distutils: include_dirs = MY_HEADERS``,
|
| 939 |
+
find the value of ``MY_HEADERS`` in the ``setup.py``, put it in a python
|
| 940 |
+
variable called ``foo`` as a string, and then call
|
| 941 |
+
``cythonize(..., aliases={'MY_HEADERS': foo})``.
|
| 942 |
+
|
| 943 |
+
:param quiet: If True, Cython won't print error, warning, or status messages during the
|
| 944 |
+
compilation.
|
| 945 |
+
|
| 946 |
+
:param force: Forces the recompilation of the Cython modules, even if the timestamps
|
| 947 |
+
don't indicate that a recompilation is necessary.
|
| 948 |
+
|
| 949 |
+
:param language: To globally enable C++ mode, you can pass ``language='c++'``. Otherwise, this
|
| 950 |
+
will be determined at a per-file level based on compiler directives. This
|
| 951 |
+
affects only modules found based on file names. Extension instances passed
|
| 952 |
+
into :func:`cythonize` will not be changed. It is recommended to rather
|
| 953 |
+
use the compiler directive ``# distutils: language = c++`` than this option.
|
| 954 |
+
|
| 955 |
+
:param exclude_failures: For a broad 'try to compile' mode that ignores compilation
|
| 956 |
+
failures and simply excludes the failed extensions,
|
| 957 |
+
pass ``exclude_failures=True``. Note that this only
|
| 958 |
+
really makes sense for compiling ``.py`` files which can also
|
| 959 |
+
be used without compilation.
|
| 960 |
+
|
| 961 |
+
:param show_all_warnings: By default, not all Cython warnings are printed.
|
| 962 |
+
Set to true to show all warnings.
|
| 963 |
+
|
| 964 |
+
:param annotate: If ``True``, will produce a HTML file for each of the ``.pyx`` or ``.py``
|
| 965 |
+
files compiled. The HTML file gives an indication
|
| 966 |
+
of how much Python interaction there is in
|
| 967 |
+
each of the source code lines, compared to plain C code.
|
| 968 |
+
It also allows you to see the C/C++ code
|
| 969 |
+
generated for each line of Cython code. This report is invaluable when
|
| 970 |
+
optimizing a function for speed,
|
| 971 |
+
and for determining when to :ref:`release the GIL <nogil>`:
|
| 972 |
+
in general, a ``nogil`` block may contain only "white" code.
|
| 973 |
+
See examples in :ref:`determining_where_to_add_types` or
|
| 974 |
+
:ref:`primes`.
|
| 975 |
+
|
| 976 |
+
|
| 977 |
+
:param annotate-fullc: If ``True`` will produce a colorized HTML version of
|
| 978 |
+
the source which includes entire generated C/C++-code.
|
| 979 |
+
|
| 980 |
+
|
| 981 |
+
:param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this:
|
| 982 |
+
``compiler_directives={'embedsignature': True}``.
|
| 983 |
+
See :ref:`compiler-directives`.
|
| 984 |
+
|
| 985 |
+
:param depfile: produce depfiles for the sources if True.
|
| 986 |
+
"""
|
| 987 |
+
if exclude is None:
|
| 988 |
+
exclude = []
|
| 989 |
+
if 'include_path' not in options:
|
| 990 |
+
options['include_path'] = ['.']
|
| 991 |
+
if 'common_utility_include_dir' in options:
|
| 992 |
+
safe_makedirs(options['common_utility_include_dir'])
|
| 993 |
+
|
| 994 |
+
depfile = options.pop('depfile', None)
|
| 995 |
+
|
| 996 |
+
if pythran is None:
|
| 997 |
+
pythran_options = None
|
| 998 |
+
else:
|
| 999 |
+
pythran_options = CompilationOptions(**options)
|
| 1000 |
+
pythran_options.cplus = True
|
| 1001 |
+
pythran_options.np_pythran = True
|
| 1002 |
+
|
| 1003 |
+
if force is None:
|
| 1004 |
+
force = os.environ.get("CYTHON_FORCE_REGEN") == "1" # allow global overrides for build systems
|
| 1005 |
+
|
| 1006 |
+
c_options = CompilationOptions(**options)
|
| 1007 |
+
cpp_options = CompilationOptions(**options); cpp_options.cplus = True
|
| 1008 |
+
ctx = Context.from_options(c_options)
|
| 1009 |
+
options = c_options
|
| 1010 |
+
module_list, module_metadata = create_extension_list(
|
| 1011 |
+
module_list,
|
| 1012 |
+
exclude=exclude,
|
| 1013 |
+
ctx=ctx,
|
| 1014 |
+
quiet=quiet,
|
| 1015 |
+
exclude_failures=exclude_failures,
|
| 1016 |
+
language=language,
|
| 1017 |
+
aliases=aliases)
|
| 1018 |
+
|
| 1019 |
+
fix_windows_unicode_modules(module_list)
|
| 1020 |
+
|
| 1021 |
+
deps = create_dependency_tree(ctx, quiet=quiet)
|
| 1022 |
+
build_dir = getattr(options, 'build_dir', None)
|
| 1023 |
+
|
| 1024 |
+
def copy_to_build_dir(filepath, root=os.getcwd()):
|
| 1025 |
+
filepath_abs = os.path.abspath(filepath)
|
| 1026 |
+
if os.path.isabs(filepath):
|
| 1027 |
+
filepath = filepath_abs
|
| 1028 |
+
if filepath_abs.startswith(root):
|
| 1029 |
+
# distutil extension depends are relative to cwd
|
| 1030 |
+
mod_dir = join_path(build_dir,
|
| 1031 |
+
os.path.dirname(_relpath(filepath, root)))
|
| 1032 |
+
copy_once_if_newer(filepath_abs, mod_dir)
|
| 1033 |
+
|
| 1034 |
+
modules_by_cfile = collections.defaultdict(list)
|
| 1035 |
+
to_compile = []
|
| 1036 |
+
for m in module_list:
|
| 1037 |
+
if build_dir:
|
| 1038 |
+
for dep in m.depends:
|
| 1039 |
+
copy_to_build_dir(dep)
|
| 1040 |
+
|
| 1041 |
+
cy_sources = [
|
| 1042 |
+
source for source in m.sources
|
| 1043 |
+
if os.path.splitext(source)[1] in ('.pyx', '.py')]
|
| 1044 |
+
if len(cy_sources) == 1:
|
| 1045 |
+
# normal "special" case: believe the Extension module name to allow user overrides
|
| 1046 |
+
full_module_name = m.name
|
| 1047 |
+
else:
|
| 1048 |
+
# infer FQMN from source files
|
| 1049 |
+
full_module_name = None
|
| 1050 |
+
|
| 1051 |
+
new_sources = []
|
| 1052 |
+
for source in m.sources:
|
| 1053 |
+
base, ext = os.path.splitext(source)
|
| 1054 |
+
if ext in ('.pyx', '.py'):
|
| 1055 |
+
if m.np_pythran:
|
| 1056 |
+
c_file = base + '.cpp'
|
| 1057 |
+
options = pythran_options
|
| 1058 |
+
elif m.language == 'c++':
|
| 1059 |
+
c_file = base + '.cpp'
|
| 1060 |
+
options = cpp_options
|
| 1061 |
+
else:
|
| 1062 |
+
c_file = base + '.c'
|
| 1063 |
+
options = c_options
|
| 1064 |
+
|
| 1065 |
+
# setup for out of place build directory if enabled
|
| 1066 |
+
if build_dir:
|
| 1067 |
+
if os.path.isabs(c_file):
|
| 1068 |
+
c_file = os.path.splitdrive(c_file)[1]
|
| 1069 |
+
c_file = c_file.split(os.sep, 1)[1]
|
| 1070 |
+
c_file = os.path.join(build_dir, c_file)
|
| 1071 |
+
dir = os.path.dirname(c_file)
|
| 1072 |
+
safe_makedirs_once(dir)
|
| 1073 |
+
|
| 1074 |
+
# write out the depfile, if requested
|
| 1075 |
+
if depfile:
|
| 1076 |
+
dependencies = deps.all_dependencies(source)
|
| 1077 |
+
write_depfile(c_file, source, dependencies)
|
| 1078 |
+
|
| 1079 |
+
# Missing files and those generated by other Cython versions should always be recreated.
|
| 1080 |
+
if Utils.file_generated_by_this_cython(c_file):
|
| 1081 |
+
c_timestamp = os.path.getmtime(c_file)
|
| 1082 |
+
else:
|
| 1083 |
+
c_timestamp = -1
|
| 1084 |
+
|
| 1085 |
+
# Priority goes first to modified files, second to direct
|
| 1086 |
+
# dependents, and finally to indirect dependents.
|
| 1087 |
+
if c_timestamp < deps.timestamp(source):
|
| 1088 |
+
dep_timestamp, dep = deps.timestamp(source), source
|
| 1089 |
+
priority = 0
|
| 1090 |
+
else:
|
| 1091 |
+
dep_timestamp, dep = deps.newest_dependency(source)
|
| 1092 |
+
priority = 2 - (dep in deps.immediate_dependencies(source))
|
| 1093 |
+
if force or c_timestamp < dep_timestamp:
|
| 1094 |
+
if not quiet and not force:
|
| 1095 |
+
if source == dep:
|
| 1096 |
+
print(u"Compiling %s because it changed." % Utils.decode_filename(source))
|
| 1097 |
+
else:
|
| 1098 |
+
print(u"Compiling %s because it depends on %s." % (
|
| 1099 |
+
Utils.decode_filename(source),
|
| 1100 |
+
Utils.decode_filename(dep),
|
| 1101 |
+
))
|
| 1102 |
+
if not force and options.cache:
|
| 1103 |
+
fingerprint = deps.transitive_fingerprint(source, m, options)
|
| 1104 |
+
else:
|
| 1105 |
+
fingerprint = None
|
| 1106 |
+
to_compile.append((
|
| 1107 |
+
priority, source, c_file, fingerprint, quiet,
|
| 1108 |
+
options, not exclude_failures, module_metadata.get(m.name),
|
| 1109 |
+
full_module_name, show_all_warnings))
|
| 1110 |
+
new_sources.append(c_file)
|
| 1111 |
+
modules_by_cfile[c_file].append(m)
|
| 1112 |
+
else:
|
| 1113 |
+
new_sources.append(source)
|
| 1114 |
+
if build_dir:
|
| 1115 |
+
copy_to_build_dir(source)
|
| 1116 |
+
m.sources = new_sources
|
| 1117 |
+
|
| 1118 |
+
if options.cache:
|
| 1119 |
+
if not os.path.exists(options.cache):
|
| 1120 |
+
os.makedirs(options.cache)
|
| 1121 |
+
to_compile.sort()
|
| 1122 |
+
# Drop "priority" component of "to_compile" entries and add a
|
| 1123 |
+
# simple progress indicator.
|
| 1124 |
+
N = len(to_compile)
|
| 1125 |
+
progress_fmt = "[{0:%d}/{1}] " % len(str(N))
|
| 1126 |
+
for i in range(N):
|
| 1127 |
+
progress = progress_fmt.format(i+1, N)
|
| 1128 |
+
to_compile[i] = to_compile[i][1:] + (progress,)
|
| 1129 |
+
|
| 1130 |
+
if N <= 1:
|
| 1131 |
+
nthreads = 0
|
| 1132 |
+
if nthreads:
|
| 1133 |
+
import multiprocessing
|
| 1134 |
+
pool = multiprocessing.Pool(
|
| 1135 |
+
nthreads, initializer=_init_multiprocessing_helper)
|
| 1136 |
+
# This is a bit more involved than it should be, because KeyboardInterrupts
|
| 1137 |
+
# break the multiprocessing workers when using a normal pool.map().
|
| 1138 |
+
# See, for example:
|
| 1139 |
+
# https://noswap.com/blog/python-multiprocessing-keyboardinterrupt
|
| 1140 |
+
try:
|
| 1141 |
+
result = pool.map_async(cythonize_one_helper, to_compile, chunksize=1)
|
| 1142 |
+
pool.close()
|
| 1143 |
+
while not result.ready():
|
| 1144 |
+
try:
|
| 1145 |
+
result.get(99999) # seconds
|
| 1146 |
+
except multiprocessing.TimeoutError:
|
| 1147 |
+
pass
|
| 1148 |
+
except KeyboardInterrupt:
|
| 1149 |
+
pool.terminate()
|
| 1150 |
+
raise
|
| 1151 |
+
pool.join()
|
| 1152 |
+
else:
|
| 1153 |
+
for args in to_compile:
|
| 1154 |
+
cythonize_one(*args)
|
| 1155 |
+
|
| 1156 |
+
if exclude_failures:
|
| 1157 |
+
failed_modules = set()
|
| 1158 |
+
for c_file, modules in modules_by_cfile.items():
|
| 1159 |
+
if not os.path.exists(c_file):
|
| 1160 |
+
failed_modules.update(modules)
|
| 1161 |
+
elif os.path.getsize(c_file) < 200:
|
| 1162 |
+
f = io_open(c_file, 'r', encoding='iso8859-1')
|
| 1163 |
+
try:
|
| 1164 |
+
if f.read(len('#error ')) == '#error ':
|
| 1165 |
+
# dead compilation result
|
| 1166 |
+
failed_modules.update(modules)
|
| 1167 |
+
finally:
|
| 1168 |
+
f.close()
|
| 1169 |
+
if failed_modules:
|
| 1170 |
+
for module in failed_modules:
|
| 1171 |
+
module_list.remove(module)
|
| 1172 |
+
print(u"Failed compilations: %s" % ', '.join(sorted([
|
| 1173 |
+
module.name for module in failed_modules])))
|
| 1174 |
+
|
| 1175 |
+
if options.cache:
|
| 1176 |
+
cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100))
|
| 1177 |
+
# cythonize() is often followed by the (non-Python-buffered)
|
| 1178 |
+
# compiler output, flush now to avoid interleaving output.
|
| 1179 |
+
sys.stdout.flush()
|
| 1180 |
+
return module_list
|
| 1181 |
+
|
| 1182 |
+
|
| 1183 |
+
def fix_windows_unicode_modules(module_list):
|
| 1184 |
+
# Hack around a distutils 3.[5678] bug on Windows for unicode module names.
|
| 1185 |
+
# https://bugs.python.org/issue39432
|
| 1186 |
+
if sys.platform != "win32":
|
| 1187 |
+
return
|
| 1188 |
+
if sys.version_info < (3, 5) or sys.version_info >= (3, 8, 2):
|
| 1189 |
+
return
|
| 1190 |
+
|
| 1191 |
+
def make_filtered_list(ignored_symbol, old_entries):
|
| 1192 |
+
class FilteredExportSymbols(list):
|
| 1193 |
+
# export_symbols for unicode filename cause link errors on Windows
|
| 1194 |
+
# Cython doesn't need them (it already defines PyInit with the correct linkage)
|
| 1195 |
+
# so use this class as a temporary fix to stop them from being generated
|
| 1196 |
+
def __contains__(self, val):
|
| 1197 |
+
# so distutils doesn't "helpfully" add PyInit_<name>
|
| 1198 |
+
return val == ignored_symbol or list.__contains__(self, val)
|
| 1199 |
+
|
| 1200 |
+
filtered_list = FilteredExportSymbols(old_entries)
|
| 1201 |
+
if old_entries:
|
| 1202 |
+
filtered_list.extend(name for name in old_entries if name != ignored_symbol)
|
| 1203 |
+
return filtered_list
|
| 1204 |
+
|
| 1205 |
+
for m in module_list:
|
| 1206 |
+
# TODO: use m.name.isascii() in Py3.7+
|
| 1207 |
+
try:
|
| 1208 |
+
m.name.encode("ascii")
|
| 1209 |
+
continue
|
| 1210 |
+
except UnicodeEncodeError:
|
| 1211 |
+
pass
|
| 1212 |
+
m.export_symbols = make_filtered_list(
|
| 1213 |
+
"PyInit_" + m.name.rsplit(".", 1)[-1],
|
| 1214 |
+
m.export_symbols,
|
| 1215 |
+
)
|
| 1216 |
+
|
| 1217 |
+
|
| 1218 |
+
if os.environ.get('XML_RESULTS'):
|
| 1219 |
+
compile_result_dir = os.environ['XML_RESULTS']
|
| 1220 |
+
def record_results(func):
|
| 1221 |
+
def with_record(*args):
|
| 1222 |
+
t = time.time()
|
| 1223 |
+
success = True
|
| 1224 |
+
try:
|
| 1225 |
+
try:
|
| 1226 |
+
func(*args)
|
| 1227 |
+
except:
|
| 1228 |
+
success = False
|
| 1229 |
+
finally:
|
| 1230 |
+
t = time.time() - t
|
| 1231 |
+
module = fully_qualified_name(args[0])
|
| 1232 |
+
name = "cythonize." + module
|
| 1233 |
+
failures = 1 - success
|
| 1234 |
+
if success:
|
| 1235 |
+
failure_item = ""
|
| 1236 |
+
else:
|
| 1237 |
+
failure_item = "failure"
|
| 1238 |
+
output = open(os.path.join(compile_result_dir, name + ".xml"), "w")
|
| 1239 |
+
output.write("""
|
| 1240 |
+
<?xml version="1.0" ?>
|
| 1241 |
+
<testsuite name="%(name)s" errors="0" failures="%(failures)s" tests="1" time="%(t)s">
|
| 1242 |
+
<testcase classname="%(name)s" name="cythonize">
|
| 1243 |
+
%(failure_item)s
|
| 1244 |
+
</testcase>
|
| 1245 |
+
</testsuite>
|
| 1246 |
+
""".strip() % locals())
|
| 1247 |
+
output.close()
|
| 1248 |
+
return with_record
|
| 1249 |
+
else:
|
| 1250 |
+
def record_results(func):
|
| 1251 |
+
return func
|
| 1252 |
+
|
| 1253 |
+
|
| 1254 |
+
# TODO: Share context? Issue: pyx processing leaks into pxd module
|
| 1255 |
+
@record_results
|
| 1256 |
+
def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
|
| 1257 |
+
raise_on_failure=True, embedded_metadata=None,
|
| 1258 |
+
full_module_name=None, show_all_warnings=False,
|
| 1259 |
+
progress=""):
|
| 1260 |
+
from ..Compiler.Main import compile_single, default_options
|
| 1261 |
+
from ..Compiler.Errors import CompileError, PyrexError
|
| 1262 |
+
|
| 1263 |
+
if fingerprint:
|
| 1264 |
+
if not os.path.exists(options.cache):
|
| 1265 |
+
safe_makedirs(options.cache)
|
| 1266 |
+
# Cython-generated c files are highly compressible.
|
| 1267 |
+
# (E.g. a compression ratio of about 10 for Sage).
|
| 1268 |
+
fingerprint_file_base = join_path(
|
| 1269 |
+
options.cache, "%s-%s" % (os.path.basename(c_file), fingerprint))
|
| 1270 |
+
gz_fingerprint_file = fingerprint_file_base + gzip_ext
|
| 1271 |
+
zip_fingerprint_file = fingerprint_file_base + '.zip'
|
| 1272 |
+
if os.path.exists(gz_fingerprint_file) or os.path.exists(zip_fingerprint_file):
|
| 1273 |
+
if not quiet:
|
| 1274 |
+
print(u"%sFound compiled %s in cache" % (progress, pyx_file))
|
| 1275 |
+
if os.path.exists(gz_fingerprint_file):
|
| 1276 |
+
os.utime(gz_fingerprint_file, None)
|
| 1277 |
+
with contextlib.closing(gzip_open(gz_fingerprint_file, 'rb')) as g:
|
| 1278 |
+
with contextlib.closing(open(c_file, 'wb')) as f:
|
| 1279 |
+
shutil.copyfileobj(g, f)
|
| 1280 |
+
else:
|
| 1281 |
+
os.utime(zip_fingerprint_file, None)
|
| 1282 |
+
dirname = os.path.dirname(c_file)
|
| 1283 |
+
with contextlib.closing(zipfile.ZipFile(zip_fingerprint_file)) as z:
|
| 1284 |
+
for artifact in z.namelist():
|
| 1285 |
+
z.extract(artifact, os.path.join(dirname, artifact))
|
| 1286 |
+
return
|
| 1287 |
+
if not quiet:
|
| 1288 |
+
print(u"%sCythonizing %s" % (progress, Utils.decode_filename(pyx_file)))
|
| 1289 |
+
if options is None:
|
| 1290 |
+
options = CompilationOptions(default_options)
|
| 1291 |
+
options.output_file = c_file
|
| 1292 |
+
options.embedded_metadata = embedded_metadata
|
| 1293 |
+
|
| 1294 |
+
old_warning_level = Errors.LEVEL
|
| 1295 |
+
if show_all_warnings:
|
| 1296 |
+
Errors.LEVEL = 0
|
| 1297 |
+
|
| 1298 |
+
any_failures = 0
|
| 1299 |
+
try:
|
| 1300 |
+
result = compile_single(pyx_file, options, full_module_name=full_module_name)
|
| 1301 |
+
if result.num_errors > 0:
|
| 1302 |
+
any_failures = 1
|
| 1303 |
+
except (EnvironmentError, PyrexError) as e:
|
| 1304 |
+
sys.stderr.write('%s\n' % e)
|
| 1305 |
+
any_failures = 1
|
| 1306 |
+
# XXX
|
| 1307 |
+
import traceback
|
| 1308 |
+
traceback.print_exc()
|
| 1309 |
+
except Exception:
|
| 1310 |
+
if raise_on_failure:
|
| 1311 |
+
raise
|
| 1312 |
+
import traceback
|
| 1313 |
+
traceback.print_exc()
|
| 1314 |
+
any_failures = 1
|
| 1315 |
+
finally:
|
| 1316 |
+
if show_all_warnings:
|
| 1317 |
+
Errors.LEVEL = old_warning_level
|
| 1318 |
+
|
| 1319 |
+
if any_failures:
|
| 1320 |
+
if raise_on_failure:
|
| 1321 |
+
raise CompileError(None, pyx_file)
|
| 1322 |
+
elif os.path.exists(c_file):
|
| 1323 |
+
os.remove(c_file)
|
| 1324 |
+
elif fingerprint:
|
| 1325 |
+
artifacts = list(filter(None, [
|
| 1326 |
+
getattr(result, attr, None)
|
| 1327 |
+
for attr in ('c_file', 'h_file', 'api_file', 'i_file')]))
|
| 1328 |
+
if len(artifacts) == 1:
|
| 1329 |
+
fingerprint_file = gz_fingerprint_file
|
| 1330 |
+
with contextlib.closing(open(c_file, 'rb')) as f:
|
| 1331 |
+
with contextlib.closing(gzip_open(fingerprint_file + '.tmp', 'wb')) as g:
|
| 1332 |
+
shutil.copyfileobj(f, g)
|
| 1333 |
+
else:
|
| 1334 |
+
fingerprint_file = zip_fingerprint_file
|
| 1335 |
+
with contextlib.closing(zipfile.ZipFile(
|
| 1336 |
+
fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
|
| 1337 |
+
for artifact in artifacts:
|
| 1338 |
+
zip.write(artifact, os.path.basename(artifact))
|
| 1339 |
+
os.rename(fingerprint_file + '.tmp', fingerprint_file)
|
| 1340 |
+
|
| 1341 |
+
|
| 1342 |
+
def cythonize_one_helper(m):
|
| 1343 |
+
import traceback
|
| 1344 |
+
try:
|
| 1345 |
+
return cythonize_one(*m)
|
| 1346 |
+
except Exception:
|
| 1347 |
+
traceback.print_exc()
|
| 1348 |
+
raise
|
| 1349 |
+
|
| 1350 |
+
|
| 1351 |
+
def _init_multiprocessing_helper():
|
| 1352 |
+
# KeyboardInterrupt kills workers, so don't let them get it
|
| 1353 |
+
import signal
|
| 1354 |
+
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
| 1355 |
+
|
| 1356 |
+
|
| 1357 |
+
def cleanup_cache(cache, target_size, ratio=.85):
|
| 1358 |
+
try:
|
| 1359 |
+
p = subprocess.Popen(['du', '-s', '-k', os.path.abspath(cache)], stdout=subprocess.PIPE)
|
| 1360 |
+
stdout, _ = p.communicate()
|
| 1361 |
+
res = p.wait()
|
| 1362 |
+
if res == 0:
|
| 1363 |
+
total_size = 1024 * int(stdout.strip().split()[0])
|
| 1364 |
+
if total_size < target_size:
|
| 1365 |
+
return
|
| 1366 |
+
except (OSError, ValueError):
|
| 1367 |
+
pass
|
| 1368 |
+
total_size = 0
|
| 1369 |
+
all = []
|
| 1370 |
+
for file in os.listdir(cache):
|
| 1371 |
+
path = join_path(cache, file)
|
| 1372 |
+
s = os.stat(path)
|
| 1373 |
+
total_size += s.st_size
|
| 1374 |
+
all.append((s.st_atime, s.st_size, path))
|
| 1375 |
+
if total_size > target_size:
|
| 1376 |
+
for time, size, file in reversed(sorted(all)):
|
| 1377 |
+
os.unlink(file)
|
| 1378 |
+
total_size -= size
|
| 1379 |
+
if total_size < target_size * ratio:
|
| 1380 |
+
break
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Distutils.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from Cython.Distutils.build_ext import build_ext
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Inline.py
ADDED
|
@@ -0,0 +1,367 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import hashlib
|
| 4 |
+
import inspect
|
| 5 |
+
import os
|
| 6 |
+
import re
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
from distutils.core import Distribution, Extension
|
| 10 |
+
from distutils.command.build_ext import build_ext
|
| 11 |
+
|
| 12 |
+
import Cython
|
| 13 |
+
from ..Compiler.Main import Context
|
| 14 |
+
from ..Compiler.Options import (default_options, CompilationOptions,
|
| 15 |
+
get_directive_defaults)
|
| 16 |
+
|
| 17 |
+
from ..Compiler.Visitor import CythonTransform, EnvTransform
|
| 18 |
+
from ..Compiler.ParseTreeTransforms import SkipDeclarations
|
| 19 |
+
from ..Compiler.TreeFragment import parse_from_strings
|
| 20 |
+
from ..Compiler.StringEncoding import _unicode
|
| 21 |
+
from .Dependencies import strip_string_literals, cythonize, cached_function
|
| 22 |
+
from ..Compiler import Pipeline
|
| 23 |
+
from ..Utils import get_cython_cache_dir
|
| 24 |
+
import cython as cython_module
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
IS_PY3 = sys.version_info >= (3,)
|
| 28 |
+
|
| 29 |
+
# A utility function to convert user-supplied ASCII strings to unicode.
|
| 30 |
+
if not IS_PY3:
|
| 31 |
+
def to_unicode(s):
|
| 32 |
+
if isinstance(s, bytes):
|
| 33 |
+
return s.decode('ascii')
|
| 34 |
+
else:
|
| 35 |
+
return s
|
| 36 |
+
else:
|
| 37 |
+
to_unicode = lambda x: x
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
if sys.version_info < (3, 5):
|
| 41 |
+
import imp
|
| 42 |
+
def load_dynamic(name, module_path):
|
| 43 |
+
return imp.load_dynamic(name, module_path)
|
| 44 |
+
else:
|
| 45 |
+
import importlib.util
|
| 46 |
+
from importlib.machinery import ExtensionFileLoader
|
| 47 |
+
|
| 48 |
+
def load_dynamic(name, path):
|
| 49 |
+
spec = importlib.util.spec_from_file_location(name, loader=ExtensionFileLoader(name, path))
|
| 50 |
+
module = importlib.util.module_from_spec(spec)
|
| 51 |
+
spec.loader.exec_module(module)
|
| 52 |
+
return module
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class UnboundSymbols(EnvTransform, SkipDeclarations):
|
| 56 |
+
def __init__(self):
|
| 57 |
+
super(EnvTransform, self).__init__(context=None)
|
| 58 |
+
self.unbound = set()
|
| 59 |
+
def visit_NameNode(self, node):
|
| 60 |
+
if not self.current_env().lookup(node.name):
|
| 61 |
+
self.unbound.add(node.name)
|
| 62 |
+
return node
|
| 63 |
+
def __call__(self, node):
|
| 64 |
+
super(UnboundSymbols, self).__call__(node)
|
| 65 |
+
return self.unbound
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
@cached_function
|
| 69 |
+
def unbound_symbols(code, context=None):
|
| 70 |
+
code = to_unicode(code)
|
| 71 |
+
if context is None:
|
| 72 |
+
context = Context([], get_directive_defaults(),
|
| 73 |
+
options=CompilationOptions(default_options))
|
| 74 |
+
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
|
| 75 |
+
tree = parse_from_strings('(tree fragment)', code)
|
| 76 |
+
for phase in Pipeline.create_pipeline(context, 'pyx'):
|
| 77 |
+
if phase is None:
|
| 78 |
+
continue
|
| 79 |
+
tree = phase(tree)
|
| 80 |
+
if isinstance(phase, AnalyseDeclarationsTransform):
|
| 81 |
+
break
|
| 82 |
+
try:
|
| 83 |
+
import builtins
|
| 84 |
+
except ImportError:
|
| 85 |
+
import __builtin__ as builtins
|
| 86 |
+
return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def unsafe_type(arg, context=None):
|
| 90 |
+
py_type = type(arg)
|
| 91 |
+
if py_type is int:
|
| 92 |
+
return 'long'
|
| 93 |
+
else:
|
| 94 |
+
return safe_type(arg, context)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def safe_type(arg, context=None):
|
| 98 |
+
py_type = type(arg)
|
| 99 |
+
if py_type in (list, tuple, dict, str):
|
| 100 |
+
return py_type.__name__
|
| 101 |
+
elif py_type is complex:
|
| 102 |
+
return 'double complex'
|
| 103 |
+
elif py_type is float:
|
| 104 |
+
return 'double'
|
| 105 |
+
elif py_type is bool:
|
| 106 |
+
return 'bint'
|
| 107 |
+
elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray):
|
| 108 |
+
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
|
| 109 |
+
else:
|
| 110 |
+
for base_type in py_type.__mro__:
|
| 111 |
+
if base_type.__module__ in ('__builtin__', 'builtins'):
|
| 112 |
+
return 'object'
|
| 113 |
+
module = context.find_module(base_type.__module__, need_pxd=False)
|
| 114 |
+
if module:
|
| 115 |
+
entry = module.lookup(base_type.__name__)
|
| 116 |
+
if entry.is_type:
|
| 117 |
+
return '%s.%s' % (base_type.__module__, base_type.__name__)
|
| 118 |
+
return 'object'
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def _get_build_extension():
|
| 122 |
+
dist = Distribution()
|
| 123 |
+
# Ensure the build respects distutils configuration by parsing
|
| 124 |
+
# the configuration files
|
| 125 |
+
config_files = dist.find_config_files()
|
| 126 |
+
dist.parse_config_files(config_files)
|
| 127 |
+
build_extension = build_ext(dist)
|
| 128 |
+
build_extension.finalize_options()
|
| 129 |
+
return build_extension
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
@cached_function
|
| 133 |
+
def _create_context(cython_include_dirs):
|
| 134 |
+
return Context(
|
| 135 |
+
list(cython_include_dirs),
|
| 136 |
+
get_directive_defaults(),
|
| 137 |
+
options=CompilationOptions(default_options)
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
_cython_inline_cache = {}
|
| 142 |
+
_cython_inline_default_context = _create_context(('.',))
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
|
| 146 |
+
for symbol in unbound_symbols:
|
| 147 |
+
if symbol not in kwds:
|
| 148 |
+
if locals is None or globals is None:
|
| 149 |
+
calling_frame = inspect.currentframe().f_back.f_back.f_back
|
| 150 |
+
if locals is None:
|
| 151 |
+
locals = calling_frame.f_locals
|
| 152 |
+
if globals is None:
|
| 153 |
+
globals = calling_frame.f_globals
|
| 154 |
+
if symbol in locals:
|
| 155 |
+
kwds[symbol] = locals[symbol]
|
| 156 |
+
elif symbol in globals:
|
| 157 |
+
kwds[symbol] = globals[symbol]
|
| 158 |
+
else:
|
| 159 |
+
print("Couldn't find %r" % symbol)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def _inline_key(orig_code, arg_sigs, language_level):
|
| 163 |
+
key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__
|
| 164 |
+
return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest()
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def cython_inline(code, get_type=unsafe_type,
|
| 168 |
+
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
|
| 169 |
+
cython_include_dirs=None, cython_compiler_directives=None,
|
| 170 |
+
force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
|
| 171 |
+
|
| 172 |
+
if get_type is None:
|
| 173 |
+
get_type = lambda x: 'object'
|
| 174 |
+
ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context
|
| 175 |
+
|
| 176 |
+
cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {}
|
| 177 |
+
if language_level is None and 'language_level' not in cython_compiler_directives:
|
| 178 |
+
language_level = '3str'
|
| 179 |
+
if language_level is not None:
|
| 180 |
+
cython_compiler_directives['language_level'] = language_level
|
| 181 |
+
|
| 182 |
+
key_hash = None
|
| 183 |
+
|
| 184 |
+
# Fast path if this has been called in this session.
|
| 185 |
+
_unbound_symbols = _cython_inline_cache.get(code)
|
| 186 |
+
if _unbound_symbols is not None:
|
| 187 |
+
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
| 188 |
+
args = sorted(kwds.items())
|
| 189 |
+
arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args])
|
| 190 |
+
key_hash = _inline_key(code, arg_sigs, language_level)
|
| 191 |
+
invoke = _cython_inline_cache.get((code, arg_sigs, key_hash))
|
| 192 |
+
if invoke is not None:
|
| 193 |
+
arg_list = [arg[1] for arg in args]
|
| 194 |
+
return invoke(*arg_list)
|
| 195 |
+
|
| 196 |
+
orig_code = code
|
| 197 |
+
code = to_unicode(code)
|
| 198 |
+
code, literals = strip_string_literals(code)
|
| 199 |
+
code = strip_common_indent(code)
|
| 200 |
+
if locals is None:
|
| 201 |
+
locals = inspect.currentframe().f_back.f_back.f_locals
|
| 202 |
+
if globals is None:
|
| 203 |
+
globals = inspect.currentframe().f_back.f_back.f_globals
|
| 204 |
+
try:
|
| 205 |
+
_cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code)
|
| 206 |
+
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
| 207 |
+
except AssertionError:
|
| 208 |
+
if not quiet:
|
| 209 |
+
# Parsing from strings not fully supported (e.g. cimports).
|
| 210 |
+
print("Could not parse code as a string (to extract unbound symbols).")
|
| 211 |
+
|
| 212 |
+
cimports = []
|
| 213 |
+
for name, arg in list(kwds.items()):
|
| 214 |
+
if arg is cython_module:
|
| 215 |
+
cimports.append('\ncimport cython as %s' % name)
|
| 216 |
+
del kwds[name]
|
| 217 |
+
arg_names = sorted(kwds)
|
| 218 |
+
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
|
| 219 |
+
if key_hash is None:
|
| 220 |
+
key_hash = _inline_key(orig_code, arg_sigs, language_level)
|
| 221 |
+
module_name = "_cython_inline_" + key_hash
|
| 222 |
+
|
| 223 |
+
if module_name in sys.modules:
|
| 224 |
+
module = sys.modules[module_name]
|
| 225 |
+
|
| 226 |
+
else:
|
| 227 |
+
build_extension = None
|
| 228 |
+
if cython_inline.so_ext is None:
|
| 229 |
+
# Figure out and cache current extension suffix
|
| 230 |
+
build_extension = _get_build_extension()
|
| 231 |
+
cython_inline.so_ext = build_extension.get_ext_filename('')
|
| 232 |
+
|
| 233 |
+
lib_dir = os.path.abspath(lib_dir)
|
| 234 |
+
module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext)
|
| 235 |
+
|
| 236 |
+
if not os.path.exists(lib_dir):
|
| 237 |
+
os.makedirs(lib_dir)
|
| 238 |
+
if force or not os.path.isfile(module_path):
|
| 239 |
+
cflags = []
|
| 240 |
+
define_macros = []
|
| 241 |
+
c_include_dirs = []
|
| 242 |
+
qualified = re.compile(r'([.\w]+)[.]')
|
| 243 |
+
for type, _ in arg_sigs:
|
| 244 |
+
m = qualified.match(type)
|
| 245 |
+
if m:
|
| 246 |
+
cimports.append('\ncimport %s' % m.groups()[0])
|
| 247 |
+
# one special case
|
| 248 |
+
if m.groups()[0] == 'numpy':
|
| 249 |
+
import numpy
|
| 250 |
+
c_include_dirs.append(numpy.get_include())
|
| 251 |
+
define_macros.append(("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION"))
|
| 252 |
+
# cflags.append('-Wno-unused')
|
| 253 |
+
module_body, func_body = extract_func_code(code)
|
| 254 |
+
params = ', '.join(['%s %s' % a for a in arg_sigs])
|
| 255 |
+
module_code = """
|
| 256 |
+
%(module_body)s
|
| 257 |
+
%(cimports)s
|
| 258 |
+
def __invoke(%(params)s):
|
| 259 |
+
%(func_body)s
|
| 260 |
+
return locals()
|
| 261 |
+
""" % {'cimports': '\n'.join(cimports),
|
| 262 |
+
'module_body': module_body,
|
| 263 |
+
'params': params,
|
| 264 |
+
'func_body': func_body }
|
| 265 |
+
for key, value in literals.items():
|
| 266 |
+
module_code = module_code.replace(key, value)
|
| 267 |
+
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
| 268 |
+
fh = open(pyx_file, 'w')
|
| 269 |
+
try:
|
| 270 |
+
fh.write(module_code)
|
| 271 |
+
finally:
|
| 272 |
+
fh.close()
|
| 273 |
+
extension = Extension(
|
| 274 |
+
name=module_name,
|
| 275 |
+
sources=[pyx_file],
|
| 276 |
+
include_dirs=c_include_dirs or None,
|
| 277 |
+
extra_compile_args=cflags or None,
|
| 278 |
+
define_macros=define_macros or None,
|
| 279 |
+
)
|
| 280 |
+
if build_extension is None:
|
| 281 |
+
build_extension = _get_build_extension()
|
| 282 |
+
build_extension.extensions = cythonize(
|
| 283 |
+
[extension],
|
| 284 |
+
include_path=cython_include_dirs or ['.'],
|
| 285 |
+
compiler_directives=cython_compiler_directives,
|
| 286 |
+
quiet=quiet)
|
| 287 |
+
build_extension.build_temp = os.path.dirname(pyx_file)
|
| 288 |
+
build_extension.build_lib = lib_dir
|
| 289 |
+
build_extension.run()
|
| 290 |
+
|
| 291 |
+
if sys.platform == 'win32' and sys.version_info >= (3, 8):
|
| 292 |
+
with os.add_dll_directory(os.path.abspath(lib_dir)):
|
| 293 |
+
module = load_dynamic(module_name, module_path)
|
| 294 |
+
else:
|
| 295 |
+
module = load_dynamic(module_name, module_path)
|
| 296 |
+
|
| 297 |
+
_cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke
|
| 298 |
+
arg_list = [kwds[arg] for arg in arg_names]
|
| 299 |
+
return module.__invoke(*arg_list)
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
# Cached suffix used by cython_inline above. None should get
|
| 303 |
+
# overridden with actual value upon the first cython_inline invocation
|
| 304 |
+
cython_inline.so_ext = None
|
| 305 |
+
|
| 306 |
+
_find_non_space = re.compile('[^ ]').search
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def strip_common_indent(code):
|
| 310 |
+
min_indent = None
|
| 311 |
+
lines = code.splitlines()
|
| 312 |
+
for line in lines:
|
| 313 |
+
match = _find_non_space(line)
|
| 314 |
+
if not match:
|
| 315 |
+
continue # blank
|
| 316 |
+
indent = match.start()
|
| 317 |
+
if line[indent] == '#':
|
| 318 |
+
continue # comment
|
| 319 |
+
if min_indent is None or min_indent > indent:
|
| 320 |
+
min_indent = indent
|
| 321 |
+
for ix, line in enumerate(lines):
|
| 322 |
+
match = _find_non_space(line)
|
| 323 |
+
if not match or not line or line[indent:indent+1] == '#':
|
| 324 |
+
continue
|
| 325 |
+
lines[ix] = line[min_indent:]
|
| 326 |
+
return '\n'.join(lines)
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
|
| 330 |
+
def extract_func_code(code):
|
| 331 |
+
module = []
|
| 332 |
+
function = []
|
| 333 |
+
current = function
|
| 334 |
+
code = code.replace('\t', ' ')
|
| 335 |
+
lines = code.split('\n')
|
| 336 |
+
for line in lines:
|
| 337 |
+
if not line.startswith(' '):
|
| 338 |
+
if module_statement.match(line):
|
| 339 |
+
current = module
|
| 340 |
+
else:
|
| 341 |
+
current = function
|
| 342 |
+
current.append(line)
|
| 343 |
+
return '\n'.join(module), ' ' + '\n '.join(function)
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
def get_body(source):
|
| 347 |
+
ix = source.index(':')
|
| 348 |
+
if source[:5] == 'lambda':
|
| 349 |
+
return "return %s" % source[ix+1:]
|
| 350 |
+
else:
|
| 351 |
+
return source[ix+1:]
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
# Lots to be done here... It would be especially cool if compiled functions
|
| 355 |
+
# could invoke each other quickly.
|
| 356 |
+
class RuntimeCompiledFunction(object):
|
| 357 |
+
|
| 358 |
+
def __init__(self, f):
|
| 359 |
+
self._f = f
|
| 360 |
+
self._body = get_body(inspect.getsource(f))
|
| 361 |
+
|
| 362 |
+
def __call__(self, *args, **kwds):
|
| 363 |
+
all = inspect.getcallargs(self._f, *args, **kwds)
|
| 364 |
+
if IS_PY3:
|
| 365 |
+
return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
|
| 366 |
+
else:
|
| 367 |
+
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/IpythonMagic.py
ADDED
|
@@ -0,0 +1,572 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
=====================
|
| 4 |
+
Cython related magics
|
| 5 |
+
=====================
|
| 6 |
+
|
| 7 |
+
Magic command interface for interactive work with Cython
|
| 8 |
+
|
| 9 |
+
.. note::
|
| 10 |
+
|
| 11 |
+
The ``Cython`` package needs to be installed separately. It
|
| 12 |
+
can be obtained using ``easy_install`` or ``pip``.
|
| 13 |
+
|
| 14 |
+
Usage
|
| 15 |
+
=====
|
| 16 |
+
|
| 17 |
+
To enable the magics below, execute ``%load_ext cython``.
|
| 18 |
+
|
| 19 |
+
``%%cython``
|
| 20 |
+
|
| 21 |
+
{CYTHON_DOC}
|
| 22 |
+
|
| 23 |
+
``%%cython_inline``
|
| 24 |
+
|
| 25 |
+
{CYTHON_INLINE_DOC}
|
| 26 |
+
|
| 27 |
+
``%%cython_pyximport``
|
| 28 |
+
|
| 29 |
+
{CYTHON_PYXIMPORT_DOC}
|
| 30 |
+
|
| 31 |
+
Author:
|
| 32 |
+
* Brian Granger
|
| 33 |
+
|
| 34 |
+
Code moved from IPython and adapted by:
|
| 35 |
+
* Martín Gaitán
|
| 36 |
+
|
| 37 |
+
Parts of this code were taken from Cython.inline.
|
| 38 |
+
"""
|
| 39 |
+
#-----------------------------------------------------------------------------
|
| 40 |
+
# Copyright (C) 2010-2011, IPython Development Team.
|
| 41 |
+
#
|
| 42 |
+
# Distributed under the terms of the Modified BSD License.
|
| 43 |
+
#
|
| 44 |
+
# The full license is in the file ipython-COPYING.rst, distributed with this software.
|
| 45 |
+
#-----------------------------------------------------------------------------
|
| 46 |
+
|
| 47 |
+
from __future__ import absolute_import, print_function
|
| 48 |
+
|
| 49 |
+
import io
|
| 50 |
+
import os
|
| 51 |
+
import re
|
| 52 |
+
import sys
|
| 53 |
+
import time
|
| 54 |
+
import copy
|
| 55 |
+
import distutils.log
|
| 56 |
+
import textwrap
|
| 57 |
+
|
| 58 |
+
IO_ENCODING = sys.getfilesystemencoding()
|
| 59 |
+
IS_PY2 = sys.version_info[0] < 3
|
| 60 |
+
|
| 61 |
+
import hashlib
|
| 62 |
+
from distutils.core import Distribution, Extension
|
| 63 |
+
from distutils.command.build_ext import build_ext
|
| 64 |
+
|
| 65 |
+
from IPython.core import display
|
| 66 |
+
from IPython.core import magic_arguments
|
| 67 |
+
from IPython.core.magic import Magics, magics_class, cell_magic
|
| 68 |
+
try:
|
| 69 |
+
from IPython.paths import get_ipython_cache_dir
|
| 70 |
+
except ImportError:
|
| 71 |
+
# older IPython version
|
| 72 |
+
from IPython.utils.path import get_ipython_cache_dir
|
| 73 |
+
from IPython.utils.text import dedent
|
| 74 |
+
|
| 75 |
+
from ..Shadow import __version__ as cython_version
|
| 76 |
+
from ..Compiler.Errors import CompileError
|
| 77 |
+
from .Inline import cython_inline, load_dynamic
|
| 78 |
+
from .Dependencies import cythonize
|
| 79 |
+
from ..Utils import captured_fd, print_captured
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
PGO_CONFIG = {
|
| 83 |
+
'gcc': {
|
| 84 |
+
'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'],
|
| 85 |
+
'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'],
|
| 86 |
+
},
|
| 87 |
+
# blind copy from 'configure' script in CPython 3.7
|
| 88 |
+
'icc': {
|
| 89 |
+
'gen': ['-prof-gen'],
|
| 90 |
+
'use': ['-prof-use'],
|
| 91 |
+
}
|
| 92 |
+
}
|
| 93 |
+
PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc']
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
if IS_PY2:
|
| 97 |
+
def encode_fs(name):
|
| 98 |
+
return name if isinstance(name, bytes) else name.encode(IO_ENCODING)
|
| 99 |
+
else:
|
| 100 |
+
def encode_fs(name):
|
| 101 |
+
return name
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@magics_class
|
| 105 |
+
class CythonMagics(Magics):
|
| 106 |
+
|
| 107 |
+
def __init__(self, shell):
|
| 108 |
+
super(CythonMagics, self).__init__(shell)
|
| 109 |
+
self._reloads = {}
|
| 110 |
+
self._code_cache = {}
|
| 111 |
+
self._pyximport_installed = False
|
| 112 |
+
|
| 113 |
+
def _import_all(self, module):
|
| 114 |
+
mdict = module.__dict__
|
| 115 |
+
if '__all__' in mdict:
|
| 116 |
+
keys = mdict['__all__']
|
| 117 |
+
else:
|
| 118 |
+
keys = [k for k in mdict if not k.startswith('_')]
|
| 119 |
+
|
| 120 |
+
for k in keys:
|
| 121 |
+
try:
|
| 122 |
+
self.shell.push({k: mdict[k]})
|
| 123 |
+
except KeyError:
|
| 124 |
+
msg = "'module' object has no attribute '%s'" % k
|
| 125 |
+
raise AttributeError(msg)
|
| 126 |
+
|
| 127 |
+
@cell_magic
|
| 128 |
+
def cython_inline(self, line, cell):
|
| 129 |
+
"""Compile and run a Cython code cell using Cython.inline.
|
| 130 |
+
|
| 131 |
+
This magic simply passes the body of the cell to Cython.inline
|
| 132 |
+
and returns the result. If the variables `a` and `b` are defined
|
| 133 |
+
in the user's namespace, here is a simple example that returns
|
| 134 |
+
their sum::
|
| 135 |
+
|
| 136 |
+
%%cython_inline
|
| 137 |
+
return a+b
|
| 138 |
+
|
| 139 |
+
For most purposes, we recommend the usage of the `%%cython` magic.
|
| 140 |
+
"""
|
| 141 |
+
locs = self.shell.user_global_ns
|
| 142 |
+
globs = self.shell.user_ns
|
| 143 |
+
return cython_inline(cell, locals=locs, globals=globs)
|
| 144 |
+
|
| 145 |
+
@cell_magic
|
| 146 |
+
def cython_pyximport(self, line, cell):
|
| 147 |
+
"""Compile and import a Cython code cell using pyximport.
|
| 148 |
+
|
| 149 |
+
The contents of the cell are written to a `.pyx` file in the current
|
| 150 |
+
working directory, which is then imported using `pyximport`. This
|
| 151 |
+
magic requires a module name to be passed::
|
| 152 |
+
|
| 153 |
+
%%cython_pyximport modulename
|
| 154 |
+
def f(x):
|
| 155 |
+
return 2.0*x
|
| 156 |
+
|
| 157 |
+
The compiled module is then imported and all of its symbols are
|
| 158 |
+
injected into the user's namespace. For most purposes, we recommend
|
| 159 |
+
the usage of the `%%cython` magic.
|
| 160 |
+
"""
|
| 161 |
+
module_name = line.strip()
|
| 162 |
+
if not module_name:
|
| 163 |
+
raise ValueError('module name must be given')
|
| 164 |
+
fname = module_name + '.pyx'
|
| 165 |
+
with io.open(fname, 'w', encoding='utf-8') as f:
|
| 166 |
+
f.write(cell)
|
| 167 |
+
if 'pyximport' not in sys.modules or not self._pyximport_installed:
|
| 168 |
+
import pyximport
|
| 169 |
+
pyximport.install()
|
| 170 |
+
self._pyximport_installed = True
|
| 171 |
+
if module_name in self._reloads:
|
| 172 |
+
module = self._reloads[module_name]
|
| 173 |
+
# Note: reloading extension modules is not actually supported
|
| 174 |
+
# (requires PEP-489 reinitialisation support).
|
| 175 |
+
# Don't know why this should ever have worked as it reads here.
|
| 176 |
+
# All we really need to do is to update the globals below.
|
| 177 |
+
#reload(module)
|
| 178 |
+
else:
|
| 179 |
+
__import__(module_name)
|
| 180 |
+
module = sys.modules[module_name]
|
| 181 |
+
self._reloads[module_name] = module
|
| 182 |
+
self._import_all(module)
|
| 183 |
+
|
| 184 |
+
@magic_arguments.magic_arguments()
|
| 185 |
+
@magic_arguments.argument(
|
| 186 |
+
'-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 187 |
+
help="Produce a colorized HTML version of the source."
|
| 188 |
+
)
|
| 189 |
+
@magic_arguments.argument(
|
| 190 |
+
'--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 191 |
+
help="Produce a colorized HTML version of the source "
|
| 192 |
+
"which includes entire generated C/C++-code."
|
| 193 |
+
)
|
| 194 |
+
@magic_arguments.argument(
|
| 195 |
+
'-+', '--cplus', action='store_true', default=False,
|
| 196 |
+
help="Output a C++ rather than C file."
|
| 197 |
+
)
|
| 198 |
+
@magic_arguments.argument(
|
| 199 |
+
'-3', dest='language_level', action='store_const', const=3, default=None,
|
| 200 |
+
help="Select Python 3 syntax."
|
| 201 |
+
)
|
| 202 |
+
@magic_arguments.argument(
|
| 203 |
+
'-2', dest='language_level', action='store_const', const=2, default=None,
|
| 204 |
+
help="Select Python 2 syntax."
|
| 205 |
+
)
|
| 206 |
+
@magic_arguments.argument(
|
| 207 |
+
'-f', '--force', action='store_true', default=False,
|
| 208 |
+
help="Force the compilation of a new module, even if the source has been "
|
| 209 |
+
"previously compiled."
|
| 210 |
+
)
|
| 211 |
+
@magic_arguments.argument(
|
| 212 |
+
'-c', '--compile-args', action='append', default=[],
|
| 213 |
+
help="Extra flags to pass to compiler via the `extra_compile_args` "
|
| 214 |
+
"Extension flag (can be specified multiple times)."
|
| 215 |
+
)
|
| 216 |
+
@magic_arguments.argument(
|
| 217 |
+
'--link-args', action='append', default=[],
|
| 218 |
+
help="Extra flags to pass to linker via the `extra_link_args` "
|
| 219 |
+
"Extension flag (can be specified multiple times)."
|
| 220 |
+
)
|
| 221 |
+
@magic_arguments.argument(
|
| 222 |
+
'-l', '--lib', action='append', default=[],
|
| 223 |
+
help="Add a library to link the extension against (can be specified "
|
| 224 |
+
"multiple times)."
|
| 225 |
+
)
|
| 226 |
+
@magic_arguments.argument(
|
| 227 |
+
'-n', '--name',
|
| 228 |
+
help="Specify a name for the Cython module."
|
| 229 |
+
)
|
| 230 |
+
@magic_arguments.argument(
|
| 231 |
+
'-L', dest='library_dirs', metavar='dir', action='append', default=[],
|
| 232 |
+
help="Add a path to the list of library directories (can be specified "
|
| 233 |
+
"multiple times)."
|
| 234 |
+
)
|
| 235 |
+
@magic_arguments.argument(
|
| 236 |
+
'-I', '--include', action='append', default=[],
|
| 237 |
+
help="Add a path to the list of include directories (can be specified "
|
| 238 |
+
"multiple times)."
|
| 239 |
+
)
|
| 240 |
+
@magic_arguments.argument(
|
| 241 |
+
'-S', '--src', action='append', default=[],
|
| 242 |
+
help="Add a path to the list of src files (can be specified "
|
| 243 |
+
"multiple times)."
|
| 244 |
+
)
|
| 245 |
+
@magic_arguments.argument(
|
| 246 |
+
'--pgo', dest='pgo', action='store_true', default=False,
|
| 247 |
+
help=("Enable profile guided optimisation in the C compiler. "
|
| 248 |
+
"Compiles the cell twice and executes it in between to generate a runtime profile.")
|
| 249 |
+
)
|
| 250 |
+
@magic_arguments.argument(
|
| 251 |
+
'--verbose', dest='quiet', action='store_false', default=True,
|
| 252 |
+
help=("Print debug information like generated .c/.cpp file location "
|
| 253 |
+
"and exact gcc/g++ command invoked.")
|
| 254 |
+
)
|
| 255 |
+
@cell_magic
|
| 256 |
+
def cython(self, line, cell):
|
| 257 |
+
"""Compile and import everything from a Cython code cell.
|
| 258 |
+
|
| 259 |
+
The contents of the cell are written to a `.pyx` file in the
|
| 260 |
+
directory `IPYTHONDIR/cython` using a filename with the hash of the
|
| 261 |
+
code. This file is then cythonized and compiled. The resulting module
|
| 262 |
+
is imported and all of its symbols are injected into the user's
|
| 263 |
+
namespace. The usage is similar to that of `%%cython_pyximport` but
|
| 264 |
+
you don't have to pass a module name::
|
| 265 |
+
|
| 266 |
+
%%cython
|
| 267 |
+
def f(x):
|
| 268 |
+
return 2.0*x
|
| 269 |
+
|
| 270 |
+
To compile OpenMP codes, pass the required `--compile-args`
|
| 271 |
+
and `--link-args`. For example with gcc::
|
| 272 |
+
|
| 273 |
+
%%cython --compile-args=-fopenmp --link-args=-fopenmp
|
| 274 |
+
...
|
| 275 |
+
|
| 276 |
+
To enable profile guided optimisation, pass the ``--pgo`` option.
|
| 277 |
+
Note that the cell itself needs to take care of establishing a suitable
|
| 278 |
+
profile when executed. This can be done by implementing the functions to
|
| 279 |
+
optimise, and then calling them directly in the same cell on some realistic
|
| 280 |
+
training data like this::
|
| 281 |
+
|
| 282 |
+
%%cython --pgo
|
| 283 |
+
def critical_function(data):
|
| 284 |
+
for item in data:
|
| 285 |
+
...
|
| 286 |
+
|
| 287 |
+
# execute function several times to build profile
|
| 288 |
+
from somewhere import some_typical_data
|
| 289 |
+
for _ in range(100):
|
| 290 |
+
critical_function(some_typical_data)
|
| 291 |
+
|
| 292 |
+
In Python 3.5 and later, you can distinguish between the profile and
|
| 293 |
+
non-profile runs as follows::
|
| 294 |
+
|
| 295 |
+
if "_pgo_" in __name__:
|
| 296 |
+
... # execute critical code here
|
| 297 |
+
"""
|
| 298 |
+
args = magic_arguments.parse_argstring(self.cython, line)
|
| 299 |
+
code = cell if cell.endswith('\n') else cell + '\n'
|
| 300 |
+
lib_dir = os.path.join(get_ipython_cache_dir(), 'cython')
|
| 301 |
+
key = (code, line, sys.version_info, sys.executable, cython_version)
|
| 302 |
+
|
| 303 |
+
if not os.path.exists(lib_dir):
|
| 304 |
+
os.makedirs(lib_dir)
|
| 305 |
+
|
| 306 |
+
if args.pgo:
|
| 307 |
+
key += ('pgo',)
|
| 308 |
+
if args.force:
|
| 309 |
+
# Force a new module name by adding the current time to the
|
| 310 |
+
# key which is hashed to determine the module name.
|
| 311 |
+
key += (time.time(),)
|
| 312 |
+
|
| 313 |
+
if args.name:
|
| 314 |
+
module_name = str(args.name) # no-op in Py3
|
| 315 |
+
else:
|
| 316 |
+
module_name = "_cython_magic_" + hashlib.sha1(str(key).encode('utf-8')).hexdigest()
|
| 317 |
+
html_file = os.path.join(lib_dir, module_name + '.html')
|
| 318 |
+
module_path = os.path.join(lib_dir, module_name + self.so_ext)
|
| 319 |
+
|
| 320 |
+
have_module = os.path.isfile(module_path)
|
| 321 |
+
need_cythonize = args.pgo or not have_module
|
| 322 |
+
|
| 323 |
+
if args.annotate:
|
| 324 |
+
if not os.path.isfile(html_file):
|
| 325 |
+
need_cythonize = True
|
| 326 |
+
|
| 327 |
+
extension = None
|
| 328 |
+
if need_cythonize:
|
| 329 |
+
extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
|
| 330 |
+
if extensions is None:
|
| 331 |
+
# Compilation failed and printed error message
|
| 332 |
+
return None
|
| 333 |
+
assert len(extensions) == 1
|
| 334 |
+
extension = extensions[0]
|
| 335 |
+
self._code_cache[key] = module_name
|
| 336 |
+
|
| 337 |
+
if args.pgo:
|
| 338 |
+
self._profile_pgo_wrapper(extension, lib_dir)
|
| 339 |
+
|
| 340 |
+
def print_compiler_output(stdout, stderr, where):
|
| 341 |
+
# On windows, errors are printed to stdout, we redirect both to sys.stderr.
|
| 342 |
+
print_captured(stdout, where, u"Content of stdout:\n")
|
| 343 |
+
print_captured(stderr, where, u"Content of stderr:\n")
|
| 344 |
+
|
| 345 |
+
get_stderr = get_stdout = None
|
| 346 |
+
try:
|
| 347 |
+
with captured_fd(1) as get_stdout:
|
| 348 |
+
with captured_fd(2) as get_stderr:
|
| 349 |
+
self._build_extension(
|
| 350 |
+
extension, lib_dir, pgo_step_name='use' if args.pgo else None, quiet=args.quiet)
|
| 351 |
+
except (distutils.errors.CompileError, distutils.errors.LinkError):
|
| 352 |
+
# Build failed, print error message from compiler/linker
|
| 353 |
+
print_compiler_output(get_stdout(), get_stderr(), sys.stderr)
|
| 354 |
+
return None
|
| 355 |
+
|
| 356 |
+
# Build seems ok, but we might still want to show any warnings that occurred
|
| 357 |
+
print_compiler_output(get_stdout(), get_stderr(), sys.stdout)
|
| 358 |
+
|
| 359 |
+
module = load_dynamic(module_name, module_path)
|
| 360 |
+
self._import_all(module)
|
| 361 |
+
|
| 362 |
+
if args.annotate:
|
| 363 |
+
try:
|
| 364 |
+
with io.open(html_file, encoding='utf-8') as f:
|
| 365 |
+
annotated_html = f.read()
|
| 366 |
+
except IOError as e:
|
| 367 |
+
# File could not be opened. Most likely the user has a version
|
| 368 |
+
# of Cython before 0.15.1 (when `cythonize` learned the
|
| 369 |
+
# `force` keyword argument) and has already compiled this
|
| 370 |
+
# exact source without annotation.
|
| 371 |
+
print('Cython completed successfully but the annotated '
|
| 372 |
+
'source could not be read.', file=sys.stderr)
|
| 373 |
+
print(e, file=sys.stderr)
|
| 374 |
+
else:
|
| 375 |
+
return display.HTML(self.clean_annotated_html(annotated_html))
|
| 376 |
+
|
| 377 |
+
def _profile_pgo_wrapper(self, extension, lib_dir):
|
| 378 |
+
"""
|
| 379 |
+
Generate a .c file for a separate extension module that calls the
|
| 380 |
+
module init function of the original module. This makes sure that the
|
| 381 |
+
PGO profiler sees the correct .o file of the final module, but it still
|
| 382 |
+
allows us to import the module under a different name for profiling,
|
| 383 |
+
before recompiling it into the PGO optimised module. Overwriting and
|
| 384 |
+
reimporting the same shared library is not portable.
|
| 385 |
+
"""
|
| 386 |
+
extension = copy.copy(extension) # shallow copy, do not modify sources in place!
|
| 387 |
+
module_name = extension.name
|
| 388 |
+
pgo_module_name = '_pgo_' + module_name
|
| 389 |
+
pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c')
|
| 390 |
+
with io.open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f:
|
| 391 |
+
f.write(textwrap.dedent(u"""
|
| 392 |
+
#include "Python.h"
|
| 393 |
+
#if PY_MAJOR_VERSION < 3
|
| 394 |
+
extern PyMODINIT_FUNC init%(module_name)s(void);
|
| 395 |
+
PyMODINIT_FUNC init%(pgo_module_name)s(void); /*proto*/
|
| 396 |
+
PyMODINIT_FUNC init%(pgo_module_name)s(void) {
|
| 397 |
+
PyObject *sys_modules;
|
| 398 |
+
init%(module_name)s(); if (PyErr_Occurred()) return;
|
| 399 |
+
sys_modules = PyImport_GetModuleDict(); /* borrowed, no exception, "never" fails */
|
| 400 |
+
if (sys_modules) {
|
| 401 |
+
PyObject *module = PyDict_GetItemString(sys_modules, "%(module_name)s"); if (!module) return;
|
| 402 |
+
PyDict_SetItemString(sys_modules, "%(pgo_module_name)s", module);
|
| 403 |
+
Py_DECREF(module);
|
| 404 |
+
}
|
| 405 |
+
}
|
| 406 |
+
#else
|
| 407 |
+
extern PyMODINIT_FUNC PyInit_%(module_name)s(void);
|
| 408 |
+
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/
|
| 409 |
+
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) {
|
| 410 |
+
return PyInit_%(module_name)s();
|
| 411 |
+
}
|
| 412 |
+
#endif
|
| 413 |
+
""" % {'module_name': module_name, 'pgo_module_name': pgo_module_name}))
|
| 414 |
+
|
| 415 |
+
extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place!
|
| 416 |
+
extension.name = pgo_module_name
|
| 417 |
+
|
| 418 |
+
self._build_extension(extension, lib_dir, pgo_step_name='gen')
|
| 419 |
+
|
| 420 |
+
# import and execute module code to generate profile
|
| 421 |
+
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
|
| 422 |
+
load_dynamic(pgo_module_name, so_module_path)
|
| 423 |
+
|
| 424 |
+
def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
|
| 425 |
+
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
| 426 |
+
pyx_file = encode_fs(pyx_file)
|
| 427 |
+
|
| 428 |
+
c_include_dirs = args.include
|
| 429 |
+
c_src_files = list(map(str, args.src))
|
| 430 |
+
if 'numpy' in code:
|
| 431 |
+
import numpy
|
| 432 |
+
c_include_dirs.append(numpy.get_include())
|
| 433 |
+
with io.open(pyx_file, 'w', encoding='utf-8') as f:
|
| 434 |
+
f.write(code)
|
| 435 |
+
extension = Extension(
|
| 436 |
+
name=module_name,
|
| 437 |
+
sources=[pyx_file] + c_src_files,
|
| 438 |
+
include_dirs=c_include_dirs,
|
| 439 |
+
library_dirs=args.library_dirs,
|
| 440 |
+
extra_compile_args=args.compile_args,
|
| 441 |
+
extra_link_args=args.link_args,
|
| 442 |
+
libraries=args.lib,
|
| 443 |
+
language='c++' if args.cplus else 'c',
|
| 444 |
+
)
|
| 445 |
+
try:
|
| 446 |
+
opts = dict(
|
| 447 |
+
quiet=quiet,
|
| 448 |
+
annotate=args.annotate,
|
| 449 |
+
force=True,
|
| 450 |
+
language_level=min(3, sys.version_info[0]),
|
| 451 |
+
)
|
| 452 |
+
if args.language_level is not None:
|
| 453 |
+
assert args.language_level in (2, 3)
|
| 454 |
+
opts['language_level'] = args.language_level
|
| 455 |
+
return cythonize([extension], **opts)
|
| 456 |
+
except CompileError:
|
| 457 |
+
return None
|
| 458 |
+
|
| 459 |
+
def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
|
| 460 |
+
build_extension = self._get_build_extension(
|
| 461 |
+
extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
|
| 462 |
+
old_threshold = None
|
| 463 |
+
try:
|
| 464 |
+
if not quiet:
|
| 465 |
+
old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
|
| 466 |
+
build_extension.run()
|
| 467 |
+
finally:
|
| 468 |
+
if not quiet and old_threshold is not None:
|
| 469 |
+
distutils.log.set_threshold(old_threshold)
|
| 470 |
+
|
| 471 |
+
def _add_pgo_flags(self, build_extension, step_name, temp_dir):
|
| 472 |
+
compiler_type = build_extension.compiler.compiler_type
|
| 473 |
+
if compiler_type == 'unix':
|
| 474 |
+
compiler_cmd = build_extension.compiler.compiler_so
|
| 475 |
+
# TODO: we could try to call "[cmd] --version" for better insights
|
| 476 |
+
if not compiler_cmd:
|
| 477 |
+
pass
|
| 478 |
+
elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]:
|
| 479 |
+
compiler_type = 'clang'
|
| 480 |
+
elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]:
|
| 481 |
+
compiler_type = 'icc'
|
| 482 |
+
elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]:
|
| 483 |
+
compiler_type = 'gcc'
|
| 484 |
+
elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]:
|
| 485 |
+
compiler_type = 'gcc'
|
| 486 |
+
config = PGO_CONFIG.get(compiler_type)
|
| 487 |
+
orig_flags = []
|
| 488 |
+
if config and step_name in config:
|
| 489 |
+
flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]]
|
| 490 |
+
for extension in build_extension.extensions:
|
| 491 |
+
orig_flags.append((extension.extra_compile_args, extension.extra_link_args))
|
| 492 |
+
extension.extra_compile_args = extension.extra_compile_args + flags
|
| 493 |
+
extension.extra_link_args = extension.extra_link_args + flags
|
| 494 |
+
else:
|
| 495 |
+
print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type),
|
| 496 |
+
file=sys.stderr)
|
| 497 |
+
return orig_flags
|
| 498 |
+
|
| 499 |
+
@property
|
| 500 |
+
def so_ext(self):
|
| 501 |
+
"""The extension suffix for compiled modules."""
|
| 502 |
+
try:
|
| 503 |
+
return self._so_ext
|
| 504 |
+
except AttributeError:
|
| 505 |
+
self._so_ext = self._get_build_extension().get_ext_filename('')
|
| 506 |
+
return self._so_ext
|
| 507 |
+
|
| 508 |
+
def _clear_distutils_mkpath_cache(self):
|
| 509 |
+
"""clear distutils mkpath cache
|
| 510 |
+
|
| 511 |
+
prevents distutils from skipping re-creation of dirs that have been removed
|
| 512 |
+
"""
|
| 513 |
+
try:
|
| 514 |
+
from distutils.dir_util import _path_created
|
| 515 |
+
except ImportError:
|
| 516 |
+
pass
|
| 517 |
+
else:
|
| 518 |
+
_path_created.clear()
|
| 519 |
+
|
| 520 |
+
def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
|
| 521 |
+
pgo_step_name=None, _build_ext=build_ext):
|
| 522 |
+
self._clear_distutils_mkpath_cache()
|
| 523 |
+
dist = Distribution()
|
| 524 |
+
config_files = dist.find_config_files()
|
| 525 |
+
try:
|
| 526 |
+
config_files.remove('setup.cfg')
|
| 527 |
+
except ValueError:
|
| 528 |
+
pass
|
| 529 |
+
dist.parse_config_files(config_files)
|
| 530 |
+
|
| 531 |
+
if not temp_dir:
|
| 532 |
+
temp_dir = lib_dir
|
| 533 |
+
add_pgo_flags = self._add_pgo_flags
|
| 534 |
+
|
| 535 |
+
if pgo_step_name:
|
| 536 |
+
base_build_ext = _build_ext
|
| 537 |
+
class _build_ext(_build_ext):
|
| 538 |
+
def build_extensions(self):
|
| 539 |
+
add_pgo_flags(self, pgo_step_name, temp_dir)
|
| 540 |
+
base_build_ext.build_extensions(self)
|
| 541 |
+
|
| 542 |
+
build_extension = _build_ext(dist)
|
| 543 |
+
build_extension.finalize_options()
|
| 544 |
+
if temp_dir:
|
| 545 |
+
temp_dir = encode_fs(temp_dir)
|
| 546 |
+
build_extension.build_temp = temp_dir
|
| 547 |
+
if lib_dir:
|
| 548 |
+
lib_dir = encode_fs(lib_dir)
|
| 549 |
+
build_extension.build_lib = lib_dir
|
| 550 |
+
if extension is not None:
|
| 551 |
+
build_extension.extensions = [extension]
|
| 552 |
+
return build_extension
|
| 553 |
+
|
| 554 |
+
@staticmethod
|
| 555 |
+
def clean_annotated_html(html):
|
| 556 |
+
"""Clean up the annotated HTML source.
|
| 557 |
+
|
| 558 |
+
Strips the link to the generated C or C++ file, which we do not
|
| 559 |
+
present to the user.
|
| 560 |
+
"""
|
| 561 |
+
r = re.compile('<p>Raw output: <a href="(.*)">(.*)</a>')
|
| 562 |
+
html = '\n'.join(l for l in html.splitlines() if not r.match(l))
|
| 563 |
+
return html
|
| 564 |
+
|
| 565 |
+
__doc__ = __doc__.format(
|
| 566 |
+
# rST doesn't see the -+ flag as part of an option list, so we
|
| 567 |
+
# hide it from the module-level docstring.
|
| 568 |
+
CYTHON_DOC=dedent(CythonMagics.cython.__doc__\
|
| 569 |
+
.replace('-+, --cplus', '--cplus ')),
|
| 570 |
+
CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__),
|
| 571 |
+
CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__),
|
| 572 |
+
)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestCyCache.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import difflib
|
| 2 |
+
import glob
|
| 3 |
+
import gzip
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
import tempfile
|
| 7 |
+
import unittest
|
| 8 |
+
|
| 9 |
+
import Cython.Build.Dependencies
|
| 10 |
+
import Cython.Utils
|
| 11 |
+
from Cython.TestUtils import CythonTest
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class TestCyCache(CythonTest):
|
| 15 |
+
|
| 16 |
+
def setUp(self):
|
| 17 |
+
CythonTest.setUp(self)
|
| 18 |
+
self.temp_dir = tempfile.mkdtemp(
|
| 19 |
+
prefix='cycache-test',
|
| 20 |
+
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None)
|
| 21 |
+
self.src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 22 |
+
self.cache_dir = tempfile.mkdtemp(prefix='cache', dir=self.temp_dir)
|
| 23 |
+
|
| 24 |
+
def cache_files(self, file_glob):
|
| 25 |
+
return glob.glob(os.path.join(self.cache_dir, file_glob))
|
| 26 |
+
|
| 27 |
+
def fresh_cythonize(self, *args, **kwargs):
|
| 28 |
+
Cython.Utils.clear_function_caches()
|
| 29 |
+
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
| 30 |
+
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
| 31 |
+
|
| 32 |
+
def test_cycache_switch(self):
|
| 33 |
+
content1 = 'value = 1\n'
|
| 34 |
+
content2 = 'value = 2\n'
|
| 35 |
+
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
| 36 |
+
a_c = a_pyx[:-4] + '.c'
|
| 37 |
+
|
| 38 |
+
with open(a_pyx, 'w') as f:
|
| 39 |
+
f.write(content1)
|
| 40 |
+
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
| 41 |
+
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
| 42 |
+
self.assertEqual(1, len(self.cache_files('a.c*')))
|
| 43 |
+
with open(a_c) as f:
|
| 44 |
+
a_contents1 = f.read()
|
| 45 |
+
os.unlink(a_c)
|
| 46 |
+
|
| 47 |
+
with open(a_pyx, 'w') as f:
|
| 48 |
+
f.write(content2)
|
| 49 |
+
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
| 50 |
+
with open(a_c) as f:
|
| 51 |
+
a_contents2 = f.read()
|
| 52 |
+
os.unlink(a_c)
|
| 53 |
+
|
| 54 |
+
self.assertNotEqual(a_contents1, a_contents2, 'C file not changed!')
|
| 55 |
+
self.assertEqual(2, len(self.cache_files('a.c*')))
|
| 56 |
+
|
| 57 |
+
with open(a_pyx, 'w') as f:
|
| 58 |
+
f.write(content1)
|
| 59 |
+
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
| 60 |
+
self.assertEqual(2, len(self.cache_files('a.c*')))
|
| 61 |
+
with open(a_c) as f:
|
| 62 |
+
a_contents = f.read()
|
| 63 |
+
self.assertEqual(
|
| 64 |
+
a_contents, a_contents1,
|
| 65 |
+
msg='\n'.join(list(difflib.unified_diff(
|
| 66 |
+
a_contents.split('\n'), a_contents1.split('\n')))[:10]))
|
| 67 |
+
|
| 68 |
+
@unittest.skipIf(sys.version_info[:2] == (3, 12) and sys.platform == "win32",
|
| 69 |
+
"This test is mysteriously broken on Windows on the CI only "
|
| 70 |
+
"(https://github.com/cython/cython/issues/5825)")
|
| 71 |
+
def test_cycache_uses_cache(self):
|
| 72 |
+
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
| 73 |
+
a_c = a_pyx[:-4] + '.c'
|
| 74 |
+
with open(a_pyx, 'w') as f:
|
| 75 |
+
f.write('pass')
|
| 76 |
+
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
| 77 |
+
a_cache = os.path.join(self.cache_dir, os.listdir(self.cache_dir)[0])
|
| 78 |
+
gzip.GzipFile(a_cache, 'wb').write('fake stuff'.encode('ascii'))
|
| 79 |
+
os.unlink(a_c)
|
| 80 |
+
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
| 81 |
+
with open(a_c) as f:
|
| 82 |
+
a_contents = f.read()
|
| 83 |
+
self.assertEqual(a_contents, 'fake stuff',
|
| 84 |
+
'Unexpected contents: %s...' % a_contents[:100])
|
| 85 |
+
|
| 86 |
+
def test_multi_file_output(self):
|
| 87 |
+
a_pyx = os.path.join(self.src_dir, 'a.pyx')
|
| 88 |
+
a_c = a_pyx[:-4] + '.c'
|
| 89 |
+
a_h = a_pyx[:-4] + '.h'
|
| 90 |
+
a_api_h = a_pyx[:-4] + '_api.h'
|
| 91 |
+
with open(a_pyx, 'w') as f:
|
| 92 |
+
f.write('cdef public api int foo(int x): return x\n')
|
| 93 |
+
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
| 94 |
+
expected = [a_c, a_h, a_api_h]
|
| 95 |
+
for output in expected:
|
| 96 |
+
self.assertTrue(os.path.exists(output), output)
|
| 97 |
+
os.unlink(output)
|
| 98 |
+
self.fresh_cythonize(a_pyx, cache=self.cache_dir)
|
| 99 |
+
for output in expected:
|
| 100 |
+
self.assertTrue(os.path.exists(output), output)
|
| 101 |
+
|
| 102 |
+
def test_options_invalidation(self):
|
| 103 |
+
hash_pyx = os.path.join(self.src_dir, 'options.pyx')
|
| 104 |
+
hash_c = hash_pyx[:-len('.pyx')] + '.c'
|
| 105 |
+
|
| 106 |
+
with open(hash_pyx, 'w') as f:
|
| 107 |
+
f.write('pass')
|
| 108 |
+
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False)
|
| 109 |
+
self.assertEqual(1, len(self.cache_files('options.c*')))
|
| 110 |
+
|
| 111 |
+
os.unlink(hash_c)
|
| 112 |
+
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=True)
|
| 113 |
+
self.assertEqual(2, len(self.cache_files('options.c*')))
|
| 114 |
+
|
| 115 |
+
os.unlink(hash_c)
|
| 116 |
+
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=False)
|
| 117 |
+
self.assertEqual(2, len(self.cache_files('options.c*')))
|
| 118 |
+
|
| 119 |
+
os.unlink(hash_c)
|
| 120 |
+
self.fresh_cythonize(hash_pyx, cache=self.cache_dir, cplus=False, show_version=True)
|
| 121 |
+
self.assertEqual(2, len(self.cache_files('options.c*')))
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestCythonizeArgsParser.py
ADDED
|
@@ -0,0 +1,482 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from Cython.Build.Cythonize import (
|
| 2 |
+
create_args_parser, parse_args_raw, parse_args,
|
| 3 |
+
parallel_compiles
|
| 4 |
+
)
|
| 5 |
+
|
| 6 |
+
from Cython.Compiler import Options
|
| 7 |
+
from Cython.Compiler.Tests.Utils import backup_Options, restore_Options, check_global_options
|
| 8 |
+
|
| 9 |
+
from unittest import TestCase
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
try:
|
| 13 |
+
from StringIO import StringIO
|
| 14 |
+
except ImportError:
|
| 15 |
+
from io import StringIO # doesn't accept 'str' in Py2
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class TestCythonizeArgsParser(TestCase):
|
| 19 |
+
|
| 20 |
+
def setUp(self):
|
| 21 |
+
TestCase.setUp(self)
|
| 22 |
+
self.parse_args = lambda x, parser=create_args_parser() : parse_args_raw(parser, x)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def are_default(self, options, skip):
|
| 26 |
+
# empty containers
|
| 27 |
+
empty_containers = ['directives', 'compile_time_env', 'options', 'excludes']
|
| 28 |
+
are_none = ['language_level', 'annotate', 'build', 'build_inplace', 'force', 'quiet', 'lenient', 'keep_going', 'no_docstrings']
|
| 29 |
+
for opt_name in empty_containers:
|
| 30 |
+
if len(getattr(options, opt_name))!=0 and (opt_name not in skip):
|
| 31 |
+
self.assertEqual(opt_name,"", msg="For option "+opt_name)
|
| 32 |
+
return False
|
| 33 |
+
for opt_name in are_none:
|
| 34 |
+
if (getattr(options, opt_name) is not None) and (opt_name not in skip):
|
| 35 |
+
self.assertEqual(opt_name,"", msg="For option "+opt_name)
|
| 36 |
+
return False
|
| 37 |
+
if options.parallel!=parallel_compiles and ('parallel' not in skip):
|
| 38 |
+
return False
|
| 39 |
+
return True
|
| 40 |
+
|
| 41 |
+
# testing directives:
|
| 42 |
+
def test_directive_short(self):
|
| 43 |
+
options, args = self.parse_args(['-X', 'cdivision=True'])
|
| 44 |
+
self.assertFalse(args)
|
| 45 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 46 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 47 |
+
|
| 48 |
+
def test_directive_long(self):
|
| 49 |
+
options, args = self.parse_args(['--directive', 'cdivision=True'])
|
| 50 |
+
self.assertFalse(args)
|
| 51 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 52 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 53 |
+
|
| 54 |
+
def test_directive_multiple(self):
|
| 55 |
+
options, args = self.parse_args(['-X', 'cdivision=True', '-X', 'c_string_type=bytes'])
|
| 56 |
+
self.assertFalse(args)
|
| 57 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 58 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 59 |
+
self.assertEqual(options.directives['c_string_type'], 'bytes')
|
| 60 |
+
|
| 61 |
+
def test_directive_multiple_v2(self):
|
| 62 |
+
options, args = self.parse_args(['-X', 'cdivision=True,c_string_type=bytes'])
|
| 63 |
+
self.assertFalse(args)
|
| 64 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 65 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 66 |
+
self.assertEqual(options.directives['c_string_type'], 'bytes')
|
| 67 |
+
|
| 68 |
+
def test_directive_value_yes(self):
|
| 69 |
+
options, args = self.parse_args(['-X', 'cdivision=YeS'])
|
| 70 |
+
self.assertFalse(args)
|
| 71 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 72 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 73 |
+
|
| 74 |
+
def test_directive_value_no(self):
|
| 75 |
+
options, args = self.parse_args(['-X', 'cdivision=no'])
|
| 76 |
+
self.assertFalse(args)
|
| 77 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 78 |
+
self.assertEqual(options.directives['cdivision'], False)
|
| 79 |
+
|
| 80 |
+
def test_directive_value_invalid(self):
|
| 81 |
+
with self.assertRaises(ValueError) as context:
|
| 82 |
+
options, args = self.parse_args(['-X', 'cdivision=sadfasd'])
|
| 83 |
+
|
| 84 |
+
def test_directive_key_invalid(self):
|
| 85 |
+
with self.assertRaises(ValueError) as context:
|
| 86 |
+
options, args = self.parse_args(['-X', 'abracadabra'])
|
| 87 |
+
|
| 88 |
+
def test_directive_no_value(self):
|
| 89 |
+
with self.assertRaises(ValueError) as context:
|
| 90 |
+
options, args = self.parse_args(['-X', 'cdivision'])
|
| 91 |
+
|
| 92 |
+
def test_directives_types(self):
|
| 93 |
+
directives = {
|
| 94 |
+
'auto_pickle': True,
|
| 95 |
+
'c_string_type': 'bytearray',
|
| 96 |
+
'c_string_type': 'bytes',
|
| 97 |
+
'c_string_type': 'str',
|
| 98 |
+
'c_string_type': 'bytearray',
|
| 99 |
+
'c_string_type': 'unicode',
|
| 100 |
+
'c_string_encoding' : 'ascii',
|
| 101 |
+
'language_level' : 2,
|
| 102 |
+
'language_level' : 3,
|
| 103 |
+
'language_level' : '3str',
|
| 104 |
+
'set_initial_path' : 'my_initial_path',
|
| 105 |
+
}
|
| 106 |
+
for key, value in directives.items():
|
| 107 |
+
cmd = '{key}={value}'.format(key=key, value=str(value))
|
| 108 |
+
options, args = self.parse_args(['-X', cmd])
|
| 109 |
+
self.assertFalse(args)
|
| 110 |
+
self.assertTrue(self.are_default(options, ['directives']), msg = "Error for option: "+cmd)
|
| 111 |
+
self.assertEqual(options.directives[key], value, msg = "Error for option: "+cmd)
|
| 112 |
+
|
| 113 |
+
def test_directives_wrong(self):
|
| 114 |
+
directives = {
|
| 115 |
+
'auto_pickle': 42, # for bool type
|
| 116 |
+
'auto_pickle': 'NONONO', # for bool type
|
| 117 |
+
'c_string_type': 'bites',
|
| 118 |
+
#'c_string_encoding' : 'a',
|
| 119 |
+
#'language_level' : 4,
|
| 120 |
+
}
|
| 121 |
+
for key, value in directives.items():
|
| 122 |
+
cmd = '{key}={value}'.format(key=key, value=str(value))
|
| 123 |
+
with self.assertRaises(ValueError, msg = "Error for option: "+cmd) as context:
|
| 124 |
+
options, args = self.parse_args(['-X', cmd])
|
| 125 |
+
|
| 126 |
+
def test_compile_time_env_short(self):
|
| 127 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10'])
|
| 128 |
+
self.assertFalse(args)
|
| 129 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 130 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 131 |
+
|
| 132 |
+
def test_compile_time_env_long(self):
|
| 133 |
+
options, args = self.parse_args(['--compile-time-env', 'MYSIZE=10'])
|
| 134 |
+
self.assertFalse(args)
|
| 135 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 136 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 137 |
+
|
| 138 |
+
def test_compile_time_env_multiple(self):
|
| 139 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10', '-E', 'ARRSIZE=11'])
|
| 140 |
+
self.assertFalse(args)
|
| 141 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 142 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 143 |
+
self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
|
| 144 |
+
|
| 145 |
+
def test_compile_time_env_multiple_v2(self):
|
| 146 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10,ARRSIZE=11'])
|
| 147 |
+
self.assertFalse(args)
|
| 148 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 149 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 150 |
+
self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
|
| 151 |
+
|
| 152 |
+
#testing options
|
| 153 |
+
def test_option_short(self):
|
| 154 |
+
options, args = self.parse_args(['-s', 'docstrings=True'])
|
| 155 |
+
self.assertFalse(args)
|
| 156 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 157 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 158 |
+
|
| 159 |
+
def test_option_long(self):
|
| 160 |
+
options, args = self.parse_args(['--option', 'docstrings=True'])
|
| 161 |
+
self.assertFalse(args)
|
| 162 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 163 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 164 |
+
|
| 165 |
+
def test_option_multiple(self):
|
| 166 |
+
options, args = self.parse_args(['-s', 'docstrings=True', '-s', 'buffer_max_dims=8'])
|
| 167 |
+
self.assertFalse(args)
|
| 168 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 169 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 170 |
+
self.assertEqual(options.options['buffer_max_dims'], True) # really?
|
| 171 |
+
|
| 172 |
+
def test_option_multiple_v2(self):
|
| 173 |
+
options, args = self.parse_args(['-s', 'docstrings=True,buffer_max_dims=8'])
|
| 174 |
+
self.assertFalse(args)
|
| 175 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 176 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 177 |
+
self.assertEqual(options.options['buffer_max_dims'], True) # really?
|
| 178 |
+
|
| 179 |
+
def test_option_value_yes(self):
|
| 180 |
+
options, args = self.parse_args(['-s', 'docstrings=YeS'])
|
| 181 |
+
self.assertFalse(args)
|
| 182 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 183 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 184 |
+
|
| 185 |
+
def test_option_value_4242(self):
|
| 186 |
+
options, args = self.parse_args(['-s', 'docstrings=4242'])
|
| 187 |
+
self.assertFalse(args)
|
| 188 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 189 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 190 |
+
|
| 191 |
+
def test_option_value_0(self):
|
| 192 |
+
options, args = self.parse_args(['-s', 'docstrings=0'])
|
| 193 |
+
self.assertFalse(args)
|
| 194 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 195 |
+
self.assertEqual(options.options['docstrings'], False)
|
| 196 |
+
|
| 197 |
+
def test_option_value_emptystr(self):
|
| 198 |
+
options, args = self.parse_args(['-s', 'docstrings='])
|
| 199 |
+
self.assertFalse(args)
|
| 200 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 201 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 202 |
+
|
| 203 |
+
def test_option_value_a_str(self):
|
| 204 |
+
options, args = self.parse_args(['-s', 'docstrings=BB'])
|
| 205 |
+
self.assertFalse(args)
|
| 206 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 207 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 208 |
+
|
| 209 |
+
def test_option_value_no(self):
|
| 210 |
+
options, args = self.parse_args(['-s', 'docstrings=nO'])
|
| 211 |
+
self.assertFalse(args)
|
| 212 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 213 |
+
self.assertEqual(options.options['docstrings'], False)
|
| 214 |
+
|
| 215 |
+
def test_option_no_value(self):
|
| 216 |
+
options, args = self.parse_args(['-s', 'docstrings'])
|
| 217 |
+
self.assertFalse(args)
|
| 218 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 219 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 220 |
+
|
| 221 |
+
def test_option_any_key(self):
|
| 222 |
+
options, args = self.parse_args(['-s', 'abracadabra'])
|
| 223 |
+
self.assertFalse(args)
|
| 224 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 225 |
+
self.assertEqual(options.options['abracadabra'], True)
|
| 226 |
+
|
| 227 |
+
def test_language_level_2(self):
|
| 228 |
+
options, args = self.parse_args(['-2'])
|
| 229 |
+
self.assertFalse(args)
|
| 230 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 231 |
+
self.assertEqual(options.language_level, 2)
|
| 232 |
+
|
| 233 |
+
def test_language_level_3(self):
|
| 234 |
+
options, args = self.parse_args(['-3'])
|
| 235 |
+
self.assertFalse(args)
|
| 236 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 237 |
+
self.assertEqual(options.language_level, 3)
|
| 238 |
+
|
| 239 |
+
def test_language_level_3str(self):
|
| 240 |
+
options, args = self.parse_args(['--3str'])
|
| 241 |
+
self.assertFalse(args)
|
| 242 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 243 |
+
self.assertEqual(options.language_level, '3str')
|
| 244 |
+
|
| 245 |
+
def test_annotate_short(self):
|
| 246 |
+
options, args = self.parse_args(['-a'])
|
| 247 |
+
self.assertFalse(args)
|
| 248 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 249 |
+
self.assertEqual(options.annotate, 'default')
|
| 250 |
+
|
| 251 |
+
def test_annotate_long(self):
|
| 252 |
+
options, args = self.parse_args(['--annotate'])
|
| 253 |
+
self.assertFalse(args)
|
| 254 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 255 |
+
self.assertEqual(options.annotate, 'default')
|
| 256 |
+
|
| 257 |
+
def test_annotate_fullc(self):
|
| 258 |
+
options, args = self.parse_args(['--annotate-fullc'])
|
| 259 |
+
self.assertFalse(args)
|
| 260 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 261 |
+
self.assertEqual(options.annotate, 'fullc')
|
| 262 |
+
|
| 263 |
+
def test_annotate_and_positional(self):
|
| 264 |
+
options, args = self.parse_args(['-a', 'foo.pyx'])
|
| 265 |
+
self.assertEqual(args, ['foo.pyx'])
|
| 266 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 267 |
+
self.assertEqual(options.annotate, 'default')
|
| 268 |
+
|
| 269 |
+
def test_annotate_and_optional(self):
|
| 270 |
+
options, args = self.parse_args(['-a', '--3str'])
|
| 271 |
+
self.assertFalse(args)
|
| 272 |
+
self.assertTrue(self.are_default(options, ['annotate', 'language_level']))
|
| 273 |
+
self.assertEqual(options.annotate, 'default')
|
| 274 |
+
self.assertEqual(options.language_level, '3str')
|
| 275 |
+
|
| 276 |
+
def test_exclude_short(self):
|
| 277 |
+
options, args = self.parse_args(['-x', '*.pyx'])
|
| 278 |
+
self.assertFalse(args)
|
| 279 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 280 |
+
self.assertTrue('*.pyx' in options.excludes)
|
| 281 |
+
|
| 282 |
+
def test_exclude_long(self):
|
| 283 |
+
options, args = self.parse_args(['--exclude', '*.pyx'])
|
| 284 |
+
self.assertFalse(args)
|
| 285 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 286 |
+
self.assertTrue('*.pyx' in options.excludes)
|
| 287 |
+
|
| 288 |
+
def test_exclude_multiple(self):
|
| 289 |
+
options, args = self.parse_args(['--exclude', '*.pyx', '--exclude', '*.py', ])
|
| 290 |
+
self.assertFalse(args)
|
| 291 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 292 |
+
self.assertEqual(options.excludes, ['*.pyx', '*.py'])
|
| 293 |
+
|
| 294 |
+
def test_build_short(self):
|
| 295 |
+
options, args = self.parse_args(['-b'])
|
| 296 |
+
self.assertFalse(args)
|
| 297 |
+
self.assertTrue(self.are_default(options, ['build']))
|
| 298 |
+
self.assertEqual(options.build, True)
|
| 299 |
+
|
| 300 |
+
def test_build_long(self):
|
| 301 |
+
options, args = self.parse_args(['--build'])
|
| 302 |
+
self.assertFalse(args)
|
| 303 |
+
self.assertTrue(self.are_default(options, ['build']))
|
| 304 |
+
self.assertEqual(options.build, True)
|
| 305 |
+
|
| 306 |
+
def test_inplace_short(self):
|
| 307 |
+
options, args = self.parse_args(['-i'])
|
| 308 |
+
self.assertFalse(args)
|
| 309 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 310 |
+
self.assertEqual(options.build_inplace, True)
|
| 311 |
+
|
| 312 |
+
def test_inplace_long(self):
|
| 313 |
+
options, args = self.parse_args(['--inplace'])
|
| 314 |
+
self.assertFalse(args)
|
| 315 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 316 |
+
self.assertEqual(options.build_inplace, True)
|
| 317 |
+
|
| 318 |
+
def test_parallel_short(self):
|
| 319 |
+
options, args = self.parse_args(['-j', '42'])
|
| 320 |
+
self.assertFalse(args)
|
| 321 |
+
self.assertTrue(self.are_default(options, ['parallel']))
|
| 322 |
+
self.assertEqual(options.parallel, 42)
|
| 323 |
+
|
| 324 |
+
def test_parallel_long(self):
|
| 325 |
+
options, args = self.parse_args(['--parallel', '42'])
|
| 326 |
+
self.assertFalse(args)
|
| 327 |
+
self.assertTrue(self.are_default(options, ['parallel']))
|
| 328 |
+
self.assertEqual(options.parallel, 42)
|
| 329 |
+
|
| 330 |
+
def test_force_short(self):
|
| 331 |
+
options, args = self.parse_args(['-f'])
|
| 332 |
+
self.assertFalse(args)
|
| 333 |
+
self.assertTrue(self.are_default(options, ['force']))
|
| 334 |
+
self.assertEqual(options.force, True)
|
| 335 |
+
|
| 336 |
+
def test_force_long(self):
|
| 337 |
+
options, args = self.parse_args(['--force'])
|
| 338 |
+
self.assertFalse(args)
|
| 339 |
+
self.assertTrue(self.are_default(options, ['force']))
|
| 340 |
+
self.assertEqual(options.force, True)
|
| 341 |
+
|
| 342 |
+
def test_quite_short(self):
|
| 343 |
+
options, args = self.parse_args(['-q'])
|
| 344 |
+
self.assertFalse(args)
|
| 345 |
+
self.assertTrue(self.are_default(options, ['quiet']))
|
| 346 |
+
self.assertEqual(options.quiet, True)
|
| 347 |
+
|
| 348 |
+
def test_quite_long(self):
|
| 349 |
+
options, args = self.parse_args(['--quiet'])
|
| 350 |
+
self.assertFalse(args)
|
| 351 |
+
self.assertTrue(self.are_default(options, ['quiet']))
|
| 352 |
+
self.assertEqual(options.quiet, True)
|
| 353 |
+
|
| 354 |
+
def test_lenient_long(self):
|
| 355 |
+
options, args = self.parse_args(['--lenient'])
|
| 356 |
+
self.assertTrue(self.are_default(options, ['lenient']))
|
| 357 |
+
self.assertFalse(args)
|
| 358 |
+
self.assertEqual(options.lenient, True)
|
| 359 |
+
|
| 360 |
+
def test_keep_going_short(self):
|
| 361 |
+
options, args = self.parse_args(['-k'])
|
| 362 |
+
self.assertFalse(args)
|
| 363 |
+
self.assertTrue(self.are_default(options, ['keep_going']))
|
| 364 |
+
self.assertEqual(options.keep_going, True)
|
| 365 |
+
|
| 366 |
+
def test_keep_going_long(self):
|
| 367 |
+
options, args = self.parse_args(['--keep-going'])
|
| 368 |
+
self.assertFalse(args)
|
| 369 |
+
self.assertTrue(self.are_default(options, ['keep_going']))
|
| 370 |
+
self.assertEqual(options.keep_going, True)
|
| 371 |
+
|
| 372 |
+
def test_no_docstrings_long(self):
|
| 373 |
+
options, args = self.parse_args(['--no-docstrings'])
|
| 374 |
+
self.assertFalse(args)
|
| 375 |
+
self.assertTrue(self.are_default(options, ['no_docstrings']))
|
| 376 |
+
self.assertEqual(options.no_docstrings, True)
|
| 377 |
+
|
| 378 |
+
def test_file_name(self):
|
| 379 |
+
options, args = self.parse_args(['file1.pyx', 'file2.pyx'])
|
| 380 |
+
self.assertEqual(len(args), 2)
|
| 381 |
+
self.assertEqual(args[0], 'file1.pyx')
|
| 382 |
+
self.assertEqual(args[1], 'file2.pyx')
|
| 383 |
+
self.assertTrue(self.are_default(options, []))
|
| 384 |
+
|
| 385 |
+
def test_option_first(self):
|
| 386 |
+
options, args = self.parse_args(['-i', 'file.pyx'])
|
| 387 |
+
self.assertEqual(args, ['file.pyx'])
|
| 388 |
+
self.assertEqual(options.build_inplace, True)
|
| 389 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 390 |
+
|
| 391 |
+
def test_file_inbetween(self):
|
| 392 |
+
options, args = self.parse_args(['-i', 'file.pyx', '-a'])
|
| 393 |
+
self.assertEqual(args, ['file.pyx'])
|
| 394 |
+
self.assertEqual(options.build_inplace, True)
|
| 395 |
+
self.assertEqual(options.annotate, 'default')
|
| 396 |
+
self.assertTrue(self.are_default(options, ['build_inplace', 'annotate']))
|
| 397 |
+
|
| 398 |
+
def test_option_trailing(self):
|
| 399 |
+
options, args = self.parse_args(['file.pyx', '-i'])
|
| 400 |
+
self.assertEqual(args, ['file.pyx'])
|
| 401 |
+
self.assertEqual(options.build_inplace, True)
|
| 402 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 403 |
+
|
| 404 |
+
def test_interspersed_positional(self):
|
| 405 |
+
options, sources = self.parse_args([
|
| 406 |
+
'file1.pyx', '-a',
|
| 407 |
+
'file2.pyx'
|
| 408 |
+
])
|
| 409 |
+
self.assertEqual(sources, ['file1.pyx', 'file2.pyx'])
|
| 410 |
+
self.assertEqual(options.annotate, 'default')
|
| 411 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 412 |
+
|
| 413 |
+
def test_interspersed_positional2(self):
|
| 414 |
+
options, sources = self.parse_args([
|
| 415 |
+
'file1.pyx', '-a',
|
| 416 |
+
'file2.pyx', '-a', 'file3.pyx'
|
| 417 |
+
])
|
| 418 |
+
self.assertEqual(sources, ['file1.pyx', 'file2.pyx', 'file3.pyx'])
|
| 419 |
+
self.assertEqual(options.annotate, 'default')
|
| 420 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 421 |
+
|
| 422 |
+
def test_interspersed_positional3(self):
|
| 423 |
+
options, sources = self.parse_args([
|
| 424 |
+
'-f', 'f1', 'f2', '-a',
|
| 425 |
+
'f3', 'f4', '-a', 'f5'
|
| 426 |
+
])
|
| 427 |
+
self.assertEqual(sources, ['f1', 'f2', 'f3', 'f4', 'f5'])
|
| 428 |
+
self.assertEqual(options.annotate, 'default')
|
| 429 |
+
self.assertEqual(options.force, True)
|
| 430 |
+
self.assertTrue(self.are_default(options, ['annotate', 'force']))
|
| 431 |
+
|
| 432 |
+
def test_wrong_option(self):
|
| 433 |
+
old_stderr = sys.stderr
|
| 434 |
+
stderr = sys.stderr = StringIO()
|
| 435 |
+
try:
|
| 436 |
+
self.assertRaises(SystemExit, self.parse_args,
|
| 437 |
+
['--unknown-option']
|
| 438 |
+
)
|
| 439 |
+
finally:
|
| 440 |
+
sys.stderr = old_stderr
|
| 441 |
+
self.assertTrue(stderr.getvalue())
|
| 442 |
+
|
| 443 |
+
|
| 444 |
+
class TestParseArgs(TestCase):
|
| 445 |
+
def setUp(self):
|
| 446 |
+
self._options_backup = backup_Options()
|
| 447 |
+
|
| 448 |
+
def tearDown(self):
|
| 449 |
+
restore_Options(self._options_backup)
|
| 450 |
+
|
| 451 |
+
def check_default_global_options(self, white_list=[]):
|
| 452 |
+
self.assertEqual(check_global_options(self._options_backup, white_list), "")
|
| 453 |
+
|
| 454 |
+
def test_build_set_for_inplace(self):
|
| 455 |
+
options, args = parse_args(['foo.pyx', '-i'])
|
| 456 |
+
self.assertEqual(options.build, True)
|
| 457 |
+
self.check_default_global_options()
|
| 458 |
+
|
| 459 |
+
def test_lenient(self):
|
| 460 |
+
options, sources = parse_args(['foo.pyx', '--lenient'])
|
| 461 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 462 |
+
self.assertEqual(Options.error_on_unknown_names, False)
|
| 463 |
+
self.assertEqual(Options.error_on_uninitialized, False)
|
| 464 |
+
self.check_default_global_options(['error_on_unknown_names', 'error_on_uninitialized'])
|
| 465 |
+
|
| 466 |
+
def test_annotate(self):
|
| 467 |
+
options, sources = parse_args(['foo.pyx', '--annotate'])
|
| 468 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 469 |
+
self.assertEqual(Options.annotate, 'default')
|
| 470 |
+
self.check_default_global_options(['annotate'])
|
| 471 |
+
|
| 472 |
+
def test_annotate_fullc(self):
|
| 473 |
+
options, sources = parse_args(['foo.pyx', '--annotate-fullc'])
|
| 474 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 475 |
+
self.assertEqual(Options.annotate, 'fullc')
|
| 476 |
+
self.check_default_global_options(['annotate'])
|
| 477 |
+
|
| 478 |
+
def test_no_docstrings(self):
|
| 479 |
+
options, sources = parse_args(['foo.pyx', '--no-docstrings'])
|
| 480 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 481 |
+
self.assertEqual(Options.docstrings, False)
|
| 482 |
+
self.check_default_global_options(['docstrings'])
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestDependencies.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import os.path
|
| 3 |
+
import sys
|
| 4 |
+
import tempfile
|
| 5 |
+
import unittest
|
| 6 |
+
from io import open
|
| 7 |
+
from os.path import join as pjoin
|
| 8 |
+
|
| 9 |
+
from ..Dependencies import extended_iglob
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@contextlib.contextmanager
|
| 13 |
+
def writable_file(dir_path, filename):
|
| 14 |
+
with open(pjoin(dir_path, filename), "w", encoding="utf8") as f:
|
| 15 |
+
yield f
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class TestGlobbing(unittest.TestCase):
|
| 19 |
+
@classmethod
|
| 20 |
+
def setUpClass(cls):
|
| 21 |
+
cls._orig_dir = os.getcwd()
|
| 22 |
+
if sys.version_info[0] < 3:
|
| 23 |
+
temp_path = cls._tmpdir = tempfile.mkdtemp()
|
| 24 |
+
else:
|
| 25 |
+
cls._tmpdir = tempfile.TemporaryDirectory()
|
| 26 |
+
temp_path = cls._tmpdir.name
|
| 27 |
+
os.chdir(temp_path)
|
| 28 |
+
|
| 29 |
+
for dir1 in "abcd":
|
| 30 |
+
for dir1x in [dir1, dir1 + 'x']:
|
| 31 |
+
for dir2 in "xyz":
|
| 32 |
+
dir_path = pjoin(dir1x, dir2)
|
| 33 |
+
os.makedirs(dir_path)
|
| 34 |
+
with writable_file(dir_path, "file2_pyx.pyx") as f:
|
| 35 |
+
f.write(u'""" PYX """')
|
| 36 |
+
with writable_file(dir_path, "file2_py.py") as f:
|
| 37 |
+
f.write(u'""" PY """')
|
| 38 |
+
|
| 39 |
+
with writable_file(dir1x, "file1_pyx.pyx") as f:
|
| 40 |
+
f.write(u'""" PYX """')
|
| 41 |
+
with writable_file(dir1x, "file1_py.py") as f:
|
| 42 |
+
f.write(u'""" PY """')
|
| 43 |
+
|
| 44 |
+
@classmethod
|
| 45 |
+
def tearDownClass(cls):
|
| 46 |
+
os.chdir(cls._orig_dir)
|
| 47 |
+
if sys.version_info[0] < 3:
|
| 48 |
+
import shutil
|
| 49 |
+
shutil.rmtree(cls._tmpdir)
|
| 50 |
+
else:
|
| 51 |
+
cls._tmpdir.cleanup()
|
| 52 |
+
|
| 53 |
+
def files_equal(self, pattern, expected_files):
|
| 54 |
+
expected_files = sorted(expected_files)
|
| 55 |
+
# It's the users's choice whether '/' will appear on Windows.
|
| 56 |
+
matched_files = sorted(path.replace('/', os.sep) for path in extended_iglob(pattern))
|
| 57 |
+
self.assertListEqual(matched_files, expected_files) # /
|
| 58 |
+
|
| 59 |
+
# Special case for Windows: also support '\' in patterns.
|
| 60 |
+
if os.sep == '\\' and '/' in pattern:
|
| 61 |
+
matched_files = sorted(extended_iglob(pattern.replace('/', '\\')))
|
| 62 |
+
self.assertListEqual(matched_files, expected_files) # \
|
| 63 |
+
|
| 64 |
+
def test_extended_iglob_simple(self):
|
| 65 |
+
ax_files = [pjoin("a", "x", "file2_pyx.pyx"), pjoin("a", "x", "file2_py.py")]
|
| 66 |
+
self.files_equal("a/x/*", ax_files)
|
| 67 |
+
self.files_equal("a/x/*.c12", [])
|
| 68 |
+
self.files_equal("a/x/*.{py,pyx,c12}", ax_files)
|
| 69 |
+
self.files_equal("a/x/*.{py,pyx}", ax_files)
|
| 70 |
+
self.files_equal("a/x/*.{pyx}", ax_files[:1])
|
| 71 |
+
self.files_equal("a/x/*.pyx", ax_files[:1])
|
| 72 |
+
self.files_equal("a/x/*.{py}", ax_files[1:])
|
| 73 |
+
self.files_equal("a/x/*.py", ax_files[1:])
|
| 74 |
+
|
| 75 |
+
def test_extended_iglob_simple_star(self):
|
| 76 |
+
for basedir in "ad":
|
| 77 |
+
files = [
|
| 78 |
+
pjoin(basedir, dirname, filename)
|
| 79 |
+
for dirname in "xyz"
|
| 80 |
+
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
| 81 |
+
]
|
| 82 |
+
self.files_equal(basedir + "/*/*", files)
|
| 83 |
+
self.files_equal(basedir + "/*/*.c12", [])
|
| 84 |
+
self.files_equal(basedir + "/*/*.{py,pyx,c12}", files)
|
| 85 |
+
self.files_equal(basedir + "/*/*.{py,pyx}", files)
|
| 86 |
+
self.files_equal(basedir + "/*/*.{pyx}", files[::2])
|
| 87 |
+
self.files_equal(basedir + "/*/*.pyx", files[::2])
|
| 88 |
+
self.files_equal(basedir + "/*/*.{py}", files[1::2])
|
| 89 |
+
self.files_equal(basedir + "/*/*.py", files[1::2])
|
| 90 |
+
|
| 91 |
+
for subdir in "xy*":
|
| 92 |
+
files = [
|
| 93 |
+
pjoin(basedir, dirname, filename)
|
| 94 |
+
for dirname in "xyz"
|
| 95 |
+
if subdir in ('*', dirname)
|
| 96 |
+
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
| 97 |
+
]
|
| 98 |
+
path = basedir + '/' + subdir + '/'
|
| 99 |
+
self.files_equal(path + "*", files)
|
| 100 |
+
self.files_equal(path + "*.{py,pyx}", files)
|
| 101 |
+
self.files_equal(path + "*.{pyx}", files[::2])
|
| 102 |
+
self.files_equal(path + "*.pyx", files[::2])
|
| 103 |
+
self.files_equal(path + "*.{py}", files[1::2])
|
| 104 |
+
self.files_equal(path + "*.py", files[1::2])
|
| 105 |
+
|
| 106 |
+
def test_extended_iglob_double_star(self):
|
| 107 |
+
basedirs = os.listdir(".")
|
| 108 |
+
files = [
|
| 109 |
+
pjoin(basedir, dirname, filename)
|
| 110 |
+
for basedir in basedirs
|
| 111 |
+
for dirname in "xyz"
|
| 112 |
+
for filename in ["file2_pyx.pyx", "file2_py.py"]
|
| 113 |
+
]
|
| 114 |
+
all_files = [
|
| 115 |
+
pjoin(basedir, filename)
|
| 116 |
+
for basedir in basedirs
|
| 117 |
+
for filename in ["file1_pyx.pyx", "file1_py.py"]
|
| 118 |
+
] + files
|
| 119 |
+
self.files_equal("*/*/*", files)
|
| 120 |
+
self.files_equal("*/*/**/*", files)
|
| 121 |
+
self.files_equal("*/**/*.*", all_files)
|
| 122 |
+
self.files_equal("**/*.*", all_files)
|
| 123 |
+
self.files_equal("*/**/*.c12", [])
|
| 124 |
+
self.files_equal("**/*.c12", [])
|
| 125 |
+
self.files_equal("*/*/*.{py,pyx,c12}", files)
|
| 126 |
+
self.files_equal("*/*/**/*.{py,pyx,c12}", files)
|
| 127 |
+
self.files_equal("*/**/*/*.{py,pyx,c12}", files)
|
| 128 |
+
self.files_equal("**/*/*/*.{py,pyx,c12}", files)
|
| 129 |
+
self.files_equal("**/*.{py,pyx,c12}", all_files)
|
| 130 |
+
self.files_equal("*/*/*.{py,pyx}", files)
|
| 131 |
+
self.files_equal("**/*/*/*.{py,pyx}", files)
|
| 132 |
+
self.files_equal("*/**/*/*.{py,pyx}", files)
|
| 133 |
+
self.files_equal("**/*.{py,pyx}", all_files)
|
| 134 |
+
self.files_equal("*/*/*.{pyx}", files[::2])
|
| 135 |
+
self.files_equal("**/*.{pyx}", all_files[::2])
|
| 136 |
+
self.files_equal("*/**/*/*.pyx", files[::2])
|
| 137 |
+
self.files_equal("*/*/*.pyx", files[::2])
|
| 138 |
+
self.files_equal("**/*.pyx", all_files[::2])
|
| 139 |
+
self.files_equal("*/*/*.{py}", files[1::2])
|
| 140 |
+
self.files_equal("**/*.{py}", all_files[1::2])
|
| 141 |
+
self.files_equal("*/*/*.py", files[1::2])
|
| 142 |
+
self.files_equal("**/*.py", all_files[1::2])
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestInline.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import tempfile
|
| 3 |
+
import unittest
|
| 4 |
+
from Cython.Shadow import inline
|
| 5 |
+
from Cython.Build.Inline import safe_type
|
| 6 |
+
from Cython.TestUtils import CythonTest
|
| 7 |
+
|
| 8 |
+
try:
|
| 9 |
+
import numpy
|
| 10 |
+
has_numpy = True
|
| 11 |
+
except:
|
| 12 |
+
has_numpy = False
|
| 13 |
+
|
| 14 |
+
test_kwds = dict(force=True, quiet=True)
|
| 15 |
+
|
| 16 |
+
global_value = 100
|
| 17 |
+
|
| 18 |
+
class TestInline(CythonTest):
|
| 19 |
+
def setUp(self):
|
| 20 |
+
CythonTest.setUp(self)
|
| 21 |
+
self._call_kwds = dict(test_kwds)
|
| 22 |
+
if os.path.isdir('TEST_TMP'):
|
| 23 |
+
lib_dir = os.path.join('TEST_TMP','inline')
|
| 24 |
+
else:
|
| 25 |
+
lib_dir = tempfile.mkdtemp(prefix='cython_inline_')
|
| 26 |
+
self._call_kwds['lib_dir'] = lib_dir
|
| 27 |
+
|
| 28 |
+
def test_simple(self):
|
| 29 |
+
self.assertEqual(inline("return 1+2", **self._call_kwds), 3)
|
| 30 |
+
|
| 31 |
+
def test_types(self):
|
| 32 |
+
self.assertEqual(inline("""
|
| 33 |
+
cimport cython
|
| 34 |
+
return cython.typeof(a), cython.typeof(b)
|
| 35 |
+
""", a=1.0, b=[], **self._call_kwds), ('double', 'list object'))
|
| 36 |
+
|
| 37 |
+
def test_locals(self):
|
| 38 |
+
a = 1
|
| 39 |
+
b = 2
|
| 40 |
+
self.assertEqual(inline("return a+b", **self._call_kwds), 3)
|
| 41 |
+
|
| 42 |
+
def test_globals(self):
|
| 43 |
+
self.assertEqual(inline("return global_value + 1", **self._call_kwds), global_value + 1)
|
| 44 |
+
|
| 45 |
+
def test_no_return(self):
|
| 46 |
+
self.assertEqual(inline("""
|
| 47 |
+
a = 1
|
| 48 |
+
cdef double b = 2
|
| 49 |
+
cdef c = []
|
| 50 |
+
""", **self._call_kwds), dict(a=1, b=2.0, c=[]))
|
| 51 |
+
|
| 52 |
+
def test_def_node(self):
|
| 53 |
+
foo = inline("def foo(x): return x * x", **self._call_kwds)['foo']
|
| 54 |
+
self.assertEqual(foo(7), 49)
|
| 55 |
+
|
| 56 |
+
def test_class_ref(self):
|
| 57 |
+
class Type(object):
|
| 58 |
+
pass
|
| 59 |
+
tp = inline("Type")['Type']
|
| 60 |
+
self.assertEqual(tp, Type)
|
| 61 |
+
|
| 62 |
+
def test_pure(self):
|
| 63 |
+
import cython as cy
|
| 64 |
+
b = inline("""
|
| 65 |
+
b = cy.declare(float, a)
|
| 66 |
+
c = cy.declare(cy.pointer(cy.float), &b)
|
| 67 |
+
return b
|
| 68 |
+
""", a=3, **self._call_kwds)
|
| 69 |
+
self.assertEqual(type(b), float)
|
| 70 |
+
|
| 71 |
+
def test_compiler_directives(self):
|
| 72 |
+
self.assertEqual(
|
| 73 |
+
inline('return sum(x)',
|
| 74 |
+
x=[1, 2, 3],
|
| 75 |
+
cython_compiler_directives={'boundscheck': False}),
|
| 76 |
+
6
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
def test_lang_version(self):
|
| 80 |
+
# GH-3419. Caching for inline code didn't always respect compiler directives.
|
| 81 |
+
inline_divcode = "def f(int a, int b): return a/b"
|
| 82 |
+
self.assertEqual(
|
| 83 |
+
inline(inline_divcode, language_level=2)['f'](5,2),
|
| 84 |
+
2
|
| 85 |
+
)
|
| 86 |
+
self.assertEqual(
|
| 87 |
+
inline(inline_divcode, language_level=3)['f'](5,2),
|
| 88 |
+
2.5
|
| 89 |
+
)
|
| 90 |
+
self.assertEqual(
|
| 91 |
+
inline(inline_divcode, language_level=2)['f'](5,2),
|
| 92 |
+
2
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
def test_repeated_use(self):
|
| 96 |
+
inline_mulcode = "def f(int a, int b): return a * b"
|
| 97 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
|
| 98 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 3), 15)
|
| 99 |
+
self.assertEqual(inline(inline_mulcode)['f'](6, 2), 12)
|
| 100 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
|
| 101 |
+
|
| 102 |
+
f = inline(inline_mulcode)['f']
|
| 103 |
+
self.assertEqual(f(5, 2), 10)
|
| 104 |
+
self.assertEqual(f(5, 3), 15)
|
| 105 |
+
|
| 106 |
+
@unittest.skipIf(not has_numpy, "NumPy is not available")
|
| 107 |
+
def test_numpy(self):
|
| 108 |
+
import numpy
|
| 109 |
+
a = numpy.ndarray((10, 20))
|
| 110 |
+
a[0,0] = 10
|
| 111 |
+
self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
|
| 112 |
+
self.assertEqual(inline("return a[0,0]", a=a, **self._call_kwds), 10.0)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestIpythonMagic.py
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# tag: ipython
|
| 3 |
+
|
| 4 |
+
"""Tests for the Cython magics extension."""
|
| 5 |
+
|
| 6 |
+
from __future__ import absolute_import
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
import io
|
| 10 |
+
import sys
|
| 11 |
+
from contextlib import contextmanager
|
| 12 |
+
from unittest import skipIf
|
| 13 |
+
|
| 14 |
+
from Cython.Build import IpythonMagic
|
| 15 |
+
from Cython.TestUtils import CythonTest
|
| 16 |
+
from Cython.Compiler.Annotate import AnnotationCCodeWriter
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
import IPython.testing.globalipapp
|
| 20 |
+
except ImportError:
|
| 21 |
+
# Disable tests and fake helpers for initialisation below.
|
| 22 |
+
def skip_if_not_installed(_):
|
| 23 |
+
return None
|
| 24 |
+
else:
|
| 25 |
+
def skip_if_not_installed(c):
|
| 26 |
+
return c
|
| 27 |
+
|
| 28 |
+
# not using IPython's decorators here because they depend on "nose"
|
| 29 |
+
skip_win32 = skipIf(sys.platform == 'win32', "Skip on Windows")
|
| 30 |
+
skip_py27 = skipIf(sys.version_info[:2] == (2,7), "Disabled in Py2.7")
|
| 31 |
+
|
| 32 |
+
try:
|
| 33 |
+
# disable IPython history thread before it gets started to avoid having to clean it up
|
| 34 |
+
from IPython.core.history import HistoryManager
|
| 35 |
+
HistoryManager.enabled = False
|
| 36 |
+
except ImportError:
|
| 37 |
+
pass
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@contextmanager
|
| 41 |
+
def capture_output():
|
| 42 |
+
backup = sys.stdout, sys.stderr
|
| 43 |
+
try:
|
| 44 |
+
replacement = [
|
| 45 |
+
io.TextIOWrapper(io.BytesIO(), encoding=sys.stdout.encoding),
|
| 46 |
+
io.TextIOWrapper(io.BytesIO(), encoding=sys.stderr.encoding),
|
| 47 |
+
]
|
| 48 |
+
sys.stdout, sys.stderr = replacement
|
| 49 |
+
output = []
|
| 50 |
+
yield output
|
| 51 |
+
finally:
|
| 52 |
+
sys.stdout, sys.stderr = backup
|
| 53 |
+
for wrapper in replacement:
|
| 54 |
+
wrapper.seek(0) # rewind
|
| 55 |
+
output.append(wrapper.read())
|
| 56 |
+
wrapper.close()
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
code = u"""\
|
| 60 |
+
def f(x):
|
| 61 |
+
return 2*x
|
| 62 |
+
"""
|
| 63 |
+
|
| 64 |
+
cython3_code = u"""\
|
| 65 |
+
def f(int x):
|
| 66 |
+
return 2 / x
|
| 67 |
+
|
| 68 |
+
def call(x):
|
| 69 |
+
return f(*(x,))
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
pgo_cython3_code = cython3_code + u"""\
|
| 73 |
+
def main():
|
| 74 |
+
for _ in range(100): call(5)
|
| 75 |
+
main()
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
compile_error_code = u'''\
|
| 79 |
+
cdef extern from *:
|
| 80 |
+
"""
|
| 81 |
+
xxx a=1;
|
| 82 |
+
"""
|
| 83 |
+
int a;
|
| 84 |
+
def doit():
|
| 85 |
+
return a
|
| 86 |
+
'''
|
| 87 |
+
|
| 88 |
+
compile_warning_code = u'''\
|
| 89 |
+
cdef extern from *:
|
| 90 |
+
"""
|
| 91 |
+
#pragma message ( "CWarning" )
|
| 92 |
+
int a = 42;
|
| 93 |
+
"""
|
| 94 |
+
int a;
|
| 95 |
+
def doit():
|
| 96 |
+
return a
|
| 97 |
+
'''
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
@skip_if_not_installed
|
| 101 |
+
class TestIPythonMagic(CythonTest):
|
| 102 |
+
|
| 103 |
+
@classmethod
|
| 104 |
+
def setUpClass(cls):
|
| 105 |
+
CythonTest.setUpClass()
|
| 106 |
+
cls._ip = IPython.testing.globalipapp.get_ipython()
|
| 107 |
+
|
| 108 |
+
def setUp(self):
|
| 109 |
+
CythonTest.setUp(self)
|
| 110 |
+
self._ip.extension_manager.load_extension('cython')
|
| 111 |
+
|
| 112 |
+
def test_cython_inline(self):
|
| 113 |
+
ip = self._ip
|
| 114 |
+
ip.ex('a=10; b=20')
|
| 115 |
+
result = ip.run_cell_magic('cython_inline', '', 'return a+b')
|
| 116 |
+
self.assertEqual(result, 30)
|
| 117 |
+
|
| 118 |
+
@skip_win32
|
| 119 |
+
def test_cython_pyximport(self):
|
| 120 |
+
ip = self._ip
|
| 121 |
+
module_name = '_test_cython_pyximport'
|
| 122 |
+
ip.run_cell_magic('cython_pyximport', module_name, code)
|
| 123 |
+
ip.ex('g = f(10)')
|
| 124 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 125 |
+
ip.run_cell_magic('cython_pyximport', module_name, code)
|
| 126 |
+
ip.ex('h = f(-10)')
|
| 127 |
+
self.assertEqual(ip.user_ns['h'], -20.0)
|
| 128 |
+
try:
|
| 129 |
+
os.remove(module_name + '.pyx')
|
| 130 |
+
except OSError:
|
| 131 |
+
pass
|
| 132 |
+
|
| 133 |
+
def test_cython(self):
|
| 134 |
+
ip = self._ip
|
| 135 |
+
ip.run_cell_magic('cython', '', code)
|
| 136 |
+
ip.ex('g = f(10)')
|
| 137 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 138 |
+
|
| 139 |
+
def test_cython_name(self):
|
| 140 |
+
# The Cython module named 'mymodule' defines the function f.
|
| 141 |
+
ip = self._ip
|
| 142 |
+
ip.run_cell_magic('cython', '--name=mymodule', code)
|
| 143 |
+
# This module can now be imported in the interactive namespace.
|
| 144 |
+
ip.ex('import mymodule; g = mymodule.f(10)')
|
| 145 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 146 |
+
|
| 147 |
+
def test_cython_language_level(self):
|
| 148 |
+
# The Cython cell defines the functions f() and call().
|
| 149 |
+
ip = self._ip
|
| 150 |
+
ip.run_cell_magic('cython', '', cython3_code)
|
| 151 |
+
ip.ex('g = f(10); h = call(10)')
|
| 152 |
+
if sys.version_info[0] < 3:
|
| 153 |
+
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
| 154 |
+
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
| 155 |
+
else:
|
| 156 |
+
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
| 157 |
+
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
| 158 |
+
|
| 159 |
+
def test_cython3(self):
|
| 160 |
+
# The Cython cell defines the functions f() and call().
|
| 161 |
+
ip = self._ip
|
| 162 |
+
ip.run_cell_magic('cython', '-3', cython3_code)
|
| 163 |
+
ip.ex('g = f(10); h = call(10)')
|
| 164 |
+
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
| 165 |
+
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
| 166 |
+
|
| 167 |
+
def test_cython2(self):
|
| 168 |
+
# The Cython cell defines the functions f() and call().
|
| 169 |
+
ip = self._ip
|
| 170 |
+
ip.run_cell_magic('cython', '-2', cython3_code)
|
| 171 |
+
ip.ex('g = f(10); h = call(10)')
|
| 172 |
+
self.assertEqual(ip.user_ns['g'], 2 // 10)
|
| 173 |
+
self.assertEqual(ip.user_ns['h'], 2 // 10)
|
| 174 |
+
|
| 175 |
+
def test_cython_compile_error_shown(self):
|
| 176 |
+
ip = self._ip
|
| 177 |
+
with capture_output() as out:
|
| 178 |
+
ip.run_cell_magic('cython', '-3', compile_error_code)
|
| 179 |
+
captured_out, captured_err = out
|
| 180 |
+
|
| 181 |
+
# it could be that c-level output is captured by distutil-extension
|
| 182 |
+
# (and not by us) and is printed to stdout:
|
| 183 |
+
captured_all = captured_out + "\n" + captured_err
|
| 184 |
+
self.assertTrue("error" in captured_all, msg="error in " + captured_all)
|
| 185 |
+
|
| 186 |
+
def test_cython_link_error_shown(self):
|
| 187 |
+
ip = self._ip
|
| 188 |
+
with capture_output() as out:
|
| 189 |
+
ip.run_cell_magic('cython', '-3 -l=xxxxxxxx', code)
|
| 190 |
+
captured_out, captured_err = out
|
| 191 |
+
|
| 192 |
+
# it could be that c-level output is captured by distutil-extension
|
| 193 |
+
# (and not by us) and is printed to stdout:
|
| 194 |
+
captured_all = captured_out + "\n!" + captured_err
|
| 195 |
+
self.assertTrue("error" in captured_all, msg="error in " + captured_all)
|
| 196 |
+
|
| 197 |
+
def test_cython_warning_shown(self):
|
| 198 |
+
ip = self._ip
|
| 199 |
+
with capture_output() as out:
|
| 200 |
+
# force rebuild, otherwise no warning as after the first success
|
| 201 |
+
# no build step is performed
|
| 202 |
+
ip.run_cell_magic('cython', '-3 -f', compile_warning_code)
|
| 203 |
+
captured_out, captured_err = out
|
| 204 |
+
|
| 205 |
+
# check that warning was printed to stdout even if build hasn't failed
|
| 206 |
+
self.assertTrue("CWarning" in captured_out)
|
| 207 |
+
|
| 208 |
+
@skip_py27 # Not strictly broken in Py2.7 but currently fails in CI due to C compiler issues.
|
| 209 |
+
@skip_win32
|
| 210 |
+
def test_cython3_pgo(self):
|
| 211 |
+
# The Cython cell defines the functions f() and call().
|
| 212 |
+
ip = self._ip
|
| 213 |
+
ip.run_cell_magic('cython', '-3 --pgo', pgo_cython3_code)
|
| 214 |
+
ip.ex('g = f(10); h = call(10); main()')
|
| 215 |
+
self.assertEqual(ip.user_ns['g'], 2.0 / 10.0)
|
| 216 |
+
self.assertEqual(ip.user_ns['h'], 2.0 / 10.0)
|
| 217 |
+
|
| 218 |
+
@skip_win32
|
| 219 |
+
def test_extlibs(self):
|
| 220 |
+
ip = self._ip
|
| 221 |
+
code = u"""
|
| 222 |
+
from libc.math cimport sin
|
| 223 |
+
x = sin(0.0)
|
| 224 |
+
"""
|
| 225 |
+
ip.user_ns['x'] = 1
|
| 226 |
+
ip.run_cell_magic('cython', '-l m', code)
|
| 227 |
+
self.assertEqual(ip.user_ns['x'], 0)
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
def test_cython_verbose(self):
|
| 231 |
+
ip = self._ip
|
| 232 |
+
ip.run_cell_magic('cython', '--verbose', code)
|
| 233 |
+
ip.ex('g = f(10)')
|
| 234 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 235 |
+
|
| 236 |
+
def test_cython_verbose_thresholds(self):
|
| 237 |
+
@contextmanager
|
| 238 |
+
def mock_distutils():
|
| 239 |
+
class MockLog:
|
| 240 |
+
DEBUG = 1
|
| 241 |
+
INFO = 2
|
| 242 |
+
thresholds = [INFO]
|
| 243 |
+
|
| 244 |
+
def set_threshold(self, val):
|
| 245 |
+
self.thresholds.append(val)
|
| 246 |
+
return self.thresholds[-2]
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
new_log = MockLog()
|
| 250 |
+
old_log = IpythonMagic.distutils.log
|
| 251 |
+
try:
|
| 252 |
+
IpythonMagic.distutils.log = new_log
|
| 253 |
+
yield new_log
|
| 254 |
+
finally:
|
| 255 |
+
IpythonMagic.distutils.log = old_log
|
| 256 |
+
|
| 257 |
+
ip = self._ip
|
| 258 |
+
with mock_distutils() as verbose_log:
|
| 259 |
+
ip.run_cell_magic('cython', '--verbose', code)
|
| 260 |
+
ip.ex('g = f(10)')
|
| 261 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 262 |
+
self.assertEqual([verbose_log.INFO, verbose_log.DEBUG, verbose_log.INFO],
|
| 263 |
+
verbose_log.thresholds)
|
| 264 |
+
|
| 265 |
+
with mock_distutils() as normal_log:
|
| 266 |
+
ip.run_cell_magic('cython', '', code)
|
| 267 |
+
ip.ex('g = f(10)')
|
| 268 |
+
self.assertEqual(ip.user_ns['g'], 20.0)
|
| 269 |
+
self.assertEqual([normal_log.INFO], normal_log.thresholds)
|
| 270 |
+
|
| 271 |
+
def test_cython_no_annotate(self):
|
| 272 |
+
ip = self._ip
|
| 273 |
+
html = ip.run_cell_magic('cython', '', code)
|
| 274 |
+
self.assertTrue(html is None)
|
| 275 |
+
|
| 276 |
+
def test_cython_annotate(self):
|
| 277 |
+
ip = self._ip
|
| 278 |
+
html = ip.run_cell_magic('cython', '--annotate', code)
|
| 279 |
+
# somewhat brittle way to differentiate between annotated htmls
|
| 280 |
+
# with/without complete source code:
|
| 281 |
+
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE not in html.data)
|
| 282 |
+
|
| 283 |
+
def test_cython_annotate_default(self):
|
| 284 |
+
ip = self._ip
|
| 285 |
+
html = ip.run_cell_magic('cython', '-a', code)
|
| 286 |
+
# somewhat brittle way to differentiate between annotated htmls
|
| 287 |
+
# with/without complete source code:
|
| 288 |
+
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE not in html.data)
|
| 289 |
+
|
| 290 |
+
def test_cython_annotate_complete_c_code(self):
|
| 291 |
+
ip = self._ip
|
| 292 |
+
html = ip.run_cell_magic('cython', '--annotate-fullc', code)
|
| 293 |
+
# somewhat brittle way to differentiate between annotated htmls
|
| 294 |
+
# with/without complete source code:
|
| 295 |
+
self.assertTrue(AnnotationCCodeWriter.COMPLETE_CODE_TITLE in html.data)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestRecythonize.py
ADDED
|
@@ -0,0 +1,212 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import shutil
|
| 2 |
+
import os
|
| 3 |
+
import tempfile
|
| 4 |
+
import time
|
| 5 |
+
|
| 6 |
+
import Cython.Build.Dependencies
|
| 7 |
+
import Cython.Utils
|
| 8 |
+
from Cython.TestUtils import CythonTest
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def fresh_cythonize(*args, **kwargs):
|
| 12 |
+
Cython.Utils.clear_function_caches()
|
| 13 |
+
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
| 14 |
+
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
| 15 |
+
|
| 16 |
+
class TestRecythonize(CythonTest):
|
| 17 |
+
|
| 18 |
+
def setUp(self):
|
| 19 |
+
CythonTest.setUp(self)
|
| 20 |
+
self.temp_dir = (
|
| 21 |
+
tempfile.mkdtemp(
|
| 22 |
+
prefix='recythonize-test',
|
| 23 |
+
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None
|
| 24 |
+
)
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
def tearDown(self):
|
| 28 |
+
CythonTest.tearDown(self)
|
| 29 |
+
shutil.rmtree(self.temp_dir)
|
| 30 |
+
|
| 31 |
+
def test_recythonize_pyx_on_pxd_change(self):
|
| 32 |
+
|
| 33 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 34 |
+
|
| 35 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 36 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 37 |
+
a_c = os.path.join(src_dir, 'a.c')
|
| 38 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 39 |
+
|
| 40 |
+
with open(a_pxd, 'w') as f:
|
| 41 |
+
f.write('cdef int value\n')
|
| 42 |
+
|
| 43 |
+
with open(a_pyx, 'w') as f:
|
| 44 |
+
f.write('value = 1\n')
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# The dependencies for "a.pyx" are "a.pxd" and "a.pyx".
|
| 48 |
+
self.assertEqual({a_pxd, a_pyx}, dep_tree.all_dependencies(a_pyx))
|
| 49 |
+
|
| 50 |
+
# Cythonize to create a.c
|
| 51 |
+
fresh_cythonize(a_pyx)
|
| 52 |
+
|
| 53 |
+
# Sleep to address coarse time-stamp precision.
|
| 54 |
+
time.sleep(1)
|
| 55 |
+
|
| 56 |
+
with open(a_c) as f:
|
| 57 |
+
a_c_contents1 = f.read()
|
| 58 |
+
|
| 59 |
+
with open(a_pxd, 'w') as f:
|
| 60 |
+
f.write('cdef double value\n')
|
| 61 |
+
|
| 62 |
+
fresh_cythonize(a_pyx)
|
| 63 |
+
|
| 64 |
+
with open(a_c) as f:
|
| 65 |
+
a_c_contents2 = f.read()
|
| 66 |
+
|
| 67 |
+
self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
|
| 68 |
+
self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
|
| 69 |
+
self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
|
| 70 |
+
self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def test_recythonize_py_on_pxd_change(self):
|
| 74 |
+
|
| 75 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 76 |
+
|
| 77 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 78 |
+
a_py = os.path.join(src_dir, 'a.py')
|
| 79 |
+
a_c = os.path.join(src_dir, 'a.c')
|
| 80 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 81 |
+
|
| 82 |
+
with open(a_pxd, 'w') as f:
|
| 83 |
+
f.write('cdef int value\n')
|
| 84 |
+
|
| 85 |
+
with open(a_py, 'w') as f:
|
| 86 |
+
f.write('value = 1\n')
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# The dependencies for "a.py" are "a.pxd" and "a.py".
|
| 90 |
+
self.assertEqual({a_pxd, a_py}, dep_tree.all_dependencies(a_py))
|
| 91 |
+
|
| 92 |
+
# Cythonize to create a.c
|
| 93 |
+
fresh_cythonize(a_py)
|
| 94 |
+
|
| 95 |
+
# Sleep to address coarse time-stamp precision.
|
| 96 |
+
time.sleep(1)
|
| 97 |
+
|
| 98 |
+
with open(a_c) as f:
|
| 99 |
+
a_c_contents1 = f.read()
|
| 100 |
+
|
| 101 |
+
with open(a_pxd, 'w') as f:
|
| 102 |
+
f.write('cdef double value\n')
|
| 103 |
+
|
| 104 |
+
fresh_cythonize(a_py)
|
| 105 |
+
|
| 106 |
+
with open(a_c) as f:
|
| 107 |
+
a_c_contents2 = f.read()
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
|
| 111 |
+
self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
|
| 112 |
+
self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
|
| 113 |
+
self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
|
| 114 |
+
|
| 115 |
+
def test_recythonize_pyx_on_dep_pxd_change(self):
|
| 116 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 117 |
+
|
| 118 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 119 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 120 |
+
b_pyx = os.path.join(src_dir, 'b.pyx')
|
| 121 |
+
b_c = os.path.join(src_dir, 'b.c')
|
| 122 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 123 |
+
|
| 124 |
+
with open(a_pxd, 'w') as f:
|
| 125 |
+
f.write('cdef int value\n')
|
| 126 |
+
|
| 127 |
+
with open(a_pyx, 'w') as f:
|
| 128 |
+
f.write('value = 1\n')
|
| 129 |
+
|
| 130 |
+
with open(b_pyx, 'w') as f:
|
| 131 |
+
f.write('cimport a\n' + 'a.value = 2\n')
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
# The dependencies for "b.pyx" are "a.pxd" and "b.pyx".
|
| 135 |
+
self.assertEqual({a_pxd, b_pyx}, dep_tree.all_dependencies(b_pyx))
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
# Cythonize to create b.c
|
| 139 |
+
fresh_cythonize([a_pyx, b_pyx])
|
| 140 |
+
|
| 141 |
+
# Sleep to address coarse time-stamp precision.
|
| 142 |
+
time.sleep(1)
|
| 143 |
+
|
| 144 |
+
with open(b_c) as f:
|
| 145 |
+
b_c_contents1 = f.read()
|
| 146 |
+
|
| 147 |
+
with open(a_pxd, 'w') as f:
|
| 148 |
+
f.write('cdef double value\n')
|
| 149 |
+
|
| 150 |
+
fresh_cythonize([a_pyx, b_pyx])
|
| 151 |
+
|
| 152 |
+
with open(b_c) as f:
|
| 153 |
+
b_c_contents2 = f.read()
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
|
| 158 |
+
self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
|
| 159 |
+
self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
|
| 160 |
+
self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def test_recythonize_py_on_dep_pxd_change(self):
|
| 165 |
+
|
| 166 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 167 |
+
|
| 168 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 169 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 170 |
+
b_pxd = os.path.join(src_dir, 'b.pxd')
|
| 171 |
+
b_py = os.path.join(src_dir, 'b.py')
|
| 172 |
+
b_c = os.path.join(src_dir, 'b.c')
|
| 173 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 174 |
+
|
| 175 |
+
with open(a_pxd, 'w') as f:
|
| 176 |
+
f.write('cdef int value\n')
|
| 177 |
+
|
| 178 |
+
with open(a_pyx, 'w') as f:
|
| 179 |
+
f.write('value = 1\n')
|
| 180 |
+
|
| 181 |
+
with open(b_pxd, 'w') as f:
|
| 182 |
+
f.write('cimport a\n')
|
| 183 |
+
|
| 184 |
+
with open(b_py, 'w') as f:
|
| 185 |
+
f.write('a.value = 2\n')
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
# The dependencies for b.py are "a.pxd", "b.pxd" and "b.py".
|
| 189 |
+
self.assertEqual({a_pxd, b_pxd, b_py}, dep_tree.all_dependencies(b_py))
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
# Cythonize to create b.c
|
| 193 |
+
fresh_cythonize([a_pyx, b_py])
|
| 194 |
+
|
| 195 |
+
# Sleep to address coarse time-stamp precision.
|
| 196 |
+
time.sleep(1)
|
| 197 |
+
|
| 198 |
+
with open(b_c) as f:
|
| 199 |
+
b_c_contents1 = f.read()
|
| 200 |
+
|
| 201 |
+
with open(a_pxd, 'w') as f:
|
| 202 |
+
f.write('cdef double value\n')
|
| 203 |
+
|
| 204 |
+
fresh_cythonize([a_pyx, b_py])
|
| 205 |
+
|
| 206 |
+
with open(b_c) as f:
|
| 207 |
+
b_c_contents2 = f.read()
|
| 208 |
+
|
| 209 |
+
self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
|
| 210 |
+
self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
|
| 211 |
+
self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
|
| 212 |
+
self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/TestStripLiterals.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from Cython.Build.Dependencies import strip_string_literals
|
| 2 |
+
|
| 3 |
+
from Cython.TestUtils import CythonTest
|
| 4 |
+
|
| 5 |
+
class TestStripLiterals(CythonTest):
|
| 6 |
+
|
| 7 |
+
def t(self, before, expected):
|
| 8 |
+
actual, literals = strip_string_literals(before, prefix="_L")
|
| 9 |
+
self.assertEqual(expected, actual)
|
| 10 |
+
for key, value in literals.items():
|
| 11 |
+
actual = actual.replace(key, value)
|
| 12 |
+
self.assertEqual(before, actual)
|
| 13 |
+
|
| 14 |
+
def test_empty(self):
|
| 15 |
+
self.t("", "")
|
| 16 |
+
|
| 17 |
+
def test_single_quote(self):
|
| 18 |
+
self.t("'x'", "'_L1_'")
|
| 19 |
+
|
| 20 |
+
def test_double_quote(self):
|
| 21 |
+
self.t('"x"', '"_L1_"')
|
| 22 |
+
|
| 23 |
+
def test_nested_quotes(self):
|
| 24 |
+
self.t(""" '"' "'" """, """ '_L1_' "_L2_" """)
|
| 25 |
+
|
| 26 |
+
def test_triple_quote(self):
|
| 27 |
+
self.t(" '''a\n''' ", " '''_L1_''' ")
|
| 28 |
+
|
| 29 |
+
def test_backslash(self):
|
| 30 |
+
self.t(r"'a\'b'", "'_L1_'")
|
| 31 |
+
self.t(r"'a\\'", "'_L1_'")
|
| 32 |
+
self.t(r"'a\\\'b'", "'_L1_'")
|
| 33 |
+
|
| 34 |
+
def test_unicode(self):
|
| 35 |
+
self.t("u'abc'", "u'_L1_'")
|
| 36 |
+
|
| 37 |
+
def test_raw(self):
|
| 38 |
+
self.t(r"r'abc\\'", "r'_L1_'")
|
| 39 |
+
|
| 40 |
+
def test_raw_unicode(self):
|
| 41 |
+
self.t(r"ru'abc\\'", "ru'_L1_'")
|
| 42 |
+
|
| 43 |
+
def test_comment(self):
|
| 44 |
+
self.t("abc # foo", "abc #_L1_")
|
| 45 |
+
|
| 46 |
+
def test_comment_and_quote(self):
|
| 47 |
+
self.t("abc # 'x'", "abc #_L1_")
|
| 48 |
+
self.t("'abc#'", "'_L1_'")
|
| 49 |
+
|
| 50 |
+
def test_include(self):
|
| 51 |
+
self.t("include 'a.pxi' # something here",
|
| 52 |
+
"include '_L1_' #_L2_")
|
| 53 |
+
|
| 54 |
+
def test_extern(self):
|
| 55 |
+
self.t("cdef extern from 'a.h': # comment",
|
| 56 |
+
"cdef extern from '_L1_': #_L2_")
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/Tests/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# empty file
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Build/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .Dependencies import cythonize
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
if sys.version_info < (3, 7):
|
| 5 |
+
from .Distutils import build_ext
|
| 6 |
+
del sys
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def __getattr__(name):
|
| 10 |
+
if name == 'build_ext':
|
| 11 |
+
# Lazy import, fails if distutils is not available (in Python 3.12+).
|
| 12 |
+
from .Distutils import build_ext
|
| 13 |
+
return build_ext
|
| 14 |
+
raise AttributeError("module '%s' has no attribute '%s'" % (__name__, name))
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/CodeWriter.py
ADDED
|
@@ -0,0 +1,820 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Serializes a Cython code tree to Cython code. This is primarily useful for
|
| 3 |
+
debugging and testing purposes.
|
| 4 |
+
The output is in a strict format, no whitespace or comments from the input
|
| 5 |
+
is preserved (and it could not be as it is not present in the code tree).
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from __future__ import absolute_import, print_function
|
| 9 |
+
|
| 10 |
+
from .Compiler.Visitor import TreeVisitor
|
| 11 |
+
from .Compiler.ExprNodes import *
|
| 12 |
+
from .Compiler.Nodes import CSimpleBaseTypeNode
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class LinesResult(object):
|
| 16 |
+
def __init__(self):
|
| 17 |
+
self.lines = []
|
| 18 |
+
self.s = u""
|
| 19 |
+
|
| 20 |
+
def put(self, s):
|
| 21 |
+
self.s += s
|
| 22 |
+
|
| 23 |
+
def newline(self):
|
| 24 |
+
self.lines.append(self.s)
|
| 25 |
+
self.s = u""
|
| 26 |
+
|
| 27 |
+
def putline(self, s):
|
| 28 |
+
self.put(s)
|
| 29 |
+
self.newline()
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class DeclarationWriter(TreeVisitor):
|
| 33 |
+
"""
|
| 34 |
+
A Cython code writer that is limited to declarations nodes.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
indent_string = u" "
|
| 38 |
+
|
| 39 |
+
def __init__(self, result=None):
|
| 40 |
+
super(DeclarationWriter, self).__init__()
|
| 41 |
+
if result is None:
|
| 42 |
+
result = LinesResult()
|
| 43 |
+
self.result = result
|
| 44 |
+
self.numindents = 0
|
| 45 |
+
self.tempnames = {}
|
| 46 |
+
self.tempblockindex = 0
|
| 47 |
+
|
| 48 |
+
def write(self, tree):
|
| 49 |
+
self.visit(tree)
|
| 50 |
+
return self.result
|
| 51 |
+
|
| 52 |
+
def indent(self):
|
| 53 |
+
self.numindents += 1
|
| 54 |
+
|
| 55 |
+
def dedent(self):
|
| 56 |
+
self.numindents -= 1
|
| 57 |
+
|
| 58 |
+
def startline(self, s=u""):
|
| 59 |
+
self.result.put(self.indent_string * self.numindents + s)
|
| 60 |
+
|
| 61 |
+
def put(self, s):
|
| 62 |
+
self.result.put(s)
|
| 63 |
+
|
| 64 |
+
def putline(self, s):
|
| 65 |
+
self.result.putline(self.indent_string * self.numindents + s)
|
| 66 |
+
|
| 67 |
+
def endline(self, s=u""):
|
| 68 |
+
self.result.putline(s)
|
| 69 |
+
|
| 70 |
+
def line(self, s):
|
| 71 |
+
self.startline(s)
|
| 72 |
+
self.endline()
|
| 73 |
+
|
| 74 |
+
def comma_separated_list(self, items, output_rhs=False):
|
| 75 |
+
if len(items) > 0:
|
| 76 |
+
for item in items[:-1]:
|
| 77 |
+
self.visit(item)
|
| 78 |
+
if output_rhs and item.default is not None:
|
| 79 |
+
self.put(u" = ")
|
| 80 |
+
self.visit(item.default)
|
| 81 |
+
self.put(u", ")
|
| 82 |
+
self.visit(items[-1])
|
| 83 |
+
if output_rhs and items[-1].default is not None:
|
| 84 |
+
self.put(u" = ")
|
| 85 |
+
self.visit(items[-1].default)
|
| 86 |
+
|
| 87 |
+
def _visit_indented(self, node):
|
| 88 |
+
self.indent()
|
| 89 |
+
self.visit(node)
|
| 90 |
+
self.dedent()
|
| 91 |
+
|
| 92 |
+
def visit_Node(self, node):
|
| 93 |
+
raise AssertionError("Node not handled by serializer: %r" % node)
|
| 94 |
+
|
| 95 |
+
def visit_ModuleNode(self, node):
|
| 96 |
+
self.visitchildren(node)
|
| 97 |
+
|
| 98 |
+
def visit_StatListNode(self, node):
|
| 99 |
+
self.visitchildren(node)
|
| 100 |
+
|
| 101 |
+
def visit_CDefExternNode(self, node):
|
| 102 |
+
if node.include_file is None:
|
| 103 |
+
file = u'*'
|
| 104 |
+
else:
|
| 105 |
+
file = u'"%s"' % node.include_file
|
| 106 |
+
self.putline(u"cdef extern from %s:" % file)
|
| 107 |
+
self._visit_indented(node.body)
|
| 108 |
+
|
| 109 |
+
def visit_CPtrDeclaratorNode(self, node):
|
| 110 |
+
self.put('*')
|
| 111 |
+
self.visit(node.base)
|
| 112 |
+
|
| 113 |
+
def visit_CReferenceDeclaratorNode(self, node):
|
| 114 |
+
self.put('&')
|
| 115 |
+
self.visit(node.base)
|
| 116 |
+
|
| 117 |
+
def visit_CArrayDeclaratorNode(self, node):
|
| 118 |
+
self.visit(node.base)
|
| 119 |
+
self.put(u'[')
|
| 120 |
+
if node.dimension is not None:
|
| 121 |
+
self.visit(node.dimension)
|
| 122 |
+
self.put(u']')
|
| 123 |
+
|
| 124 |
+
def visit_CFuncDeclaratorNode(self, node):
|
| 125 |
+
# TODO: except, gil, etc.
|
| 126 |
+
self.visit(node.base)
|
| 127 |
+
self.put(u'(')
|
| 128 |
+
self.comma_separated_list(node.args)
|
| 129 |
+
self.endline(u')')
|
| 130 |
+
|
| 131 |
+
def visit_CNameDeclaratorNode(self, node):
|
| 132 |
+
self.put(node.name)
|
| 133 |
+
|
| 134 |
+
def visit_CSimpleBaseTypeNode(self, node):
|
| 135 |
+
# See Parsing.p_sign_and_longness
|
| 136 |
+
if node.is_basic_c_type:
|
| 137 |
+
self.put(("unsigned ", "", "signed ")[node.signed])
|
| 138 |
+
if node.longness < 0:
|
| 139 |
+
self.put("short " * -node.longness)
|
| 140 |
+
elif node.longness > 0:
|
| 141 |
+
self.put("long " * node.longness)
|
| 142 |
+
if node.name is not None:
|
| 143 |
+
self.put(node.name)
|
| 144 |
+
|
| 145 |
+
def visit_CComplexBaseTypeNode(self, node):
|
| 146 |
+
self.visit(node.base_type)
|
| 147 |
+
self.visit(node.declarator)
|
| 148 |
+
|
| 149 |
+
def visit_CNestedBaseTypeNode(self, node):
|
| 150 |
+
self.visit(node.base_type)
|
| 151 |
+
self.put(u'.')
|
| 152 |
+
self.put(node.name)
|
| 153 |
+
|
| 154 |
+
def visit_TemplatedTypeNode(self, node):
|
| 155 |
+
self.visit(node.base_type_node)
|
| 156 |
+
self.put(u'[')
|
| 157 |
+
self.comma_separated_list(node.positional_args + node.keyword_args.key_value_pairs)
|
| 158 |
+
self.put(u']')
|
| 159 |
+
|
| 160 |
+
def visit_CVarDefNode(self, node):
|
| 161 |
+
self.startline(u"cdef ")
|
| 162 |
+
self.visit(node.base_type)
|
| 163 |
+
self.put(u" ")
|
| 164 |
+
self.comma_separated_list(node.declarators, output_rhs=True)
|
| 165 |
+
self.endline()
|
| 166 |
+
|
| 167 |
+
def _visit_container_node(self, node, decl, extras, attributes):
|
| 168 |
+
# TODO: visibility
|
| 169 |
+
self.startline(decl)
|
| 170 |
+
if node.name:
|
| 171 |
+
self.put(u' ')
|
| 172 |
+
self.put(node.name)
|
| 173 |
+
if node.cname is not None:
|
| 174 |
+
self.put(u' "%s"' % node.cname)
|
| 175 |
+
if extras:
|
| 176 |
+
self.put(extras)
|
| 177 |
+
self.endline(':')
|
| 178 |
+
self.indent()
|
| 179 |
+
if not attributes:
|
| 180 |
+
self.putline('pass')
|
| 181 |
+
else:
|
| 182 |
+
for attribute in attributes:
|
| 183 |
+
self.visit(attribute)
|
| 184 |
+
self.dedent()
|
| 185 |
+
|
| 186 |
+
def visit_CStructOrUnionDefNode(self, node):
|
| 187 |
+
if node.typedef_flag:
|
| 188 |
+
decl = u'ctypedef '
|
| 189 |
+
else:
|
| 190 |
+
decl = u'cdef '
|
| 191 |
+
if node.visibility == 'public':
|
| 192 |
+
decl += u'public '
|
| 193 |
+
if node.packed:
|
| 194 |
+
decl += u'packed '
|
| 195 |
+
decl += node.kind
|
| 196 |
+
self._visit_container_node(node, decl, None, node.attributes)
|
| 197 |
+
|
| 198 |
+
def visit_CppClassNode(self, node):
|
| 199 |
+
extras = ""
|
| 200 |
+
if node.templates:
|
| 201 |
+
extras = u"[%s]" % ", ".join(node.templates)
|
| 202 |
+
if node.base_classes:
|
| 203 |
+
extras += "(%s)" % ", ".join(node.base_classes)
|
| 204 |
+
self._visit_container_node(node, u"cdef cppclass", extras, node.attributes)
|
| 205 |
+
|
| 206 |
+
def visit_CEnumDefNode(self, node):
|
| 207 |
+
self._visit_container_node(node, u"cdef enum", None, node.items)
|
| 208 |
+
|
| 209 |
+
def visit_CEnumDefItemNode(self, node):
|
| 210 |
+
self.startline(node.name)
|
| 211 |
+
if node.cname:
|
| 212 |
+
self.put(u' "%s"' % node.cname)
|
| 213 |
+
if node.value:
|
| 214 |
+
self.put(u" = ")
|
| 215 |
+
self.visit(node.value)
|
| 216 |
+
self.endline()
|
| 217 |
+
|
| 218 |
+
def visit_CClassDefNode(self, node):
|
| 219 |
+
assert not node.module_name
|
| 220 |
+
if node.decorators:
|
| 221 |
+
for decorator in node.decorators:
|
| 222 |
+
self.visit(decorator)
|
| 223 |
+
self.startline(u"cdef class ")
|
| 224 |
+
self.put(node.class_name)
|
| 225 |
+
if node.base_class_name:
|
| 226 |
+
self.put(u"(")
|
| 227 |
+
if node.base_class_module:
|
| 228 |
+
self.put(node.base_class_module)
|
| 229 |
+
self.put(u".")
|
| 230 |
+
self.put(node.base_class_name)
|
| 231 |
+
self.put(u")")
|
| 232 |
+
self.endline(u":")
|
| 233 |
+
self._visit_indented(node.body)
|
| 234 |
+
|
| 235 |
+
def visit_CTypeDefNode(self, node):
|
| 236 |
+
self.startline(u"ctypedef ")
|
| 237 |
+
self.visit(node.base_type)
|
| 238 |
+
self.put(u" ")
|
| 239 |
+
self.visit(node.declarator)
|
| 240 |
+
self.endline()
|
| 241 |
+
|
| 242 |
+
def visit_FuncDefNode(self, node):
|
| 243 |
+
# TODO: support cdef + cpdef functions
|
| 244 |
+
self.startline(u"def %s(" % node.name)
|
| 245 |
+
self.comma_separated_list(node.args)
|
| 246 |
+
self.endline(u"):")
|
| 247 |
+
self._visit_indented(node.body)
|
| 248 |
+
|
| 249 |
+
def visit_CFuncDefNode(self, node):
|
| 250 |
+
self.startline(u'cpdef ' if node.overridable else u'cdef ')
|
| 251 |
+
if node.modifiers:
|
| 252 |
+
self.put(' '.join(node.modifiers))
|
| 253 |
+
self.put(' ')
|
| 254 |
+
if node.visibility != 'private':
|
| 255 |
+
self.put(node.visibility)
|
| 256 |
+
self.put(u' ')
|
| 257 |
+
if node.api:
|
| 258 |
+
self.put(u'api ')
|
| 259 |
+
|
| 260 |
+
if node.base_type:
|
| 261 |
+
self.visit(node.base_type)
|
| 262 |
+
if node.base_type.name is not None:
|
| 263 |
+
self.put(u' ')
|
| 264 |
+
|
| 265 |
+
# visit the CFuncDeclaratorNode, but put a `:` at the end of line
|
| 266 |
+
self.visit(node.declarator.base)
|
| 267 |
+
self.put(u'(')
|
| 268 |
+
self.comma_separated_list(node.declarator.args)
|
| 269 |
+
self.endline(u'):')
|
| 270 |
+
|
| 271 |
+
self._visit_indented(node.body)
|
| 272 |
+
|
| 273 |
+
def visit_CArgDeclNode(self, node):
|
| 274 |
+
# For "CSimpleBaseTypeNode", the variable type may have been parsed as type.
|
| 275 |
+
# For other node types, the "name" is always None.
|
| 276 |
+
if not isinstance(node.base_type, CSimpleBaseTypeNode) or \
|
| 277 |
+
node.base_type.name is not None:
|
| 278 |
+
self.visit(node.base_type)
|
| 279 |
+
|
| 280 |
+
# If we printed something for "node.base_type", we may need to print an extra ' '.
|
| 281 |
+
#
|
| 282 |
+
# Special case: if "node.declarator" is a "CNameDeclaratorNode",
|
| 283 |
+
# its "name" might be an empty string, for example, for "cdef f(x)".
|
| 284 |
+
if node.declarator.declared_name():
|
| 285 |
+
self.put(u" ")
|
| 286 |
+
self.visit(node.declarator)
|
| 287 |
+
if node.default is not None:
|
| 288 |
+
self.put(u" = ")
|
| 289 |
+
self.visit(node.default)
|
| 290 |
+
|
| 291 |
+
def visit_CImportStatNode(self, node):
|
| 292 |
+
self.startline(u"cimport ")
|
| 293 |
+
self.put(node.module_name)
|
| 294 |
+
if node.as_name:
|
| 295 |
+
self.put(u" as ")
|
| 296 |
+
self.put(node.as_name)
|
| 297 |
+
self.endline()
|
| 298 |
+
|
| 299 |
+
def visit_FromCImportStatNode(self, node):
|
| 300 |
+
self.startline(u"from ")
|
| 301 |
+
self.put(node.module_name)
|
| 302 |
+
self.put(u" cimport ")
|
| 303 |
+
first = True
|
| 304 |
+
for pos, name, as_name, kind in node.imported_names:
|
| 305 |
+
assert kind is None
|
| 306 |
+
if first:
|
| 307 |
+
first = False
|
| 308 |
+
else:
|
| 309 |
+
self.put(u", ")
|
| 310 |
+
self.put(name)
|
| 311 |
+
if as_name:
|
| 312 |
+
self.put(u" as ")
|
| 313 |
+
self.put(as_name)
|
| 314 |
+
self.endline()
|
| 315 |
+
|
| 316 |
+
def visit_NameNode(self, node):
|
| 317 |
+
self.put(node.name)
|
| 318 |
+
|
| 319 |
+
def visit_DecoratorNode(self, node):
|
| 320 |
+
self.startline("@")
|
| 321 |
+
self.visit(node.decorator)
|
| 322 |
+
self.endline()
|
| 323 |
+
|
| 324 |
+
def visit_PassStatNode(self, node):
|
| 325 |
+
self.startline(u"pass")
|
| 326 |
+
self.endline()
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
class StatementWriter(DeclarationWriter):
|
| 330 |
+
"""
|
| 331 |
+
A Cython code writer for most language statement features.
|
| 332 |
+
"""
|
| 333 |
+
|
| 334 |
+
def visit_SingleAssignmentNode(self, node):
|
| 335 |
+
self.startline()
|
| 336 |
+
self.visit(node.lhs)
|
| 337 |
+
self.put(u" = ")
|
| 338 |
+
self.visit(node.rhs)
|
| 339 |
+
self.endline()
|
| 340 |
+
|
| 341 |
+
def visit_CascadedAssignmentNode(self, node):
|
| 342 |
+
self.startline()
|
| 343 |
+
for lhs in node.lhs_list:
|
| 344 |
+
self.visit(lhs)
|
| 345 |
+
self.put(u" = ")
|
| 346 |
+
self.visit(node.rhs)
|
| 347 |
+
self.endline()
|
| 348 |
+
|
| 349 |
+
def visit_PrintStatNode(self, node):
|
| 350 |
+
self.startline(u"print ")
|
| 351 |
+
self.comma_separated_list(node.arg_tuple.args)
|
| 352 |
+
if not node.append_newline:
|
| 353 |
+
self.put(u",")
|
| 354 |
+
self.endline()
|
| 355 |
+
|
| 356 |
+
def visit_ForInStatNode(self, node):
|
| 357 |
+
self.startline(u"for ")
|
| 358 |
+
if node.target.is_sequence_constructor:
|
| 359 |
+
self.comma_separated_list(node.target.args)
|
| 360 |
+
else:
|
| 361 |
+
self.visit(node.target)
|
| 362 |
+
self.put(u" in ")
|
| 363 |
+
self.visit(node.iterator.sequence)
|
| 364 |
+
self.endline(u":")
|
| 365 |
+
self._visit_indented(node.body)
|
| 366 |
+
if node.else_clause is not None:
|
| 367 |
+
self.line(u"else:")
|
| 368 |
+
self._visit_indented(node.else_clause)
|
| 369 |
+
|
| 370 |
+
def visit_IfStatNode(self, node):
|
| 371 |
+
# The IfClauseNode is handled directly without a separate match
|
| 372 |
+
# for clariy.
|
| 373 |
+
self.startline(u"if ")
|
| 374 |
+
self.visit(node.if_clauses[0].condition)
|
| 375 |
+
self.endline(":")
|
| 376 |
+
self._visit_indented(node.if_clauses[0].body)
|
| 377 |
+
for clause in node.if_clauses[1:]:
|
| 378 |
+
self.startline("elif ")
|
| 379 |
+
self.visit(clause.condition)
|
| 380 |
+
self.endline(":")
|
| 381 |
+
self._visit_indented(clause.body)
|
| 382 |
+
if node.else_clause is not None:
|
| 383 |
+
self.line("else:")
|
| 384 |
+
self._visit_indented(node.else_clause)
|
| 385 |
+
|
| 386 |
+
def visit_WhileStatNode(self, node):
|
| 387 |
+
self.startline(u"while ")
|
| 388 |
+
self.visit(node.condition)
|
| 389 |
+
self.endline(u":")
|
| 390 |
+
self._visit_indented(node.body)
|
| 391 |
+
if node.else_clause is not None:
|
| 392 |
+
self.line("else:")
|
| 393 |
+
self._visit_indented(node.else_clause)
|
| 394 |
+
|
| 395 |
+
def visit_ContinueStatNode(self, node):
|
| 396 |
+
self.line(u"continue")
|
| 397 |
+
|
| 398 |
+
def visit_BreakStatNode(self, node):
|
| 399 |
+
self.line(u"break")
|
| 400 |
+
|
| 401 |
+
def visit_SequenceNode(self, node):
|
| 402 |
+
self.comma_separated_list(node.args) # Might need to discover whether we need () around tuples...hmm...
|
| 403 |
+
|
| 404 |
+
def visit_ExprStatNode(self, node):
|
| 405 |
+
self.startline()
|
| 406 |
+
self.visit(node.expr)
|
| 407 |
+
self.endline()
|
| 408 |
+
|
| 409 |
+
def visit_InPlaceAssignmentNode(self, node):
|
| 410 |
+
self.startline()
|
| 411 |
+
self.visit(node.lhs)
|
| 412 |
+
self.put(u" %s= " % node.operator)
|
| 413 |
+
self.visit(node.rhs)
|
| 414 |
+
self.endline()
|
| 415 |
+
|
| 416 |
+
def visit_WithStatNode(self, node):
|
| 417 |
+
self.startline()
|
| 418 |
+
self.put(u"with ")
|
| 419 |
+
self.visit(node.manager)
|
| 420 |
+
if node.target is not None:
|
| 421 |
+
self.put(u" as ")
|
| 422 |
+
self.visit(node.target)
|
| 423 |
+
self.endline(u":")
|
| 424 |
+
self._visit_indented(node.body)
|
| 425 |
+
|
| 426 |
+
def visit_TryFinallyStatNode(self, node):
|
| 427 |
+
self.line(u"try:")
|
| 428 |
+
self._visit_indented(node.body)
|
| 429 |
+
self.line(u"finally:")
|
| 430 |
+
self._visit_indented(node.finally_clause)
|
| 431 |
+
|
| 432 |
+
def visit_TryExceptStatNode(self, node):
|
| 433 |
+
self.line(u"try:")
|
| 434 |
+
self._visit_indented(node.body)
|
| 435 |
+
for x in node.except_clauses:
|
| 436 |
+
self.visit(x)
|
| 437 |
+
if node.else_clause is not None:
|
| 438 |
+
self.visit(node.else_clause)
|
| 439 |
+
|
| 440 |
+
def visit_ExceptClauseNode(self, node):
|
| 441 |
+
self.startline(u"except")
|
| 442 |
+
if node.pattern is not None:
|
| 443 |
+
self.put(u" ")
|
| 444 |
+
self.visit(node.pattern)
|
| 445 |
+
if node.target is not None:
|
| 446 |
+
self.put(u", ")
|
| 447 |
+
self.visit(node.target)
|
| 448 |
+
self.endline(":")
|
| 449 |
+
self._visit_indented(node.body)
|
| 450 |
+
|
| 451 |
+
def visit_ReturnStatNode(self, node):
|
| 452 |
+
self.startline("return")
|
| 453 |
+
if node.value is not None:
|
| 454 |
+
self.put(u" ")
|
| 455 |
+
self.visit(node.value)
|
| 456 |
+
self.endline()
|
| 457 |
+
|
| 458 |
+
def visit_ReraiseStatNode(self, node):
|
| 459 |
+
self.line("raise")
|
| 460 |
+
|
| 461 |
+
def visit_ImportNode(self, node):
|
| 462 |
+
self.put(u"(import %s)" % node.module_name.value)
|
| 463 |
+
|
| 464 |
+
def visit_TempsBlockNode(self, node):
|
| 465 |
+
"""
|
| 466 |
+
Temporaries are output like $1_1', where the first number is
|
| 467 |
+
an index of the TempsBlockNode and the second number is an index
|
| 468 |
+
of the temporary which that block allocates.
|
| 469 |
+
"""
|
| 470 |
+
idx = 0
|
| 471 |
+
for handle in node.temps:
|
| 472 |
+
self.tempnames[handle] = "$%d_%d" % (self.tempblockindex, idx)
|
| 473 |
+
idx += 1
|
| 474 |
+
self.tempblockindex += 1
|
| 475 |
+
self.visit(node.body)
|
| 476 |
+
|
| 477 |
+
def visit_TempRefNode(self, node):
|
| 478 |
+
self.put(self.tempnames[node.handle])
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
class ExpressionWriter(TreeVisitor):
|
| 482 |
+
"""
|
| 483 |
+
A Cython code writer that is intentionally limited to expressions.
|
| 484 |
+
"""
|
| 485 |
+
|
| 486 |
+
def __init__(self, result=None):
|
| 487 |
+
super(ExpressionWriter, self).__init__()
|
| 488 |
+
if result is None:
|
| 489 |
+
result = u""
|
| 490 |
+
self.result = result
|
| 491 |
+
self.precedence = [0]
|
| 492 |
+
|
| 493 |
+
def write(self, tree):
|
| 494 |
+
self.visit(tree)
|
| 495 |
+
return self.result
|
| 496 |
+
|
| 497 |
+
def put(self, s):
|
| 498 |
+
self.result += s
|
| 499 |
+
|
| 500 |
+
def remove(self, s):
|
| 501 |
+
if self.result.endswith(s):
|
| 502 |
+
self.result = self.result[:-len(s)]
|
| 503 |
+
|
| 504 |
+
def comma_separated_list(self, items):
|
| 505 |
+
if len(items) > 0:
|
| 506 |
+
for item in items[:-1]:
|
| 507 |
+
self.visit(item)
|
| 508 |
+
self.put(u", ")
|
| 509 |
+
self.visit(items[-1])
|
| 510 |
+
|
| 511 |
+
def visit_Node(self, node):
|
| 512 |
+
raise AssertionError("Node not handled by serializer: %r" % node)
|
| 513 |
+
|
| 514 |
+
def visit_IntNode(self, node):
|
| 515 |
+
self.put(node.value)
|
| 516 |
+
|
| 517 |
+
def visit_FloatNode(self, node):
|
| 518 |
+
self.put(node.value)
|
| 519 |
+
|
| 520 |
+
def visit_NoneNode(self, node):
|
| 521 |
+
self.put(u"None")
|
| 522 |
+
|
| 523 |
+
def visit_NameNode(self, node):
|
| 524 |
+
self.put(node.name)
|
| 525 |
+
|
| 526 |
+
def visit_EllipsisNode(self, node):
|
| 527 |
+
self.put(u"...")
|
| 528 |
+
|
| 529 |
+
def visit_BoolNode(self, node):
|
| 530 |
+
self.put(str(node.value))
|
| 531 |
+
|
| 532 |
+
def visit_ConstNode(self, node):
|
| 533 |
+
self.put(str(node.value))
|
| 534 |
+
|
| 535 |
+
def visit_ImagNode(self, node):
|
| 536 |
+
self.put(node.value)
|
| 537 |
+
self.put(u"j")
|
| 538 |
+
|
| 539 |
+
def emit_string(self, node, prefix=u""):
|
| 540 |
+
repr_val = repr(node.value)
|
| 541 |
+
if repr_val[0] in 'ub':
|
| 542 |
+
repr_val = repr_val[1:]
|
| 543 |
+
self.put(u"%s%s" % (prefix, repr_val))
|
| 544 |
+
|
| 545 |
+
def visit_BytesNode(self, node):
|
| 546 |
+
self.emit_string(node, u"b")
|
| 547 |
+
|
| 548 |
+
def visit_StringNode(self, node):
|
| 549 |
+
self.emit_string(node)
|
| 550 |
+
|
| 551 |
+
def visit_UnicodeNode(self, node):
|
| 552 |
+
self.emit_string(node, u"u")
|
| 553 |
+
|
| 554 |
+
def emit_sequence(self, node, parens=(u"", u"")):
|
| 555 |
+
open_paren, close_paren = parens
|
| 556 |
+
items = node.subexpr_nodes()
|
| 557 |
+
self.put(open_paren)
|
| 558 |
+
self.comma_separated_list(items)
|
| 559 |
+
self.put(close_paren)
|
| 560 |
+
|
| 561 |
+
def visit_ListNode(self, node):
|
| 562 |
+
self.emit_sequence(node, u"[]")
|
| 563 |
+
|
| 564 |
+
def visit_TupleNode(self, node):
|
| 565 |
+
self.emit_sequence(node, u"()")
|
| 566 |
+
|
| 567 |
+
def visit_SetNode(self, node):
|
| 568 |
+
if len(node.subexpr_nodes()) > 0:
|
| 569 |
+
self.emit_sequence(node, u"{}")
|
| 570 |
+
else:
|
| 571 |
+
self.put(u"set()")
|
| 572 |
+
|
| 573 |
+
def visit_DictNode(self, node):
|
| 574 |
+
self.emit_sequence(node, u"{}")
|
| 575 |
+
|
| 576 |
+
def visit_DictItemNode(self, node):
|
| 577 |
+
self.visit(node.key)
|
| 578 |
+
self.put(u": ")
|
| 579 |
+
self.visit(node.value)
|
| 580 |
+
|
| 581 |
+
unop_precedence = {
|
| 582 |
+
'not': 3, '!': 3,
|
| 583 |
+
'+': 11, '-': 11, '~': 11,
|
| 584 |
+
}
|
| 585 |
+
binop_precedence = {
|
| 586 |
+
'or': 1,
|
| 587 |
+
'and': 2,
|
| 588 |
+
# unary: 'not': 3, '!': 3,
|
| 589 |
+
'in': 4, 'not_in': 4, 'is': 4, 'is_not': 4, '<': 4, '<=': 4, '>': 4, '>=': 4, '!=': 4, '==': 4,
|
| 590 |
+
'|': 5,
|
| 591 |
+
'^': 6,
|
| 592 |
+
'&': 7,
|
| 593 |
+
'<<': 8, '>>': 8,
|
| 594 |
+
'+': 9, '-': 9,
|
| 595 |
+
'*': 10, '@': 10, '/': 10, '//': 10, '%': 10,
|
| 596 |
+
# unary: '+': 11, '-': 11, '~': 11
|
| 597 |
+
'**': 12,
|
| 598 |
+
}
|
| 599 |
+
|
| 600 |
+
def operator_enter(self, new_prec):
|
| 601 |
+
old_prec = self.precedence[-1]
|
| 602 |
+
if old_prec > new_prec:
|
| 603 |
+
self.put(u"(")
|
| 604 |
+
self.precedence.append(new_prec)
|
| 605 |
+
|
| 606 |
+
def operator_exit(self):
|
| 607 |
+
old_prec, new_prec = self.precedence[-2:]
|
| 608 |
+
if old_prec > new_prec:
|
| 609 |
+
self.put(u")")
|
| 610 |
+
self.precedence.pop()
|
| 611 |
+
|
| 612 |
+
def visit_NotNode(self, node):
|
| 613 |
+
op = 'not'
|
| 614 |
+
prec = self.unop_precedence[op]
|
| 615 |
+
self.operator_enter(prec)
|
| 616 |
+
self.put(u"not ")
|
| 617 |
+
self.visit(node.operand)
|
| 618 |
+
self.operator_exit()
|
| 619 |
+
|
| 620 |
+
def visit_UnopNode(self, node):
|
| 621 |
+
op = node.operator
|
| 622 |
+
prec = self.unop_precedence[op]
|
| 623 |
+
self.operator_enter(prec)
|
| 624 |
+
self.put(u"%s" % node.operator)
|
| 625 |
+
self.visit(node.operand)
|
| 626 |
+
self.operator_exit()
|
| 627 |
+
|
| 628 |
+
def visit_BinopNode(self, node):
|
| 629 |
+
op = node.operator
|
| 630 |
+
prec = self.binop_precedence.get(op, 0)
|
| 631 |
+
self.operator_enter(prec)
|
| 632 |
+
self.visit(node.operand1)
|
| 633 |
+
self.put(u" %s " % op.replace('_', ' '))
|
| 634 |
+
self.visit(node.operand2)
|
| 635 |
+
self.operator_exit()
|
| 636 |
+
|
| 637 |
+
def visit_BoolBinopNode(self, node):
|
| 638 |
+
self.visit_BinopNode(node)
|
| 639 |
+
|
| 640 |
+
def visit_PrimaryCmpNode(self, node):
|
| 641 |
+
self.visit_BinopNode(node)
|
| 642 |
+
|
| 643 |
+
def visit_IndexNode(self, node):
|
| 644 |
+
self.visit(node.base)
|
| 645 |
+
self.put(u"[")
|
| 646 |
+
if isinstance(node.index, TupleNode):
|
| 647 |
+
if node.index.subexpr_nodes():
|
| 648 |
+
self.emit_sequence(node.index)
|
| 649 |
+
else:
|
| 650 |
+
self.put(u"()")
|
| 651 |
+
else:
|
| 652 |
+
self.visit(node.index)
|
| 653 |
+
self.put(u"]")
|
| 654 |
+
|
| 655 |
+
def visit_SliceIndexNode(self, node):
|
| 656 |
+
self.visit(node.base)
|
| 657 |
+
self.put(u"[")
|
| 658 |
+
if node.start:
|
| 659 |
+
self.visit(node.start)
|
| 660 |
+
self.put(u":")
|
| 661 |
+
if node.stop:
|
| 662 |
+
self.visit(node.stop)
|
| 663 |
+
if node.slice:
|
| 664 |
+
self.put(u":")
|
| 665 |
+
self.visit(node.slice)
|
| 666 |
+
self.put(u"]")
|
| 667 |
+
|
| 668 |
+
def visit_SliceNode(self, node):
|
| 669 |
+
if not node.start.is_none:
|
| 670 |
+
self.visit(node.start)
|
| 671 |
+
self.put(u":")
|
| 672 |
+
if not node.stop.is_none:
|
| 673 |
+
self.visit(node.stop)
|
| 674 |
+
if not node.step.is_none:
|
| 675 |
+
self.put(u":")
|
| 676 |
+
self.visit(node.step)
|
| 677 |
+
|
| 678 |
+
def visit_CondExprNode(self, node):
|
| 679 |
+
self.visit(node.true_val)
|
| 680 |
+
self.put(u" if ")
|
| 681 |
+
self.visit(node.test)
|
| 682 |
+
self.put(u" else ")
|
| 683 |
+
self.visit(node.false_val)
|
| 684 |
+
|
| 685 |
+
def visit_AttributeNode(self, node):
|
| 686 |
+
self.visit(node.obj)
|
| 687 |
+
self.put(u".%s" % node.attribute)
|
| 688 |
+
|
| 689 |
+
def visit_SimpleCallNode(self, node):
|
| 690 |
+
self.visit(node.function)
|
| 691 |
+
self.put(u"(")
|
| 692 |
+
self.comma_separated_list(node.args)
|
| 693 |
+
self.put(")")
|
| 694 |
+
|
| 695 |
+
def emit_pos_args(self, node):
|
| 696 |
+
if node is None:
|
| 697 |
+
return
|
| 698 |
+
if isinstance(node, AddNode):
|
| 699 |
+
self.emit_pos_args(node.operand1)
|
| 700 |
+
self.emit_pos_args(node.operand2)
|
| 701 |
+
elif isinstance(node, TupleNode):
|
| 702 |
+
for expr in node.subexpr_nodes():
|
| 703 |
+
self.visit(expr)
|
| 704 |
+
self.put(u", ")
|
| 705 |
+
elif isinstance(node, AsTupleNode):
|
| 706 |
+
self.put("*")
|
| 707 |
+
self.visit(node.arg)
|
| 708 |
+
self.put(u", ")
|
| 709 |
+
else:
|
| 710 |
+
self.visit(node)
|
| 711 |
+
self.put(u", ")
|
| 712 |
+
|
| 713 |
+
def emit_kwd_args(self, node):
|
| 714 |
+
if node is None:
|
| 715 |
+
return
|
| 716 |
+
if isinstance(node, MergedDictNode):
|
| 717 |
+
for expr in node.subexpr_nodes():
|
| 718 |
+
self.emit_kwd_args(expr)
|
| 719 |
+
elif isinstance(node, DictNode):
|
| 720 |
+
for expr in node.subexpr_nodes():
|
| 721 |
+
self.put(u"%s=" % expr.key.value)
|
| 722 |
+
self.visit(expr.value)
|
| 723 |
+
self.put(u", ")
|
| 724 |
+
else:
|
| 725 |
+
self.put(u"**")
|
| 726 |
+
self.visit(node)
|
| 727 |
+
self.put(u", ")
|
| 728 |
+
|
| 729 |
+
def visit_GeneralCallNode(self, node):
|
| 730 |
+
self.visit(node.function)
|
| 731 |
+
self.put(u"(")
|
| 732 |
+
self.emit_pos_args(node.positional_args)
|
| 733 |
+
self.emit_kwd_args(node.keyword_args)
|
| 734 |
+
self.remove(u", ")
|
| 735 |
+
self.put(")")
|
| 736 |
+
|
| 737 |
+
def emit_comprehension(self, body, target,
|
| 738 |
+
sequence, condition,
|
| 739 |
+
parens=(u"", u"")):
|
| 740 |
+
open_paren, close_paren = parens
|
| 741 |
+
self.put(open_paren)
|
| 742 |
+
self.visit(body)
|
| 743 |
+
self.put(u" for ")
|
| 744 |
+
self.visit(target)
|
| 745 |
+
self.put(u" in ")
|
| 746 |
+
self.visit(sequence)
|
| 747 |
+
if condition:
|
| 748 |
+
self.put(u" if ")
|
| 749 |
+
self.visit(condition)
|
| 750 |
+
self.put(close_paren)
|
| 751 |
+
|
| 752 |
+
def visit_ComprehensionAppendNode(self, node):
|
| 753 |
+
self.visit(node.expr)
|
| 754 |
+
|
| 755 |
+
def visit_DictComprehensionAppendNode(self, node):
|
| 756 |
+
self.visit(node.key_expr)
|
| 757 |
+
self.put(u": ")
|
| 758 |
+
self.visit(node.value_expr)
|
| 759 |
+
|
| 760 |
+
def visit_ComprehensionNode(self, node):
|
| 761 |
+
tpmap = {'list': u"[]", 'dict': u"{}", 'set': u"{}"}
|
| 762 |
+
parens = tpmap[node.type.py_type_name()]
|
| 763 |
+
body = node.loop.body
|
| 764 |
+
target = node.loop.target
|
| 765 |
+
sequence = node.loop.iterator.sequence
|
| 766 |
+
condition = None
|
| 767 |
+
if hasattr(body, 'if_clauses'):
|
| 768 |
+
# type(body) is Nodes.IfStatNode
|
| 769 |
+
condition = body.if_clauses[0].condition
|
| 770 |
+
body = body.if_clauses[0].body
|
| 771 |
+
self.emit_comprehension(body, target, sequence, condition, parens)
|
| 772 |
+
|
| 773 |
+
def visit_GeneratorExpressionNode(self, node):
|
| 774 |
+
body = node.loop.body
|
| 775 |
+
target = node.loop.target
|
| 776 |
+
sequence = node.loop.iterator.sequence
|
| 777 |
+
condition = None
|
| 778 |
+
if hasattr(body, 'if_clauses'):
|
| 779 |
+
# type(body) is Nodes.IfStatNode
|
| 780 |
+
condition = body.if_clauses[0].condition
|
| 781 |
+
body = body.if_clauses[0].body.expr.arg
|
| 782 |
+
elif hasattr(body, 'expr'):
|
| 783 |
+
# type(body) is Nodes.ExprStatNode
|
| 784 |
+
body = body.expr.arg
|
| 785 |
+
self.emit_comprehension(body, target, sequence, condition, u"()")
|
| 786 |
+
|
| 787 |
+
|
| 788 |
+
class PxdWriter(DeclarationWriter, ExpressionWriter):
|
| 789 |
+
"""
|
| 790 |
+
A Cython code writer for everything supported in pxd files.
|
| 791 |
+
(currently unused)
|
| 792 |
+
"""
|
| 793 |
+
|
| 794 |
+
def __call__(self, node):
|
| 795 |
+
print(u'\n'.join(self.write(node).lines))
|
| 796 |
+
return node
|
| 797 |
+
|
| 798 |
+
def visit_CFuncDefNode(self, node):
|
| 799 |
+
if node.overridable:
|
| 800 |
+
self.startline(u'cpdef ')
|
| 801 |
+
else:
|
| 802 |
+
self.startline(u'cdef ')
|
| 803 |
+
if node.modifiers:
|
| 804 |
+
self.put(' '.join(node.modifiers))
|
| 805 |
+
self.put(' ')
|
| 806 |
+
if node.visibility != 'private':
|
| 807 |
+
self.put(node.visibility)
|
| 808 |
+
self.put(u' ')
|
| 809 |
+
if node.api:
|
| 810 |
+
self.put(u'api ')
|
| 811 |
+
self.visit(node.declarator)
|
| 812 |
+
|
| 813 |
+
def visit_StatNode(self, node):
|
| 814 |
+
pass
|
| 815 |
+
|
| 816 |
+
|
| 817 |
+
class CodeWriter(StatementWriter, ExpressionWriter):
|
| 818 |
+
"""
|
| 819 |
+
A complete Cython code writer.
|
| 820 |
+
"""
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/AnalysedTreeTransforms.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
from .Visitor import ScopeTrackingTransform
|
| 4 |
+
from .Nodes import StatListNode, SingleAssignmentNode, CFuncDefNode, DefNode
|
| 5 |
+
from .ExprNodes import DictNode, DictItemNode, NameNode, UnicodeNode
|
| 6 |
+
from .PyrexTypes import py_object_type
|
| 7 |
+
from .StringEncoding import EncodedString
|
| 8 |
+
from . import Symtab
|
| 9 |
+
|
| 10 |
+
class AutoTestDictTransform(ScopeTrackingTransform):
|
| 11 |
+
# Handles autotestdict directive
|
| 12 |
+
|
| 13 |
+
excludelist = ['__cinit__', '__dealloc__', '__richcmp__',
|
| 14 |
+
'__nonzero__', '__bool__',
|
| 15 |
+
'__len__', '__contains__']
|
| 16 |
+
|
| 17 |
+
def visit_ModuleNode(self, node):
|
| 18 |
+
if node.is_pxd:
|
| 19 |
+
return node
|
| 20 |
+
self.scope_type = 'module'
|
| 21 |
+
self.scope_node = node
|
| 22 |
+
|
| 23 |
+
if not self.current_directives['autotestdict']:
|
| 24 |
+
return node
|
| 25 |
+
self.all_docstrings = self.current_directives['autotestdict.all']
|
| 26 |
+
self.cdef_docstrings = self.all_docstrings or self.current_directives['autotestdict.cdef']
|
| 27 |
+
|
| 28 |
+
assert isinstance(node.body, StatListNode)
|
| 29 |
+
|
| 30 |
+
# First see if __test__ is already created
|
| 31 |
+
if u'__test__' in node.scope.entries:
|
| 32 |
+
# Do nothing
|
| 33 |
+
return node
|
| 34 |
+
|
| 35 |
+
pos = node.pos
|
| 36 |
+
|
| 37 |
+
self.tests = []
|
| 38 |
+
self.testspos = node.pos
|
| 39 |
+
|
| 40 |
+
test_dict_entry = node.scope.declare_var(EncodedString(u'__test__'),
|
| 41 |
+
py_object_type,
|
| 42 |
+
pos,
|
| 43 |
+
visibility='public')
|
| 44 |
+
create_test_dict_assignment = SingleAssignmentNode(pos,
|
| 45 |
+
lhs=NameNode(pos, name=EncodedString(u'__test__'),
|
| 46 |
+
entry=test_dict_entry),
|
| 47 |
+
rhs=DictNode(pos, key_value_pairs=self.tests))
|
| 48 |
+
self.visitchildren(node)
|
| 49 |
+
node.body.stats.append(create_test_dict_assignment)
|
| 50 |
+
return node
|
| 51 |
+
|
| 52 |
+
def add_test(self, testpos, path, doctest):
|
| 53 |
+
pos = self.testspos
|
| 54 |
+
keystr = u'%s (line %d)' % (path, testpos[1])
|
| 55 |
+
key = UnicodeNode(pos, value=EncodedString(keystr))
|
| 56 |
+
value = UnicodeNode(pos, value=doctest)
|
| 57 |
+
self.tests.append(DictItemNode(pos, key=key, value=value))
|
| 58 |
+
|
| 59 |
+
def visit_ExprNode(self, node):
|
| 60 |
+
# expressions cannot contain functions and lambda expressions
|
| 61 |
+
# do not have a docstring
|
| 62 |
+
return node
|
| 63 |
+
|
| 64 |
+
def visit_FuncDefNode(self, node):
|
| 65 |
+
if not node.doc or (isinstance(node, DefNode) and node.fused_py_func):
|
| 66 |
+
return node
|
| 67 |
+
if not self.cdef_docstrings:
|
| 68 |
+
if isinstance(node, CFuncDefNode) and not node.py_func:
|
| 69 |
+
return node
|
| 70 |
+
if not self.all_docstrings and '>>>' not in node.doc:
|
| 71 |
+
return node
|
| 72 |
+
|
| 73 |
+
pos = self.testspos
|
| 74 |
+
if self.scope_type == 'module':
|
| 75 |
+
path = node.entry.name
|
| 76 |
+
elif self.scope_type in ('pyclass', 'cclass'):
|
| 77 |
+
if isinstance(node, CFuncDefNode):
|
| 78 |
+
if node.py_func is not None:
|
| 79 |
+
name = node.py_func.name
|
| 80 |
+
else:
|
| 81 |
+
name = node.entry.name
|
| 82 |
+
else:
|
| 83 |
+
name = node.name
|
| 84 |
+
if self.scope_type == 'cclass' and name in self.excludelist:
|
| 85 |
+
return node
|
| 86 |
+
if self.scope_type == 'pyclass':
|
| 87 |
+
class_name = self.scope_node.name
|
| 88 |
+
else:
|
| 89 |
+
class_name = self.scope_node.class_name
|
| 90 |
+
if isinstance(node.entry.scope, Symtab.PropertyScope):
|
| 91 |
+
property_method_name = node.entry.scope.name
|
| 92 |
+
path = "%s.%s.%s" % (class_name, node.entry.scope.name,
|
| 93 |
+
node.entry.name)
|
| 94 |
+
else:
|
| 95 |
+
path = "%s.%s" % (class_name, node.entry.name)
|
| 96 |
+
else:
|
| 97 |
+
assert False
|
| 98 |
+
self.add_test(node.pos, path, node.doc)
|
| 99 |
+
return node
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Annotate.py
ADDED
|
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Note: Work in progress
|
| 2 |
+
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import os.path
|
| 7 |
+
import re
|
| 8 |
+
import codecs
|
| 9 |
+
import textwrap
|
| 10 |
+
from datetime import datetime
|
| 11 |
+
from functools import partial
|
| 12 |
+
from collections import defaultdict
|
| 13 |
+
from xml.sax.saxutils import escape as html_escape
|
| 14 |
+
try:
|
| 15 |
+
from StringIO import StringIO
|
| 16 |
+
except ImportError:
|
| 17 |
+
from io import StringIO # does not support writing 'str' in Py2
|
| 18 |
+
|
| 19 |
+
from . import Version
|
| 20 |
+
from .Code import CCodeWriter
|
| 21 |
+
from .. import Utils
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class AnnotationCCodeWriter(CCodeWriter):
|
| 25 |
+
|
| 26 |
+
# also used as marker for detection of complete code emission in tests
|
| 27 |
+
COMPLETE_CODE_TITLE = "Complete cythonized code"
|
| 28 |
+
|
| 29 |
+
def __init__(self, create_from=None, buffer=None, copy_formatting=True, show_entire_c_code=False, source_desc=None):
|
| 30 |
+
CCodeWriter.__init__(self, create_from, buffer, copy_formatting=copy_formatting)
|
| 31 |
+
self.show_entire_c_code = show_entire_c_code
|
| 32 |
+
if create_from is None:
|
| 33 |
+
self.annotation_buffer = StringIO()
|
| 34 |
+
self.last_annotated_pos = None
|
| 35 |
+
# annotations[filename][line] -> [(column, AnnotationItem)*]
|
| 36 |
+
self.annotations = defaultdict(partial(defaultdict, list))
|
| 37 |
+
# code[filename][line] -> str
|
| 38 |
+
self.code = defaultdict(partial(defaultdict, str))
|
| 39 |
+
# scopes[filename][line] -> set(scopes)
|
| 40 |
+
self.scopes = defaultdict(partial(defaultdict, set))
|
| 41 |
+
else:
|
| 42 |
+
# When creating an insertion point, keep references to the same database
|
| 43 |
+
self.annotation_buffer = create_from.annotation_buffer
|
| 44 |
+
self.annotations = create_from.annotations
|
| 45 |
+
self.code = create_from.code
|
| 46 |
+
self.scopes = create_from.scopes
|
| 47 |
+
self.last_annotated_pos = create_from.last_annotated_pos
|
| 48 |
+
|
| 49 |
+
def create_new(self, create_from, buffer, copy_formatting):
|
| 50 |
+
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
|
| 51 |
+
|
| 52 |
+
def _write_to_buffer(self, s):
|
| 53 |
+
self.buffer.write(s)
|
| 54 |
+
self.annotation_buffer.write(s)
|
| 55 |
+
|
| 56 |
+
def mark_pos(self, pos, trace=True):
|
| 57 |
+
if pos is not None:
|
| 58 |
+
CCodeWriter.mark_pos(self, pos, trace)
|
| 59 |
+
if self.funcstate and self.funcstate.scope:
|
| 60 |
+
# lambdas and genexprs can result in multiple scopes per line => keep them in a set
|
| 61 |
+
self.scopes[pos[0].filename][pos[1]].add(self.funcstate.scope)
|
| 62 |
+
if self.last_annotated_pos:
|
| 63 |
+
source_desc, line, _ = self.last_annotated_pos
|
| 64 |
+
pos_code = self.code[source_desc.filename]
|
| 65 |
+
pos_code[line] += self.annotation_buffer.getvalue()
|
| 66 |
+
self.annotation_buffer = StringIO()
|
| 67 |
+
self.last_annotated_pos = pos
|
| 68 |
+
|
| 69 |
+
def annotate(self, pos, item):
|
| 70 |
+
self.annotations[pos[0].filename][pos[1]].append((pos[2], item))
|
| 71 |
+
|
| 72 |
+
def _css(self):
|
| 73 |
+
"""css template will later allow to choose a colormap"""
|
| 74 |
+
css = [self._css_template]
|
| 75 |
+
for i in range(255):
|
| 76 |
+
color = u"FFFF%02x" % int(255.0 // (1.0 + i/10.0))
|
| 77 |
+
css.append('.cython.score-%d {background-color: #%s;}' % (i, color))
|
| 78 |
+
try:
|
| 79 |
+
from pygments.formatters import HtmlFormatter
|
| 80 |
+
except ImportError:
|
| 81 |
+
pass
|
| 82 |
+
else:
|
| 83 |
+
css.append(HtmlFormatter().get_style_defs('.cython'))
|
| 84 |
+
return '\n'.join(css)
|
| 85 |
+
|
| 86 |
+
_css_template = textwrap.dedent("""
|
| 87 |
+
body.cython { font-family: courier; font-size: 12; }
|
| 88 |
+
|
| 89 |
+
.cython.tag { }
|
| 90 |
+
.cython.line { color: #000000; margin: 0em }
|
| 91 |
+
.cython.code { font-size: 9; color: #444444; display: none; margin: 0px 0px 0px 8px; border-left: 8px none; }
|
| 92 |
+
|
| 93 |
+
.cython.line .run { background-color: #B0FFB0; }
|
| 94 |
+
.cython.line .mis { background-color: #FFB0B0; }
|
| 95 |
+
.cython.code.run { border-left: 8px solid #B0FFB0; }
|
| 96 |
+
.cython.code.mis { border-left: 8px solid #FFB0B0; }
|
| 97 |
+
|
| 98 |
+
.cython.code .py_c_api { color: red; }
|
| 99 |
+
.cython.code .py_macro_api { color: #FF7000; }
|
| 100 |
+
.cython.code .pyx_c_api { color: #FF3000; }
|
| 101 |
+
.cython.code .pyx_macro_api { color: #FF7000; }
|
| 102 |
+
.cython.code .refnanny { color: #FFA000; }
|
| 103 |
+
.cython.code .trace { color: #FFA000; }
|
| 104 |
+
.cython.code .error_goto { color: #FFA000; }
|
| 105 |
+
|
| 106 |
+
.cython.code .coerce { color: #008000; border: 1px dotted #008000 }
|
| 107 |
+
.cython.code .py_attr { color: #FF0000; font-weight: bold; }
|
| 108 |
+
.cython.code .c_attr { color: #0000FF; }
|
| 109 |
+
.cython.code .py_call { color: #FF0000; font-weight: bold; }
|
| 110 |
+
.cython.code .c_call { color: #0000FF; }
|
| 111 |
+
""")
|
| 112 |
+
|
| 113 |
+
# on-click toggle function to show/hide C source code
|
| 114 |
+
_onclick_attr = ' onclick="{0}"'.format((
|
| 115 |
+
"(function(s){"
|
| 116 |
+
" s.display = s.display === 'block' ? 'none' : 'block'"
|
| 117 |
+
"})(this.nextElementSibling.style)"
|
| 118 |
+
).replace(' ', '') # poor dev's JS minification
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
def save_annotation(self, source_filename, target_filename, coverage_xml=None):
|
| 122 |
+
with Utils.open_source_file(source_filename) as f:
|
| 123 |
+
code = f.read()
|
| 124 |
+
generated_code = self.code.get(source_filename, {})
|
| 125 |
+
c_file = Utils.decode_filename(os.path.basename(target_filename))
|
| 126 |
+
html_filename = os.path.splitext(target_filename)[0] + ".html"
|
| 127 |
+
|
| 128 |
+
with codecs.open(html_filename, "w", encoding="UTF-8") as out_buffer:
|
| 129 |
+
out_buffer.write(self._save_annotation(code, generated_code, c_file, source_filename, coverage_xml))
|
| 130 |
+
|
| 131 |
+
def _save_annotation_header(self, c_file, source_filename, coverage_timestamp=None):
|
| 132 |
+
coverage_info = ''
|
| 133 |
+
if coverage_timestamp:
|
| 134 |
+
coverage_info = u' with coverage data from {timestamp}'.format(
|
| 135 |
+
timestamp=datetime.fromtimestamp(int(coverage_timestamp) // 1000))
|
| 136 |
+
|
| 137 |
+
outlist = [
|
| 138 |
+
textwrap.dedent(u'''\
|
| 139 |
+
<!DOCTYPE html>
|
| 140 |
+
<!-- Generated by Cython {watermark} -->
|
| 141 |
+
<html>
|
| 142 |
+
<head>
|
| 143 |
+
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
| 144 |
+
<title>Cython: {filename}</title>
|
| 145 |
+
<style type="text/css">
|
| 146 |
+
{css}
|
| 147 |
+
</style>
|
| 148 |
+
</head>
|
| 149 |
+
<body class="cython">
|
| 150 |
+
<p><span style="border-bottom: solid 1px grey;">Generated by Cython {watermark}</span>{more_info}</p>
|
| 151 |
+
<p>
|
| 152 |
+
<span style="background-color: #FFFF00">Yellow lines</span> hint at Python interaction.<br />
|
| 153 |
+
Click on a line that starts with a "<code>+</code>" to see the C code that Cython generated for it.
|
| 154 |
+
</p>
|
| 155 |
+
''').format(css=self._css(), watermark=Version.watermark,
|
| 156 |
+
filename=os.path.basename(source_filename) if source_filename else '',
|
| 157 |
+
more_info=coverage_info)
|
| 158 |
+
]
|
| 159 |
+
if c_file:
|
| 160 |
+
outlist.append(u'<p>Raw output: <a href="%s">%s</a></p>\n' % (c_file, c_file))
|
| 161 |
+
return outlist
|
| 162 |
+
|
| 163 |
+
def _save_annotation_footer(self):
|
| 164 |
+
return (u'</body></html>\n',)
|
| 165 |
+
|
| 166 |
+
def _save_annotation(self, code, generated_code, c_file=None, source_filename=None, coverage_xml=None):
|
| 167 |
+
"""
|
| 168 |
+
lines : original cython source code split by lines
|
| 169 |
+
generated_code : generated c code keyed by line number in original file
|
| 170 |
+
target filename : name of the file in which to store the generated html
|
| 171 |
+
c_file : filename in which the c_code has been written
|
| 172 |
+
"""
|
| 173 |
+
if coverage_xml is not None and source_filename:
|
| 174 |
+
coverage_timestamp = coverage_xml.get('timestamp', '').strip()
|
| 175 |
+
covered_lines = self._get_line_coverage(coverage_xml, source_filename)
|
| 176 |
+
else:
|
| 177 |
+
coverage_timestamp = covered_lines = None
|
| 178 |
+
annotation_items = dict(self.annotations[source_filename])
|
| 179 |
+
scopes = dict(self.scopes[source_filename])
|
| 180 |
+
|
| 181 |
+
outlist = []
|
| 182 |
+
outlist.extend(self._save_annotation_header(c_file, source_filename, coverage_timestamp))
|
| 183 |
+
outlist.extend(self._save_annotation_body(code, generated_code, annotation_items, scopes, covered_lines))
|
| 184 |
+
outlist.extend(self._save_annotation_footer())
|
| 185 |
+
return ''.join(outlist)
|
| 186 |
+
|
| 187 |
+
def _get_line_coverage(self, coverage_xml, source_filename):
|
| 188 |
+
coverage_data = None
|
| 189 |
+
for entry in coverage_xml.iterfind('.//class'):
|
| 190 |
+
if not entry.get('filename'):
|
| 191 |
+
continue
|
| 192 |
+
if (entry.get('filename') == source_filename or
|
| 193 |
+
os.path.abspath(entry.get('filename')) == source_filename):
|
| 194 |
+
coverage_data = entry
|
| 195 |
+
break
|
| 196 |
+
elif source_filename.endswith(entry.get('filename')):
|
| 197 |
+
coverage_data = entry # but we might still find a better match...
|
| 198 |
+
if coverage_data is None:
|
| 199 |
+
return None
|
| 200 |
+
return dict(
|
| 201 |
+
(int(line.get('number')), int(line.get('hits')))
|
| 202 |
+
for line in coverage_data.iterfind('lines/line')
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
def _htmlify_code(self, code, language):
|
| 206 |
+
try:
|
| 207 |
+
from pygments import highlight
|
| 208 |
+
from pygments.lexers import CythonLexer, CppLexer
|
| 209 |
+
from pygments.formatters import HtmlFormatter
|
| 210 |
+
except ImportError:
|
| 211 |
+
# no Pygments, just escape the code
|
| 212 |
+
return html_escape(code)
|
| 213 |
+
|
| 214 |
+
if language == "cython":
|
| 215 |
+
lexer = CythonLexer(stripnl=False, stripall=False)
|
| 216 |
+
elif language == "c/cpp":
|
| 217 |
+
lexer = CppLexer(stripnl=False, stripall=False)
|
| 218 |
+
else:
|
| 219 |
+
# unknown language, use fallback
|
| 220 |
+
return html_escape(code)
|
| 221 |
+
html_code = highlight(
|
| 222 |
+
code, lexer,
|
| 223 |
+
HtmlFormatter(nowrap=True))
|
| 224 |
+
return html_code
|
| 225 |
+
|
| 226 |
+
def _save_annotation_body(self, cython_code, generated_code, annotation_items, scopes, covered_lines=None):
|
| 227 |
+
outlist = [u'<div class="cython">']
|
| 228 |
+
pos_comment_marker = u'/* \N{HORIZONTAL ELLIPSIS} */\n'
|
| 229 |
+
new_calls_map = dict(
|
| 230 |
+
(name, 0) for name in
|
| 231 |
+
'refnanny trace py_macro_api py_c_api pyx_macro_api pyx_c_api error_goto'.split()
|
| 232 |
+
).copy
|
| 233 |
+
|
| 234 |
+
self.mark_pos(None)
|
| 235 |
+
|
| 236 |
+
def annotate(match):
|
| 237 |
+
group_name = match.lastgroup
|
| 238 |
+
calls[group_name] += 1
|
| 239 |
+
return u"<span class='%s'>%s</span>" % (
|
| 240 |
+
group_name, match.group(group_name))
|
| 241 |
+
|
| 242 |
+
lines = self._htmlify_code(cython_code, "cython").splitlines()
|
| 243 |
+
lineno_width = len(str(len(lines)))
|
| 244 |
+
if not covered_lines:
|
| 245 |
+
covered_lines = None
|
| 246 |
+
|
| 247 |
+
for k, line in enumerate(lines, 1):
|
| 248 |
+
try:
|
| 249 |
+
c_code = generated_code[k]
|
| 250 |
+
except KeyError:
|
| 251 |
+
c_code = ''
|
| 252 |
+
else:
|
| 253 |
+
c_code = _replace_pos_comment(pos_comment_marker, c_code)
|
| 254 |
+
if c_code.startswith(pos_comment_marker):
|
| 255 |
+
c_code = c_code[len(pos_comment_marker):]
|
| 256 |
+
c_code = html_escape(c_code)
|
| 257 |
+
|
| 258 |
+
calls = new_calls_map()
|
| 259 |
+
c_code = _parse_code(annotate, c_code)
|
| 260 |
+
score = (5 * calls['py_c_api'] + 2 * calls['pyx_c_api'] +
|
| 261 |
+
calls['py_macro_api'] + calls['pyx_macro_api'])
|
| 262 |
+
|
| 263 |
+
if c_code:
|
| 264 |
+
onclick = self._onclick_attr
|
| 265 |
+
expandsymbol = '+'
|
| 266 |
+
else:
|
| 267 |
+
onclick = ''
|
| 268 |
+
expandsymbol = ' '
|
| 269 |
+
|
| 270 |
+
covered = ''
|
| 271 |
+
if covered_lines is not None and k in covered_lines:
|
| 272 |
+
hits = covered_lines[k]
|
| 273 |
+
if hits is not None:
|
| 274 |
+
covered = 'run' if hits else 'mis'
|
| 275 |
+
|
| 276 |
+
outlist.append(
|
| 277 |
+
u'<pre class="cython line score-{score}"{onclick}>'
|
| 278 |
+
# generate line number with expand symbol in front,
|
| 279 |
+
# and the right number of digit
|
| 280 |
+
u'{expandsymbol}<span class="{covered}">{line:0{lineno_width}d}</span>: {code}</pre>\n'.format(
|
| 281 |
+
score=score,
|
| 282 |
+
expandsymbol=expandsymbol,
|
| 283 |
+
covered=covered,
|
| 284 |
+
lineno_width=lineno_width,
|
| 285 |
+
line=k,
|
| 286 |
+
code=line.rstrip(),
|
| 287 |
+
onclick=onclick,
|
| 288 |
+
))
|
| 289 |
+
if c_code:
|
| 290 |
+
outlist.append(u"<pre class='cython code score-{score} {covered}'>{code}</pre>".format(
|
| 291 |
+
score=score, covered=covered, code=c_code))
|
| 292 |
+
outlist.append(u"</div>")
|
| 293 |
+
|
| 294 |
+
# now the whole c-code if needed:
|
| 295 |
+
if self.show_entire_c_code:
|
| 296 |
+
outlist.append(u'<p><div class="cython">')
|
| 297 |
+
onclick_title = u"<pre class='cython line'{onclick}>+ {title}</pre>\n"
|
| 298 |
+
outlist.append(onclick_title.format(
|
| 299 |
+
onclick=self._onclick_attr,
|
| 300 |
+
title=AnnotationCCodeWriter.COMPLETE_CODE_TITLE,
|
| 301 |
+
))
|
| 302 |
+
complete_code_as_html = self._htmlify_code(self.buffer.getvalue(), "c/cpp")
|
| 303 |
+
outlist.append(u"<pre class='cython code'>{code}</pre>".format(code=complete_code_as_html))
|
| 304 |
+
outlist.append(u"</div></p>")
|
| 305 |
+
|
| 306 |
+
return outlist
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
_parse_code = re.compile((
|
| 310 |
+
br'(?P<refnanny>__Pyx_X?(?:GOT|GIVE)REF|__Pyx_RefNanny[A-Za-z]+)|'
|
| 311 |
+
br'(?P<trace>__Pyx_Trace[A-Za-z]+)|'
|
| 312 |
+
br'(?:'
|
| 313 |
+
br'(?P<pyx_macro_api>__Pyx_[A-Z][A-Z_]+)|'
|
| 314 |
+
br'(?P<pyx_c_api>(?:__Pyx_[A-Z][a-z_][A-Za-z_]*)|__pyx_convert_[A-Za-z_]*)|'
|
| 315 |
+
br'(?P<py_macro_api>Py[A-Z][a-z]+_[A-Z][A-Z_]+)|'
|
| 316 |
+
br'(?P<py_c_api>Py[A-Z][a-z]+_[A-Z][a-z][A-Za-z_]*)'
|
| 317 |
+
br')(?=\()|' # look-ahead to exclude subsequent '(' from replacement
|
| 318 |
+
br'(?P<error_goto>(?:(?<=;) *if [^;]* +)?__PYX_ERR\([^)]+\))'
|
| 319 |
+
).decode('ascii')).sub
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
_replace_pos_comment = re.compile(
|
| 323 |
+
# this matches what Cython generates as code line marker comment
|
| 324 |
+
br'^\s*/\*(?:(?:[^*]|\*[^/])*\n)+\s*\*/\s*\n'.decode('ascii'),
|
| 325 |
+
re.M
|
| 326 |
+
).sub
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
class AnnotationItem(object):
|
| 330 |
+
|
| 331 |
+
def __init__(self, style, text, tag="", size=0):
|
| 332 |
+
self.style = style
|
| 333 |
+
self.text = text
|
| 334 |
+
self.tag = tag
|
| 335 |
+
self.size = size
|
| 336 |
+
|
| 337 |
+
def start(self):
|
| 338 |
+
return u"<span class='cython tag %s' title='%s'>%s" % (self.style, self.text, self.tag)
|
| 339 |
+
|
| 340 |
+
def end(self):
|
| 341 |
+
return self.size, u"</span>"
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/AutoDocTransforms.py
ADDED
|
@@ -0,0 +1,318 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import, print_function
|
| 2 |
+
|
| 3 |
+
from .Visitor import CythonTransform
|
| 4 |
+
from .StringEncoding import EncodedString
|
| 5 |
+
from . import Options
|
| 6 |
+
from . import PyrexTypes
|
| 7 |
+
from ..CodeWriter import ExpressionWriter
|
| 8 |
+
from .Errors import warning
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class AnnotationWriter(ExpressionWriter):
|
| 12 |
+
"""
|
| 13 |
+
A Cython code writer for Python expressions in argument/variable annotations.
|
| 14 |
+
"""
|
| 15 |
+
def __init__(self, description=None):
|
| 16 |
+
"""description is optional. If specified it is used in
|
| 17 |
+
warning messages for the nodes that don't convert to string properly.
|
| 18 |
+
If not specified then no messages are generated.
|
| 19 |
+
"""
|
| 20 |
+
ExpressionWriter.__init__(self)
|
| 21 |
+
self.description = description
|
| 22 |
+
self.incomplete = False
|
| 23 |
+
|
| 24 |
+
def visit_Node(self, node):
|
| 25 |
+
self.put(u"<???>")
|
| 26 |
+
self.incomplete = True
|
| 27 |
+
if self.description:
|
| 28 |
+
warning(node.pos,
|
| 29 |
+
"Failed to convert code to string representation in {0}".format(
|
| 30 |
+
self.description), level=1)
|
| 31 |
+
|
| 32 |
+
def visit_LambdaNode(self, node):
|
| 33 |
+
# XXX Should we do better?
|
| 34 |
+
self.put("<lambda>")
|
| 35 |
+
self.incomplete = True
|
| 36 |
+
if self.description:
|
| 37 |
+
warning(node.pos,
|
| 38 |
+
"Failed to convert lambda to string representation in {0}".format(
|
| 39 |
+
self.description), level=1)
|
| 40 |
+
|
| 41 |
+
def visit_UnicodeNode(self, node):
|
| 42 |
+
# Discard Unicode prefix in annotations. Any tool looking at them
|
| 43 |
+
# would probably expect Py3 string semantics.
|
| 44 |
+
self.emit_string(node, "")
|
| 45 |
+
|
| 46 |
+
def visit_AnnotationNode(self, node):
|
| 47 |
+
self.put(node.string.unicode_value)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class EmbedSignature(CythonTransform):
|
| 51 |
+
|
| 52 |
+
def __init__(self, context):
|
| 53 |
+
super(EmbedSignature, self).__init__(context)
|
| 54 |
+
self.class_name = None
|
| 55 |
+
self.class_node = None
|
| 56 |
+
|
| 57 |
+
def _fmt_expr(self, node):
|
| 58 |
+
writer = ExpressionWriter()
|
| 59 |
+
result = writer.write(node)
|
| 60 |
+
# print(type(node).__name__, '-->', result)
|
| 61 |
+
return result
|
| 62 |
+
|
| 63 |
+
def _fmt_annotation(self, node):
|
| 64 |
+
writer = AnnotationWriter()
|
| 65 |
+
result = writer.write(node)
|
| 66 |
+
# print(type(node).__name__, '-->', result)
|
| 67 |
+
return result
|
| 68 |
+
|
| 69 |
+
def _setup_format(self):
|
| 70 |
+
signature_format = self.current_directives['embedsignature.format']
|
| 71 |
+
self.is_format_c = signature_format == 'c'
|
| 72 |
+
self.is_format_python = signature_format == 'python'
|
| 73 |
+
self.is_format_clinic = signature_format == 'clinic'
|
| 74 |
+
|
| 75 |
+
def _fmt_arg(self, arg):
|
| 76 |
+
arg_doc = arg.name
|
| 77 |
+
annotation = None
|
| 78 |
+
defaultval = None
|
| 79 |
+
if arg.is_self_arg:
|
| 80 |
+
if self.is_format_clinic:
|
| 81 |
+
arg_doc = '$self'
|
| 82 |
+
elif arg.is_type_arg:
|
| 83 |
+
if self.is_format_clinic:
|
| 84 |
+
arg_doc = '$type'
|
| 85 |
+
elif self.is_format_c:
|
| 86 |
+
if arg.type is not PyrexTypes.py_object_type:
|
| 87 |
+
arg_doc = arg.type.declaration_code(arg.name, for_display=1)
|
| 88 |
+
elif self.is_format_python:
|
| 89 |
+
if not arg.annotation:
|
| 90 |
+
annotation = self._fmt_type(arg.type)
|
| 91 |
+
if arg.annotation:
|
| 92 |
+
if not self.is_format_clinic:
|
| 93 |
+
annotation = self._fmt_annotation(arg.annotation)
|
| 94 |
+
if arg.default:
|
| 95 |
+
defaultval = self._fmt_expr(arg.default)
|
| 96 |
+
if annotation:
|
| 97 |
+
arg_doc = arg_doc + (': %s' % annotation)
|
| 98 |
+
if defaultval:
|
| 99 |
+
arg_doc = arg_doc + (' = %s' % defaultval)
|
| 100 |
+
elif defaultval:
|
| 101 |
+
arg_doc = arg_doc + ('=%s' % defaultval)
|
| 102 |
+
return arg_doc
|
| 103 |
+
|
| 104 |
+
def _fmt_star_arg(self, arg):
|
| 105 |
+
arg_doc = arg.name
|
| 106 |
+
if arg.annotation:
|
| 107 |
+
if not self.is_format_clinic:
|
| 108 |
+
annotation = self._fmt_annotation(arg.annotation)
|
| 109 |
+
arg_doc = arg_doc + (': %s' % annotation)
|
| 110 |
+
return arg_doc
|
| 111 |
+
|
| 112 |
+
def _fmt_arglist(self, args,
|
| 113 |
+
npoargs=0, npargs=0, pargs=None,
|
| 114 |
+
nkargs=0, kargs=None,
|
| 115 |
+
hide_self=False):
|
| 116 |
+
arglist = []
|
| 117 |
+
for arg in args:
|
| 118 |
+
if not hide_self or not arg.entry.is_self_arg:
|
| 119 |
+
arg_doc = self._fmt_arg(arg)
|
| 120 |
+
arglist.append(arg_doc)
|
| 121 |
+
if pargs:
|
| 122 |
+
arg_doc = self._fmt_star_arg(pargs)
|
| 123 |
+
arglist.insert(npargs + npoargs, '*%s' % arg_doc)
|
| 124 |
+
elif nkargs:
|
| 125 |
+
arglist.insert(npargs + npoargs, '*')
|
| 126 |
+
if npoargs:
|
| 127 |
+
arglist.insert(npoargs, '/')
|
| 128 |
+
if kargs:
|
| 129 |
+
arg_doc = self._fmt_star_arg(kargs)
|
| 130 |
+
arglist.append('**%s' % arg_doc)
|
| 131 |
+
return arglist
|
| 132 |
+
|
| 133 |
+
def _fmt_type(self, type):
|
| 134 |
+
if type is PyrexTypes.py_object_type:
|
| 135 |
+
return None
|
| 136 |
+
elif self.is_format_c:
|
| 137 |
+
code = type.declaration_code("", for_display=1)
|
| 138 |
+
return code
|
| 139 |
+
elif self.is_format_python:
|
| 140 |
+
annotation = None
|
| 141 |
+
if type.is_string:
|
| 142 |
+
annotation = self.current_directives['c_string_type']
|
| 143 |
+
elif type.is_numeric:
|
| 144 |
+
annotation = type.py_type_name()
|
| 145 |
+
if annotation is None:
|
| 146 |
+
code = type.declaration_code('', for_display=1)
|
| 147 |
+
annotation = code.replace(' ', '_').replace('*', 'p')
|
| 148 |
+
return annotation
|
| 149 |
+
return None
|
| 150 |
+
|
| 151 |
+
def _fmt_signature(self, cls_name, func_name, args,
|
| 152 |
+
npoargs=0, npargs=0, pargs=None,
|
| 153 |
+
nkargs=0, kargs=None,
|
| 154 |
+
return_expr=None, return_type=None,
|
| 155 |
+
hide_self=False):
|
| 156 |
+
arglist = self._fmt_arglist(
|
| 157 |
+
args, npoargs, npargs, pargs, nkargs, kargs,
|
| 158 |
+
hide_self=hide_self,
|
| 159 |
+
)
|
| 160 |
+
arglist_doc = ', '.join(arglist)
|
| 161 |
+
func_doc = '%s(%s)' % (func_name, arglist_doc)
|
| 162 |
+
if self.is_format_c and cls_name:
|
| 163 |
+
func_doc = '%s.%s' % (cls_name, func_doc)
|
| 164 |
+
if not self.is_format_clinic:
|
| 165 |
+
ret_doc = None
|
| 166 |
+
if return_expr:
|
| 167 |
+
ret_doc = self._fmt_annotation(return_expr)
|
| 168 |
+
elif return_type:
|
| 169 |
+
ret_doc = self._fmt_type(return_type)
|
| 170 |
+
if ret_doc:
|
| 171 |
+
func_doc = '%s -> %s' % (func_doc, ret_doc)
|
| 172 |
+
return func_doc
|
| 173 |
+
|
| 174 |
+
def _embed_signature(self, signature, node_doc):
|
| 175 |
+
if self.is_format_clinic and self.current_directives['binding']:
|
| 176 |
+
return node_doc
|
| 177 |
+
if node_doc:
|
| 178 |
+
if self.is_format_clinic:
|
| 179 |
+
docfmt = "%s\n--\n\n%s"
|
| 180 |
+
else:
|
| 181 |
+
docfmt = "%s\n%s"
|
| 182 |
+
return docfmt % (signature, node_doc)
|
| 183 |
+
else:
|
| 184 |
+
if self.is_format_clinic:
|
| 185 |
+
docfmt = "%s\n--\n\n"
|
| 186 |
+
else:
|
| 187 |
+
docfmt = "%s"
|
| 188 |
+
return docfmt % signature
|
| 189 |
+
|
| 190 |
+
def __call__(self, node):
|
| 191 |
+
if not Options.docstrings:
|
| 192 |
+
return node
|
| 193 |
+
else:
|
| 194 |
+
return super(EmbedSignature, self).__call__(node)
|
| 195 |
+
|
| 196 |
+
def visit_ClassDefNode(self, node):
|
| 197 |
+
oldname = self.class_name
|
| 198 |
+
oldclass = self.class_node
|
| 199 |
+
self.class_node = node
|
| 200 |
+
try:
|
| 201 |
+
# PyClassDefNode
|
| 202 |
+
self.class_name = node.name
|
| 203 |
+
except AttributeError:
|
| 204 |
+
# CClassDefNode
|
| 205 |
+
self.class_name = node.class_name
|
| 206 |
+
self.visitchildren(node)
|
| 207 |
+
self.class_name = oldname
|
| 208 |
+
self.class_node = oldclass
|
| 209 |
+
return node
|
| 210 |
+
|
| 211 |
+
def visit_LambdaNode(self, node):
|
| 212 |
+
# lambda expressions so not have signature or inner functions
|
| 213 |
+
return node
|
| 214 |
+
|
| 215 |
+
def visit_DefNode(self, node):
|
| 216 |
+
if not self.current_directives['embedsignature']:
|
| 217 |
+
return node
|
| 218 |
+
self._setup_format()
|
| 219 |
+
|
| 220 |
+
is_constructor = False
|
| 221 |
+
hide_self = False
|
| 222 |
+
if node.entry.is_special:
|
| 223 |
+
is_constructor = self.class_node and node.name == '__init__'
|
| 224 |
+
if not is_constructor:
|
| 225 |
+
return node
|
| 226 |
+
class_name = None
|
| 227 |
+
func_name = node.name
|
| 228 |
+
if self.is_format_c:
|
| 229 |
+
func_name = self.class_name
|
| 230 |
+
hide_self = True
|
| 231 |
+
else:
|
| 232 |
+
class_name, func_name = self.class_name, node.name
|
| 233 |
+
|
| 234 |
+
npoargs = getattr(node, 'num_posonly_args', 0)
|
| 235 |
+
nkargs = getattr(node, 'num_kwonly_args', 0)
|
| 236 |
+
npargs = len(node.args) - nkargs - npoargs
|
| 237 |
+
signature = self._fmt_signature(
|
| 238 |
+
class_name, func_name, node.args,
|
| 239 |
+
npoargs, npargs, node.star_arg,
|
| 240 |
+
nkargs, node.starstar_arg,
|
| 241 |
+
return_expr=node.return_type_annotation,
|
| 242 |
+
return_type=None, hide_self=hide_self)
|
| 243 |
+
if signature:
|
| 244 |
+
if is_constructor and self.is_format_c:
|
| 245 |
+
doc_holder = self.class_node.entry.type.scope
|
| 246 |
+
else:
|
| 247 |
+
doc_holder = node.entry
|
| 248 |
+
if doc_holder.doc is not None:
|
| 249 |
+
old_doc = doc_holder.doc
|
| 250 |
+
elif not is_constructor and getattr(node, 'py_func', None) is not None:
|
| 251 |
+
old_doc = node.py_func.entry.doc
|
| 252 |
+
else:
|
| 253 |
+
old_doc = None
|
| 254 |
+
new_doc = self._embed_signature(signature, old_doc)
|
| 255 |
+
doc_holder.doc = EncodedString(new_doc)
|
| 256 |
+
if not is_constructor and getattr(node, 'py_func', None) is not None:
|
| 257 |
+
node.py_func.entry.doc = EncodedString(new_doc)
|
| 258 |
+
return node
|
| 259 |
+
|
| 260 |
+
def visit_CFuncDefNode(self, node):
|
| 261 |
+
if not node.overridable: # not cpdef FOO(...):
|
| 262 |
+
return node
|
| 263 |
+
if not self.current_directives['embedsignature']:
|
| 264 |
+
return node
|
| 265 |
+
self._setup_format()
|
| 266 |
+
|
| 267 |
+
signature = self._fmt_signature(
|
| 268 |
+
self.class_name, node.declarator.base.name,
|
| 269 |
+
node.declarator.args,
|
| 270 |
+
return_type=node.return_type)
|
| 271 |
+
if signature:
|
| 272 |
+
if node.entry.doc is not None:
|
| 273 |
+
old_doc = node.entry.doc
|
| 274 |
+
elif getattr(node, 'py_func', None) is not None:
|
| 275 |
+
old_doc = node.py_func.entry.doc
|
| 276 |
+
else:
|
| 277 |
+
old_doc = None
|
| 278 |
+
new_doc = self._embed_signature(signature, old_doc)
|
| 279 |
+
node.entry.doc = EncodedString(new_doc)
|
| 280 |
+
py_func = getattr(node, 'py_func', None)
|
| 281 |
+
if py_func is not None:
|
| 282 |
+
py_func.entry.doc = EncodedString(new_doc)
|
| 283 |
+
return node
|
| 284 |
+
|
| 285 |
+
def visit_PropertyNode(self, node):
|
| 286 |
+
if not self.current_directives['embedsignature']:
|
| 287 |
+
return node
|
| 288 |
+
self._setup_format()
|
| 289 |
+
|
| 290 |
+
entry = node.entry
|
| 291 |
+
body = node.body
|
| 292 |
+
prop_name = entry.name
|
| 293 |
+
type_name = None
|
| 294 |
+
if entry.visibility == 'public':
|
| 295 |
+
if self.is_format_c:
|
| 296 |
+
# property synthesised from a cdef public attribute
|
| 297 |
+
type_name = entry.type.declaration_code("", for_display=1)
|
| 298 |
+
if not entry.type.is_pyobject:
|
| 299 |
+
type_name = "'%s'" % type_name
|
| 300 |
+
elif entry.type.is_extension_type:
|
| 301 |
+
type_name = entry.type.module_name + '.' + type_name
|
| 302 |
+
elif self.is_format_python:
|
| 303 |
+
type_name = self._fmt_type(entry.type)
|
| 304 |
+
if type_name is None:
|
| 305 |
+
for stat in body.stats:
|
| 306 |
+
if stat.name != '__get__':
|
| 307 |
+
continue
|
| 308 |
+
if self.is_format_c:
|
| 309 |
+
prop_name = '%s.%s' % (self.class_name, prop_name)
|
| 310 |
+
ret_annotation = stat.return_type_annotation
|
| 311 |
+
if ret_annotation:
|
| 312 |
+
type_name = self._fmt_annotation(ret_annotation)
|
| 313 |
+
if type_name is not None :
|
| 314 |
+
signature = '%s: %s' % (prop_name, type_name)
|
| 315 |
+
new_doc = self._embed_signature(signature, entry.doc)
|
| 316 |
+
if not self.is_format_clinic:
|
| 317 |
+
entry.doc = EncodedString(new_doc)
|
| 318 |
+
return node
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Buffer.py
ADDED
|
@@ -0,0 +1,749 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
from .Visitor import CythonTransform
|
| 4 |
+
from .ModuleNode import ModuleNode
|
| 5 |
+
from .Errors import CompileError
|
| 6 |
+
from .UtilityCode import CythonUtilityCode
|
| 7 |
+
from .Code import UtilityCode, TempitaUtilityCode
|
| 8 |
+
|
| 9 |
+
from . import Options
|
| 10 |
+
from . import Interpreter
|
| 11 |
+
from . import PyrexTypes
|
| 12 |
+
from . import Naming
|
| 13 |
+
from . import Symtab
|
| 14 |
+
|
| 15 |
+
def dedent(text, reindent=0):
|
| 16 |
+
from textwrap import dedent
|
| 17 |
+
text = dedent(text)
|
| 18 |
+
if reindent > 0:
|
| 19 |
+
indent = " " * reindent
|
| 20 |
+
text = '\n'.join([indent + x for x in text.split('\n')])
|
| 21 |
+
return text
|
| 22 |
+
|
| 23 |
+
class IntroduceBufferAuxiliaryVars(CythonTransform):
|
| 24 |
+
|
| 25 |
+
#
|
| 26 |
+
# Entry point
|
| 27 |
+
#
|
| 28 |
+
|
| 29 |
+
buffers_exists = False
|
| 30 |
+
using_memoryview = False
|
| 31 |
+
|
| 32 |
+
def __call__(self, node):
|
| 33 |
+
assert isinstance(node, ModuleNode)
|
| 34 |
+
self.max_ndim = 0
|
| 35 |
+
result = super(IntroduceBufferAuxiliaryVars, self).__call__(node)
|
| 36 |
+
if self.buffers_exists:
|
| 37 |
+
use_bufstruct_declare_code(node.scope)
|
| 38 |
+
use_py2_buffer_functions(node.scope)
|
| 39 |
+
|
| 40 |
+
return result
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
#
|
| 44 |
+
# Basic operations for transforms
|
| 45 |
+
#
|
| 46 |
+
def handle_scope(self, node, scope):
|
| 47 |
+
# For all buffers, insert extra variables in the scope.
|
| 48 |
+
# The variables are also accessible from the buffer_info
|
| 49 |
+
# on the buffer entry
|
| 50 |
+
scope_items = scope.entries.items()
|
| 51 |
+
bufvars = [entry for name, entry in scope_items if entry.type.is_buffer]
|
| 52 |
+
if len(bufvars) > 0:
|
| 53 |
+
bufvars.sort(key=lambda entry: entry.name)
|
| 54 |
+
self.buffers_exists = True
|
| 55 |
+
|
| 56 |
+
memviewslicevars = [entry for name, entry in scope_items if entry.type.is_memoryviewslice]
|
| 57 |
+
if len(memviewslicevars) > 0:
|
| 58 |
+
self.buffers_exists = True
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
for (name, entry) in scope_items:
|
| 62 |
+
if name == 'memoryview' and isinstance(entry.utility_code_definition, CythonUtilityCode):
|
| 63 |
+
self.using_memoryview = True
|
| 64 |
+
break
|
| 65 |
+
del scope_items
|
| 66 |
+
|
| 67 |
+
if isinstance(node, ModuleNode) and len(bufvars) > 0:
|
| 68 |
+
# for now...note that pos is wrong
|
| 69 |
+
raise CompileError(node.pos, "Buffer vars not allowed in module scope")
|
| 70 |
+
for entry in bufvars:
|
| 71 |
+
if entry.type.dtype.is_ptr:
|
| 72 |
+
raise CompileError(node.pos, "Buffers with pointer types not yet supported.")
|
| 73 |
+
|
| 74 |
+
name = entry.name
|
| 75 |
+
buftype = entry.type
|
| 76 |
+
if buftype.ndim > Options.buffer_max_dims:
|
| 77 |
+
raise CompileError(node.pos,
|
| 78 |
+
"Buffer ndims exceeds Options.buffer_max_dims = %d" % Options.buffer_max_dims)
|
| 79 |
+
if buftype.ndim > self.max_ndim:
|
| 80 |
+
self.max_ndim = buftype.ndim
|
| 81 |
+
|
| 82 |
+
# Declare auxiliary vars
|
| 83 |
+
def decvar(type, prefix):
|
| 84 |
+
cname = scope.mangle(prefix, name)
|
| 85 |
+
aux_var = scope.declare_var(name=None, cname=cname,
|
| 86 |
+
type=type, pos=node.pos)
|
| 87 |
+
if entry.is_arg:
|
| 88 |
+
aux_var.used = True # otherwise, NameNode will mark whether it is used
|
| 89 |
+
|
| 90 |
+
return aux_var
|
| 91 |
+
|
| 92 |
+
auxvars = ((PyrexTypes.c_pyx_buffer_nd_type, Naming.pybuffernd_prefix),
|
| 93 |
+
(PyrexTypes.c_pyx_buffer_type, Naming.pybufferstruct_prefix))
|
| 94 |
+
pybuffernd, rcbuffer = [decvar(type, prefix) for (type, prefix) in auxvars]
|
| 95 |
+
|
| 96 |
+
entry.buffer_aux = Symtab.BufferAux(pybuffernd, rcbuffer)
|
| 97 |
+
|
| 98 |
+
scope.buffer_entries = bufvars
|
| 99 |
+
self.scope = scope
|
| 100 |
+
|
| 101 |
+
def visit_ModuleNode(self, node):
|
| 102 |
+
self.handle_scope(node, node.scope)
|
| 103 |
+
self.visitchildren(node)
|
| 104 |
+
return node
|
| 105 |
+
|
| 106 |
+
def visit_FuncDefNode(self, node):
|
| 107 |
+
self.handle_scope(node, node.local_scope)
|
| 108 |
+
self.visitchildren(node)
|
| 109 |
+
return node
|
| 110 |
+
|
| 111 |
+
#
|
| 112 |
+
# Analysis
|
| 113 |
+
#
|
| 114 |
+
buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered!
|
| 115 |
+
buffer_defaults = {"ndim": 1, "mode": "full", "negative_indices": True, "cast": False}
|
| 116 |
+
buffer_positional_options_count = 1 # anything beyond this needs keyword argument
|
| 117 |
+
|
| 118 |
+
ERR_BUF_OPTION_UNKNOWN = '"%s" is not a buffer option'
|
| 119 |
+
ERR_BUF_TOO_MANY = 'Too many buffer options'
|
| 120 |
+
ERR_BUF_DUP = '"%s" buffer option already supplied'
|
| 121 |
+
ERR_BUF_MISSING = '"%s" missing'
|
| 122 |
+
ERR_BUF_MODE = 'Only allowed buffer modes are: "c", "fortran", "full", "strided" (as a compile-time string)'
|
| 123 |
+
ERR_BUF_NDIM = 'ndim must be a non-negative integer'
|
| 124 |
+
ERR_BUF_DTYPE = 'dtype must be "object", numeric type or a struct'
|
| 125 |
+
ERR_BUF_BOOL = '"%s" must be a boolean'
|
| 126 |
+
|
| 127 |
+
def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, need_complete=True):
|
| 128 |
+
"""
|
| 129 |
+
Must be called during type analysis, as analyse is called
|
| 130 |
+
on the dtype argument.
|
| 131 |
+
|
| 132 |
+
posargs and dictargs should consist of a list and a dict
|
| 133 |
+
of tuples (value, pos). Defaults should be a dict of values.
|
| 134 |
+
|
| 135 |
+
Returns a dict containing all the options a buffer can have and
|
| 136 |
+
its value (with the positions stripped).
|
| 137 |
+
"""
|
| 138 |
+
if defaults is None:
|
| 139 |
+
defaults = buffer_defaults
|
| 140 |
+
|
| 141 |
+
posargs, dictargs = Interpreter.interpret_compiletime_options(
|
| 142 |
+
posargs, dictargs, type_env=env, type_args=(0, 'dtype'))
|
| 143 |
+
|
| 144 |
+
if len(posargs) > buffer_positional_options_count:
|
| 145 |
+
raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)
|
| 146 |
+
|
| 147 |
+
options = {}
|
| 148 |
+
for name, (value, pos) in dictargs.items():
|
| 149 |
+
if name not in buffer_options:
|
| 150 |
+
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
|
| 151 |
+
options[name] = value
|
| 152 |
+
|
| 153 |
+
for name, (value, pos) in zip(buffer_options, posargs):
|
| 154 |
+
if name not in buffer_options:
|
| 155 |
+
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
|
| 156 |
+
if name in options:
|
| 157 |
+
raise CompileError(pos, ERR_BUF_DUP % name)
|
| 158 |
+
options[name] = value
|
| 159 |
+
|
| 160 |
+
# Check that they are all there and copy defaults
|
| 161 |
+
for name in buffer_options:
|
| 162 |
+
if name not in options:
|
| 163 |
+
try:
|
| 164 |
+
options[name] = defaults[name]
|
| 165 |
+
except KeyError:
|
| 166 |
+
if need_complete:
|
| 167 |
+
raise CompileError(globalpos, ERR_BUF_MISSING % name)
|
| 168 |
+
|
| 169 |
+
dtype = options.get("dtype")
|
| 170 |
+
if dtype and dtype.is_extension_type:
|
| 171 |
+
raise CompileError(globalpos, ERR_BUF_DTYPE)
|
| 172 |
+
|
| 173 |
+
ndim = options.get("ndim")
|
| 174 |
+
if ndim and (not isinstance(ndim, int) or ndim < 0):
|
| 175 |
+
raise CompileError(globalpos, ERR_BUF_NDIM)
|
| 176 |
+
|
| 177 |
+
mode = options.get("mode")
|
| 178 |
+
if mode and not (mode in ('full', 'strided', 'c', 'fortran')):
|
| 179 |
+
raise CompileError(globalpos, ERR_BUF_MODE)
|
| 180 |
+
|
| 181 |
+
def assert_bool(name):
|
| 182 |
+
x = options.get(name)
|
| 183 |
+
if not isinstance(x, bool):
|
| 184 |
+
raise CompileError(globalpos, ERR_BUF_BOOL % name)
|
| 185 |
+
|
| 186 |
+
assert_bool('negative_indices')
|
| 187 |
+
assert_bool('cast')
|
| 188 |
+
|
| 189 |
+
return options
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
#
|
| 193 |
+
# Code generation
|
| 194 |
+
#
|
| 195 |
+
|
| 196 |
+
class BufferEntry(object):
|
| 197 |
+
def __init__(self, entry):
|
| 198 |
+
self.entry = entry
|
| 199 |
+
self.type = entry.type
|
| 200 |
+
self.cname = entry.buffer_aux.buflocal_nd_var.cname
|
| 201 |
+
self.buf_ptr = "%s.rcbuffer->pybuffer.buf" % self.cname
|
| 202 |
+
self.buf_ptr_type = entry.type.buffer_ptr_type
|
| 203 |
+
self.init_attributes()
|
| 204 |
+
|
| 205 |
+
def init_attributes(self):
|
| 206 |
+
self.shape = self.get_buf_shapevars()
|
| 207 |
+
self.strides = self.get_buf_stridevars()
|
| 208 |
+
self.suboffsets = self.get_buf_suboffsetvars()
|
| 209 |
+
|
| 210 |
+
def get_buf_suboffsetvars(self):
|
| 211 |
+
return self._for_all_ndim("%s.diminfo[%d].suboffsets")
|
| 212 |
+
|
| 213 |
+
def get_buf_stridevars(self):
|
| 214 |
+
return self._for_all_ndim("%s.diminfo[%d].strides")
|
| 215 |
+
|
| 216 |
+
def get_buf_shapevars(self):
|
| 217 |
+
return self._for_all_ndim("%s.diminfo[%d].shape")
|
| 218 |
+
|
| 219 |
+
def _for_all_ndim(self, s):
|
| 220 |
+
return [s % (self.cname, i) for i in range(self.type.ndim)]
|
| 221 |
+
|
| 222 |
+
def generate_buffer_lookup_code(self, code, index_cnames):
|
| 223 |
+
# Create buffer lookup and return it
|
| 224 |
+
# This is done via utility macros/inline functions, which vary
|
| 225 |
+
# according to the access mode used.
|
| 226 |
+
params = []
|
| 227 |
+
nd = self.type.ndim
|
| 228 |
+
mode = self.type.mode
|
| 229 |
+
if mode == 'full':
|
| 230 |
+
for i, s, o in zip(index_cnames,
|
| 231 |
+
self.get_buf_stridevars(),
|
| 232 |
+
self.get_buf_suboffsetvars()):
|
| 233 |
+
params.append(i)
|
| 234 |
+
params.append(s)
|
| 235 |
+
params.append(o)
|
| 236 |
+
funcname = "__Pyx_BufPtrFull%dd" % nd
|
| 237 |
+
funcgen = buf_lookup_full_code
|
| 238 |
+
else:
|
| 239 |
+
if mode == 'strided':
|
| 240 |
+
funcname = "__Pyx_BufPtrStrided%dd" % nd
|
| 241 |
+
funcgen = buf_lookup_strided_code
|
| 242 |
+
elif mode == 'c':
|
| 243 |
+
funcname = "__Pyx_BufPtrCContig%dd" % nd
|
| 244 |
+
funcgen = buf_lookup_c_code
|
| 245 |
+
elif mode == 'fortran':
|
| 246 |
+
funcname = "__Pyx_BufPtrFortranContig%dd" % nd
|
| 247 |
+
funcgen = buf_lookup_fortran_code
|
| 248 |
+
else:
|
| 249 |
+
assert False
|
| 250 |
+
for i, s in zip(index_cnames, self.get_buf_stridevars()):
|
| 251 |
+
params.append(i)
|
| 252 |
+
params.append(s)
|
| 253 |
+
|
| 254 |
+
# Make sure the utility code is available
|
| 255 |
+
if funcname not in code.globalstate.utility_codes:
|
| 256 |
+
code.globalstate.utility_codes.add(funcname)
|
| 257 |
+
protocode = code.globalstate['utility_code_proto']
|
| 258 |
+
defcode = code.globalstate['utility_code_def']
|
| 259 |
+
funcgen(protocode, defcode, name=funcname, nd=nd)
|
| 260 |
+
|
| 261 |
+
buf_ptr_type_code = self.buf_ptr_type.empty_declaration_code()
|
| 262 |
+
ptrcode = "%s(%s, %s, %s)" % (funcname, buf_ptr_type_code, self.buf_ptr,
|
| 263 |
+
", ".join(params))
|
| 264 |
+
return ptrcode
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def get_flags(buffer_aux, buffer_type):
|
| 268 |
+
flags = 'PyBUF_FORMAT'
|
| 269 |
+
mode = buffer_type.mode
|
| 270 |
+
if mode == 'full':
|
| 271 |
+
flags += '| PyBUF_INDIRECT'
|
| 272 |
+
elif mode == 'strided':
|
| 273 |
+
flags += '| PyBUF_STRIDES'
|
| 274 |
+
elif mode == 'c':
|
| 275 |
+
flags += '| PyBUF_C_CONTIGUOUS'
|
| 276 |
+
elif mode == 'fortran':
|
| 277 |
+
flags += '| PyBUF_F_CONTIGUOUS'
|
| 278 |
+
else:
|
| 279 |
+
assert False
|
| 280 |
+
if buffer_aux.writable_needed: flags += "| PyBUF_WRITABLE"
|
| 281 |
+
return flags
|
| 282 |
+
|
| 283 |
+
def used_buffer_aux_vars(entry):
|
| 284 |
+
buffer_aux = entry.buffer_aux
|
| 285 |
+
buffer_aux.buflocal_nd_var.used = True
|
| 286 |
+
buffer_aux.rcbuf_var.used = True
|
| 287 |
+
|
| 288 |
+
def put_unpack_buffer_aux_into_scope(buf_entry, code):
|
| 289 |
+
# Generate code to copy the needed struct info into local
|
| 290 |
+
# variables.
|
| 291 |
+
buffer_aux, mode = buf_entry.buffer_aux, buf_entry.type.mode
|
| 292 |
+
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
| 293 |
+
|
| 294 |
+
fldnames = ['strides', 'shape']
|
| 295 |
+
if mode == 'full':
|
| 296 |
+
fldnames.append('suboffsets')
|
| 297 |
+
|
| 298 |
+
ln = []
|
| 299 |
+
for i in range(buf_entry.type.ndim):
|
| 300 |
+
for fldname in fldnames:
|
| 301 |
+
ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % (
|
| 302 |
+
pybuffernd_struct, i, fldname,
|
| 303 |
+
pybuffernd_struct, fldname, i,
|
| 304 |
+
))
|
| 305 |
+
code.putln(' '.join(ln))
|
| 306 |
+
|
| 307 |
+
def put_init_vars(entry, code):
|
| 308 |
+
bufaux = entry.buffer_aux
|
| 309 |
+
pybuffernd_struct = bufaux.buflocal_nd_var.cname
|
| 310 |
+
pybuffer_struct = bufaux.rcbuf_var.cname
|
| 311 |
+
# init pybuffer_struct
|
| 312 |
+
code.putln("%s.pybuffer.buf = NULL;" % pybuffer_struct)
|
| 313 |
+
code.putln("%s.refcount = 0;" % pybuffer_struct)
|
| 314 |
+
# init the buffer object
|
| 315 |
+
# code.put_init_var_to_py_none(entry)
|
| 316 |
+
# init the pybuffernd_struct
|
| 317 |
+
code.putln("%s.data = NULL;" % pybuffernd_struct)
|
| 318 |
+
code.putln("%s.rcbuffer = &%s;" % (pybuffernd_struct, pybuffer_struct))
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
def put_acquire_arg_buffer(entry, code, pos):
|
| 322 |
+
buffer_aux = entry.buffer_aux
|
| 323 |
+
getbuffer = get_getbuffer_call(code, entry.cname, buffer_aux, entry.type)
|
| 324 |
+
|
| 325 |
+
# Acquire any new buffer
|
| 326 |
+
code.putln("{")
|
| 327 |
+
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth())
|
| 328 |
+
code.putln(code.error_goto_if("%s == -1" % getbuffer, pos))
|
| 329 |
+
code.putln("}")
|
| 330 |
+
# An exception raised in arg parsing cannot be caught, so no
|
| 331 |
+
# need to care about the buffer then.
|
| 332 |
+
put_unpack_buffer_aux_into_scope(entry, code)
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
def put_release_buffer_code(code, entry):
|
| 336 |
+
code.globalstate.use_utility_code(acquire_utility_code)
|
| 337 |
+
code.putln("__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);" % entry.buffer_aux.buflocal_nd_var.cname)
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
|
| 341 |
+
ndim = buffer_type.ndim
|
| 342 |
+
cast = int(buffer_type.cast)
|
| 343 |
+
flags = get_flags(buffer_aux, buffer_type)
|
| 344 |
+
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
| 345 |
+
|
| 346 |
+
dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype)
|
| 347 |
+
|
| 348 |
+
code.globalstate.use_utility_code(acquire_utility_code)
|
| 349 |
+
return ("__Pyx_GetBufferAndValidate(&%(pybuffernd_struct)s.rcbuffer->pybuffer, "
|
| 350 |
+
"(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, "
|
| 351 |
+
"%(cast)d, __pyx_stack)" % locals())
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
|
| 355 |
+
is_initialized, pos, code):
|
| 356 |
+
"""
|
| 357 |
+
Generate code for reassigning a buffer variables. This only deals with getting
|
| 358 |
+
the buffer auxiliary structure and variables set up correctly, the assignment
|
| 359 |
+
itself and refcounting is the responsibility of the caller.
|
| 360 |
+
|
| 361 |
+
However, the assignment operation may throw an exception so that the reassignment
|
| 362 |
+
never happens.
|
| 363 |
+
|
| 364 |
+
Depending on the circumstances there are two possible outcomes:
|
| 365 |
+
- Old buffer released, new acquired, rhs assigned to lhs
|
| 366 |
+
- Old buffer released, new acquired which fails, reaqcuire old lhs buffer
|
| 367 |
+
(which may or may not succeed).
|
| 368 |
+
"""
|
| 369 |
+
|
| 370 |
+
buffer_aux, buffer_type = buf_entry.buffer_aux, buf_entry.type
|
| 371 |
+
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
|
| 372 |
+
flags = get_flags(buffer_aux, buffer_type)
|
| 373 |
+
|
| 374 |
+
code.putln("{") # Set up necessary stack for getbuffer
|
| 375 |
+
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())
|
| 376 |
+
|
| 377 |
+
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
|
| 378 |
+
|
| 379 |
+
if is_initialized:
|
| 380 |
+
# Release any existing buffer
|
| 381 |
+
code.putln('__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);' % pybuffernd_struct)
|
| 382 |
+
# Acquire
|
| 383 |
+
retcode_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
| 384 |
+
code.putln("%s = %s;" % (retcode_cname, getbuffer % rhs_cname))
|
| 385 |
+
code.putln('if (%s) {' % (code.unlikely("%s < 0" % retcode_cname)))
|
| 386 |
+
# If acquisition failed, attempt to reacquire the old buffer
|
| 387 |
+
# before raising the exception. A failure of reacquisition
|
| 388 |
+
# will cause the reacquisition exception to be reported, one
|
| 389 |
+
# can consider working around this later.
|
| 390 |
+
exc_temps = tuple(code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False)
|
| 391 |
+
for _ in range(3))
|
| 392 |
+
code.putln('PyErr_Fetch(&%s, &%s, &%s);' % exc_temps)
|
| 393 |
+
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % lhs_cname)))
|
| 394 |
+
code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % exc_temps) # Do not refnanny these!
|
| 395 |
+
code.globalstate.use_utility_code(raise_buffer_fallback_code)
|
| 396 |
+
code.putln('__Pyx_RaiseBufferFallbackError();')
|
| 397 |
+
code.putln('} else {')
|
| 398 |
+
code.putln('PyErr_Restore(%s, %s, %s);' % exc_temps)
|
| 399 |
+
code.putln('}')
|
| 400 |
+
code.putln('%s = %s = %s = 0;' % exc_temps)
|
| 401 |
+
for t in exc_temps:
|
| 402 |
+
code.funcstate.release_temp(t)
|
| 403 |
+
code.putln('}')
|
| 404 |
+
# Unpack indices
|
| 405 |
+
put_unpack_buffer_aux_into_scope(buf_entry, code)
|
| 406 |
+
code.putln(code.error_goto_if_neg(retcode_cname, pos))
|
| 407 |
+
code.funcstate.release_temp(retcode_cname)
|
| 408 |
+
else:
|
| 409 |
+
# Our entry had no previous value, so set to None when acquisition fails.
|
| 410 |
+
# In this case, auxiliary vars should be set up right in initialization to a zero-buffer,
|
| 411 |
+
# so it suffices to set the buf field to NULL.
|
| 412 |
+
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % rhs_cname)))
|
| 413 |
+
code.putln('%s = %s; __Pyx_INCREF(Py_None); %s.rcbuffer->pybuffer.buf = NULL;' %
|
| 414 |
+
(lhs_cname,
|
| 415 |
+
PyrexTypes.typecast(buffer_type, PyrexTypes.py_object_type, "Py_None"),
|
| 416 |
+
pybuffernd_struct))
|
| 417 |
+
code.putln(code.error_goto(pos))
|
| 418 |
+
code.put('} else {')
|
| 419 |
+
# Unpack indices
|
| 420 |
+
put_unpack_buffer_aux_into_scope(buf_entry, code)
|
| 421 |
+
code.putln('}')
|
| 422 |
+
|
| 423 |
+
code.putln("}") # Release stack
|
| 424 |
+
|
| 425 |
+
|
| 426 |
+
def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives,
|
| 427 |
+
pos, code, negative_indices, in_nogil_context):
|
| 428 |
+
"""
|
| 429 |
+
Generates code to process indices and calculate an offset into
|
| 430 |
+
a buffer. Returns a C string which gives a pointer which can be
|
| 431 |
+
read from or written to at will (it is an expression so caller should
|
| 432 |
+
store it in a temporary if it is used more than once).
|
| 433 |
+
|
| 434 |
+
As the bounds checking can have any number of combinations of unsigned
|
| 435 |
+
arguments, smart optimizations etc. we insert it directly in the function
|
| 436 |
+
body. The lookup however is delegated to a inline function that is instantiated
|
| 437 |
+
once per ndim (lookup with suboffsets tend to get quite complicated).
|
| 438 |
+
|
| 439 |
+
entry is a BufferEntry
|
| 440 |
+
"""
|
| 441 |
+
negative_indices = directives['wraparound'] and negative_indices
|
| 442 |
+
|
| 443 |
+
if directives['boundscheck']:
|
| 444 |
+
# Check bounds and fix negative indices.
|
| 445 |
+
# We allocate a temporary which is initialized to -1, meaning OK (!).
|
| 446 |
+
# If an error occurs, the temp is set to the index dimension the
|
| 447 |
+
# error is occurring at.
|
| 448 |
+
failed_dim_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
| 449 |
+
code.putln("%s = -1;" % failed_dim_temp)
|
| 450 |
+
for dim, (signed, cname, shape) in enumerate(zip(index_signeds, index_cnames, entry.get_buf_shapevars())):
|
| 451 |
+
if signed != 0:
|
| 452 |
+
# not unsigned, deal with negative index
|
| 453 |
+
code.putln("if (%s < 0) {" % cname)
|
| 454 |
+
if negative_indices:
|
| 455 |
+
code.putln("%s += %s;" % (cname, shape))
|
| 456 |
+
code.putln("if (%s) %s = %d;" % (
|
| 457 |
+
code.unlikely("%s < 0" % cname),
|
| 458 |
+
failed_dim_temp, dim))
|
| 459 |
+
else:
|
| 460 |
+
code.putln("%s = %d;" % (failed_dim_temp, dim))
|
| 461 |
+
code.put("} else ")
|
| 462 |
+
# check bounds in positive direction
|
| 463 |
+
if signed != 0:
|
| 464 |
+
cast = ""
|
| 465 |
+
else:
|
| 466 |
+
cast = "(size_t)"
|
| 467 |
+
code.putln("if (%s) %s = %d;" % (
|
| 468 |
+
code.unlikely("%s >= %s%s" % (cname, cast, shape)),
|
| 469 |
+
failed_dim_temp, dim))
|
| 470 |
+
|
| 471 |
+
if in_nogil_context:
|
| 472 |
+
code.globalstate.use_utility_code(raise_indexerror_nogil)
|
| 473 |
+
func = '__Pyx_RaiseBufferIndexErrorNogil'
|
| 474 |
+
else:
|
| 475 |
+
code.globalstate.use_utility_code(raise_indexerror_code)
|
| 476 |
+
func = '__Pyx_RaiseBufferIndexError'
|
| 477 |
+
|
| 478 |
+
code.putln("if (%s) {" % code.unlikely("%s != -1" % failed_dim_temp))
|
| 479 |
+
code.putln('%s(%s);' % (func, failed_dim_temp))
|
| 480 |
+
code.putln(code.error_goto(pos))
|
| 481 |
+
code.putln('}')
|
| 482 |
+
code.funcstate.release_temp(failed_dim_temp)
|
| 483 |
+
elif negative_indices:
|
| 484 |
+
# Only fix negative indices.
|
| 485 |
+
for signed, cname, shape in zip(index_signeds, index_cnames, entry.get_buf_shapevars()):
|
| 486 |
+
if signed != 0:
|
| 487 |
+
code.putln("if (%s < 0) %s += %s;" % (cname, cname, shape))
|
| 488 |
+
|
| 489 |
+
return entry.generate_buffer_lookup_code(code, index_cnames)
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
def use_bufstruct_declare_code(env):
|
| 493 |
+
env.use_utility_code(buffer_struct_declare_code)
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
def buf_lookup_full_code(proto, defin, name, nd):
|
| 497 |
+
"""
|
| 498 |
+
Generates a buffer lookup function for the right number
|
| 499 |
+
of dimensions. The function gives back a void* at the right location.
|
| 500 |
+
"""
|
| 501 |
+
# _i_ndex, _s_tride, sub_o_ffset
|
| 502 |
+
macroargs = ", ".join(["i%d, s%d, o%d" % (i, i, i) for i in range(nd)])
|
| 503 |
+
proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs))
|
| 504 |
+
|
| 505 |
+
funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)])
|
| 506 |
+
proto.putln("static CYTHON_INLINE void* %s_imp(void* buf, %s);" % (name, funcargs))
|
| 507 |
+
defin.putln(dedent("""
|
| 508 |
+
static CYTHON_INLINE void* %s_imp(void* buf, %s) {
|
| 509 |
+
char* ptr = (char*)buf;
|
| 510 |
+
""") % (name, funcargs) + "".join([dedent("""\
|
| 511 |
+
ptr += s%d * i%d;
|
| 512 |
+
if (o%d >= 0) ptr = *((char**)ptr) + o%d;
|
| 513 |
+
""") % (i, i, i, i) for i in range(nd)]
|
| 514 |
+
) + "\nreturn ptr;\n}")
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
def buf_lookup_strided_code(proto, defin, name, nd):
|
| 518 |
+
"""
|
| 519 |
+
Generates a buffer lookup function for the right number
|
| 520 |
+
of dimensions. The function gives back a void* at the right location.
|
| 521 |
+
"""
|
| 522 |
+
# _i_ndex, _s_tride
|
| 523 |
+
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
| 524 |
+
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd)])
|
| 525 |
+
proto.putln("#define %s(type, buf, %s) (type)((char*)buf + %s)" % (name, args, offset))
|
| 526 |
+
|
| 527 |
+
|
| 528 |
+
def buf_lookup_c_code(proto, defin, name, nd):
|
| 529 |
+
"""
|
| 530 |
+
Similar to strided lookup, but can assume that the last dimension
|
| 531 |
+
doesn't need a multiplication as long as.
|
| 532 |
+
Still we keep the same signature for now.
|
| 533 |
+
"""
|
| 534 |
+
if nd == 1:
|
| 535 |
+
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
|
| 536 |
+
else:
|
| 537 |
+
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
| 538 |
+
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd - 1)])
|
| 539 |
+
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, nd - 1))
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
def buf_lookup_fortran_code(proto, defin, name, nd):
|
| 543 |
+
"""
|
| 544 |
+
Like C lookup, but the first index is optimized instead.
|
| 545 |
+
"""
|
| 546 |
+
if nd == 1:
|
| 547 |
+
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
|
| 548 |
+
else:
|
| 549 |
+
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
|
| 550 |
+
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(1, nd)])
|
| 551 |
+
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, 0))
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
def use_py2_buffer_functions(env):
|
| 555 |
+
env.use_utility_code(GetAndReleaseBufferUtilityCode())
|
| 556 |
+
|
| 557 |
+
|
| 558 |
+
class GetAndReleaseBufferUtilityCode(object):
|
| 559 |
+
# Emulation of PyObject_GetBuffer and PyBuffer_Release for Python 2.
|
| 560 |
+
# For >= 2.6 we do double mode -- use the new buffer interface on objects
|
| 561 |
+
# which has the right tp_flags set, but emulation otherwise.
|
| 562 |
+
|
| 563 |
+
requires = None
|
| 564 |
+
is_cython_utility = False
|
| 565 |
+
|
| 566 |
+
def __init__(self):
|
| 567 |
+
pass
|
| 568 |
+
|
| 569 |
+
def __eq__(self, other):
|
| 570 |
+
return isinstance(other, GetAndReleaseBufferUtilityCode)
|
| 571 |
+
|
| 572 |
+
def __hash__(self):
|
| 573 |
+
return 24342342
|
| 574 |
+
|
| 575 |
+
def get_tree(self, **kwargs): pass
|
| 576 |
+
|
| 577 |
+
def put_code(self, output):
|
| 578 |
+
code = output['utility_code_def']
|
| 579 |
+
proto_code = output['utility_code_proto']
|
| 580 |
+
env = output.module_node.scope
|
| 581 |
+
cython_scope = env.context.cython_scope
|
| 582 |
+
|
| 583 |
+
# Search all types for __getbuffer__ overloads
|
| 584 |
+
types = []
|
| 585 |
+
visited_scopes = set()
|
| 586 |
+
def find_buffer_types(scope):
|
| 587 |
+
if scope in visited_scopes:
|
| 588 |
+
return
|
| 589 |
+
visited_scopes.add(scope)
|
| 590 |
+
for m in scope.cimported_modules:
|
| 591 |
+
find_buffer_types(m)
|
| 592 |
+
for e in scope.type_entries:
|
| 593 |
+
if isinstance(e.utility_code_definition, CythonUtilityCode):
|
| 594 |
+
continue
|
| 595 |
+
t = e.type
|
| 596 |
+
if t.is_extension_type:
|
| 597 |
+
if scope is cython_scope and not e.used:
|
| 598 |
+
continue
|
| 599 |
+
release = get = None
|
| 600 |
+
for x in t.scope.pyfunc_entries:
|
| 601 |
+
if x.name == u"__getbuffer__": get = x.func_cname
|
| 602 |
+
elif x.name == u"__releasebuffer__": release = x.func_cname
|
| 603 |
+
if get:
|
| 604 |
+
types.append((t.typeptr_cname, get, release))
|
| 605 |
+
|
| 606 |
+
find_buffer_types(env)
|
| 607 |
+
|
| 608 |
+
util_code = TempitaUtilityCode.load(
|
| 609 |
+
"GetAndReleaseBuffer", from_file="Buffer.c",
|
| 610 |
+
context=dict(types=types))
|
| 611 |
+
|
| 612 |
+
proto = util_code.format_code(util_code.proto)
|
| 613 |
+
impl = util_code.format_code(
|
| 614 |
+
util_code.inject_string_constants(util_code.impl, output)[1])
|
| 615 |
+
|
| 616 |
+
proto_code.putln(proto)
|
| 617 |
+
code.putln(impl)
|
| 618 |
+
|
| 619 |
+
|
| 620 |
+
def mangle_dtype_name(dtype):
|
| 621 |
+
# Use prefixes to separate user defined types from builtins
|
| 622 |
+
# (consider "typedef float unsigned_int")
|
| 623 |
+
if dtype.is_pyobject:
|
| 624 |
+
return "object"
|
| 625 |
+
elif dtype.is_ptr:
|
| 626 |
+
return "ptr"
|
| 627 |
+
else:
|
| 628 |
+
if dtype.is_typedef or dtype.is_struct_or_union:
|
| 629 |
+
prefix = "nn_"
|
| 630 |
+
else:
|
| 631 |
+
prefix = ""
|
| 632 |
+
return prefix + dtype.specialization_name()
|
| 633 |
+
|
| 634 |
+
def get_type_information_cname(code, dtype, maxdepth=None):
|
| 635 |
+
"""
|
| 636 |
+
Output the run-time type information (__Pyx_TypeInfo) for given dtype,
|
| 637 |
+
and return the name of the type info struct.
|
| 638 |
+
|
| 639 |
+
Structs with two floats of the same size are encoded as complex numbers.
|
| 640 |
+
One can separate between complex numbers declared as struct or with native
|
| 641 |
+
encoding by inspecting to see if the fields field of the type is
|
| 642 |
+
filled in.
|
| 643 |
+
"""
|
| 644 |
+
namesuffix = mangle_dtype_name(dtype)
|
| 645 |
+
name = "__Pyx_TypeInfo_%s" % namesuffix
|
| 646 |
+
structinfo_name = "__Pyx_StructFields_%s" % namesuffix
|
| 647 |
+
|
| 648 |
+
if dtype.is_error: return "<error>"
|
| 649 |
+
|
| 650 |
+
# It's critical that walking the type info doesn't use more stack
|
| 651 |
+
# depth than dtype.struct_nesting_depth() returns, so use an assertion for this
|
| 652 |
+
if maxdepth is None: maxdepth = dtype.struct_nesting_depth()
|
| 653 |
+
if maxdepth <= 0:
|
| 654 |
+
assert False
|
| 655 |
+
|
| 656 |
+
if name not in code.globalstate.utility_codes:
|
| 657 |
+
code.globalstate.utility_codes.add(name)
|
| 658 |
+
typecode = code.globalstate['typeinfo']
|
| 659 |
+
|
| 660 |
+
arraysizes = []
|
| 661 |
+
if dtype.is_array:
|
| 662 |
+
while dtype.is_array:
|
| 663 |
+
arraysizes.append(dtype.size)
|
| 664 |
+
dtype = dtype.base_type
|
| 665 |
+
|
| 666 |
+
complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()
|
| 667 |
+
|
| 668 |
+
declcode = dtype.empty_declaration_code()
|
| 669 |
+
if dtype.is_simple_buffer_dtype():
|
| 670 |
+
structinfo_name = "NULL"
|
| 671 |
+
elif dtype.is_struct:
|
| 672 |
+
struct_scope = dtype.scope
|
| 673 |
+
if dtype.is_cv_qualified:
|
| 674 |
+
struct_scope = struct_scope.base_type_scope
|
| 675 |
+
# Must pre-call all used types in order not to recurse during utility code writing.
|
| 676 |
+
fields = struct_scope.var_entries
|
| 677 |
+
assert len(fields) > 0
|
| 678 |
+
types = [get_type_information_cname(code, f.type, maxdepth - 1)
|
| 679 |
+
for f in fields]
|
| 680 |
+
typecode.putln("static __Pyx_StructField %s[] = {" % structinfo_name, safe=True)
|
| 681 |
+
|
| 682 |
+
if dtype.is_cv_qualified:
|
| 683 |
+
# roughly speaking, remove "const" from struct_type
|
| 684 |
+
struct_type = dtype.cv_base_type.empty_declaration_code()
|
| 685 |
+
else:
|
| 686 |
+
struct_type = dtype.empty_declaration_code()
|
| 687 |
+
|
| 688 |
+
for f, typeinfo in zip(fields, types):
|
| 689 |
+
typecode.putln(' {&%s, "%s", offsetof(%s, %s)},' %
|
| 690 |
+
(typeinfo, f.name, struct_type, f.cname), safe=True)
|
| 691 |
+
|
| 692 |
+
typecode.putln(' {NULL, NULL, 0}', safe=True)
|
| 693 |
+
typecode.putln("};", safe=True)
|
| 694 |
+
else:
|
| 695 |
+
assert False
|
| 696 |
+
|
| 697 |
+
rep = str(dtype)
|
| 698 |
+
|
| 699 |
+
flags = "0"
|
| 700 |
+
is_unsigned = "0"
|
| 701 |
+
if dtype is PyrexTypes.c_char_type:
|
| 702 |
+
is_unsigned = "__PYX_IS_UNSIGNED(%s)" % declcode
|
| 703 |
+
typegroup = "'H'"
|
| 704 |
+
elif dtype.is_int:
|
| 705 |
+
is_unsigned = "__PYX_IS_UNSIGNED(%s)" % declcode
|
| 706 |
+
typegroup = "%s ? 'U' : 'I'" % is_unsigned
|
| 707 |
+
elif complex_possible or dtype.is_complex:
|
| 708 |
+
typegroup = "'C'"
|
| 709 |
+
elif dtype.is_float:
|
| 710 |
+
typegroup = "'R'"
|
| 711 |
+
elif dtype.is_struct:
|
| 712 |
+
typegroup = "'S'"
|
| 713 |
+
if dtype.packed:
|
| 714 |
+
flags = "__PYX_BUF_FLAGS_PACKED_STRUCT"
|
| 715 |
+
elif dtype.is_pyobject:
|
| 716 |
+
typegroup = "'O'"
|
| 717 |
+
else:
|
| 718 |
+
assert False, dtype
|
| 719 |
+
|
| 720 |
+
typeinfo = ('static __Pyx_TypeInfo %s = '
|
| 721 |
+
'{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };')
|
| 722 |
+
tup = (name, rep, structinfo_name, declcode,
|
| 723 |
+
', '.join([str(x) for x in arraysizes]) or '0', len(arraysizes),
|
| 724 |
+
typegroup, is_unsigned, flags)
|
| 725 |
+
typecode.putln(typeinfo % tup, safe=True)
|
| 726 |
+
|
| 727 |
+
return name
|
| 728 |
+
|
| 729 |
+
def load_buffer_utility(util_code_name, context=None, **kwargs):
|
| 730 |
+
if context is None:
|
| 731 |
+
return UtilityCode.load(util_code_name, "Buffer.c", **kwargs)
|
| 732 |
+
else:
|
| 733 |
+
return TempitaUtilityCode.load(util_code_name, "Buffer.c", context=context, **kwargs)
|
| 734 |
+
|
| 735 |
+
context = dict(max_dims=Options.buffer_max_dims)
|
| 736 |
+
buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare", context=context)
|
| 737 |
+
buffer_formats_declare_code = load_buffer_utility("BufferFormatStructs")
|
| 738 |
+
|
| 739 |
+
# Utility function to set the right exception
|
| 740 |
+
# The caller should immediately goto_error
|
| 741 |
+
raise_indexerror_code = load_buffer_utility("BufferIndexError")
|
| 742 |
+
raise_indexerror_nogil = load_buffer_utility("BufferIndexErrorNogil")
|
| 743 |
+
raise_buffer_fallback_code = load_buffer_utility("BufferFallbackError")
|
| 744 |
+
|
| 745 |
+
acquire_utility_code = load_buffer_utility("BufferGetAndValidate", context=context)
|
| 746 |
+
buffer_format_check_code = load_buffer_utility("BufferFormatCheck", context=context)
|
| 747 |
+
|
| 748 |
+
# See utility code BufferFormatFromTypeInfo
|
| 749 |
+
_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat")
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Builtin.py
ADDED
|
@@ -0,0 +1,644 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Builtin Definitions
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import
|
| 6 |
+
|
| 7 |
+
from .StringEncoding import EncodedString
|
| 8 |
+
from .Symtab import BuiltinScope, StructOrUnionScope, ModuleScope, Entry
|
| 9 |
+
from .Code import UtilityCode, TempitaUtilityCode
|
| 10 |
+
from .TypeSlots import Signature
|
| 11 |
+
from . import PyrexTypes
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# C-level implementations of builtin types, functions and methods
|
| 15 |
+
|
| 16 |
+
iter_next_utility_code = UtilityCode.load("IterNext", "ObjectHandling.c")
|
| 17 |
+
getattr_utility_code = UtilityCode.load("GetAttr", "ObjectHandling.c")
|
| 18 |
+
getattr3_utility_code = UtilityCode.load("GetAttr3", "Builtins.c")
|
| 19 |
+
pyexec_utility_code = UtilityCode.load("PyExec", "Builtins.c")
|
| 20 |
+
pyexec_globals_utility_code = UtilityCode.load("PyExecGlobals", "Builtins.c")
|
| 21 |
+
globals_utility_code = UtilityCode.load("Globals", "Builtins.c")
|
| 22 |
+
|
| 23 |
+
builtin_utility_code = {
|
| 24 |
+
'StopAsyncIteration': UtilityCode.load_cached("StopAsyncIteration", "Coroutine.c"),
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
# mapping from builtins to their C-level equivalents
|
| 29 |
+
|
| 30 |
+
class _BuiltinOverride(object):
|
| 31 |
+
def __init__(self, py_name, args, ret_type, cname, py_equiv="*",
|
| 32 |
+
utility_code=None, sig=None, func_type=None,
|
| 33 |
+
is_strict_signature=False, builtin_return_type=None,
|
| 34 |
+
nogil=None):
|
| 35 |
+
self.py_name, self.cname, self.py_equiv = py_name, cname, py_equiv
|
| 36 |
+
self.args, self.ret_type = args, ret_type
|
| 37 |
+
self.func_type, self.sig = func_type, sig
|
| 38 |
+
self.builtin_return_type = builtin_return_type
|
| 39 |
+
self.is_strict_signature = is_strict_signature
|
| 40 |
+
self.utility_code = utility_code
|
| 41 |
+
self.nogil = nogil
|
| 42 |
+
|
| 43 |
+
def build_func_type(self, sig=None, self_arg=None):
|
| 44 |
+
if sig is None:
|
| 45 |
+
sig = Signature(self.args, self.ret_type, nogil=self.nogil)
|
| 46 |
+
sig.exception_check = False # not needed for the current builtins
|
| 47 |
+
func_type = sig.function_type(self_arg)
|
| 48 |
+
if self.is_strict_signature:
|
| 49 |
+
func_type.is_strict_signature = True
|
| 50 |
+
if self.builtin_return_type:
|
| 51 |
+
func_type.return_type = builtin_types[self.builtin_return_type]
|
| 52 |
+
return func_type
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class BuiltinAttribute(object):
|
| 56 |
+
def __init__(self, py_name, cname=None, field_type=None, field_type_name=None):
|
| 57 |
+
self.py_name = py_name
|
| 58 |
+
self.cname = cname or py_name
|
| 59 |
+
self.field_type_name = field_type_name # can't do the lookup before the type is declared!
|
| 60 |
+
self.field_type = field_type
|
| 61 |
+
|
| 62 |
+
def declare_in_type(self, self_type):
|
| 63 |
+
if self.field_type_name is not None:
|
| 64 |
+
# lazy type lookup
|
| 65 |
+
field_type = builtin_scope.lookup(self.field_type_name).type
|
| 66 |
+
else:
|
| 67 |
+
field_type = self.field_type or PyrexTypes.py_object_type
|
| 68 |
+
entry = self_type.scope.declare(self.py_name, self.cname, field_type, None, 'private')
|
| 69 |
+
entry.is_variable = True
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class BuiltinFunction(_BuiltinOverride):
|
| 73 |
+
def declare_in_scope(self, scope):
|
| 74 |
+
func_type, sig = self.func_type, self.sig
|
| 75 |
+
if func_type is None:
|
| 76 |
+
func_type = self.build_func_type(sig)
|
| 77 |
+
scope.declare_builtin_cfunction(self.py_name, func_type, self.cname,
|
| 78 |
+
self.py_equiv, self.utility_code)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class BuiltinMethod(_BuiltinOverride):
|
| 82 |
+
def declare_in_type(self, self_type):
|
| 83 |
+
method_type, sig = self.func_type, self.sig
|
| 84 |
+
if method_type is None:
|
| 85 |
+
# override 'self' type (first argument)
|
| 86 |
+
self_arg = PyrexTypes.CFuncTypeArg("", self_type, None)
|
| 87 |
+
self_arg.not_none = True
|
| 88 |
+
self_arg.accept_builtin_subtypes = True
|
| 89 |
+
method_type = self.build_func_type(sig, self_arg)
|
| 90 |
+
self_type.scope.declare_builtin_cfunction(
|
| 91 |
+
self.py_name, method_type, self.cname, utility_code=self.utility_code)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class BuiltinProperty(object):
|
| 95 |
+
# read only for now
|
| 96 |
+
def __init__(self, py_name, property_type, call_cname,
|
| 97 |
+
exception_value=None, exception_check=None, utility_code=None):
|
| 98 |
+
self.py_name = py_name
|
| 99 |
+
self.property_type = property_type
|
| 100 |
+
self.call_cname = call_cname
|
| 101 |
+
self.utility_code = utility_code
|
| 102 |
+
self.exception_value = exception_value
|
| 103 |
+
self.exception_check = exception_check
|
| 104 |
+
|
| 105 |
+
def declare_in_type(self, self_type):
|
| 106 |
+
self_type.scope.declare_cproperty(
|
| 107 |
+
self.py_name,
|
| 108 |
+
self.property_type,
|
| 109 |
+
self.call_cname,
|
| 110 |
+
exception_value=self.exception_value,
|
| 111 |
+
exception_check=self.exception_check,
|
| 112 |
+
utility_code=self.utility_code
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
builtin_function_table = [
|
| 117 |
+
# name, args, return, C API func, py equiv = "*"
|
| 118 |
+
BuiltinFunction('abs', "d", "d", "fabs",
|
| 119 |
+
is_strict_signature=True, nogil=True),
|
| 120 |
+
BuiltinFunction('abs', "f", "f", "fabsf",
|
| 121 |
+
is_strict_signature=True, nogil=True),
|
| 122 |
+
BuiltinFunction('abs', "i", "i", "abs",
|
| 123 |
+
is_strict_signature=True, nogil=True),
|
| 124 |
+
BuiltinFunction('abs', "l", "l", "labs",
|
| 125 |
+
is_strict_signature=True, nogil=True),
|
| 126 |
+
BuiltinFunction('abs', None, None, "__Pyx_abs_longlong",
|
| 127 |
+
utility_code = UtilityCode.load("abs_longlong", "Builtins.c"),
|
| 128 |
+
func_type = PyrexTypes.CFuncType(
|
| 129 |
+
PyrexTypes.c_longlong_type, [
|
| 130 |
+
PyrexTypes.CFuncTypeArg("arg", PyrexTypes.c_longlong_type, None)
|
| 131 |
+
],
|
| 132 |
+
is_strict_signature = True, nogil=True)),
|
| 133 |
+
] + list(
|
| 134 |
+
BuiltinFunction('abs', None, None, "/*abs_{0}*/".format(t.specialization_name()),
|
| 135 |
+
func_type = PyrexTypes.CFuncType(
|
| 136 |
+
t,
|
| 137 |
+
[PyrexTypes.CFuncTypeArg("arg", t, None)],
|
| 138 |
+
is_strict_signature = True, nogil=True))
|
| 139 |
+
for t in (PyrexTypes.c_uint_type, PyrexTypes.c_ulong_type, PyrexTypes.c_ulonglong_type)
|
| 140 |
+
) + list(
|
| 141 |
+
BuiltinFunction('abs', None, None, "__Pyx_c_abs{0}".format(t.funcsuffix),
|
| 142 |
+
func_type = PyrexTypes.CFuncType(
|
| 143 |
+
t.real_type, [
|
| 144 |
+
PyrexTypes.CFuncTypeArg("arg", t, None)
|
| 145 |
+
],
|
| 146 |
+
is_strict_signature = True, nogil=True))
|
| 147 |
+
for t in (PyrexTypes.c_float_complex_type,
|
| 148 |
+
PyrexTypes.c_double_complex_type,
|
| 149 |
+
PyrexTypes.c_longdouble_complex_type)
|
| 150 |
+
) + [
|
| 151 |
+
BuiltinFunction('abs', "O", "O", "__Pyx_PyNumber_Absolute",
|
| 152 |
+
utility_code=UtilityCode.load("py_abs", "Builtins.c")),
|
| 153 |
+
#('all', "", "", ""),
|
| 154 |
+
#('any', "", "", ""),
|
| 155 |
+
#('ascii', "", "", ""),
|
| 156 |
+
#('bin', "", "", ""),
|
| 157 |
+
BuiltinFunction('callable', "O", "b", "__Pyx_PyCallable_Check",
|
| 158 |
+
utility_code = UtilityCode.load("CallableCheck", "ObjectHandling.c")),
|
| 159 |
+
#('chr', "", "", ""),
|
| 160 |
+
#('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result)
|
| 161 |
+
#('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start)
|
| 162 |
+
BuiltinFunction('delattr', "OO", "r", "PyObject_DelAttr"),
|
| 163 |
+
BuiltinFunction('dir', "O", "O", "PyObject_Dir"),
|
| 164 |
+
BuiltinFunction('divmod', "OO", "O", "PyNumber_Divmod"),
|
| 165 |
+
BuiltinFunction('exec', "O", "O", "__Pyx_PyExecGlobals",
|
| 166 |
+
utility_code = pyexec_globals_utility_code),
|
| 167 |
+
BuiltinFunction('exec', "OO", "O", "__Pyx_PyExec2",
|
| 168 |
+
utility_code = pyexec_utility_code),
|
| 169 |
+
BuiltinFunction('exec', "OOO", "O", "__Pyx_PyExec3",
|
| 170 |
+
utility_code = pyexec_utility_code),
|
| 171 |
+
#('eval', "", "", ""),
|
| 172 |
+
#('execfile', "", "", ""),
|
| 173 |
+
#('filter', "", "", ""),
|
| 174 |
+
BuiltinFunction('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr",
|
| 175 |
+
utility_code=getattr3_utility_code), # Pyrex legacy
|
| 176 |
+
BuiltinFunction('getattr', "OOO", "O", "__Pyx_GetAttr3",
|
| 177 |
+
utility_code=getattr3_utility_code),
|
| 178 |
+
BuiltinFunction('getattr', "OO", "O", "__Pyx_GetAttr",
|
| 179 |
+
utility_code=getattr_utility_code),
|
| 180 |
+
BuiltinFunction('hasattr', "OO", "b", "__Pyx_HasAttr",
|
| 181 |
+
utility_code = UtilityCode.load("HasAttr", "Builtins.c")),
|
| 182 |
+
BuiltinFunction('hash', "O", "h", "PyObject_Hash"),
|
| 183 |
+
#('hex', "", "", ""),
|
| 184 |
+
#('id', "", "", ""),
|
| 185 |
+
#('input', "", "", ""),
|
| 186 |
+
BuiltinFunction('intern', "O", "O", "__Pyx_Intern",
|
| 187 |
+
utility_code = UtilityCode.load("Intern", "Builtins.c")),
|
| 188 |
+
BuiltinFunction('isinstance', "OO", "b", "PyObject_IsInstance"),
|
| 189 |
+
BuiltinFunction('issubclass', "OO", "b", "PyObject_IsSubclass"),
|
| 190 |
+
BuiltinFunction('iter', "OO", "O", "PyCallIter_New"),
|
| 191 |
+
BuiltinFunction('iter', "O", "O", "PyObject_GetIter"),
|
| 192 |
+
BuiltinFunction('len', "O", "z", "PyObject_Length"),
|
| 193 |
+
BuiltinFunction('locals', "", "O", "__pyx_locals"),
|
| 194 |
+
#('map', "", "", ""),
|
| 195 |
+
#('max', "", "", ""),
|
| 196 |
+
#('min', "", "", ""),
|
| 197 |
+
BuiltinFunction('next', "O", "O", "__Pyx_PyIter_Next",
|
| 198 |
+
utility_code = iter_next_utility_code), # not available in Py2 => implemented here
|
| 199 |
+
BuiltinFunction('next', "OO", "O", "__Pyx_PyIter_Next2",
|
| 200 |
+
utility_code = iter_next_utility_code), # not available in Py2 => implemented here
|
| 201 |
+
#('oct', "", "", ""),
|
| 202 |
+
#('open', "ss", "O", "PyFile_FromString"), # not in Py3
|
| 203 |
+
] + [
|
| 204 |
+
BuiltinFunction('ord', None, None, "__Pyx_long_cast",
|
| 205 |
+
func_type=PyrexTypes.CFuncType(
|
| 206 |
+
PyrexTypes.c_long_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)],
|
| 207 |
+
is_strict_signature=True))
|
| 208 |
+
for c_type in [PyrexTypes.c_py_ucs4_type, PyrexTypes.c_py_unicode_type]
|
| 209 |
+
] + [
|
| 210 |
+
BuiltinFunction('ord', None, None, "__Pyx_uchar_cast",
|
| 211 |
+
func_type=PyrexTypes.CFuncType(
|
| 212 |
+
PyrexTypes.c_uchar_type, [PyrexTypes.CFuncTypeArg("c", c_type, None)],
|
| 213 |
+
is_strict_signature=True))
|
| 214 |
+
for c_type in [PyrexTypes.c_char_type, PyrexTypes.c_schar_type, PyrexTypes.c_uchar_type]
|
| 215 |
+
] + [
|
| 216 |
+
BuiltinFunction('ord', None, None, "__Pyx_PyObject_Ord",
|
| 217 |
+
utility_code=UtilityCode.load_cached("object_ord", "Builtins.c"),
|
| 218 |
+
func_type=PyrexTypes.CFuncType(
|
| 219 |
+
PyrexTypes.c_long_type, [
|
| 220 |
+
PyrexTypes.CFuncTypeArg("c", PyrexTypes.py_object_type, None)
|
| 221 |
+
],
|
| 222 |
+
exception_value="(long)(Py_UCS4)-1")),
|
| 223 |
+
BuiltinFunction('pow', "OOO", "O", "PyNumber_Power"),
|
| 224 |
+
BuiltinFunction('pow', "OO", "O", "__Pyx_PyNumber_Power2",
|
| 225 |
+
utility_code = UtilityCode.load("pow2", "Builtins.c")),
|
| 226 |
+
#('range', "", "", ""),
|
| 227 |
+
#('raw_input', "", "", ""),
|
| 228 |
+
#('reduce', "", "", ""),
|
| 229 |
+
BuiltinFunction('reload', "O", "O", "PyImport_ReloadModule"),
|
| 230 |
+
BuiltinFunction('repr', "O", "O", "PyObject_Repr"), # , builtin_return_type='str'), # add in Cython 3.1
|
| 231 |
+
#('round', "", "", ""),
|
| 232 |
+
BuiltinFunction('setattr', "OOO", "r", "PyObject_SetAttr"),
|
| 233 |
+
#('sum', "", "", ""),
|
| 234 |
+
#('sorted', "", "", ""),
|
| 235 |
+
#('type', "O", "O", "PyObject_Type"),
|
| 236 |
+
BuiltinFunction('unichr', "i", "O", "PyUnicode_FromOrdinal", builtin_return_type='unicode'),
|
| 237 |
+
#('unicode', "", "", ""),
|
| 238 |
+
#('vars', "", "", ""),
|
| 239 |
+
#('zip', "", "", ""),
|
| 240 |
+
# Can't do these easily until we have builtin type entries.
|
| 241 |
+
#('typecheck', "OO", "i", "PyObject_TypeCheck", False),
|
| 242 |
+
#('issubtype', "OO", "i", "PyType_IsSubtype", False),
|
| 243 |
+
|
| 244 |
+
# Put in namespace append optimization.
|
| 245 |
+
BuiltinFunction('__Pyx_PyObject_Append', "OO", "O", "__Pyx_PyObject_Append"),
|
| 246 |
+
|
| 247 |
+
# This is conditionally looked up based on a compiler directive.
|
| 248 |
+
BuiltinFunction('__Pyx_Globals', "", "O", "__Pyx_Globals",
|
| 249 |
+
utility_code=globals_utility_code),
|
| 250 |
+
]
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
# Builtin types
|
| 254 |
+
# bool
|
| 255 |
+
# buffer
|
| 256 |
+
# classmethod
|
| 257 |
+
# dict
|
| 258 |
+
# enumerate
|
| 259 |
+
# file
|
| 260 |
+
# float
|
| 261 |
+
# int
|
| 262 |
+
# list
|
| 263 |
+
# long
|
| 264 |
+
# object
|
| 265 |
+
# property
|
| 266 |
+
# slice
|
| 267 |
+
# staticmethod
|
| 268 |
+
# super
|
| 269 |
+
# str
|
| 270 |
+
# tuple
|
| 271 |
+
# type
|
| 272 |
+
# xrange
|
| 273 |
+
|
| 274 |
+
builtin_types_table = [
|
| 275 |
+
|
| 276 |
+
("type", "PyType_Type", []),
|
| 277 |
+
|
| 278 |
+
# This conflicts with the C++ bool type, and unfortunately
|
| 279 |
+
# C++ is too liberal about PyObject* <-> bool conversions,
|
| 280 |
+
# resulting in unintuitive runtime behavior and segfaults.
|
| 281 |
+
# ("bool", "PyBool_Type", []),
|
| 282 |
+
|
| 283 |
+
("int", "PyInt_Type", []),
|
| 284 |
+
("long", "PyLong_Type", []),
|
| 285 |
+
("float", "PyFloat_Type", []),
|
| 286 |
+
|
| 287 |
+
("complex", "PyComplex_Type", [BuiltinAttribute('cval', field_type_name = 'Py_complex'),
|
| 288 |
+
BuiltinAttribute('real', 'cval.real', field_type = PyrexTypes.c_double_type),
|
| 289 |
+
BuiltinAttribute('imag', 'cval.imag', field_type = PyrexTypes.c_double_type),
|
| 290 |
+
]),
|
| 291 |
+
|
| 292 |
+
("basestring", "PyBaseString_Type", [
|
| 293 |
+
BuiltinMethod("join", "TO", "T", "__Pyx_PyBaseString_Join",
|
| 294 |
+
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
| 295 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 296 |
+
utility_code=UtilityCode.load("PySequenceMultiply", "ObjectHandling.c")),
|
| 297 |
+
]),
|
| 298 |
+
("bytearray", "PyByteArray_Type", [
|
| 299 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 300 |
+
utility_code=UtilityCode.load("PySequenceMultiply", "ObjectHandling.c")),
|
| 301 |
+
]),
|
| 302 |
+
("bytes", "PyBytes_Type", [BuiltinMethod("join", "TO", "O", "__Pyx_PyBytes_Join",
|
| 303 |
+
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
| 304 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 305 |
+
utility_code=UtilityCode.load("PySequenceMultiply", "ObjectHandling.c")),
|
| 306 |
+
]),
|
| 307 |
+
("str", "PyString_Type", [BuiltinMethod("join", "TO", "O", "__Pyx_PyString_Join",
|
| 308 |
+
builtin_return_type='basestring',
|
| 309 |
+
utility_code=UtilityCode.load("StringJoin", "StringTools.c")),
|
| 310 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 311 |
+
utility_code=UtilityCode.load("PySequenceMultiply", "ObjectHandling.c")),
|
| 312 |
+
]),
|
| 313 |
+
("unicode", "PyUnicode_Type", [BuiltinMethod("__contains__", "TO", "b", "PyUnicode_Contains"),
|
| 314 |
+
BuiltinMethod("join", "TO", "T", "PyUnicode_Join"),
|
| 315 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 316 |
+
utility_code=UtilityCode.load("PySequenceMultiply", "ObjectHandling.c")),
|
| 317 |
+
]),
|
| 318 |
+
|
| 319 |
+
("tuple", "PyTuple_Type", [BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 320 |
+
utility_code=UtilityCode.load("PySequenceMultiply", "ObjectHandling.c")),
|
| 321 |
+
]),
|
| 322 |
+
|
| 323 |
+
("list", "PyList_Type", [BuiltinMethod("insert", "TzO", "r", "PyList_Insert"),
|
| 324 |
+
BuiltinMethod("reverse", "T", "r", "PyList_Reverse"),
|
| 325 |
+
BuiltinMethod("append", "TO", "r", "__Pyx_PyList_Append",
|
| 326 |
+
utility_code=UtilityCode.load("ListAppend", "Optimize.c")),
|
| 327 |
+
BuiltinMethod("extend", "TO", "r", "__Pyx_PyList_Extend",
|
| 328 |
+
utility_code=UtilityCode.load("ListExtend", "Optimize.c")),
|
| 329 |
+
BuiltinMethod("__mul__", "Tz", "T", "__Pyx_PySequence_Multiply",
|
| 330 |
+
utility_code=UtilityCode.load("PySequenceMultiply", "ObjectHandling.c")),
|
| 331 |
+
]),
|
| 332 |
+
|
| 333 |
+
("dict", "PyDict_Type", [BuiltinMethod("__contains__", "TO", "b", "PyDict_Contains"),
|
| 334 |
+
BuiltinMethod("has_key", "TO", "b", "PyDict_Contains"),
|
| 335 |
+
BuiltinMethod("items", "T", "O", "__Pyx_PyDict_Items",
|
| 336 |
+
utility_code=UtilityCode.load("py_dict_items", "Builtins.c")),
|
| 337 |
+
BuiltinMethod("keys", "T", "O", "__Pyx_PyDict_Keys",
|
| 338 |
+
utility_code=UtilityCode.load("py_dict_keys", "Builtins.c")),
|
| 339 |
+
BuiltinMethod("values", "T", "O", "__Pyx_PyDict_Values",
|
| 340 |
+
utility_code=UtilityCode.load("py_dict_values", "Builtins.c")),
|
| 341 |
+
BuiltinMethod("iteritems", "T", "O", "__Pyx_PyDict_IterItems",
|
| 342 |
+
utility_code=UtilityCode.load("py_dict_iteritems", "Builtins.c")),
|
| 343 |
+
BuiltinMethod("iterkeys", "T", "O", "__Pyx_PyDict_IterKeys",
|
| 344 |
+
utility_code=UtilityCode.load("py_dict_iterkeys", "Builtins.c")),
|
| 345 |
+
BuiltinMethod("itervalues", "T", "O", "__Pyx_PyDict_IterValues",
|
| 346 |
+
utility_code=UtilityCode.load("py_dict_itervalues", "Builtins.c")),
|
| 347 |
+
BuiltinMethod("viewitems", "T", "O", "__Pyx_PyDict_ViewItems",
|
| 348 |
+
utility_code=UtilityCode.load("py_dict_viewitems", "Builtins.c")),
|
| 349 |
+
BuiltinMethod("viewkeys", "T", "O", "__Pyx_PyDict_ViewKeys",
|
| 350 |
+
utility_code=UtilityCode.load("py_dict_viewkeys", "Builtins.c")),
|
| 351 |
+
BuiltinMethod("viewvalues", "T", "O", "__Pyx_PyDict_ViewValues",
|
| 352 |
+
utility_code=UtilityCode.load("py_dict_viewvalues", "Builtins.c")),
|
| 353 |
+
BuiltinMethod("clear", "T", "r", "__Pyx_PyDict_Clear",
|
| 354 |
+
utility_code=UtilityCode.load("py_dict_clear", "Optimize.c")),
|
| 355 |
+
BuiltinMethod("copy", "T", "T", "PyDict_Copy")]),
|
| 356 |
+
|
| 357 |
+
("slice", "PySlice_Type", [BuiltinAttribute('start'),
|
| 358 |
+
BuiltinAttribute('stop'),
|
| 359 |
+
BuiltinAttribute('step'),
|
| 360 |
+
]),
|
| 361 |
+
# ("file", "PyFile_Type", []), # not in Py3
|
| 362 |
+
|
| 363 |
+
("set", "PySet_Type", [BuiltinMethod("clear", "T", "r", "PySet_Clear"),
|
| 364 |
+
# discard() and remove() have a special treatment for unhashable values
|
| 365 |
+
BuiltinMethod("discard", "TO", "r", "__Pyx_PySet_Discard",
|
| 366 |
+
utility_code=UtilityCode.load("py_set_discard", "Optimize.c")),
|
| 367 |
+
BuiltinMethod("remove", "TO", "r", "__Pyx_PySet_Remove",
|
| 368 |
+
utility_code=UtilityCode.load("py_set_remove", "Optimize.c")),
|
| 369 |
+
# update is actually variadic (see Github issue #1645)
|
| 370 |
+
# BuiltinMethod("update", "TO", "r", "__Pyx_PySet_Update",
|
| 371 |
+
# utility_code=UtilityCode.load_cached("PySet_Update", "Builtins.c")),
|
| 372 |
+
BuiltinMethod("add", "TO", "r", "PySet_Add"),
|
| 373 |
+
BuiltinMethod("pop", "T", "O", "PySet_Pop")]),
|
| 374 |
+
("frozenset", "PyFrozenSet_Type", []),
|
| 375 |
+
("Exception", "((PyTypeObject*)PyExc_Exception)[0]", []),
|
| 376 |
+
("StopAsyncIteration", "((PyTypeObject*)__Pyx_PyExc_StopAsyncIteration)[0]", []),
|
| 377 |
+
("memoryview", "PyMemoryView_Type", [
|
| 378 |
+
# TODO - format would be nice, but hard to get
|
| 379 |
+
# __len__ can be accessed through a direct lookup of the buffer (but probably in Optimize.c)
|
| 380 |
+
# error checking would ideally be limited api only
|
| 381 |
+
BuiltinProperty("ndim", PyrexTypes.c_int_type, '__Pyx_PyMemoryView_Get_ndim',
|
| 382 |
+
exception_value="-1", exception_check=True,
|
| 383 |
+
utility_code=TempitaUtilityCode.load_cached(
|
| 384 |
+
"memoryview_get_from_buffer", "Builtins.c",
|
| 385 |
+
context=dict(name="ndim")
|
| 386 |
+
)
|
| 387 |
+
),
|
| 388 |
+
BuiltinProperty("readonly", PyrexTypes.c_bint_type, '__Pyx_PyMemoryView_Get_readonly',
|
| 389 |
+
exception_value="-1", exception_check=True,
|
| 390 |
+
utility_code=TempitaUtilityCode.load_cached(
|
| 391 |
+
"memoryview_get_from_buffer", "Builtins.c",
|
| 392 |
+
context=dict(name="readonly")
|
| 393 |
+
)
|
| 394 |
+
),
|
| 395 |
+
BuiltinProperty("itemsize", PyrexTypes.c_py_ssize_t_type, '__Pyx_PyMemoryView_Get_itemsize',
|
| 396 |
+
exception_value="-1", exception_check=True,
|
| 397 |
+
utility_code=TempitaUtilityCode.load_cached(
|
| 398 |
+
"memoryview_get_from_buffer", "Builtins.c",
|
| 399 |
+
context=dict(name="itemsize")
|
| 400 |
+
)
|
| 401 |
+
)]
|
| 402 |
+
)
|
| 403 |
+
]
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
types_that_construct_their_instance = frozenset({
|
| 407 |
+
# some builtin types do not always return an instance of
|
| 408 |
+
# themselves - these do:
|
| 409 |
+
'type', 'bool', 'long', 'float', 'complex',
|
| 410 |
+
'bytes', 'unicode', 'bytearray',
|
| 411 |
+
'tuple', 'list', 'dict', 'set', 'frozenset',
|
| 412 |
+
# 'str', # only in Py3.x
|
| 413 |
+
# 'file', # only in Py2.x
|
| 414 |
+
'memoryview'
|
| 415 |
+
})
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
builtin_structs_table = [
|
| 419 |
+
('Py_buffer', 'Py_buffer',
|
| 420 |
+
[("buf", PyrexTypes.c_void_ptr_type),
|
| 421 |
+
("obj", PyrexTypes.py_object_type),
|
| 422 |
+
("len", PyrexTypes.c_py_ssize_t_type),
|
| 423 |
+
("itemsize", PyrexTypes.c_py_ssize_t_type),
|
| 424 |
+
("readonly", PyrexTypes.c_bint_type),
|
| 425 |
+
("ndim", PyrexTypes.c_int_type),
|
| 426 |
+
("format", PyrexTypes.c_char_ptr_type),
|
| 427 |
+
("shape", PyrexTypes.c_py_ssize_t_ptr_type),
|
| 428 |
+
("strides", PyrexTypes.c_py_ssize_t_ptr_type),
|
| 429 |
+
("suboffsets", PyrexTypes.c_py_ssize_t_ptr_type),
|
| 430 |
+
("smalltable", PyrexTypes.CArrayType(PyrexTypes.c_py_ssize_t_type, 2)),
|
| 431 |
+
("internal", PyrexTypes.c_void_ptr_type),
|
| 432 |
+
]),
|
| 433 |
+
('Py_complex', 'Py_complex',
|
| 434 |
+
[('real', PyrexTypes.c_double_type),
|
| 435 |
+
('imag', PyrexTypes.c_double_type),
|
| 436 |
+
])
|
| 437 |
+
]
|
| 438 |
+
|
| 439 |
+
# set up builtin scope
|
| 440 |
+
|
| 441 |
+
builtin_scope = BuiltinScope()
|
| 442 |
+
|
| 443 |
+
def init_builtin_funcs():
|
| 444 |
+
for bf in builtin_function_table:
|
| 445 |
+
bf.declare_in_scope(builtin_scope)
|
| 446 |
+
|
| 447 |
+
builtin_types = {}
|
| 448 |
+
|
| 449 |
+
def init_builtin_types():
|
| 450 |
+
global builtin_types
|
| 451 |
+
for name, cname, methods in builtin_types_table:
|
| 452 |
+
utility = builtin_utility_code.get(name)
|
| 453 |
+
if name == 'frozenset':
|
| 454 |
+
objstruct_cname = 'PySetObject'
|
| 455 |
+
elif name == 'bytearray':
|
| 456 |
+
objstruct_cname = 'PyByteArrayObject'
|
| 457 |
+
elif name == 'bool':
|
| 458 |
+
objstruct_cname = None
|
| 459 |
+
elif name == 'Exception':
|
| 460 |
+
objstruct_cname = "PyBaseExceptionObject"
|
| 461 |
+
elif name == 'StopAsyncIteration':
|
| 462 |
+
objstruct_cname = "PyBaseExceptionObject"
|
| 463 |
+
else:
|
| 464 |
+
objstruct_cname = 'Py%sObject' % name.capitalize()
|
| 465 |
+
type_class = PyrexTypes.BuiltinObjectType
|
| 466 |
+
if name in ['dict', 'list', 'set', 'frozenset']:
|
| 467 |
+
type_class = PyrexTypes.BuiltinTypeConstructorObjectType
|
| 468 |
+
elif name == 'tuple':
|
| 469 |
+
type_class = PyrexTypes.PythonTupleTypeConstructor
|
| 470 |
+
the_type = builtin_scope.declare_builtin_type(name, cname, utility, objstruct_cname,
|
| 471 |
+
type_class=type_class)
|
| 472 |
+
builtin_types[name] = the_type
|
| 473 |
+
for method in methods:
|
| 474 |
+
method.declare_in_type(the_type)
|
| 475 |
+
|
| 476 |
+
def init_builtin_structs():
|
| 477 |
+
for name, cname, attribute_types in builtin_structs_table:
|
| 478 |
+
scope = StructOrUnionScope(name)
|
| 479 |
+
for attribute_name, attribute_type in attribute_types:
|
| 480 |
+
scope.declare_var(attribute_name, attribute_type, None,
|
| 481 |
+
attribute_name, allow_pyobject=True)
|
| 482 |
+
builtin_scope.declare_struct_or_union(
|
| 483 |
+
name, "struct", scope, 1, None, cname = cname)
|
| 484 |
+
|
| 485 |
+
|
| 486 |
+
def init_builtins():
|
| 487 |
+
#Errors.init_thread() # hopefully not needed - we should not emit warnings ourselves
|
| 488 |
+
init_builtin_structs()
|
| 489 |
+
init_builtin_types()
|
| 490 |
+
init_builtin_funcs()
|
| 491 |
+
|
| 492 |
+
entry = builtin_scope.declare_var(
|
| 493 |
+
'__debug__', PyrexTypes.c_const_type(PyrexTypes.c_bint_type),
|
| 494 |
+
pos=None, cname='__pyx_assertions_enabled()', is_cdef=True)
|
| 495 |
+
entry.utility_code = UtilityCode.load_cached("AssertionsEnabled", "Exceptions.c")
|
| 496 |
+
|
| 497 |
+
global type_type, list_type, tuple_type, dict_type, set_type, frozenset_type, slice_type
|
| 498 |
+
global bytes_type, str_type, unicode_type, basestring_type, bytearray_type
|
| 499 |
+
global float_type, int_type, long_type, bool_type, complex_type
|
| 500 |
+
global memoryview_type, py_buffer_type
|
| 501 |
+
global sequence_types
|
| 502 |
+
type_type = builtin_scope.lookup('type').type
|
| 503 |
+
list_type = builtin_scope.lookup('list').type
|
| 504 |
+
tuple_type = builtin_scope.lookup('tuple').type
|
| 505 |
+
dict_type = builtin_scope.lookup('dict').type
|
| 506 |
+
set_type = builtin_scope.lookup('set').type
|
| 507 |
+
frozenset_type = builtin_scope.lookup('frozenset').type
|
| 508 |
+
slice_type = builtin_scope.lookup('slice').type
|
| 509 |
+
|
| 510 |
+
bytes_type = builtin_scope.lookup('bytes').type
|
| 511 |
+
str_type = builtin_scope.lookup('str').type
|
| 512 |
+
unicode_type = builtin_scope.lookup('unicode').type
|
| 513 |
+
basestring_type = builtin_scope.lookup('basestring').type
|
| 514 |
+
bytearray_type = builtin_scope.lookup('bytearray').type
|
| 515 |
+
memoryview_type = builtin_scope.lookup('memoryview').type
|
| 516 |
+
|
| 517 |
+
float_type = builtin_scope.lookup('float').type
|
| 518 |
+
int_type = builtin_scope.lookup('int').type
|
| 519 |
+
long_type = builtin_scope.lookup('long').type
|
| 520 |
+
bool_type = builtin_scope.lookup('bool').type
|
| 521 |
+
complex_type = builtin_scope.lookup('complex').type
|
| 522 |
+
|
| 523 |
+
sequence_types = (
|
| 524 |
+
list_type,
|
| 525 |
+
tuple_type,
|
| 526 |
+
bytes_type,
|
| 527 |
+
str_type,
|
| 528 |
+
unicode_type,
|
| 529 |
+
basestring_type,
|
| 530 |
+
bytearray_type,
|
| 531 |
+
memoryview_type,
|
| 532 |
+
)
|
| 533 |
+
|
| 534 |
+
# Set up type inference links between equivalent Python/C types
|
| 535 |
+
bool_type.equivalent_type = PyrexTypes.c_bint_type
|
| 536 |
+
PyrexTypes.c_bint_type.equivalent_type = bool_type
|
| 537 |
+
|
| 538 |
+
float_type.equivalent_type = PyrexTypes.c_double_type
|
| 539 |
+
PyrexTypes.c_double_type.equivalent_type = float_type
|
| 540 |
+
|
| 541 |
+
complex_type.equivalent_type = PyrexTypes.c_double_complex_type
|
| 542 |
+
PyrexTypes.c_double_complex_type.equivalent_type = complex_type
|
| 543 |
+
|
| 544 |
+
py_buffer_type = builtin_scope.lookup('Py_buffer').type
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
init_builtins()
|
| 548 |
+
|
| 549 |
+
##############################
|
| 550 |
+
# Support for a few standard library modules that Cython understands (currently typing and dataclasses)
|
| 551 |
+
##############################
|
| 552 |
+
_known_module_scopes = {}
|
| 553 |
+
|
| 554 |
+
def get_known_standard_library_module_scope(module_name):
|
| 555 |
+
mod = _known_module_scopes.get(module_name)
|
| 556 |
+
if mod:
|
| 557 |
+
return mod
|
| 558 |
+
|
| 559 |
+
if module_name == "typing":
|
| 560 |
+
mod = ModuleScope(module_name, None, None)
|
| 561 |
+
for name, tp in [
|
| 562 |
+
('Dict', dict_type),
|
| 563 |
+
('List', list_type),
|
| 564 |
+
('Tuple', tuple_type),
|
| 565 |
+
('Set', set_type),
|
| 566 |
+
('FrozenSet', frozenset_type),
|
| 567 |
+
]:
|
| 568 |
+
name = EncodedString(name)
|
| 569 |
+
entry = mod.declare_type(name, tp, pos = None)
|
| 570 |
+
var_entry = Entry(name, None, PyrexTypes.py_object_type)
|
| 571 |
+
var_entry.is_pyglobal = True
|
| 572 |
+
var_entry.is_variable = True
|
| 573 |
+
var_entry.scope = mod
|
| 574 |
+
entry.as_variable = var_entry
|
| 575 |
+
entry.known_standard_library_import = "%s.%s" % (module_name, name)
|
| 576 |
+
|
| 577 |
+
for name in ['ClassVar', 'Optional']:
|
| 578 |
+
name = EncodedString(name)
|
| 579 |
+
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("typing."+name))
|
| 580 |
+
entry = mod.declare_type(name, indexed_type, pos = None)
|
| 581 |
+
var_entry = Entry(name, None, PyrexTypes.py_object_type)
|
| 582 |
+
var_entry.is_pyglobal = True
|
| 583 |
+
var_entry.is_variable = True
|
| 584 |
+
var_entry.scope = mod
|
| 585 |
+
entry.as_variable = var_entry
|
| 586 |
+
entry.known_standard_library_import = "%s.%s" % (module_name, name)
|
| 587 |
+
_known_module_scopes[module_name] = mod
|
| 588 |
+
elif module_name == "dataclasses":
|
| 589 |
+
mod = ModuleScope(module_name, None, None)
|
| 590 |
+
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("dataclasses.InitVar"))
|
| 591 |
+
initvar_string = EncodedString("InitVar")
|
| 592 |
+
entry = mod.declare_type(initvar_string, indexed_type, pos = None)
|
| 593 |
+
var_entry = Entry(initvar_string, None, PyrexTypes.py_object_type)
|
| 594 |
+
var_entry.is_pyglobal = True
|
| 595 |
+
var_entry.scope = mod
|
| 596 |
+
entry.as_variable = var_entry
|
| 597 |
+
entry.known_standard_library_import = "%s.InitVar" % module_name
|
| 598 |
+
for name in ["dataclass", "field"]:
|
| 599 |
+
mod.declare_var(EncodedString(name), PyrexTypes.py_object_type, pos=None)
|
| 600 |
+
_known_module_scopes[module_name] = mod
|
| 601 |
+
elif module_name == "functools":
|
| 602 |
+
mod = ModuleScope(module_name, None, None)
|
| 603 |
+
for name in ["total_ordering"]:
|
| 604 |
+
mod.declare_var(EncodedString(name), PyrexTypes.py_object_type, pos=None)
|
| 605 |
+
_known_module_scopes[module_name] = mod
|
| 606 |
+
|
| 607 |
+
return mod
|
| 608 |
+
|
| 609 |
+
|
| 610 |
+
def get_known_standard_library_entry(qualified_name):
|
| 611 |
+
name_parts = qualified_name.split(".")
|
| 612 |
+
module_name = EncodedString(name_parts[0])
|
| 613 |
+
rest = name_parts[1:]
|
| 614 |
+
|
| 615 |
+
if len(rest) > 1: # for now, we don't know how to deal with any nested modules
|
| 616 |
+
return None
|
| 617 |
+
|
| 618 |
+
mod = get_known_standard_library_module_scope(module_name)
|
| 619 |
+
|
| 620 |
+
# eventually handle more sophisticated multiple lookups if needed
|
| 621 |
+
if mod and rest:
|
| 622 |
+
return mod.lookup_here(rest[0])
|
| 623 |
+
return None
|
| 624 |
+
|
| 625 |
+
|
| 626 |
+
def exprnode_to_known_standard_library_name(node, env):
|
| 627 |
+
qualified_name_parts = []
|
| 628 |
+
known_name = None
|
| 629 |
+
while node.is_attribute:
|
| 630 |
+
qualified_name_parts.append(node.attribute)
|
| 631 |
+
node = node.obj
|
| 632 |
+
if node.is_name:
|
| 633 |
+
entry = env.lookup(node.name)
|
| 634 |
+
if entry and entry.known_standard_library_import:
|
| 635 |
+
if get_known_standard_library_entry(
|
| 636 |
+
entry.known_standard_library_import):
|
| 637 |
+
known_name = entry.known_standard_library_import
|
| 638 |
+
else:
|
| 639 |
+
standard_env = get_known_standard_library_module_scope(
|
| 640 |
+
entry.known_standard_library_import)
|
| 641 |
+
if standard_env:
|
| 642 |
+
qualified_name_parts.append(standard_env.name)
|
| 643 |
+
known_name = ".".join(reversed(qualified_name_parts))
|
| 644 |
+
return known_name
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/CmdLine.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Cython - Command Line Parsing
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
from argparse import ArgumentParser, Action, SUPPRESS
|
| 10 |
+
from . import Options
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
if sys.version_info < (3, 3):
|
| 14 |
+
# TODO: This workaround can be removed in Cython 3.1
|
| 15 |
+
FileNotFoundError = IOError
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class ParseDirectivesAction(Action):
|
| 19 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 20 |
+
old_directives = dict(getattr(namespace, self.dest,
|
| 21 |
+
Options.get_directive_defaults()))
|
| 22 |
+
directives = Options.parse_directive_list(
|
| 23 |
+
values, relaxed_bool=True, current_settings=old_directives)
|
| 24 |
+
setattr(namespace, self.dest, directives)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class ParseOptionsAction(Action):
|
| 28 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 29 |
+
options = dict(getattr(namespace, self.dest, {}))
|
| 30 |
+
for opt in values.split(','):
|
| 31 |
+
if '=' in opt:
|
| 32 |
+
n, v = opt.split('=', 1)
|
| 33 |
+
v = v.lower() not in ('false', 'f', '0', 'no')
|
| 34 |
+
else:
|
| 35 |
+
n, v = opt, True
|
| 36 |
+
options[n] = v
|
| 37 |
+
setattr(namespace, self.dest, options)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class ParseCompileTimeEnvAction(Action):
|
| 41 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 42 |
+
old_env = dict(getattr(namespace, self.dest, {}))
|
| 43 |
+
new_env = Options.parse_compile_time_env(values, current_settings=old_env)
|
| 44 |
+
setattr(namespace, self.dest, new_env)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class ActivateAllWarningsAction(Action):
|
| 48 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 49 |
+
directives = getattr(namespace, 'compiler_directives', {})
|
| 50 |
+
directives.update(Options.extra_warnings)
|
| 51 |
+
namespace.compiler_directives = directives
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class SetLenientAction(Action):
|
| 55 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 56 |
+
namespace.error_on_unknown_names = False
|
| 57 |
+
namespace.error_on_uninitialized = False
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class SetGDBDebugAction(Action):
|
| 61 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 62 |
+
namespace.gdb_debug = True
|
| 63 |
+
namespace.output_dir = os.curdir
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class SetGDBDebugOutputAction(Action):
|
| 67 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 68 |
+
namespace.gdb_debug = True
|
| 69 |
+
namespace.output_dir = values
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class SetAnnotateCoverageAction(Action):
|
| 73 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 74 |
+
namespace.annotate = True
|
| 75 |
+
namespace.annotate_coverage_xml = values
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def create_cython_argparser():
|
| 79 |
+
description = "Cython (https://cython.org/) is a compiler for code written in the "\
|
| 80 |
+
"Cython language. Cython is based on Pyrex by Greg Ewing."
|
| 81 |
+
|
| 82 |
+
parser = ArgumentParser(description=description, argument_default=SUPPRESS)
|
| 83 |
+
|
| 84 |
+
parser.add_argument("-V", "--version", dest='show_version', action='store_const', const=1,
|
| 85 |
+
help='Display version number of cython compiler')
|
| 86 |
+
parser.add_argument("-l", "--create-listing", dest='use_listing_file', action='store_const', const=1,
|
| 87 |
+
help='Write error messages to a listing file')
|
| 88 |
+
parser.add_argument("-I", "--include-dir", dest='include_path', action='append',
|
| 89 |
+
help='Search for include files in named directory '
|
| 90 |
+
'(multiple include directories are allowed).')
|
| 91 |
+
parser.add_argument("-o", "--output-file", dest='output_file', action='store', type=str,
|
| 92 |
+
help='Specify name of generated C file')
|
| 93 |
+
parser.add_argument("-t", "--timestamps", dest='timestamps', action='store_const', const=1,
|
| 94 |
+
help='Only compile newer source files')
|
| 95 |
+
parser.add_argument("-f", "--force", dest='timestamps', action='store_const', const=0,
|
| 96 |
+
help='Compile all source files (overrides implied -t)')
|
| 97 |
+
parser.add_argument("-v", "--verbose", dest='verbose', action='count',
|
| 98 |
+
help='Be verbose, print file names on multiple compilation')
|
| 99 |
+
parser.add_argument("-p", "--embed-positions", dest='embed_pos_in_docstring', action='store_const', const=1,
|
| 100 |
+
help='If specified, the positions in Cython files of each '
|
| 101 |
+
'function definition is embedded in its docstring.')
|
| 102 |
+
parser.add_argument("--cleanup", dest='generate_cleanup_code', action='store', type=int,
|
| 103 |
+
help='Release interned objects on python exit, for memory debugging. '
|
| 104 |
+
'Level indicates aggressiveness, default 0 releases nothing.')
|
| 105 |
+
parser.add_argument("-w", "--working", dest='working_path', action='store', type=str,
|
| 106 |
+
help='Sets the working directory for Cython (the directory modules are searched from)')
|
| 107 |
+
parser.add_argument("--gdb", action=SetGDBDebugAction, nargs=0,
|
| 108 |
+
help='Output debug information for cygdb')
|
| 109 |
+
parser.add_argument("--gdb-outdir", action=SetGDBDebugOutputAction, type=str,
|
| 110 |
+
help='Specify gdb debug information output directory. Implies --gdb.')
|
| 111 |
+
parser.add_argument("-D", "--no-docstrings", dest='docstrings', action='store_false',
|
| 112 |
+
help='Strip docstrings from the compiled module.')
|
| 113 |
+
parser.add_argument('-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 114 |
+
help='Produce a colorized HTML version of the source.')
|
| 115 |
+
parser.add_argument('--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 116 |
+
help='Produce a colorized HTML version of the source '
|
| 117 |
+
'which includes entire generated C/C++-code.')
|
| 118 |
+
parser.add_argument("--annotate-coverage", dest='annotate_coverage_xml', action=SetAnnotateCoverageAction, type=str,
|
| 119 |
+
help='Annotate and include coverage information from cov.xml.')
|
| 120 |
+
parser.add_argument("--line-directives", dest='emit_linenums', action='store_true',
|
| 121 |
+
help='Produce #line directives pointing to the .pyx source')
|
| 122 |
+
parser.add_argument("-+", "--cplus", dest='cplus', action='store_const', const=1,
|
| 123 |
+
help='Output a C++ rather than C file.')
|
| 124 |
+
parser.add_argument('--embed', action='store_const', const='main',
|
| 125 |
+
help='Generate a main() function that embeds the Python interpreter. '
|
| 126 |
+
'Pass --embed=<method_name> for a name other than main().')
|
| 127 |
+
parser.add_argument('-2', dest='language_level', action='store_const', const=2,
|
| 128 |
+
help='Compile based on Python-2 syntax and code semantics.')
|
| 129 |
+
parser.add_argument('-3', dest='language_level', action='store_const', const=3,
|
| 130 |
+
help='Compile based on Python-3 syntax and code semantics.')
|
| 131 |
+
parser.add_argument('--3str', dest='language_level', action='store_const', const='3str',
|
| 132 |
+
help='Compile based on Python-3 syntax and code semantics without '
|
| 133 |
+
'assuming unicode by default for string literals under Python 2.')
|
| 134 |
+
parser.add_argument("--lenient", action=SetLenientAction, nargs=0,
|
| 135 |
+
help='Change some compile time errors to runtime errors to '
|
| 136 |
+
'improve Python compatibility')
|
| 137 |
+
parser.add_argument("--capi-reexport-cincludes", dest='capi_reexport_cincludes', action='store_true',
|
| 138 |
+
help='Add cincluded headers to any auto-generated header files.')
|
| 139 |
+
parser.add_argument("--fast-fail", dest='fast_fail', action='store_true',
|
| 140 |
+
help='Abort the compilation on the first error')
|
| 141 |
+
parser.add_argument("-Werror", "--warning-errors", dest='warning_errors', action='store_true',
|
| 142 |
+
help='Make all warnings into errors')
|
| 143 |
+
parser.add_argument("-Wextra", "--warning-extra", action=ActivateAllWarningsAction, nargs=0,
|
| 144 |
+
help='Enable extra warnings')
|
| 145 |
+
|
| 146 |
+
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
|
| 147 |
+
dest='compiler_directives', type=str,
|
| 148 |
+
action=ParseDirectivesAction,
|
| 149 |
+
help='Overrides a compiler directive')
|
| 150 |
+
parser.add_argument('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
| 151 |
+
dest='compile_time_env', type=str,
|
| 152 |
+
action=ParseCompileTimeEnvAction,
|
| 153 |
+
help='Provides compile time env like DEF would do.')
|
| 154 |
+
parser.add_argument("--module-name",
|
| 155 |
+
dest='module_name', type=str, action='store',
|
| 156 |
+
help='Fully qualified module name. If not given, is '
|
| 157 |
+
'deduced from the import path if source file is in '
|
| 158 |
+
'a package, or equals the filename otherwise.')
|
| 159 |
+
parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
| 160 |
+
parser.add_argument('sources', nargs='*', default=[])
|
| 161 |
+
|
| 162 |
+
# TODO: add help
|
| 163 |
+
parser.add_argument("-z", "--pre-import", dest='pre_import', action='store', type=str, help=SUPPRESS)
|
| 164 |
+
parser.add_argument("--convert-range", dest='convert_range', action='store_true', help=SUPPRESS)
|
| 165 |
+
parser.add_argument("--no-c-in-traceback", dest='c_line_in_traceback', action='store_false', help=SUPPRESS)
|
| 166 |
+
parser.add_argument("--cimport-from-pyx", dest='cimport_from_pyx', action='store_true', help=SUPPRESS)
|
| 167 |
+
parser.add_argument("--old-style-globals", dest='old_style_globals', action='store_true', help=SUPPRESS)
|
| 168 |
+
|
| 169 |
+
# debug stuff:
|
| 170 |
+
from . import DebugFlags
|
| 171 |
+
for name in vars(DebugFlags):
|
| 172 |
+
if name.startswith("debug"):
|
| 173 |
+
option_name = name.replace('_', '-')
|
| 174 |
+
parser.add_argument("--" + option_name, action='store_true', help=SUPPRESS)
|
| 175 |
+
|
| 176 |
+
return parser
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def parse_command_line_raw(parser, args):
|
| 180 |
+
# special handling for --embed and --embed=xxxx as they aren't correctly parsed
|
| 181 |
+
def filter_out_embed_options(args):
|
| 182 |
+
with_embed, without_embed = [], []
|
| 183 |
+
for x in args:
|
| 184 |
+
if x == '--embed' or x.startswith('--embed='):
|
| 185 |
+
with_embed.append(x)
|
| 186 |
+
else:
|
| 187 |
+
without_embed.append(x)
|
| 188 |
+
return with_embed, without_embed
|
| 189 |
+
|
| 190 |
+
with_embed, args_without_embed = filter_out_embed_options(args)
|
| 191 |
+
|
| 192 |
+
arguments, unknown = parser.parse_known_args(args_without_embed)
|
| 193 |
+
|
| 194 |
+
sources = arguments.sources
|
| 195 |
+
del arguments.sources
|
| 196 |
+
|
| 197 |
+
# unknown can be either debug, embed or input files or really unknown
|
| 198 |
+
for option in unknown:
|
| 199 |
+
if option.startswith('-'):
|
| 200 |
+
parser.error("unknown option " + option)
|
| 201 |
+
else:
|
| 202 |
+
sources.append(option)
|
| 203 |
+
|
| 204 |
+
# embed-stuff must be handled extra:
|
| 205 |
+
for x in with_embed:
|
| 206 |
+
if x == '--embed':
|
| 207 |
+
name = 'main' # default value
|
| 208 |
+
else:
|
| 209 |
+
name = x[len('--embed='):]
|
| 210 |
+
setattr(arguments, 'embed', name)
|
| 211 |
+
|
| 212 |
+
return arguments, sources
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def parse_command_line(args):
|
| 216 |
+
parser = create_cython_argparser()
|
| 217 |
+
arguments, sources = parse_command_line_raw(parser, args)
|
| 218 |
+
|
| 219 |
+
work_dir = getattr(arguments, 'working_path', '')
|
| 220 |
+
for source in sources:
|
| 221 |
+
if work_dir and not os.path.isabs(source):
|
| 222 |
+
source = os.path.join(work_dir, source)
|
| 223 |
+
if not os.path.exists(source):
|
| 224 |
+
import errno
|
| 225 |
+
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), source)
|
| 226 |
+
|
| 227 |
+
options = Options.CompilationOptions(Options.default_options)
|
| 228 |
+
for name, value in vars(arguments).items():
|
| 229 |
+
if name.startswith('debug'):
|
| 230 |
+
from . import DebugFlags
|
| 231 |
+
if name in dir(DebugFlags):
|
| 232 |
+
setattr(DebugFlags, name, value)
|
| 233 |
+
else:
|
| 234 |
+
parser.error("Unknown debug flag: %s\n" % name)
|
| 235 |
+
elif hasattr(Options, name):
|
| 236 |
+
setattr(Options, name, value)
|
| 237 |
+
else:
|
| 238 |
+
setattr(options, name, value)
|
| 239 |
+
|
| 240 |
+
if options.use_listing_file and len(sources) > 1:
|
| 241 |
+
parser.error("cython: Only one source file allowed when using -o\n")
|
| 242 |
+
if len(sources) == 0 and not options.show_version:
|
| 243 |
+
parser.error("cython: Need at least one source file\n")
|
| 244 |
+
if Options.embed and len(sources) > 1:
|
| 245 |
+
parser.error("cython: Only one source file allowed when using --embed\n")
|
| 246 |
+
if options.module_name:
|
| 247 |
+
if options.timestamps:
|
| 248 |
+
parser.error("cython: Cannot use --module-name with --timestamps\n")
|
| 249 |
+
if len(sources) > 1:
|
| 250 |
+
parser.error("cython: Only one source file allowed when using --module-name\n")
|
| 251 |
+
return options, sources
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Code.cp39-win_amd64.pyd
ADDED
|
Binary file (838 kB). View file
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Code.pxd
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# cython: language_level=3
|
| 2 |
+
|
| 3 |
+
cimport cython
|
| 4 |
+
from ..StringIOTree cimport StringIOTree
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
cdef class UtilityCodeBase(object):
|
| 8 |
+
cpdef format_code(self, code_string, replace_empty_lines=*)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
cdef class UtilityCode(UtilityCodeBase):
|
| 12 |
+
cdef public object name
|
| 13 |
+
cdef public object proto
|
| 14 |
+
cdef public object impl
|
| 15 |
+
cdef public object init
|
| 16 |
+
cdef public object cleanup
|
| 17 |
+
cdef public object proto_block
|
| 18 |
+
cdef public object requires
|
| 19 |
+
cdef public dict _cache
|
| 20 |
+
cdef public list specialize_list
|
| 21 |
+
cdef public object file
|
| 22 |
+
|
| 23 |
+
cpdef none_or_sub(self, s, context)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
cdef class FunctionState:
|
| 27 |
+
cdef public set names_taken
|
| 28 |
+
cdef public object owner
|
| 29 |
+
cdef public object scope
|
| 30 |
+
|
| 31 |
+
cdef public object error_label
|
| 32 |
+
cdef public size_t label_counter
|
| 33 |
+
cdef public set labels_used
|
| 34 |
+
cdef public object return_label
|
| 35 |
+
cdef public object continue_label
|
| 36 |
+
cdef public object break_label
|
| 37 |
+
cdef public list yield_labels
|
| 38 |
+
|
| 39 |
+
cdef public object return_from_error_cleanup_label # not used in __init__ ?
|
| 40 |
+
|
| 41 |
+
cdef public object exc_vars
|
| 42 |
+
cdef public object current_except
|
| 43 |
+
cdef public bint in_try_finally
|
| 44 |
+
cdef public bint can_trace
|
| 45 |
+
cdef public bint gil_owned
|
| 46 |
+
|
| 47 |
+
cdef public list temps_allocated
|
| 48 |
+
cdef public dict temps_free
|
| 49 |
+
cdef public dict temps_used_type
|
| 50 |
+
cdef public set zombie_temps
|
| 51 |
+
cdef public size_t temp_counter
|
| 52 |
+
cdef public list collect_temps_stack
|
| 53 |
+
|
| 54 |
+
cdef public object closure_temps
|
| 55 |
+
cdef public bint should_declare_error_indicator
|
| 56 |
+
cdef public bint uses_error_indicator
|
| 57 |
+
cdef public bint error_without_exception
|
| 58 |
+
|
| 59 |
+
cdef public bint needs_refnanny
|
| 60 |
+
|
| 61 |
+
@cython.locals(n=size_t)
|
| 62 |
+
cpdef new_label(self, name=*)
|
| 63 |
+
cpdef tuple get_loop_labels(self)
|
| 64 |
+
cpdef set_loop_labels(self, labels)
|
| 65 |
+
cpdef tuple get_all_labels(self)
|
| 66 |
+
cpdef set_all_labels(self, labels)
|
| 67 |
+
cpdef start_collecting_temps(self)
|
| 68 |
+
cpdef stop_collecting_temps(self)
|
| 69 |
+
|
| 70 |
+
cpdef list temps_in_use(self)
|
| 71 |
+
|
| 72 |
+
cdef class IntConst:
|
| 73 |
+
cdef public object cname
|
| 74 |
+
cdef public object value
|
| 75 |
+
cdef public bint is_long
|
| 76 |
+
|
| 77 |
+
cdef class PyObjectConst:
|
| 78 |
+
cdef public object cname
|
| 79 |
+
cdef public object type
|
| 80 |
+
|
| 81 |
+
cdef class StringConst:
|
| 82 |
+
cdef public object cname
|
| 83 |
+
cdef public object text
|
| 84 |
+
cdef public object escaped_value
|
| 85 |
+
cdef public dict py_strings
|
| 86 |
+
cdef public list py_versions
|
| 87 |
+
|
| 88 |
+
@cython.locals(intern=bint, is_str=bint, is_unicode=bint)
|
| 89 |
+
cpdef get_py_string_const(self, encoding, identifier=*, is_str=*, py3str_cstring=*)
|
| 90 |
+
|
| 91 |
+
## cdef class PyStringConst:
|
| 92 |
+
## cdef public object cname
|
| 93 |
+
## cdef public object encoding
|
| 94 |
+
## cdef public bint is_str
|
| 95 |
+
## cdef public bint is_unicode
|
| 96 |
+
## cdef public bint intern
|
| 97 |
+
|
| 98 |
+
#class GlobalState(object):
|
| 99 |
+
|
| 100 |
+
#def funccontext_property(name):
|
| 101 |
+
|
| 102 |
+
cdef class CCodeWriter(object):
|
| 103 |
+
cdef readonly StringIOTree buffer
|
| 104 |
+
cdef readonly list pyclass_stack
|
| 105 |
+
cdef readonly object globalstate
|
| 106 |
+
cdef readonly object funcstate
|
| 107 |
+
cdef object code_config
|
| 108 |
+
cdef object last_pos
|
| 109 |
+
cdef object last_marked_pos
|
| 110 |
+
cdef Py_ssize_t level
|
| 111 |
+
cdef public Py_ssize_t call_level # debug-only, see Nodes.py
|
| 112 |
+
cdef bint bol
|
| 113 |
+
|
| 114 |
+
cpdef write(self, s)
|
| 115 |
+
@cython.final
|
| 116 |
+
cdef _write_lines(self, s)
|
| 117 |
+
cpdef _write_to_buffer(self, s)
|
| 118 |
+
cpdef put(self, code)
|
| 119 |
+
cpdef put_safe(self, code)
|
| 120 |
+
cpdef putln(self, code=*, bint safe=*)
|
| 121 |
+
@cython.final
|
| 122 |
+
cdef increase_indent(self)
|
| 123 |
+
@cython.final
|
| 124 |
+
cdef decrease_indent(self)
|
| 125 |
+
@cython.final
|
| 126 |
+
cdef indent(self)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
cdef class PyrexCodeWriter:
|
| 130 |
+
cdef public object f
|
| 131 |
+
cdef public Py_ssize_t level
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Code.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/CodeGeneration.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
from .Visitor import VisitorTransform
|
| 4 |
+
from .Nodes import StatListNode
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class ExtractPxdCode(VisitorTransform):
|
| 8 |
+
"""
|
| 9 |
+
Finds nodes in a pxd file that should generate code, and
|
| 10 |
+
returns them in a StatListNode.
|
| 11 |
+
|
| 12 |
+
The result is a tuple (StatListNode, ModuleScope), i.e.
|
| 13 |
+
everything that is needed from the pxd after it is processed.
|
| 14 |
+
|
| 15 |
+
A purer approach would be to separately compile the pxd code,
|
| 16 |
+
but the result would have to be slightly more sophisticated
|
| 17 |
+
than pure strings (functions + wanted interned strings +
|
| 18 |
+
wanted utility code + wanted cached objects) so for now this
|
| 19 |
+
approach is taken.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
def __call__(self, root):
|
| 23 |
+
self.funcs = []
|
| 24 |
+
self.visitchildren(root)
|
| 25 |
+
return (StatListNode(root.pos, stats=self.funcs), root.scope)
|
| 26 |
+
|
| 27 |
+
def visit_FuncDefNode(self, node):
|
| 28 |
+
self.funcs.append(node)
|
| 29 |
+
# Do not visit children, nested funcdefnodes will
|
| 30 |
+
# also be moved by this action...
|
| 31 |
+
return node
|
| 32 |
+
|
| 33 |
+
def visit_Node(self, node):
|
| 34 |
+
self.visitchildren(node)
|
| 35 |
+
return node
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/CythonScope.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
from .Symtab import ModuleScope
|
| 4 |
+
from .PyrexTypes import *
|
| 5 |
+
from .UtilityCode import CythonUtilityCode
|
| 6 |
+
from .Errors import error
|
| 7 |
+
from .Scanning import StringSourceDescriptor
|
| 8 |
+
from . import MemoryView
|
| 9 |
+
from .StringEncoding import EncodedString
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class CythonScope(ModuleScope):
|
| 13 |
+
is_cython_builtin = 1
|
| 14 |
+
_cythonscope_initialized = False
|
| 15 |
+
|
| 16 |
+
def __init__(self, context):
|
| 17 |
+
ModuleScope.__init__(self, u'cython', None, None)
|
| 18 |
+
self.pxd_file_loaded = True
|
| 19 |
+
self.populate_cython_scope()
|
| 20 |
+
# The Main.Context object
|
| 21 |
+
self.context = context
|
| 22 |
+
|
| 23 |
+
for fused_type in (cy_integral_type, cy_floating_type, cy_numeric_type):
|
| 24 |
+
entry = self.declare_typedef(fused_type.name,
|
| 25 |
+
fused_type,
|
| 26 |
+
None,
|
| 27 |
+
cname='<error>')
|
| 28 |
+
entry.in_cinclude = True
|
| 29 |
+
|
| 30 |
+
def is_cpp(self):
|
| 31 |
+
# Allow C++ utility code in C++ contexts.
|
| 32 |
+
return self.context.cpp
|
| 33 |
+
|
| 34 |
+
def lookup_type(self, name):
|
| 35 |
+
# This function should go away when types are all first-level objects.
|
| 36 |
+
type = parse_basic_type(name)
|
| 37 |
+
if type:
|
| 38 |
+
return type
|
| 39 |
+
|
| 40 |
+
return super(CythonScope, self).lookup_type(name)
|
| 41 |
+
|
| 42 |
+
def lookup(self, name):
|
| 43 |
+
entry = super(CythonScope, self).lookup(name)
|
| 44 |
+
|
| 45 |
+
if entry is None and not self._cythonscope_initialized:
|
| 46 |
+
self.load_cythonscope()
|
| 47 |
+
entry = super(CythonScope, self).lookup(name)
|
| 48 |
+
|
| 49 |
+
return entry
|
| 50 |
+
|
| 51 |
+
def find_module(self, module_name, pos):
|
| 52 |
+
error("cython.%s is not available" % module_name, pos)
|
| 53 |
+
|
| 54 |
+
def find_submodule(self, module_name, as_package=False):
|
| 55 |
+
entry = self.entries.get(module_name, None)
|
| 56 |
+
if not entry:
|
| 57 |
+
self.load_cythonscope()
|
| 58 |
+
entry = self.entries.get(module_name, None)
|
| 59 |
+
|
| 60 |
+
if entry and entry.as_module:
|
| 61 |
+
return entry.as_module
|
| 62 |
+
else:
|
| 63 |
+
# TODO: fix find_submodule control flow so that we're not
|
| 64 |
+
# expected to create a submodule here (to protect CythonScope's
|
| 65 |
+
# possible immutability). Hack ourselves out of the situation
|
| 66 |
+
# for now.
|
| 67 |
+
raise error((StringSourceDescriptor(u"cython", u""), 0, 0),
|
| 68 |
+
"cython.%s is not available" % module_name)
|
| 69 |
+
|
| 70 |
+
def lookup_qualified_name(self, qname):
|
| 71 |
+
# ExprNode.as_cython_attribute generates qnames and we untangle it here...
|
| 72 |
+
name_path = qname.split(u'.')
|
| 73 |
+
scope = self
|
| 74 |
+
while len(name_path) > 1:
|
| 75 |
+
scope = scope.lookup_here(name_path[0])
|
| 76 |
+
if scope:
|
| 77 |
+
scope = scope.as_module
|
| 78 |
+
del name_path[0]
|
| 79 |
+
if scope is None:
|
| 80 |
+
return None
|
| 81 |
+
else:
|
| 82 |
+
return scope.lookup_here(name_path[0])
|
| 83 |
+
|
| 84 |
+
def populate_cython_scope(self):
|
| 85 |
+
# These are used to optimize isinstance in FinalOptimizePhase
|
| 86 |
+
type_object = self.declare_typedef(
|
| 87 |
+
'PyTypeObject',
|
| 88 |
+
base_type = c_void_type,
|
| 89 |
+
pos = None,
|
| 90 |
+
cname = 'PyTypeObject')
|
| 91 |
+
type_object.is_void = True
|
| 92 |
+
type_object_type = type_object.type
|
| 93 |
+
|
| 94 |
+
self.declare_cfunction(
|
| 95 |
+
'PyObject_TypeCheck',
|
| 96 |
+
CFuncType(c_bint_type, [CFuncTypeArg("o", py_object_type, None),
|
| 97 |
+
CFuncTypeArg("t", c_ptr_type(type_object_type), None)]),
|
| 98 |
+
pos = None,
|
| 99 |
+
defining = 1,
|
| 100 |
+
cname = 'PyObject_TypeCheck')
|
| 101 |
+
|
| 102 |
+
def load_cythonscope(self):
|
| 103 |
+
"""
|
| 104 |
+
Creates some entries for testing purposes and entries for
|
| 105 |
+
cython.array() and for cython.view.*.
|
| 106 |
+
"""
|
| 107 |
+
if self._cythonscope_initialized:
|
| 108 |
+
return
|
| 109 |
+
|
| 110 |
+
self._cythonscope_initialized = True
|
| 111 |
+
cython_testscope_utility_code.declare_in_scope(
|
| 112 |
+
self, cython_scope=self)
|
| 113 |
+
cython_test_extclass_utility_code.declare_in_scope(
|
| 114 |
+
self, cython_scope=self)
|
| 115 |
+
|
| 116 |
+
#
|
| 117 |
+
# The view sub-scope
|
| 118 |
+
#
|
| 119 |
+
self.viewscope = viewscope = ModuleScope(u'view', self, None)
|
| 120 |
+
self.declare_module('view', viewscope, None).as_module = viewscope
|
| 121 |
+
viewscope.is_cython_builtin = True
|
| 122 |
+
viewscope.pxd_file_loaded = True
|
| 123 |
+
|
| 124 |
+
cythonview_testscope_utility_code.declare_in_scope(
|
| 125 |
+
viewscope, cython_scope=self)
|
| 126 |
+
|
| 127 |
+
view_utility_scope = MemoryView.view_utility_code.declare_in_scope(
|
| 128 |
+
self.viewscope, cython_scope=self,
|
| 129 |
+
allowlist=MemoryView.view_utility_allowlist)
|
| 130 |
+
|
| 131 |
+
# Marks the types as being cython_builtin_type so that they can be
|
| 132 |
+
# extended from without Cython attempting to import cython.view
|
| 133 |
+
ext_types = [ entry.type
|
| 134 |
+
for entry in view_utility_scope.entries.values()
|
| 135 |
+
if entry.type.is_extension_type ]
|
| 136 |
+
for ext_type in ext_types:
|
| 137 |
+
ext_type.is_cython_builtin_type = 1
|
| 138 |
+
|
| 139 |
+
# self.entries["array"] = view_utility_scope.entries.pop("array")
|
| 140 |
+
|
| 141 |
+
# dataclasses scope
|
| 142 |
+
dc_str = EncodedString(u'dataclasses')
|
| 143 |
+
dataclassesscope = ModuleScope(dc_str, self, context=None)
|
| 144 |
+
self.declare_module(dc_str, dataclassesscope, pos=None).as_module = dataclassesscope
|
| 145 |
+
dataclassesscope.is_cython_builtin = True
|
| 146 |
+
dataclassesscope.pxd_file_loaded = True
|
| 147 |
+
# doesn't actually have any contents
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def create_cython_scope(context):
|
| 151 |
+
# One could in fact probably make it a singleton,
|
| 152 |
+
# but not sure yet whether any code mutates it (which would kill reusing
|
| 153 |
+
# it across different contexts)
|
| 154 |
+
return CythonScope(context)
|
| 155 |
+
|
| 156 |
+
# Load test utilities for the cython scope
|
| 157 |
+
|
| 158 |
+
def load_testscope_utility(cy_util_name, **kwargs):
|
| 159 |
+
return CythonUtilityCode.load(cy_util_name, "TestCythonScope.pyx", **kwargs)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
undecorated_methods_protos = UtilityCode(proto=u"""
|
| 163 |
+
/* These methods are undecorated and have therefore no prototype */
|
| 164 |
+
static PyObject *__pyx_TestClass_cdef_method(
|
| 165 |
+
struct __pyx_TestClass_obj *self, int value);
|
| 166 |
+
static PyObject *__pyx_TestClass_cpdef_method(
|
| 167 |
+
struct __pyx_TestClass_obj *self, int value, int skip_dispatch);
|
| 168 |
+
static PyObject *__pyx_TestClass_def_method(
|
| 169 |
+
PyObject *self, PyObject *value);
|
| 170 |
+
""")
|
| 171 |
+
|
| 172 |
+
cython_testscope_utility_code = load_testscope_utility("TestScope")
|
| 173 |
+
|
| 174 |
+
test_cython_utility_dep = load_testscope_utility("TestDep")
|
| 175 |
+
|
| 176 |
+
cython_test_extclass_utility_code = \
|
| 177 |
+
load_testscope_utility("TestClass", name="TestClass",
|
| 178 |
+
requires=[undecorated_methods_protos,
|
| 179 |
+
test_cython_utility_dep])
|
| 180 |
+
|
| 181 |
+
cythonview_testscope_utility_code = load_testscope_utility("View.TestScope")
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Dataclass.py
ADDED
|
@@ -0,0 +1,839 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# functions to transform a c class into a dataclass
|
| 2 |
+
|
| 3 |
+
from collections import OrderedDict
|
| 4 |
+
from textwrap import dedent
|
| 5 |
+
import operator
|
| 6 |
+
|
| 7 |
+
from . import ExprNodes
|
| 8 |
+
from . import Nodes
|
| 9 |
+
from . import PyrexTypes
|
| 10 |
+
from . import Builtin
|
| 11 |
+
from . import Naming
|
| 12 |
+
from .Errors import error, warning
|
| 13 |
+
from .Code import UtilityCode, TempitaUtilityCode, PyxCodeWriter
|
| 14 |
+
from .Visitor import VisitorTransform
|
| 15 |
+
from .StringEncoding import EncodedString
|
| 16 |
+
from .TreeFragment import TreeFragment
|
| 17 |
+
from .ParseTreeTransforms import NormalizeTree, SkipDeclarations
|
| 18 |
+
from .Options import copy_inherited_directives
|
| 19 |
+
|
| 20 |
+
_dataclass_loader_utilitycode = None
|
| 21 |
+
|
| 22 |
+
def make_dataclasses_module_callnode(pos):
|
| 23 |
+
global _dataclass_loader_utilitycode
|
| 24 |
+
if not _dataclass_loader_utilitycode:
|
| 25 |
+
python_utility_code = UtilityCode.load_cached("Dataclasses_fallback", "Dataclasses.py")
|
| 26 |
+
python_utility_code = EncodedString(python_utility_code.impl)
|
| 27 |
+
_dataclass_loader_utilitycode = TempitaUtilityCode.load(
|
| 28 |
+
"SpecificModuleLoader", "Dataclasses.c",
|
| 29 |
+
context={'cname': "dataclasses", 'py_code': python_utility_code.as_c_string_literal()})
|
| 30 |
+
return ExprNodes.PythonCapiCallNode(
|
| 31 |
+
pos, "__Pyx_Load_dataclasses_Module",
|
| 32 |
+
PyrexTypes.CFuncType(PyrexTypes.py_object_type, []),
|
| 33 |
+
utility_code=_dataclass_loader_utilitycode,
|
| 34 |
+
args=[],
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
def make_dataclass_call_helper(pos, callable, kwds):
|
| 38 |
+
utility_code = UtilityCode.load_cached("DataclassesCallHelper", "Dataclasses.c")
|
| 39 |
+
func_type = PyrexTypes.CFuncType(
|
| 40 |
+
PyrexTypes.py_object_type, [
|
| 41 |
+
PyrexTypes.CFuncTypeArg("callable", PyrexTypes.py_object_type, None),
|
| 42 |
+
PyrexTypes.CFuncTypeArg("kwds", PyrexTypes.py_object_type, None)
|
| 43 |
+
],
|
| 44 |
+
)
|
| 45 |
+
return ExprNodes.PythonCapiCallNode(
|
| 46 |
+
pos,
|
| 47 |
+
function_name="__Pyx_DataclassesCallHelper",
|
| 48 |
+
func_type=func_type,
|
| 49 |
+
utility_code=utility_code,
|
| 50 |
+
args=[callable, kwds],
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class RemoveAssignmentsToNames(VisitorTransform, SkipDeclarations):
|
| 55 |
+
"""
|
| 56 |
+
Cython (and Python) normally treats
|
| 57 |
+
|
| 58 |
+
class A:
|
| 59 |
+
x = 1
|
| 60 |
+
|
| 61 |
+
as generating a class attribute. However for dataclasses the `= 1` should be interpreted as
|
| 62 |
+
a default value to initialize an instance attribute with.
|
| 63 |
+
This transform therefore removes the `x=1` assignment so that the class attribute isn't
|
| 64 |
+
generated, while recording what it has removed so that it can be used in the initialization.
|
| 65 |
+
"""
|
| 66 |
+
def __init__(self, names):
|
| 67 |
+
super(RemoveAssignmentsToNames, self).__init__()
|
| 68 |
+
self.names = names
|
| 69 |
+
self.removed_assignments = {}
|
| 70 |
+
|
| 71 |
+
def visit_CClassNode(self, node):
|
| 72 |
+
self.visitchildren(node)
|
| 73 |
+
return node
|
| 74 |
+
|
| 75 |
+
def visit_PyClassNode(self, node):
|
| 76 |
+
return node # go no further
|
| 77 |
+
|
| 78 |
+
def visit_FuncDefNode(self, node):
|
| 79 |
+
return node # go no further
|
| 80 |
+
|
| 81 |
+
def visit_SingleAssignmentNode(self, node):
|
| 82 |
+
if node.lhs.is_name and node.lhs.name in self.names:
|
| 83 |
+
if node.lhs.name in self.removed_assignments:
|
| 84 |
+
warning(node.pos, ("Multiple assignments for '%s' in dataclass; "
|
| 85 |
+
"using most recent") % node.lhs.name, 1)
|
| 86 |
+
self.removed_assignments[node.lhs.name] = node.rhs
|
| 87 |
+
return []
|
| 88 |
+
return node
|
| 89 |
+
|
| 90 |
+
# I believe cascaded assignment is always a syntax error with annotations
|
| 91 |
+
# so there's no need to define visit_CascadedAssignmentNode
|
| 92 |
+
|
| 93 |
+
def visit_Node(self, node):
|
| 94 |
+
self.visitchildren(node)
|
| 95 |
+
return node
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class TemplateCode(object):
|
| 99 |
+
"""
|
| 100 |
+
Adds the ability to keep track of placeholder argument names to PyxCodeWriter.
|
| 101 |
+
|
| 102 |
+
Also adds extra_stats which are nodes bundled at the end when this
|
| 103 |
+
is converted to a tree.
|
| 104 |
+
"""
|
| 105 |
+
_placeholder_count = 0
|
| 106 |
+
|
| 107 |
+
def __init__(self, writer=None, placeholders=None, extra_stats=None):
|
| 108 |
+
self.writer = PyxCodeWriter() if writer is None else writer
|
| 109 |
+
self.placeholders = {} if placeholders is None else placeholders
|
| 110 |
+
self.extra_stats = [] if extra_stats is None else extra_stats
|
| 111 |
+
|
| 112 |
+
def add_code_line(self, code_line):
|
| 113 |
+
self.writer.putln(code_line)
|
| 114 |
+
|
| 115 |
+
def add_code_lines(self, code_lines):
|
| 116 |
+
for line in code_lines:
|
| 117 |
+
self.writer.putln(line)
|
| 118 |
+
|
| 119 |
+
def reset(self):
|
| 120 |
+
# don't attempt to reset placeholders - it really doesn't matter if
|
| 121 |
+
# we have unused placeholders
|
| 122 |
+
self.writer.reset()
|
| 123 |
+
|
| 124 |
+
def empty(self):
|
| 125 |
+
return self.writer.empty()
|
| 126 |
+
|
| 127 |
+
def indenter(self):
|
| 128 |
+
return self.writer.indenter()
|
| 129 |
+
|
| 130 |
+
def new_placeholder(self, field_names, value):
|
| 131 |
+
name = self._new_placeholder_name(field_names)
|
| 132 |
+
self.placeholders[name] = value
|
| 133 |
+
return name
|
| 134 |
+
|
| 135 |
+
def add_extra_statements(self, statements):
|
| 136 |
+
if self.extra_stats is None:
|
| 137 |
+
assert False, "Can only use add_extra_statements on top-level writer"
|
| 138 |
+
self.extra_stats.extend(statements)
|
| 139 |
+
|
| 140 |
+
def _new_placeholder_name(self, field_names):
|
| 141 |
+
while True:
|
| 142 |
+
name = "DATACLASS_PLACEHOLDER_%d" % self._placeholder_count
|
| 143 |
+
if (name not in self.placeholders
|
| 144 |
+
and name not in field_names):
|
| 145 |
+
# make sure name isn't already used and doesn't
|
| 146 |
+
# conflict with a variable name (which is unlikely but possible)
|
| 147 |
+
break
|
| 148 |
+
self._placeholder_count += 1
|
| 149 |
+
return name
|
| 150 |
+
|
| 151 |
+
def generate_tree(self, level='c_class'):
|
| 152 |
+
stat_list_node = TreeFragment(
|
| 153 |
+
self.writer.getvalue(),
|
| 154 |
+
level=level,
|
| 155 |
+
pipeline=[NormalizeTree(None)],
|
| 156 |
+
).substitute(self.placeholders)
|
| 157 |
+
|
| 158 |
+
stat_list_node.stats += self.extra_stats
|
| 159 |
+
return stat_list_node
|
| 160 |
+
|
| 161 |
+
def insertion_point(self):
|
| 162 |
+
new_writer = self.writer.insertion_point()
|
| 163 |
+
return TemplateCode(
|
| 164 |
+
writer=new_writer,
|
| 165 |
+
placeholders=self.placeholders,
|
| 166 |
+
extra_stats=self.extra_stats
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class _MISSING_TYPE(object):
|
| 171 |
+
pass
|
| 172 |
+
MISSING = _MISSING_TYPE()
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
class Field(object):
|
| 176 |
+
"""
|
| 177 |
+
Field is based on the dataclasses.field class from the standard library module.
|
| 178 |
+
It is used internally during the generation of Cython dataclasses to keep track
|
| 179 |
+
of the settings for individual attributes.
|
| 180 |
+
|
| 181 |
+
Attributes of this class are stored as nodes so they can be used in code construction
|
| 182 |
+
more readily (i.e. we store BoolNode rather than bool)
|
| 183 |
+
"""
|
| 184 |
+
default = MISSING
|
| 185 |
+
default_factory = MISSING
|
| 186 |
+
private = False
|
| 187 |
+
|
| 188 |
+
literal_keys = ("repr", "hash", "init", "compare", "metadata")
|
| 189 |
+
|
| 190 |
+
# default values are defined by the CPython dataclasses.field
|
| 191 |
+
def __init__(self, pos, default=MISSING, default_factory=MISSING,
|
| 192 |
+
repr=None, hash=None, init=None,
|
| 193 |
+
compare=None, metadata=None,
|
| 194 |
+
is_initvar=False, is_classvar=False,
|
| 195 |
+
**additional_kwds):
|
| 196 |
+
if default is not MISSING:
|
| 197 |
+
self.default = default
|
| 198 |
+
if default_factory is not MISSING:
|
| 199 |
+
self.default_factory = default_factory
|
| 200 |
+
self.repr = repr or ExprNodes.BoolNode(pos, value=True)
|
| 201 |
+
self.hash = hash or ExprNodes.NoneNode(pos)
|
| 202 |
+
self.init = init or ExprNodes.BoolNode(pos, value=True)
|
| 203 |
+
self.compare = compare or ExprNodes.BoolNode(pos, value=True)
|
| 204 |
+
self.metadata = metadata or ExprNodes.NoneNode(pos)
|
| 205 |
+
self.is_initvar = is_initvar
|
| 206 |
+
self.is_classvar = is_classvar
|
| 207 |
+
|
| 208 |
+
for k, v in additional_kwds.items():
|
| 209 |
+
# There should not be any additional keywords!
|
| 210 |
+
error(v.pos, "cython.dataclasses.field() got an unexpected keyword argument '%s'" % k)
|
| 211 |
+
|
| 212 |
+
for field_name in self.literal_keys:
|
| 213 |
+
field_value = getattr(self, field_name)
|
| 214 |
+
if not field_value.is_literal:
|
| 215 |
+
error(field_value.pos,
|
| 216 |
+
"cython.dataclasses.field parameter '%s' must be a literal value" % field_name)
|
| 217 |
+
|
| 218 |
+
def iterate_record_node_arguments(self):
|
| 219 |
+
for key in (self.literal_keys + ('default', 'default_factory')):
|
| 220 |
+
value = getattr(self, key)
|
| 221 |
+
if value is not MISSING:
|
| 222 |
+
yield key, value
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def process_class_get_fields(node):
|
| 226 |
+
var_entries = node.scope.var_entries
|
| 227 |
+
# order of definition is used in the dataclass
|
| 228 |
+
var_entries = sorted(var_entries, key=operator.attrgetter('pos'))
|
| 229 |
+
var_names = [entry.name for entry in var_entries]
|
| 230 |
+
|
| 231 |
+
# don't treat `x = 1` as an assignment of a class attribute within the dataclass
|
| 232 |
+
transform = RemoveAssignmentsToNames(var_names)
|
| 233 |
+
transform(node)
|
| 234 |
+
default_value_assignments = transform.removed_assignments
|
| 235 |
+
|
| 236 |
+
base_type = node.base_type
|
| 237 |
+
fields = OrderedDict()
|
| 238 |
+
while base_type:
|
| 239 |
+
if base_type.is_external or not base_type.scope.implemented:
|
| 240 |
+
warning(node.pos, "Cannot reliably handle Cython dataclasses with base types "
|
| 241 |
+
"in external modules since it is not possible to tell what fields they have", 2)
|
| 242 |
+
if base_type.dataclass_fields:
|
| 243 |
+
fields = base_type.dataclass_fields.copy()
|
| 244 |
+
break
|
| 245 |
+
base_type = base_type.base_type
|
| 246 |
+
|
| 247 |
+
for entry in var_entries:
|
| 248 |
+
name = entry.name
|
| 249 |
+
is_initvar = entry.declared_with_pytyping_modifier("dataclasses.InitVar")
|
| 250 |
+
# TODO - classvars aren't included in "var_entries" so are missed here
|
| 251 |
+
# and thus this code is never triggered
|
| 252 |
+
is_classvar = entry.declared_with_pytyping_modifier("typing.ClassVar")
|
| 253 |
+
if name in default_value_assignments:
|
| 254 |
+
assignment = default_value_assignments[name]
|
| 255 |
+
if (isinstance(assignment, ExprNodes.CallNode) and (
|
| 256 |
+
assignment.function.as_cython_attribute() == "dataclasses.field" or
|
| 257 |
+
Builtin.exprnode_to_known_standard_library_name(
|
| 258 |
+
assignment.function, node.scope) == "dataclasses.field")):
|
| 259 |
+
# I believe most of this is well-enforced when it's treated as a directive
|
| 260 |
+
# but it doesn't hurt to make sure
|
| 261 |
+
valid_general_call = (isinstance(assignment, ExprNodes.GeneralCallNode)
|
| 262 |
+
and isinstance(assignment.positional_args, ExprNodes.TupleNode)
|
| 263 |
+
and not assignment.positional_args.args
|
| 264 |
+
and (assignment.keyword_args is None or isinstance(assignment.keyword_args, ExprNodes.DictNode)))
|
| 265 |
+
valid_simple_call = (isinstance(assignment, ExprNodes.SimpleCallNode) and not assignment.args)
|
| 266 |
+
if not (valid_general_call or valid_simple_call):
|
| 267 |
+
error(assignment.pos, "Call to 'cython.dataclasses.field' must only consist "
|
| 268 |
+
"of compile-time keyword arguments")
|
| 269 |
+
continue
|
| 270 |
+
keyword_args = assignment.keyword_args.as_python_dict() if valid_general_call and assignment.keyword_args else {}
|
| 271 |
+
if 'default' in keyword_args and 'default_factory' in keyword_args:
|
| 272 |
+
error(assignment.pos, "cannot specify both default and default_factory")
|
| 273 |
+
continue
|
| 274 |
+
field = Field(node.pos, **keyword_args)
|
| 275 |
+
else:
|
| 276 |
+
if assignment.type in [Builtin.list_type, Builtin.dict_type, Builtin.set_type]:
|
| 277 |
+
# The standard library module generates a TypeError at runtime
|
| 278 |
+
# in this situation.
|
| 279 |
+
# Error message is copied from CPython
|
| 280 |
+
error(assignment.pos, "mutable default <class '{0}'> for field {1} is not allowed: "
|
| 281 |
+
"use default_factory".format(assignment.type.name, name))
|
| 282 |
+
|
| 283 |
+
field = Field(node.pos, default=assignment)
|
| 284 |
+
else:
|
| 285 |
+
field = Field(node.pos)
|
| 286 |
+
field.is_initvar = is_initvar
|
| 287 |
+
field.is_classvar = is_classvar
|
| 288 |
+
if entry.visibility == "private":
|
| 289 |
+
field.private = True
|
| 290 |
+
fields[name] = field
|
| 291 |
+
node.entry.type.dataclass_fields = fields
|
| 292 |
+
return fields
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
|
| 296 |
+
# default argument values from https://docs.python.org/3/library/dataclasses.html
|
| 297 |
+
kwargs = dict(init=True, repr=True, eq=True,
|
| 298 |
+
order=False, unsafe_hash=False,
|
| 299 |
+
frozen=False, kw_only=False)
|
| 300 |
+
if dataclass_args is not None:
|
| 301 |
+
if dataclass_args[0]:
|
| 302 |
+
error(node.pos, "cython.dataclasses.dataclass takes no positional arguments")
|
| 303 |
+
for k, v in dataclass_args[1].items():
|
| 304 |
+
if k not in kwargs:
|
| 305 |
+
error(node.pos,
|
| 306 |
+
"cython.dataclasses.dataclass() got an unexpected keyword argument '%s'" % k)
|
| 307 |
+
if not isinstance(v, ExprNodes.BoolNode):
|
| 308 |
+
error(node.pos,
|
| 309 |
+
"Arguments passed to cython.dataclasses.dataclass must be True or False")
|
| 310 |
+
kwargs[k] = v.value
|
| 311 |
+
|
| 312 |
+
kw_only = kwargs['kw_only']
|
| 313 |
+
|
| 314 |
+
fields = process_class_get_fields(node)
|
| 315 |
+
|
| 316 |
+
dataclass_module = make_dataclasses_module_callnode(node.pos)
|
| 317 |
+
|
| 318 |
+
# create __dataclass_params__ attribute. I try to use the exact
|
| 319 |
+
# `_DataclassParams` class defined in the standard library module if at all possible
|
| 320 |
+
# for maximum duck-typing compatibility.
|
| 321 |
+
dataclass_params_func = ExprNodes.AttributeNode(node.pos, obj=dataclass_module,
|
| 322 |
+
attribute=EncodedString("_DataclassParams"))
|
| 323 |
+
dataclass_params_keywords = ExprNodes.DictNode.from_pairs(
|
| 324 |
+
node.pos,
|
| 325 |
+
[ (ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 326 |
+
ExprNodes.BoolNode(node.pos, value=v))
|
| 327 |
+
for k, v in kwargs.items() ] +
|
| 328 |
+
[ (ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 329 |
+
ExprNodes.BoolNode(node.pos, value=v))
|
| 330 |
+
for k, v in [('kw_only', kw_only), ('match_args', False),
|
| 331 |
+
('slots', False), ('weakref_slot', False)]
|
| 332 |
+
])
|
| 333 |
+
dataclass_params = make_dataclass_call_helper(
|
| 334 |
+
node.pos, dataclass_params_func, dataclass_params_keywords)
|
| 335 |
+
dataclass_params_assignment = Nodes.SingleAssignmentNode(
|
| 336 |
+
node.pos,
|
| 337 |
+
lhs = ExprNodes.NameNode(node.pos, name=EncodedString("__dataclass_params__")),
|
| 338 |
+
rhs = dataclass_params)
|
| 339 |
+
|
| 340 |
+
dataclass_fields_stats = _set_up_dataclass_fields(node, fields, dataclass_module)
|
| 341 |
+
|
| 342 |
+
stats = Nodes.StatListNode(node.pos,
|
| 343 |
+
stats=[dataclass_params_assignment] + dataclass_fields_stats)
|
| 344 |
+
|
| 345 |
+
code = TemplateCode()
|
| 346 |
+
generate_init_code(code, kwargs['init'], node, fields, kw_only)
|
| 347 |
+
generate_repr_code(code, kwargs['repr'], node, fields)
|
| 348 |
+
generate_eq_code(code, kwargs['eq'], node, fields)
|
| 349 |
+
generate_order_code(code, kwargs['order'], node, fields)
|
| 350 |
+
generate_hash_code(code, kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields)
|
| 351 |
+
|
| 352 |
+
stats.stats += code.generate_tree().stats
|
| 353 |
+
|
| 354 |
+
# turn off annotation typing, so all arguments to __init__ are accepted as
|
| 355 |
+
# generic objects and thus can accept _HAS_DEFAULT_FACTORY.
|
| 356 |
+
# Type conversion comes later
|
| 357 |
+
comp_directives = Nodes.CompilerDirectivesNode(node.pos,
|
| 358 |
+
directives=copy_inherited_directives(node.scope.directives, annotation_typing=False),
|
| 359 |
+
body=stats)
|
| 360 |
+
|
| 361 |
+
comp_directives.analyse_declarations(node.scope)
|
| 362 |
+
# probably already in this scope, but it doesn't hurt to make sure
|
| 363 |
+
analyse_decs_transform.enter_scope(node, node.scope)
|
| 364 |
+
analyse_decs_transform.visit(comp_directives)
|
| 365 |
+
analyse_decs_transform.exit_scope()
|
| 366 |
+
|
| 367 |
+
node.body.stats.append(comp_directives)
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
def generate_init_code(code, init, node, fields, kw_only):
|
| 371 |
+
"""
|
| 372 |
+
Notes on CPython generated "__init__":
|
| 373 |
+
* Implemented in `_init_fn`.
|
| 374 |
+
* The use of the `dataclasses._HAS_DEFAULT_FACTORY` sentinel value as
|
| 375 |
+
the default argument for fields that need constructing with a factory
|
| 376 |
+
function is copied from the CPython implementation. (`None` isn't
|
| 377 |
+
suitable because it could also be a value for the user to pass.)
|
| 378 |
+
There's no real reason why it needs importing from the dataclasses module
|
| 379 |
+
though - it could equally be a value generated by Cython when the module loads.
|
| 380 |
+
* seen_default and the associated error message are copied directly from Python
|
| 381 |
+
* Call to user-defined __post_init__ function (if it exists) is copied from
|
| 382 |
+
CPython.
|
| 383 |
+
|
| 384 |
+
Cython behaviour deviates a little here (to be decided if this is right...)
|
| 385 |
+
Because the class variable from the assignment does not exist Cython fields will
|
| 386 |
+
return None (or whatever their type default is) if not initialized while Python
|
| 387 |
+
dataclasses will fall back to looking up the class variable.
|
| 388 |
+
"""
|
| 389 |
+
if not init or node.scope.lookup_here("__init__"):
|
| 390 |
+
return
|
| 391 |
+
|
| 392 |
+
# selfname behaviour copied from the cpython module
|
| 393 |
+
selfname = "__dataclass_self__" if "self" in fields else "self"
|
| 394 |
+
args = [selfname]
|
| 395 |
+
|
| 396 |
+
if kw_only:
|
| 397 |
+
args.append("*")
|
| 398 |
+
|
| 399 |
+
function_start_point = code.insertion_point()
|
| 400 |
+
code = code.insertion_point()
|
| 401 |
+
|
| 402 |
+
# create a temp to get _HAS_DEFAULT_FACTORY
|
| 403 |
+
dataclass_module = make_dataclasses_module_callnode(node.pos)
|
| 404 |
+
has_default_factory = ExprNodes.AttributeNode(
|
| 405 |
+
node.pos,
|
| 406 |
+
obj=dataclass_module,
|
| 407 |
+
attribute=EncodedString("_HAS_DEFAULT_FACTORY")
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
default_factory_placeholder = code.new_placeholder(fields, has_default_factory)
|
| 411 |
+
|
| 412 |
+
seen_default = False
|
| 413 |
+
for name, field in fields.items():
|
| 414 |
+
entry = node.scope.lookup(name)
|
| 415 |
+
if entry.annotation:
|
| 416 |
+
annotation = u": %s" % entry.annotation.string.value
|
| 417 |
+
else:
|
| 418 |
+
annotation = u""
|
| 419 |
+
assignment = u''
|
| 420 |
+
if field.default is not MISSING or field.default_factory is not MISSING:
|
| 421 |
+
seen_default = True
|
| 422 |
+
if field.default_factory is not MISSING:
|
| 423 |
+
ph_name = default_factory_placeholder
|
| 424 |
+
else:
|
| 425 |
+
ph_name = code.new_placeholder(fields, field.default) # 'default' should be a node
|
| 426 |
+
assignment = u" = %s" % ph_name
|
| 427 |
+
elif seen_default and not kw_only and field.init.value:
|
| 428 |
+
error(entry.pos, ("non-default argument '%s' follows default argument "
|
| 429 |
+
"in dataclass __init__") % name)
|
| 430 |
+
code.reset()
|
| 431 |
+
return
|
| 432 |
+
|
| 433 |
+
if field.init.value:
|
| 434 |
+
args.append(u"%s%s%s" % (name, annotation, assignment))
|
| 435 |
+
|
| 436 |
+
if field.is_initvar:
|
| 437 |
+
continue
|
| 438 |
+
elif field.default_factory is MISSING:
|
| 439 |
+
if field.init.value:
|
| 440 |
+
code.add_code_line(u" %s.%s = %s" % (selfname, name, name))
|
| 441 |
+
elif assignment:
|
| 442 |
+
# not an argument to the function, but is still initialized
|
| 443 |
+
code.add_code_line(u" %s.%s%s" % (selfname, name, assignment))
|
| 444 |
+
else:
|
| 445 |
+
ph_name = code.new_placeholder(fields, field.default_factory)
|
| 446 |
+
if field.init.value:
|
| 447 |
+
# close to:
|
| 448 |
+
# def __init__(self, name=_PLACEHOLDER_VALUE):
|
| 449 |
+
# self.name = name_default_factory() if name is _PLACEHOLDER_VALUE else name
|
| 450 |
+
code.add_code_line(u" %s.%s = %s() if %s is %s else %s" % (
|
| 451 |
+
selfname, name, ph_name, name, default_factory_placeholder, name))
|
| 452 |
+
else:
|
| 453 |
+
# still need to use the default factory to initialize
|
| 454 |
+
code.add_code_line(u" %s.%s = %s()" % (
|
| 455 |
+
selfname, name, ph_name))
|
| 456 |
+
|
| 457 |
+
if node.scope.lookup("__post_init__"):
|
| 458 |
+
post_init_vars = ", ".join(name for name, field in fields.items()
|
| 459 |
+
if field.is_initvar)
|
| 460 |
+
code.add_code_line(" %s.__post_init__(%s)" % (selfname, post_init_vars))
|
| 461 |
+
|
| 462 |
+
if code.empty():
|
| 463 |
+
code.add_code_line(" pass")
|
| 464 |
+
|
| 465 |
+
args = u", ".join(args)
|
| 466 |
+
function_start_point.add_code_line(u"def __init__(%s):" % args)
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def generate_repr_code(code, repr, node, fields):
|
| 470 |
+
"""
|
| 471 |
+
The core of the CPython implementation is just:
|
| 472 |
+
['return self.__class__.__qualname__ + f"(' +
|
| 473 |
+
', '.join([f"{f.name}={{self.{f.name}!r}}"
|
| 474 |
+
for f in fields]) +
|
| 475 |
+
')"'],
|
| 476 |
+
|
| 477 |
+
The only notable difference here is self.__class__.__qualname__ -> type(self).__name__
|
| 478 |
+
which is because Cython currently supports Python 2.
|
| 479 |
+
|
| 480 |
+
However, it also has some guards for recursive repr invocations. In the standard
|
| 481 |
+
library implementation they're done with a wrapper decorator that captures a set
|
| 482 |
+
(with the set keyed by id and thread). Here we create a set as a thread local
|
| 483 |
+
variable and key only by id.
|
| 484 |
+
"""
|
| 485 |
+
if not repr or node.scope.lookup("__repr__"):
|
| 486 |
+
return
|
| 487 |
+
|
| 488 |
+
# The recursive guard is likely a little costly, so skip it if possible.
|
| 489 |
+
# is_gc_simple defines where it can contain recursive objects
|
| 490 |
+
needs_recursive_guard = False
|
| 491 |
+
for name in fields.keys():
|
| 492 |
+
entry = node.scope.lookup(name)
|
| 493 |
+
type_ = entry.type
|
| 494 |
+
if type_.is_memoryviewslice:
|
| 495 |
+
type_ = type_.dtype
|
| 496 |
+
if not type_.is_pyobject:
|
| 497 |
+
continue # no GC
|
| 498 |
+
if not type_.is_gc_simple:
|
| 499 |
+
needs_recursive_guard = True
|
| 500 |
+
break
|
| 501 |
+
|
| 502 |
+
if needs_recursive_guard:
|
| 503 |
+
code.add_code_line("__pyx_recursive_repr_guard = __import__('threading').local()")
|
| 504 |
+
code.add_code_line("__pyx_recursive_repr_guard.running = set()")
|
| 505 |
+
code.add_code_line("def __repr__(self):")
|
| 506 |
+
if needs_recursive_guard:
|
| 507 |
+
code.add_code_line(" key = id(self)")
|
| 508 |
+
code.add_code_line(" guard_set = self.__pyx_recursive_repr_guard.running")
|
| 509 |
+
code.add_code_line(" if key in guard_set: return '...'")
|
| 510 |
+
code.add_code_line(" guard_set.add(key)")
|
| 511 |
+
code.add_code_line(" try:")
|
| 512 |
+
strs = [u"%s={self.%s!r}" % (name, name)
|
| 513 |
+
for name, field in fields.items()
|
| 514 |
+
if field.repr.value and not field.is_initvar]
|
| 515 |
+
format_string = u", ".join(strs)
|
| 516 |
+
|
| 517 |
+
code.add_code_line(u' name = getattr(type(self), "__qualname__", type(self).__name__)')
|
| 518 |
+
code.add_code_line(u" return f'{name}(%s)'" % format_string)
|
| 519 |
+
if needs_recursive_guard:
|
| 520 |
+
code.add_code_line(" finally:")
|
| 521 |
+
code.add_code_line(" guard_set.remove(key)")
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
def generate_cmp_code(code, op, funcname, node, fields):
|
| 525 |
+
if node.scope.lookup_here(funcname):
|
| 526 |
+
return
|
| 527 |
+
|
| 528 |
+
names = [name for name, field in fields.items() if (field.compare.value and not field.is_initvar)]
|
| 529 |
+
|
| 530 |
+
code.add_code_lines([
|
| 531 |
+
"def %s(self, other):" % funcname,
|
| 532 |
+
" if other.__class__ is not self.__class__:"
|
| 533 |
+
" return NotImplemented",
|
| 534 |
+
#
|
| 535 |
+
" cdef %s other_cast" % node.class_name,
|
| 536 |
+
" other_cast = <%s>other" % node.class_name,
|
| 537 |
+
])
|
| 538 |
+
|
| 539 |
+
# The Python implementation of dataclasses.py does a tuple comparison
|
| 540 |
+
# (roughly):
|
| 541 |
+
# return self._attributes_to_tuple() {op} other._attributes_to_tuple()
|
| 542 |
+
#
|
| 543 |
+
# For the Cython implementation a tuple comparison isn't an option because
|
| 544 |
+
# not all attributes can be converted to Python objects and stored in a tuple
|
| 545 |
+
#
|
| 546 |
+
# TODO - better diagnostics of whether the types support comparison before
|
| 547 |
+
# generating the code. Plus, do we want to convert C structs to dicts and
|
| 548 |
+
# compare them that way (I think not, but it might be in demand)?
|
| 549 |
+
checks = []
|
| 550 |
+
op_without_equals = op.replace('=', '')
|
| 551 |
+
|
| 552 |
+
for name in names:
|
| 553 |
+
if op != '==':
|
| 554 |
+
# tuple comparison rules - early elements take precedence
|
| 555 |
+
code.add_code_line(" if self.%s %s other_cast.%s: return True" % (
|
| 556 |
+
name, op_without_equals, name))
|
| 557 |
+
code.add_code_line(" if self.%s != other_cast.%s: return False" % (
|
| 558 |
+
name, name))
|
| 559 |
+
if "=" in op:
|
| 560 |
+
code.add_code_line(" return True") # "() == ()" is True
|
| 561 |
+
else:
|
| 562 |
+
code.add_code_line(" return False")
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
def generate_eq_code(code, eq, node, fields):
|
| 566 |
+
if not eq:
|
| 567 |
+
return
|
| 568 |
+
generate_cmp_code(code, "==", "__eq__", node, fields)
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def generate_order_code(code, order, node, fields):
|
| 572 |
+
if not order:
|
| 573 |
+
return
|
| 574 |
+
|
| 575 |
+
for op, name in [("<", "__lt__"),
|
| 576 |
+
("<=", "__le__"),
|
| 577 |
+
(">", "__gt__"),
|
| 578 |
+
(">=", "__ge__")]:
|
| 579 |
+
generate_cmp_code(code, op, name, node, fields)
|
| 580 |
+
|
| 581 |
+
|
| 582 |
+
def generate_hash_code(code, unsafe_hash, eq, frozen, node, fields):
|
| 583 |
+
"""
|
| 584 |
+
Copied from CPython implementation - the intention is to follow this as far as
|
| 585 |
+
is possible:
|
| 586 |
+
# +------------------- unsafe_hash= parameter
|
| 587 |
+
# | +----------- eq= parameter
|
| 588 |
+
# | | +--- frozen= parameter
|
| 589 |
+
# | | |
|
| 590 |
+
# v v v | | |
|
| 591 |
+
# | no | yes | <--- class has explicitly defined __hash__
|
| 592 |
+
# +=======+=======+=======+========+========+
|
| 593 |
+
# | False | False | False | | | No __eq__, use the base class __hash__
|
| 594 |
+
# +-------+-------+-------+--------+--------+
|
| 595 |
+
# | False | False | True | | | No __eq__, use the base class __hash__
|
| 596 |
+
# +-------+-------+-------+--------+--------+
|
| 597 |
+
# | False | True | False | None | | <-- the default, not hashable
|
| 598 |
+
# +-------+-------+-------+--------+--------+
|
| 599 |
+
# | False | True | True | add | | Frozen, so hashable, allows override
|
| 600 |
+
# +-------+-------+-------+--------+--------+
|
| 601 |
+
# | True | False | False | add | raise | Has no __eq__, but hashable
|
| 602 |
+
# +-------+-------+-------+--------+--------+
|
| 603 |
+
# | True | False | True | add | raise | Has no __eq__, but hashable
|
| 604 |
+
# +-------+-------+-------+--------+--------+
|
| 605 |
+
# | True | True | False | add | raise | Not frozen, but hashable
|
| 606 |
+
# +-------+-------+-------+--------+--------+
|
| 607 |
+
# | True | True | True | add | raise | Frozen, so hashable
|
| 608 |
+
# +=======+=======+=======+========+========+
|
| 609 |
+
# For boxes that are blank, __hash__ is untouched and therefore
|
| 610 |
+
# inherited from the base class. If the base is object, then
|
| 611 |
+
# id-based hashing is used.
|
| 612 |
+
|
| 613 |
+
The Python implementation creates a tuple of all the fields, then hashes them.
|
| 614 |
+
This implementation creates a tuple of all the hashes of all the fields and hashes that.
|
| 615 |
+
The reason for this slight difference is to avoid to-Python conversions for anything
|
| 616 |
+
that Cython knows how to hash directly (It doesn't look like this currently applies to
|
| 617 |
+
anything though...).
|
| 618 |
+
"""
|
| 619 |
+
|
| 620 |
+
hash_entry = node.scope.lookup_here("__hash__")
|
| 621 |
+
if hash_entry:
|
| 622 |
+
# TODO ideally assignment of __hash__ to None shouldn't trigger this
|
| 623 |
+
# but difficult to get the right information here
|
| 624 |
+
if unsafe_hash:
|
| 625 |
+
# error message taken from CPython dataclasses module
|
| 626 |
+
error(node.pos, "Cannot overwrite attribute __hash__ in class %s" % node.class_name)
|
| 627 |
+
return
|
| 628 |
+
|
| 629 |
+
if not unsafe_hash:
|
| 630 |
+
if not eq:
|
| 631 |
+
return
|
| 632 |
+
if not frozen:
|
| 633 |
+
code.add_extra_statements([
|
| 634 |
+
Nodes.SingleAssignmentNode(
|
| 635 |
+
node.pos,
|
| 636 |
+
lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")),
|
| 637 |
+
rhs=ExprNodes.NoneNode(node.pos),
|
| 638 |
+
)
|
| 639 |
+
])
|
| 640 |
+
return
|
| 641 |
+
|
| 642 |
+
names = [
|
| 643 |
+
name for name, field in fields.items()
|
| 644 |
+
if not field.is_initvar and (
|
| 645 |
+
field.compare.value if field.hash.value is None else field.hash.value)
|
| 646 |
+
]
|
| 647 |
+
|
| 648 |
+
# make a tuple of the hashes
|
| 649 |
+
hash_tuple_items = u", ".join(u"self.%s" % name for name in names)
|
| 650 |
+
if hash_tuple_items:
|
| 651 |
+
hash_tuple_items += u"," # ensure that one arg form is a tuple
|
| 652 |
+
|
| 653 |
+
# if we're here we want to generate a hash
|
| 654 |
+
code.add_code_lines([
|
| 655 |
+
"def __hash__(self):",
|
| 656 |
+
" return hash((%s))" % hash_tuple_items,
|
| 657 |
+
])
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
def get_field_type(pos, entry):
|
| 661 |
+
"""
|
| 662 |
+
sets the .type attribute for a field
|
| 663 |
+
|
| 664 |
+
Returns the annotation if possible (since this is what the dataclasses
|
| 665 |
+
module does). If not (for example, attributes defined with cdef) then
|
| 666 |
+
it creates a string fallback.
|
| 667 |
+
"""
|
| 668 |
+
if entry.annotation:
|
| 669 |
+
# Right now it doesn't look like cdef classes generate an
|
| 670 |
+
# __annotations__ dict, therefore it's safe to just return
|
| 671 |
+
# entry.annotation
|
| 672 |
+
# (TODO: remove .string if we ditch PEP563)
|
| 673 |
+
return entry.annotation.string
|
| 674 |
+
# If they do in future then we may need to look up into that
|
| 675 |
+
# to duplicating the node. The code below should do this:
|
| 676 |
+
#class_name_node = ExprNodes.NameNode(pos, name=entry.scope.name)
|
| 677 |
+
#annotations = ExprNodes.AttributeNode(
|
| 678 |
+
# pos, obj=class_name_node,
|
| 679 |
+
# attribute=EncodedString("__annotations__")
|
| 680 |
+
#)
|
| 681 |
+
#return ExprNodes.IndexNode(
|
| 682 |
+
# pos, base=annotations,
|
| 683 |
+
# index=ExprNodes.StringNode(pos, value=entry.name)
|
| 684 |
+
#)
|
| 685 |
+
else:
|
| 686 |
+
# it's slightly unclear what the best option is here - we could
|
| 687 |
+
# try to return PyType_Type. This case should only happen with
|
| 688 |
+
# attributes defined with cdef so Cython is free to make it's own
|
| 689 |
+
# decision
|
| 690 |
+
s = EncodedString(entry.type.declaration_code("", for_display=1))
|
| 691 |
+
return ExprNodes.StringNode(pos, value=s)
|
| 692 |
+
|
| 693 |
+
|
| 694 |
+
class FieldRecordNode(ExprNodes.ExprNode):
|
| 695 |
+
"""
|
| 696 |
+
__dataclass_fields__ contains a bunch of field objects recording how each field
|
| 697 |
+
of the dataclass was initialized (mainly corresponding to the arguments passed to
|
| 698 |
+
the "field" function). This node is used for the attributes of these field objects.
|
| 699 |
+
|
| 700 |
+
If possible, coerces `arg` to a Python object.
|
| 701 |
+
Otherwise, generates a sensible backup string.
|
| 702 |
+
"""
|
| 703 |
+
subexprs = ['arg']
|
| 704 |
+
|
| 705 |
+
def __init__(self, pos, arg):
|
| 706 |
+
super(FieldRecordNode, self).__init__(pos, arg=arg)
|
| 707 |
+
|
| 708 |
+
def analyse_types(self, env):
|
| 709 |
+
self.arg.analyse_types(env)
|
| 710 |
+
self.type = self.arg.type
|
| 711 |
+
return self
|
| 712 |
+
|
| 713 |
+
def coerce_to_pyobject(self, env):
|
| 714 |
+
if self.arg.type.can_coerce_to_pyobject(env):
|
| 715 |
+
return self.arg.coerce_to_pyobject(env)
|
| 716 |
+
else:
|
| 717 |
+
# A string representation of the code that gave the field seems like a reasonable
|
| 718 |
+
# fallback. This'll mostly happen for "default" and "default_factory" where the
|
| 719 |
+
# type may be a C-type that can't be converted to Python.
|
| 720 |
+
return self._make_string()
|
| 721 |
+
|
| 722 |
+
def _make_string(self):
|
| 723 |
+
from .AutoDocTransforms import AnnotationWriter
|
| 724 |
+
writer = AnnotationWriter(description="Dataclass field")
|
| 725 |
+
string = writer.write(self.arg)
|
| 726 |
+
return ExprNodes.StringNode(self.pos, value=EncodedString(string))
|
| 727 |
+
|
| 728 |
+
def generate_evaluation_code(self, code):
|
| 729 |
+
return self.arg.generate_evaluation_code(code)
|
| 730 |
+
|
| 731 |
+
|
| 732 |
+
def _set_up_dataclass_fields(node, fields, dataclass_module):
|
| 733 |
+
# For defaults and default_factories containing things like lambda,
|
| 734 |
+
# they're already declared in the class scope, and it creates a big
|
| 735 |
+
# problem if multiple copies are floating around in both the __init__
|
| 736 |
+
# function, and in the __dataclass_fields__ structure.
|
| 737 |
+
# Therefore, create module-level constants holding these values and
|
| 738 |
+
# pass those around instead
|
| 739 |
+
#
|
| 740 |
+
# If possible we use the `Field` class defined in the standard library
|
| 741 |
+
# module so that the information stored here is as close to a regular
|
| 742 |
+
# dataclass as is possible.
|
| 743 |
+
variables_assignment_stats = []
|
| 744 |
+
for name, field in fields.items():
|
| 745 |
+
if field.private:
|
| 746 |
+
continue # doesn't appear in the public interface
|
| 747 |
+
for attrname in [ "default", "default_factory" ]:
|
| 748 |
+
field_default = getattr(field, attrname)
|
| 749 |
+
if field_default is MISSING or field_default.is_literal or field_default.is_name:
|
| 750 |
+
# some simple cases where we don't need to set up
|
| 751 |
+
# the variable as a module-level constant
|
| 752 |
+
continue
|
| 753 |
+
global_scope = node.scope.global_scope()
|
| 754 |
+
module_field_name = global_scope.mangle(
|
| 755 |
+
global_scope.mangle(Naming.dataclass_field_default_cname, node.class_name),
|
| 756 |
+
name)
|
| 757 |
+
# create an entry in the global scope for this variable to live
|
| 758 |
+
field_node = ExprNodes.NameNode(field_default.pos, name=EncodedString(module_field_name))
|
| 759 |
+
field_node.entry = global_scope.declare_var(
|
| 760 |
+
field_node.name, type=field_default.type or PyrexTypes.unspecified_type,
|
| 761 |
+
pos=field_default.pos, cname=field_node.name, is_cdef=True,
|
| 762 |
+
# TODO: do we need to set 'pytyping_modifiers' here?
|
| 763 |
+
)
|
| 764 |
+
# replace the field so that future users just receive the namenode
|
| 765 |
+
setattr(field, attrname, field_node)
|
| 766 |
+
|
| 767 |
+
variables_assignment_stats.append(
|
| 768 |
+
Nodes.SingleAssignmentNode(field_default.pos, lhs=field_node, rhs=field_default))
|
| 769 |
+
|
| 770 |
+
placeholders = {}
|
| 771 |
+
field_func = ExprNodes.AttributeNode(node.pos, obj=dataclass_module,
|
| 772 |
+
attribute=EncodedString("field"))
|
| 773 |
+
dc_fields = ExprNodes.DictNode(node.pos, key_value_pairs=[])
|
| 774 |
+
dc_fields_namevalue_assignments = []
|
| 775 |
+
|
| 776 |
+
for name, field in fields.items():
|
| 777 |
+
if field.private:
|
| 778 |
+
continue # doesn't appear in the public interface
|
| 779 |
+
type_placeholder_name = "PLACEHOLDER_%s" % name
|
| 780 |
+
placeholders[type_placeholder_name] = get_field_type(
|
| 781 |
+
node.pos, node.scope.entries[name]
|
| 782 |
+
)
|
| 783 |
+
|
| 784 |
+
# defining these make the fields introspect more like a Python dataclass
|
| 785 |
+
field_type_placeholder_name = "PLACEHOLDER_FIELD_TYPE_%s" % name
|
| 786 |
+
if field.is_initvar:
|
| 787 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 788 |
+
node.pos, obj=dataclass_module,
|
| 789 |
+
attribute=EncodedString("_FIELD_INITVAR")
|
| 790 |
+
)
|
| 791 |
+
elif field.is_classvar:
|
| 792 |
+
# TODO - currently this isn't triggered
|
| 793 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 794 |
+
node.pos, obj=dataclass_module,
|
| 795 |
+
attribute=EncodedString("_FIELD_CLASSVAR")
|
| 796 |
+
)
|
| 797 |
+
else:
|
| 798 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 799 |
+
node.pos, obj=dataclass_module,
|
| 800 |
+
attribute=EncodedString("_FIELD")
|
| 801 |
+
)
|
| 802 |
+
|
| 803 |
+
dc_field_keywords = ExprNodes.DictNode.from_pairs(
|
| 804 |
+
node.pos,
|
| 805 |
+
[(ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 806 |
+
FieldRecordNode(node.pos, arg=v))
|
| 807 |
+
for k, v in field.iterate_record_node_arguments()]
|
| 808 |
+
|
| 809 |
+
)
|
| 810 |
+
dc_field_call = make_dataclass_call_helper(
|
| 811 |
+
node.pos, field_func, dc_field_keywords
|
| 812 |
+
)
|
| 813 |
+
dc_fields.key_value_pairs.append(
|
| 814 |
+
ExprNodes.DictItemNode(
|
| 815 |
+
node.pos,
|
| 816 |
+
key=ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(name)),
|
| 817 |
+
value=dc_field_call))
|
| 818 |
+
dc_fields_namevalue_assignments.append(
|
| 819 |
+
dedent(u"""\
|
| 820 |
+
__dataclass_fields__[{0!r}].name = {0!r}
|
| 821 |
+
__dataclass_fields__[{0!r}].type = {1}
|
| 822 |
+
__dataclass_fields__[{0!r}]._field_type = {2}
|
| 823 |
+
""").format(name, type_placeholder_name, field_type_placeholder_name))
|
| 824 |
+
|
| 825 |
+
dataclass_fields_assignment = \
|
| 826 |
+
Nodes.SingleAssignmentNode(node.pos,
|
| 827 |
+
lhs = ExprNodes.NameNode(node.pos,
|
| 828 |
+
name=EncodedString("__dataclass_fields__")),
|
| 829 |
+
rhs = dc_fields)
|
| 830 |
+
|
| 831 |
+
dc_fields_namevalue_assignments = u"\n".join(dc_fields_namevalue_assignments)
|
| 832 |
+
dc_fields_namevalue_assignments = TreeFragment(dc_fields_namevalue_assignments,
|
| 833 |
+
level="c_class",
|
| 834 |
+
pipeline=[NormalizeTree(None)])
|
| 835 |
+
dc_fields_namevalue_assignments = dc_fields_namevalue_assignments.substitute(placeholders)
|
| 836 |
+
|
| 837 |
+
return (variables_assignment_stats
|
| 838 |
+
+ [dataclass_fields_assignment]
|
| 839 |
+
+ dc_fields_namevalue_assignments.stats)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/DebugFlags.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Can be enabled at the command line with --debug-xxx.
|
| 2 |
+
|
| 3 |
+
debug_disposal_code = 0
|
| 4 |
+
debug_temp_alloc = 0
|
| 5 |
+
debug_coercion = 0
|
| 6 |
+
|
| 7 |
+
# Write comments into the C code that show where temporary variables
|
| 8 |
+
# are allocated and released.
|
| 9 |
+
debug_temp_code_comments = 0
|
| 10 |
+
|
| 11 |
+
# Write a call trace of the code generation phase into the C code.
|
| 12 |
+
debug_trace_code_generation = 0
|
| 13 |
+
|
| 14 |
+
# Do not replace exceptions with user-friendly error messages.
|
| 15 |
+
debug_no_exception_intercept = 0
|
| 16 |
+
|
| 17 |
+
# Print a message each time a new stage in the pipeline is entered.
|
| 18 |
+
debug_verbose_pipeline = 0
|
| 19 |
+
|
| 20 |
+
# Raise an exception when an error is encountered.
|
| 21 |
+
debug_exception_on_error = 0
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Errors.py
ADDED
|
@@ -0,0 +1,300 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Errors
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from __builtin__ import basestring as any_string_type
|
| 9 |
+
except ImportError:
|
| 10 |
+
any_string_type = (bytes, str)
|
| 11 |
+
|
| 12 |
+
import sys
|
| 13 |
+
from contextlib import contextmanager
|
| 14 |
+
|
| 15 |
+
try:
|
| 16 |
+
from threading import local as _threadlocal
|
| 17 |
+
except ImportError:
|
| 18 |
+
class _threadlocal(object): pass
|
| 19 |
+
|
| 20 |
+
threadlocal = _threadlocal()
|
| 21 |
+
|
| 22 |
+
from ..Utils import open_new_file
|
| 23 |
+
from . import DebugFlags
|
| 24 |
+
from . import Options
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class PyrexError(Exception):
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class PyrexWarning(Exception):
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
class CannotSpecialize(PyrexError):
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
def context(position):
|
| 38 |
+
source = position[0]
|
| 39 |
+
assert not (isinstance(source, any_string_type)), (
|
| 40 |
+
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
|
| 41 |
+
try:
|
| 42 |
+
F = source.get_lines()
|
| 43 |
+
except UnicodeDecodeError:
|
| 44 |
+
# file has an encoding problem
|
| 45 |
+
s = u"[unprintable code]\n"
|
| 46 |
+
else:
|
| 47 |
+
s = u''.join(F[max(0, position[1]-6):position[1]])
|
| 48 |
+
s = u'...\n%s%s^\n' % (s, u' '*(position[2]))
|
| 49 |
+
s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60)
|
| 50 |
+
return s
|
| 51 |
+
|
| 52 |
+
def format_position(position):
|
| 53 |
+
if position:
|
| 54 |
+
return u"%s:%d:%d: " % (position[0].get_error_description(),
|
| 55 |
+
position[1], position[2])
|
| 56 |
+
return u''
|
| 57 |
+
|
| 58 |
+
def format_error(message, position):
|
| 59 |
+
if position:
|
| 60 |
+
pos_str = format_position(position)
|
| 61 |
+
cont = context(position)
|
| 62 |
+
message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'')
|
| 63 |
+
return message
|
| 64 |
+
|
| 65 |
+
class CompileError(PyrexError):
|
| 66 |
+
|
| 67 |
+
def __init__(self, position = None, message = u""):
|
| 68 |
+
self.position = position
|
| 69 |
+
self.message_only = message
|
| 70 |
+
self.formatted_message = format_error(message, position)
|
| 71 |
+
self.reported = False
|
| 72 |
+
Exception.__init__(self, self.formatted_message)
|
| 73 |
+
# Python Exception subclass pickling is broken,
|
| 74 |
+
# see https://bugs.python.org/issue1692335
|
| 75 |
+
self.args = (position, message)
|
| 76 |
+
|
| 77 |
+
def __str__(self):
|
| 78 |
+
return self.formatted_message
|
| 79 |
+
|
| 80 |
+
class CompileWarning(PyrexWarning):
|
| 81 |
+
|
| 82 |
+
def __init__(self, position = None, message = ""):
|
| 83 |
+
self.position = position
|
| 84 |
+
Exception.__init__(self, format_position(position) + message)
|
| 85 |
+
|
| 86 |
+
class InternalError(Exception):
|
| 87 |
+
# If this is ever raised, there is a bug in the compiler.
|
| 88 |
+
|
| 89 |
+
def __init__(self, message):
|
| 90 |
+
self.message_only = message
|
| 91 |
+
Exception.__init__(self, u"Internal compiler error: %s"
|
| 92 |
+
% message)
|
| 93 |
+
|
| 94 |
+
class AbortError(Exception):
|
| 95 |
+
# Throw this to stop the compilation immediately.
|
| 96 |
+
|
| 97 |
+
def __init__(self, message):
|
| 98 |
+
self.message_only = message
|
| 99 |
+
Exception.__init__(self, u"Abort error: %s" % message)
|
| 100 |
+
|
| 101 |
+
class CompilerCrash(CompileError):
|
| 102 |
+
# raised when an unexpected exception occurs in a transform
|
| 103 |
+
def __init__(self, pos, context, message, cause, stacktrace=None):
|
| 104 |
+
if message:
|
| 105 |
+
message = u'\n' + message
|
| 106 |
+
else:
|
| 107 |
+
message = u'\n'
|
| 108 |
+
self.message_only = message
|
| 109 |
+
if context:
|
| 110 |
+
message = u"Compiler crash in %s%s" % (context, message)
|
| 111 |
+
if stacktrace:
|
| 112 |
+
import traceback
|
| 113 |
+
message += (
|
| 114 |
+
u'\n\nCompiler crash traceback from this point on:\n' +
|
| 115 |
+
u''.join(traceback.format_tb(stacktrace)))
|
| 116 |
+
if cause:
|
| 117 |
+
if not stacktrace:
|
| 118 |
+
message += u'\n'
|
| 119 |
+
message += u'%s: %s' % (cause.__class__.__name__, cause)
|
| 120 |
+
CompileError.__init__(self, pos, message)
|
| 121 |
+
# Python Exception subclass pickling is broken,
|
| 122 |
+
# see https://bugs.python.org/issue1692335
|
| 123 |
+
self.args = (pos, context, message, cause, stacktrace)
|
| 124 |
+
|
| 125 |
+
class NoElementTreeInstalledException(PyrexError):
|
| 126 |
+
"""raised when the user enabled options.gdb_debug but no ElementTree
|
| 127 |
+
implementation was found
|
| 128 |
+
"""
|
| 129 |
+
|
| 130 |
+
def open_listing_file(path, echo_to_stderr=True):
|
| 131 |
+
# Begin a new error listing. If path is None, no file
|
| 132 |
+
# is opened, the error counter is just reset.
|
| 133 |
+
if path is not None:
|
| 134 |
+
threadlocal.cython_errors_listing_file = open_new_file(path)
|
| 135 |
+
else:
|
| 136 |
+
threadlocal.cython_errors_listing_file = None
|
| 137 |
+
if echo_to_stderr:
|
| 138 |
+
threadlocal.cython_errors_echo_file = sys.stderr
|
| 139 |
+
else:
|
| 140 |
+
threadlocal.cython_errors_echo_file = None
|
| 141 |
+
threadlocal.cython_errors_count = 0
|
| 142 |
+
|
| 143 |
+
def close_listing_file():
|
| 144 |
+
if threadlocal.cython_errors_listing_file:
|
| 145 |
+
threadlocal.cython_errors_listing_file.close()
|
| 146 |
+
threadlocal.cython_errors_listing_file = None
|
| 147 |
+
|
| 148 |
+
def report_error(err, use_stack=True):
|
| 149 |
+
error_stack = threadlocal.cython_errors_stack
|
| 150 |
+
if error_stack and use_stack:
|
| 151 |
+
error_stack[-1].append(err)
|
| 152 |
+
else:
|
| 153 |
+
# See Main.py for why dual reporting occurs. Quick fix for now.
|
| 154 |
+
if err.reported: return
|
| 155 |
+
err.reported = True
|
| 156 |
+
try: line = u"%s\n" % err
|
| 157 |
+
except UnicodeEncodeError:
|
| 158 |
+
# Python <= 2.5 does this for non-ASCII Unicode exceptions
|
| 159 |
+
line = format_error(getattr(err, 'message_only', "[unprintable exception message]"),
|
| 160 |
+
getattr(err, 'position', None)) + u'\n'
|
| 161 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 162 |
+
if listing_file:
|
| 163 |
+
try: listing_file.write(line)
|
| 164 |
+
except UnicodeEncodeError:
|
| 165 |
+
listing_file.write(line.encode('ASCII', 'replace'))
|
| 166 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 167 |
+
if echo_file:
|
| 168 |
+
try: echo_file.write(line)
|
| 169 |
+
except UnicodeEncodeError:
|
| 170 |
+
echo_file.write(line.encode('ASCII', 'replace'))
|
| 171 |
+
threadlocal.cython_errors_count += 1
|
| 172 |
+
if Options.fast_fail:
|
| 173 |
+
raise AbortError("fatal errors")
|
| 174 |
+
|
| 175 |
+
def error(position, message):
|
| 176 |
+
#print("Errors.error:", repr(position), repr(message)) ###
|
| 177 |
+
if position is None:
|
| 178 |
+
raise InternalError(message)
|
| 179 |
+
err = CompileError(position, message)
|
| 180 |
+
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
|
| 181 |
+
report_error(err)
|
| 182 |
+
return err
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
LEVEL = 1 # warn about all errors level 1 or higher
|
| 186 |
+
|
| 187 |
+
def _write_file_encode(file, line):
|
| 188 |
+
try:
|
| 189 |
+
file.write(line)
|
| 190 |
+
except UnicodeEncodeError:
|
| 191 |
+
file.write(line.encode('ascii', 'replace'))
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def performance_hint(position, message, env):
|
| 195 |
+
if not env.directives['show_performance_hints']:
|
| 196 |
+
return
|
| 197 |
+
warn = CompileWarning(position, message)
|
| 198 |
+
line = "performance hint: %s\n" % warn
|
| 199 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 200 |
+
if listing_file:
|
| 201 |
+
_write_file_encode(listing_file, line)
|
| 202 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 203 |
+
if echo_file:
|
| 204 |
+
_write_file_encode(echo_file, line)
|
| 205 |
+
return warn
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def message(position, message, level=1):
|
| 209 |
+
if level < LEVEL:
|
| 210 |
+
return
|
| 211 |
+
warn = CompileWarning(position, message)
|
| 212 |
+
line = u"note: %s\n" % warn
|
| 213 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 214 |
+
if listing_file:
|
| 215 |
+
_write_file_encode(listing_file, line)
|
| 216 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 217 |
+
if echo_file:
|
| 218 |
+
_write_file_encode(echo_file, line)
|
| 219 |
+
return warn
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def warning(position, message, level=0):
|
| 223 |
+
if level < LEVEL:
|
| 224 |
+
return
|
| 225 |
+
if Options.warning_errors and position:
|
| 226 |
+
return error(position, message)
|
| 227 |
+
warn = CompileWarning(position, message)
|
| 228 |
+
line = u"warning: %s\n" % warn
|
| 229 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 230 |
+
if listing_file:
|
| 231 |
+
_write_file_encode(listing_file, line)
|
| 232 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 233 |
+
if echo_file:
|
| 234 |
+
_write_file_encode(echo_file, line)
|
| 235 |
+
return warn
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def warn_once(position, message, level=0):
|
| 239 |
+
if level < LEVEL:
|
| 240 |
+
return
|
| 241 |
+
warn_once_seen = threadlocal.cython_errors_warn_once_seen
|
| 242 |
+
if message in warn_once_seen:
|
| 243 |
+
return
|
| 244 |
+
warn = CompileWarning(position, message)
|
| 245 |
+
line = u"warning: %s\n" % warn
|
| 246 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 247 |
+
if listing_file:
|
| 248 |
+
_write_file_encode(listing_file, line)
|
| 249 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 250 |
+
if echo_file:
|
| 251 |
+
_write_file_encode(echo_file, line)
|
| 252 |
+
warn_once_seen.add(message)
|
| 253 |
+
return warn
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
# These functions can be used to momentarily suppress errors.
|
| 257 |
+
|
| 258 |
+
def hold_errors():
|
| 259 |
+
errors = []
|
| 260 |
+
threadlocal.cython_errors_stack.append(errors)
|
| 261 |
+
return errors
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def release_errors(ignore=False):
|
| 265 |
+
held_errors = threadlocal.cython_errors_stack.pop()
|
| 266 |
+
if not ignore:
|
| 267 |
+
for err in held_errors:
|
| 268 |
+
report_error(err)
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def held_errors():
|
| 272 |
+
return threadlocal.cython_errors_stack[-1]
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
# same as context manager:
|
| 276 |
+
|
| 277 |
+
@contextmanager
|
| 278 |
+
def local_errors(ignore=False):
|
| 279 |
+
errors = hold_errors()
|
| 280 |
+
try:
|
| 281 |
+
yield errors
|
| 282 |
+
finally:
|
| 283 |
+
release_errors(ignore=ignore)
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
# Keep all global state in thread local storage to support parallel cythonisation in distutils.
|
| 287 |
+
|
| 288 |
+
def init_thread():
|
| 289 |
+
threadlocal.cython_errors_count = 0
|
| 290 |
+
threadlocal.cython_errors_listing_file = None
|
| 291 |
+
threadlocal.cython_errors_echo_file = None
|
| 292 |
+
threadlocal.cython_errors_warn_once_seen = set()
|
| 293 |
+
threadlocal.cython_errors_stack = []
|
| 294 |
+
|
| 295 |
+
def reset():
|
| 296 |
+
threadlocal.cython_errors_warn_once_seen.clear()
|
| 297 |
+
del threadlocal.cython_errors_stack[:]
|
| 298 |
+
|
| 299 |
+
def get_errors_count():
|
| 300 |
+
return threadlocal.cython_errors_count
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/ExprNodes.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FlowControl.cp39-win_amd64.pyd
ADDED
|
Binary file (431 kB). View file
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FlowControl.pxd
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# cython: language_level=3
|
| 2 |
+
|
| 3 |
+
cimport cython
|
| 4 |
+
|
| 5 |
+
from .Visitor cimport CythonTransform, TreeVisitor
|
| 6 |
+
|
| 7 |
+
cdef class ControlBlock:
|
| 8 |
+
cdef public set children
|
| 9 |
+
cdef public set parents
|
| 10 |
+
cdef public set positions
|
| 11 |
+
cdef public list stats
|
| 12 |
+
cdef public dict gen
|
| 13 |
+
cdef public set bounded
|
| 14 |
+
|
| 15 |
+
# Big integer bitsets
|
| 16 |
+
cdef public object i_input
|
| 17 |
+
cdef public object i_output
|
| 18 |
+
cdef public object i_gen
|
| 19 |
+
cdef public object i_kill
|
| 20 |
+
cdef public object i_state
|
| 21 |
+
|
| 22 |
+
cpdef bint empty(self)
|
| 23 |
+
cpdef detach(self)
|
| 24 |
+
cpdef add_child(self, block)
|
| 25 |
+
|
| 26 |
+
cdef class ExitBlock(ControlBlock):
|
| 27 |
+
cpdef bint empty(self)
|
| 28 |
+
|
| 29 |
+
cdef class NameAssignment:
|
| 30 |
+
cdef public bint is_arg
|
| 31 |
+
cdef public bint is_deletion
|
| 32 |
+
cdef public object lhs
|
| 33 |
+
cdef public object rhs
|
| 34 |
+
cdef public object entry
|
| 35 |
+
cdef public object pos
|
| 36 |
+
cdef public set refs
|
| 37 |
+
cdef public object bit
|
| 38 |
+
cdef public object inferred_type
|
| 39 |
+
cdef public object rhs_scope
|
| 40 |
+
|
| 41 |
+
cdef class AssignmentList:
|
| 42 |
+
cdef public object bit
|
| 43 |
+
cdef public object mask
|
| 44 |
+
cdef public list stats
|
| 45 |
+
|
| 46 |
+
cdef class AssignmentCollector(TreeVisitor):
|
| 47 |
+
cdef list assignments
|
| 48 |
+
|
| 49 |
+
@cython.final
|
| 50 |
+
cdef class ControlFlow:
|
| 51 |
+
cdef public set blocks
|
| 52 |
+
cdef public set entries
|
| 53 |
+
cdef public list loops
|
| 54 |
+
cdef public list exceptions
|
| 55 |
+
|
| 56 |
+
cdef public ControlBlock entry_point
|
| 57 |
+
cdef public ExitBlock exit_point
|
| 58 |
+
cdef public ControlBlock block
|
| 59 |
+
|
| 60 |
+
cdef public dict assmts
|
| 61 |
+
|
| 62 |
+
cdef public Py_ssize_t in_try_block
|
| 63 |
+
|
| 64 |
+
cpdef newblock(self, ControlBlock parent=*)
|
| 65 |
+
cpdef nextblock(self, ControlBlock parent=*)
|
| 66 |
+
cpdef bint is_tracked(self, entry)
|
| 67 |
+
cpdef bint is_statically_assigned(self, entry)
|
| 68 |
+
cpdef mark_position(self, node)
|
| 69 |
+
cpdef mark_assignment(self, lhs, rhs, entry, rhs_scope=*)
|
| 70 |
+
cpdef mark_argument(self, lhs, rhs, entry)
|
| 71 |
+
cpdef mark_deletion(self, node, entry)
|
| 72 |
+
cpdef mark_reference(self, node, entry)
|
| 73 |
+
|
| 74 |
+
@cython.locals(block=ControlBlock, parent=ControlBlock, unreachable=set)
|
| 75 |
+
cpdef normalize(self)
|
| 76 |
+
|
| 77 |
+
@cython.locals(bit=object, assmts=AssignmentList, block=ControlBlock)
|
| 78 |
+
cpdef initialize(self)
|
| 79 |
+
|
| 80 |
+
@cython.locals(assmts=AssignmentList, assmt=NameAssignment)
|
| 81 |
+
cpdef set map_one(self, istate, entry)
|
| 82 |
+
|
| 83 |
+
@cython.locals(block=ControlBlock, parent=ControlBlock)
|
| 84 |
+
cdef reaching_definitions(self)
|
| 85 |
+
|
| 86 |
+
cdef class Uninitialized:
|
| 87 |
+
pass
|
| 88 |
+
|
| 89 |
+
cdef class Unknown:
|
| 90 |
+
pass
|
| 91 |
+
|
| 92 |
+
cdef class MessageCollection:
|
| 93 |
+
cdef set messages
|
| 94 |
+
|
| 95 |
+
@cython.locals(dirty=bint, block=ControlBlock, parent=ControlBlock,
|
| 96 |
+
assmt=NameAssignment)
|
| 97 |
+
cdef check_definitions(ControlFlow flow, dict compiler_directives)
|
| 98 |
+
|
| 99 |
+
@cython.final
|
| 100 |
+
cdef class ControlFlowAnalysis(CythonTransform):
|
| 101 |
+
cdef object gv_ctx
|
| 102 |
+
cdef object constant_folder
|
| 103 |
+
cdef set reductions
|
| 104 |
+
cdef list stack # a stack of (env, flow) tuples
|
| 105 |
+
cdef object env
|
| 106 |
+
cdef ControlFlow flow
|
| 107 |
+
cdef object object_expr
|
| 108 |
+
cdef bint in_inplace_assignment
|
| 109 |
+
|
| 110 |
+
cpdef mark_assignment(self, lhs, rhs=*, rhs_scope=*)
|
| 111 |
+
cpdef mark_position(self, node)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FlowControl.py
ADDED
|
@@ -0,0 +1,1383 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# cython: language_level=3str
|
| 2 |
+
# cython: auto_pickle=True
|
| 3 |
+
|
| 4 |
+
from __future__ import absolute_import
|
| 5 |
+
|
| 6 |
+
import cython
|
| 7 |
+
cython.declare(PyrexTypes=object, ExprNodes=object, Nodes=object, Builtin=object,
|
| 8 |
+
Options=object, TreeVisitor=object, CythonTransform=object,
|
| 9 |
+
InternalError=object, error=object, warning=object,
|
| 10 |
+
fake_rhs_expr=object, TypedExprNode=object)
|
| 11 |
+
|
| 12 |
+
from . import Builtin
|
| 13 |
+
from . import ExprNodes
|
| 14 |
+
from . import Nodes
|
| 15 |
+
from . import Options
|
| 16 |
+
from . import PyrexTypes
|
| 17 |
+
|
| 18 |
+
from .Visitor import TreeVisitor, CythonTransform
|
| 19 |
+
from .Errors import error, warning, InternalError
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class TypedExprNode(ExprNodes.ExprNode):
|
| 23 |
+
# Used for declaring assignments of a specified type without a known entry.
|
| 24 |
+
def __init__(self, type, may_be_none=None, pos=None):
|
| 25 |
+
super(TypedExprNode, self).__init__(pos)
|
| 26 |
+
self.type = type
|
| 27 |
+
self._may_be_none = may_be_none
|
| 28 |
+
|
| 29 |
+
def may_be_none(self):
|
| 30 |
+
return self._may_be_none != False
|
| 31 |
+
|
| 32 |
+
# Fake rhs to silence "unused variable" warning
|
| 33 |
+
fake_rhs_expr = TypedExprNode(PyrexTypes.unspecified_type)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class ControlBlock(object):
|
| 37 |
+
"""Control flow graph node. Sequence of assignments and name references.
|
| 38 |
+
|
| 39 |
+
children set of children nodes
|
| 40 |
+
parents set of parent nodes
|
| 41 |
+
positions set of position markers
|
| 42 |
+
|
| 43 |
+
stats list of block statements
|
| 44 |
+
gen dict of assignments generated by this block
|
| 45 |
+
bounded set of entries that are definitely bounded in this block
|
| 46 |
+
|
| 47 |
+
Example:
|
| 48 |
+
|
| 49 |
+
a = 1
|
| 50 |
+
b = a + c # 'c' is already bounded or exception here
|
| 51 |
+
|
| 52 |
+
stats = [Assignment(a), NameReference(a), NameReference(c),
|
| 53 |
+
Assignment(b)]
|
| 54 |
+
gen = {Entry(a): Assignment(a), Entry(b): Assignment(b)}
|
| 55 |
+
bounded = {Entry(a), Entry(c)}
|
| 56 |
+
|
| 57 |
+
"""
|
| 58 |
+
|
| 59 |
+
def __init__(self):
|
| 60 |
+
self.children = set()
|
| 61 |
+
self.parents = set()
|
| 62 |
+
self.positions = set()
|
| 63 |
+
|
| 64 |
+
self.stats = []
|
| 65 |
+
self.gen = {}
|
| 66 |
+
self.bounded = set()
|
| 67 |
+
|
| 68 |
+
self.i_input = 0
|
| 69 |
+
self.i_output = 0
|
| 70 |
+
self.i_gen = 0
|
| 71 |
+
self.i_kill = 0
|
| 72 |
+
self.i_state = 0
|
| 73 |
+
|
| 74 |
+
def empty(self):
|
| 75 |
+
return (not self.stats and not self.positions)
|
| 76 |
+
|
| 77 |
+
def detach(self):
|
| 78 |
+
"""Detach block from parents and children."""
|
| 79 |
+
for child in self.children:
|
| 80 |
+
child.parents.remove(self)
|
| 81 |
+
for parent in self.parents:
|
| 82 |
+
parent.children.remove(self)
|
| 83 |
+
self.parents.clear()
|
| 84 |
+
self.children.clear()
|
| 85 |
+
|
| 86 |
+
def add_child(self, block):
|
| 87 |
+
self.children.add(block)
|
| 88 |
+
block.parents.add(self)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class ExitBlock(ControlBlock):
|
| 92 |
+
"""Non-empty exit point block."""
|
| 93 |
+
|
| 94 |
+
def empty(self):
|
| 95 |
+
return False
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class AssignmentList(object):
|
| 99 |
+
def __init__(self):
|
| 100 |
+
self.stats = []
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class ControlFlow(object):
|
| 104 |
+
"""Control-flow graph.
|
| 105 |
+
|
| 106 |
+
entry_point ControlBlock entry point for this graph
|
| 107 |
+
exit_point ControlBlock normal exit point
|
| 108 |
+
block ControlBlock current block
|
| 109 |
+
blocks set children nodes
|
| 110 |
+
entries set tracked entries
|
| 111 |
+
loops list stack for loop descriptors
|
| 112 |
+
exceptions list stack for exception descriptors
|
| 113 |
+
in_try_block int track if we're in a try...except or try...finally block
|
| 114 |
+
"""
|
| 115 |
+
|
| 116 |
+
def __init__(self):
|
| 117 |
+
self.blocks = set()
|
| 118 |
+
self.entries = set()
|
| 119 |
+
self.loops = []
|
| 120 |
+
self.exceptions = []
|
| 121 |
+
|
| 122 |
+
self.entry_point = ControlBlock()
|
| 123 |
+
self.exit_point = ExitBlock()
|
| 124 |
+
self.blocks.add(self.exit_point)
|
| 125 |
+
self.block = self.entry_point
|
| 126 |
+
self.in_try_block = 0
|
| 127 |
+
|
| 128 |
+
def newblock(self, parent=None):
|
| 129 |
+
"""Create floating block linked to `parent` if given.
|
| 130 |
+
|
| 131 |
+
NOTE: Block is NOT added to self.blocks
|
| 132 |
+
"""
|
| 133 |
+
block = ControlBlock()
|
| 134 |
+
self.blocks.add(block)
|
| 135 |
+
if parent:
|
| 136 |
+
parent.add_child(block)
|
| 137 |
+
return block
|
| 138 |
+
|
| 139 |
+
def nextblock(self, parent=None):
|
| 140 |
+
"""Create block children block linked to current or `parent` if given.
|
| 141 |
+
|
| 142 |
+
NOTE: Block is added to self.blocks
|
| 143 |
+
"""
|
| 144 |
+
block = ControlBlock()
|
| 145 |
+
self.blocks.add(block)
|
| 146 |
+
if parent:
|
| 147 |
+
parent.add_child(block)
|
| 148 |
+
elif self.block:
|
| 149 |
+
self.block.add_child(block)
|
| 150 |
+
self.block = block
|
| 151 |
+
return self.block
|
| 152 |
+
|
| 153 |
+
def is_tracked(self, entry):
|
| 154 |
+
if entry.is_anonymous:
|
| 155 |
+
return False
|
| 156 |
+
return (entry.is_local or entry.is_pyclass_attr or entry.is_arg or
|
| 157 |
+
entry.from_closure or entry.in_closure or
|
| 158 |
+
entry.error_on_uninitialized)
|
| 159 |
+
|
| 160 |
+
def is_statically_assigned(self, entry):
|
| 161 |
+
if (entry.is_local and entry.is_variable and
|
| 162 |
+
(entry.type.is_struct_or_union or
|
| 163 |
+
entry.type.is_complex or
|
| 164 |
+
entry.type.is_array or
|
| 165 |
+
(entry.type.is_cpp_class and not entry.is_cpp_optional))):
|
| 166 |
+
# stack allocated structured variable => never uninitialised
|
| 167 |
+
return True
|
| 168 |
+
return False
|
| 169 |
+
|
| 170 |
+
def mark_position(self, node):
|
| 171 |
+
"""Mark position, will be used to draw graph nodes."""
|
| 172 |
+
if self.block:
|
| 173 |
+
self.block.positions.add(node.pos[:2])
|
| 174 |
+
|
| 175 |
+
def mark_assignment(self, lhs, rhs, entry, rhs_scope=None):
|
| 176 |
+
if self.block and self.is_tracked(entry):
|
| 177 |
+
assignment = NameAssignment(lhs, rhs, entry, rhs_scope=rhs_scope)
|
| 178 |
+
self.block.stats.append(assignment)
|
| 179 |
+
self.block.gen[entry] = assignment
|
| 180 |
+
self.entries.add(entry)
|
| 181 |
+
|
| 182 |
+
def mark_argument(self, lhs, rhs, entry):
|
| 183 |
+
if self.block and self.is_tracked(entry):
|
| 184 |
+
assignment = Argument(lhs, rhs, entry)
|
| 185 |
+
self.block.stats.append(assignment)
|
| 186 |
+
self.block.gen[entry] = assignment
|
| 187 |
+
self.entries.add(entry)
|
| 188 |
+
|
| 189 |
+
def mark_deletion(self, node, entry):
|
| 190 |
+
if self.block and self.is_tracked(entry):
|
| 191 |
+
assignment = NameDeletion(node, entry)
|
| 192 |
+
self.block.stats.append(assignment)
|
| 193 |
+
self.block.gen[entry] = Uninitialized
|
| 194 |
+
self.entries.add(entry)
|
| 195 |
+
|
| 196 |
+
def mark_reference(self, node, entry):
|
| 197 |
+
if self.block and self.is_tracked(entry):
|
| 198 |
+
self.block.stats.append(NameReference(node, entry))
|
| 199 |
+
## XXX: We don't track expression evaluation order so we can't use
|
| 200 |
+
## XXX: successful reference as initialization sign.
|
| 201 |
+
## # Local variable is definitely bound after this reference
|
| 202 |
+
## if not node.allow_null:
|
| 203 |
+
## self.block.bounded.add(entry)
|
| 204 |
+
self.entries.add(entry)
|
| 205 |
+
|
| 206 |
+
def normalize(self):
|
| 207 |
+
"""Delete unreachable and orphan blocks."""
|
| 208 |
+
queue = {self.entry_point}
|
| 209 |
+
visited = set()
|
| 210 |
+
while queue:
|
| 211 |
+
root = queue.pop()
|
| 212 |
+
visited.add(root)
|
| 213 |
+
for child in root.children:
|
| 214 |
+
if child not in visited:
|
| 215 |
+
queue.add(child)
|
| 216 |
+
unreachable = self.blocks - visited
|
| 217 |
+
for block in unreachable:
|
| 218 |
+
block.detach()
|
| 219 |
+
visited.remove(self.entry_point)
|
| 220 |
+
for block in visited:
|
| 221 |
+
if block.empty():
|
| 222 |
+
for parent in block.parents: # Re-parent
|
| 223 |
+
for child in block.children:
|
| 224 |
+
parent.add_child(child)
|
| 225 |
+
block.detach()
|
| 226 |
+
unreachable.add(block)
|
| 227 |
+
self.blocks -= unreachable
|
| 228 |
+
|
| 229 |
+
def initialize(self):
|
| 230 |
+
"""Set initial state, map assignments to bits."""
|
| 231 |
+
self.assmts = {}
|
| 232 |
+
|
| 233 |
+
bit = 1
|
| 234 |
+
for entry in self.entries:
|
| 235 |
+
assmts = AssignmentList()
|
| 236 |
+
assmts.mask = assmts.bit = bit
|
| 237 |
+
self.assmts[entry] = assmts
|
| 238 |
+
bit <<= 1
|
| 239 |
+
|
| 240 |
+
for block in self.blocks:
|
| 241 |
+
for stat in block.stats:
|
| 242 |
+
if isinstance(stat, NameAssignment):
|
| 243 |
+
stat.bit = bit
|
| 244 |
+
assmts = self.assmts[stat.entry]
|
| 245 |
+
assmts.stats.append(stat)
|
| 246 |
+
assmts.mask |= bit
|
| 247 |
+
bit <<= 1
|
| 248 |
+
|
| 249 |
+
for block in self.blocks:
|
| 250 |
+
for entry, stat in block.gen.items():
|
| 251 |
+
assmts = self.assmts[entry]
|
| 252 |
+
if stat is Uninitialized:
|
| 253 |
+
block.i_gen |= assmts.bit
|
| 254 |
+
else:
|
| 255 |
+
block.i_gen |= stat.bit
|
| 256 |
+
block.i_kill |= assmts.mask
|
| 257 |
+
block.i_output = block.i_gen
|
| 258 |
+
for entry in block.bounded:
|
| 259 |
+
block.i_kill |= self.assmts[entry].bit
|
| 260 |
+
|
| 261 |
+
for assmts in self.assmts.values():
|
| 262 |
+
self.entry_point.i_gen |= assmts.bit
|
| 263 |
+
self.entry_point.i_output = self.entry_point.i_gen
|
| 264 |
+
|
| 265 |
+
def map_one(self, istate, entry):
|
| 266 |
+
ret = set()
|
| 267 |
+
assmts = self.assmts[entry]
|
| 268 |
+
if istate & assmts.bit:
|
| 269 |
+
if self.is_statically_assigned(entry):
|
| 270 |
+
ret.add(StaticAssignment(entry))
|
| 271 |
+
elif entry.from_closure:
|
| 272 |
+
ret.add(Unknown)
|
| 273 |
+
else:
|
| 274 |
+
ret.add(Uninitialized)
|
| 275 |
+
for assmt in assmts.stats:
|
| 276 |
+
if istate & assmt.bit:
|
| 277 |
+
ret.add(assmt)
|
| 278 |
+
return ret
|
| 279 |
+
|
| 280 |
+
def reaching_definitions(self):
|
| 281 |
+
"""Per-block reaching definitions analysis."""
|
| 282 |
+
dirty = True
|
| 283 |
+
while dirty:
|
| 284 |
+
dirty = False
|
| 285 |
+
for block in self.blocks:
|
| 286 |
+
i_input = 0
|
| 287 |
+
for parent in block.parents:
|
| 288 |
+
i_input |= parent.i_output
|
| 289 |
+
i_output = (i_input & ~block.i_kill) | block.i_gen
|
| 290 |
+
if i_output != block.i_output:
|
| 291 |
+
dirty = True
|
| 292 |
+
block.i_input = i_input
|
| 293 |
+
block.i_output = i_output
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
class LoopDescr(object):
|
| 297 |
+
def __init__(self, next_block, loop_block):
|
| 298 |
+
self.next_block = next_block
|
| 299 |
+
self.loop_block = loop_block
|
| 300 |
+
self.exceptions = []
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
class ExceptionDescr(object):
|
| 304 |
+
"""Exception handling helper.
|
| 305 |
+
|
| 306 |
+
entry_point ControlBlock Exception handling entry point
|
| 307 |
+
finally_enter ControlBlock Normal finally clause entry point
|
| 308 |
+
finally_exit ControlBlock Normal finally clause exit point
|
| 309 |
+
"""
|
| 310 |
+
|
| 311 |
+
def __init__(self, entry_point, finally_enter=None, finally_exit=None):
|
| 312 |
+
self.entry_point = entry_point
|
| 313 |
+
self.finally_enter = finally_enter
|
| 314 |
+
self.finally_exit = finally_exit
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
class NameAssignment(object):
|
| 318 |
+
def __init__(self, lhs, rhs, entry, rhs_scope=None):
|
| 319 |
+
if lhs.cf_state is None:
|
| 320 |
+
lhs.cf_state = set()
|
| 321 |
+
self.lhs = lhs
|
| 322 |
+
self.rhs = rhs
|
| 323 |
+
self.entry = entry
|
| 324 |
+
self.pos = lhs.pos
|
| 325 |
+
self.refs = set()
|
| 326 |
+
self.is_arg = False
|
| 327 |
+
self.is_deletion = False
|
| 328 |
+
self.inferred_type = None
|
| 329 |
+
# For generator expression targets, the rhs can have a different scope than the lhs.
|
| 330 |
+
self.rhs_scope = rhs_scope
|
| 331 |
+
|
| 332 |
+
def __repr__(self):
|
| 333 |
+
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
|
| 334 |
+
|
| 335 |
+
def infer_type(self):
|
| 336 |
+
self.inferred_type = self.rhs.infer_type(self.rhs_scope or self.entry.scope)
|
| 337 |
+
return self.inferred_type
|
| 338 |
+
|
| 339 |
+
def type_dependencies(self):
|
| 340 |
+
return self.rhs.type_dependencies(self.rhs_scope or self.entry.scope)
|
| 341 |
+
|
| 342 |
+
@property
|
| 343 |
+
def type(self):
|
| 344 |
+
if not self.entry.type.is_unspecified:
|
| 345 |
+
return self.entry.type
|
| 346 |
+
return self.inferred_type
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
class StaticAssignment(NameAssignment):
|
| 350 |
+
"""Initialised at declaration time, e.g. stack allocation."""
|
| 351 |
+
def __init__(self, entry):
|
| 352 |
+
if not entry.type.is_pyobject:
|
| 353 |
+
may_be_none = False
|
| 354 |
+
else:
|
| 355 |
+
may_be_none = None # unknown
|
| 356 |
+
lhs = TypedExprNode(
|
| 357 |
+
entry.type, may_be_none=may_be_none, pos=entry.pos)
|
| 358 |
+
super(StaticAssignment, self).__init__(lhs, lhs, entry)
|
| 359 |
+
|
| 360 |
+
def infer_type(self):
|
| 361 |
+
return self.entry.type
|
| 362 |
+
|
| 363 |
+
def type_dependencies(self):
|
| 364 |
+
return ()
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
class Argument(NameAssignment):
|
| 368 |
+
def __init__(self, lhs, rhs, entry):
|
| 369 |
+
NameAssignment.__init__(self, lhs, rhs, entry)
|
| 370 |
+
self.is_arg = True
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
class NameDeletion(NameAssignment):
|
| 374 |
+
def __init__(self, lhs, entry):
|
| 375 |
+
NameAssignment.__init__(self, lhs, lhs, entry)
|
| 376 |
+
self.is_deletion = True
|
| 377 |
+
|
| 378 |
+
def infer_type(self):
|
| 379 |
+
inferred_type = self.rhs.infer_type(self.entry.scope)
|
| 380 |
+
if (not inferred_type.is_pyobject
|
| 381 |
+
and inferred_type.can_coerce_to_pyobject(self.entry.scope)):
|
| 382 |
+
return PyrexTypes.py_object_type
|
| 383 |
+
self.inferred_type = inferred_type
|
| 384 |
+
return inferred_type
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
class Uninitialized(object):
|
| 388 |
+
"""Definitely not initialised yet."""
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
class Unknown(object):
|
| 392 |
+
"""Coming from outer closure, might be initialised or not."""
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
class NameReference(object):
|
| 396 |
+
def __init__(self, node, entry):
|
| 397 |
+
if node.cf_state is None:
|
| 398 |
+
node.cf_state = set()
|
| 399 |
+
self.node = node
|
| 400 |
+
self.entry = entry
|
| 401 |
+
self.pos = node.pos
|
| 402 |
+
|
| 403 |
+
def __repr__(self):
|
| 404 |
+
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
class ControlFlowState(list):
|
| 408 |
+
# Keeps track of Node's entry assignments
|
| 409 |
+
#
|
| 410 |
+
# cf_is_null [boolean] It is uninitialized
|
| 411 |
+
# cf_maybe_null [boolean] May be uninitialized
|
| 412 |
+
# is_single [boolean] Has only one assignment at this point
|
| 413 |
+
|
| 414 |
+
cf_maybe_null = False
|
| 415 |
+
cf_is_null = False
|
| 416 |
+
is_single = False
|
| 417 |
+
|
| 418 |
+
def __init__(self, state):
|
| 419 |
+
if Uninitialized in state:
|
| 420 |
+
state.discard(Uninitialized)
|
| 421 |
+
self.cf_maybe_null = True
|
| 422 |
+
if not state:
|
| 423 |
+
self.cf_is_null = True
|
| 424 |
+
elif Unknown in state:
|
| 425 |
+
state.discard(Unknown)
|
| 426 |
+
self.cf_maybe_null = True
|
| 427 |
+
else:
|
| 428 |
+
if len(state) == 1:
|
| 429 |
+
self.is_single = True
|
| 430 |
+
# XXX: Remove fake_rhs_expr
|
| 431 |
+
super(ControlFlowState, self).__init__(
|
| 432 |
+
[i for i in state if i.rhs is not fake_rhs_expr])
|
| 433 |
+
|
| 434 |
+
def one(self):
|
| 435 |
+
return self[0]
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
class GVContext(object):
|
| 439 |
+
"""Graphviz subgraph object."""
|
| 440 |
+
|
| 441 |
+
def __init__(self):
|
| 442 |
+
self.blockids = {}
|
| 443 |
+
self.nextid = 0
|
| 444 |
+
self.children = []
|
| 445 |
+
self.sources = {}
|
| 446 |
+
|
| 447 |
+
def add(self, child):
|
| 448 |
+
self.children.append(child)
|
| 449 |
+
|
| 450 |
+
def nodeid(self, block):
|
| 451 |
+
if block not in self.blockids:
|
| 452 |
+
self.blockids[block] = 'block%d' % self.nextid
|
| 453 |
+
self.nextid += 1
|
| 454 |
+
return self.blockids[block]
|
| 455 |
+
|
| 456 |
+
def extract_sources(self, block):
|
| 457 |
+
if not block.positions:
|
| 458 |
+
return ''
|
| 459 |
+
start = min(block.positions)
|
| 460 |
+
stop = max(block.positions)
|
| 461 |
+
srcdescr = start[0]
|
| 462 |
+
if srcdescr not in self.sources:
|
| 463 |
+
self.sources[srcdescr] = list(srcdescr.get_lines())
|
| 464 |
+
lines = self.sources[srcdescr]
|
| 465 |
+
return '\\n'.join([l.strip() for l in lines[start[1] - 1:stop[1]]])
|
| 466 |
+
|
| 467 |
+
def render(self, fp, name, annotate_defs=False):
|
| 468 |
+
"""Render graphviz dot graph"""
|
| 469 |
+
fp.write('digraph %s {\n' % name)
|
| 470 |
+
fp.write(' node [shape=box];\n')
|
| 471 |
+
for child in self.children:
|
| 472 |
+
child.render(fp, self, annotate_defs)
|
| 473 |
+
fp.write('}\n')
|
| 474 |
+
|
| 475 |
+
def escape(self, text):
|
| 476 |
+
return text.replace('"', '\\"').replace('\n', '\\n')
|
| 477 |
+
|
| 478 |
+
|
| 479 |
+
class GV(object):
|
| 480 |
+
"""Graphviz DOT renderer."""
|
| 481 |
+
|
| 482 |
+
def __init__(self, name, flow):
|
| 483 |
+
self.name = name
|
| 484 |
+
self.flow = flow
|
| 485 |
+
|
| 486 |
+
def render(self, fp, ctx, annotate_defs=False):
|
| 487 |
+
fp.write(' subgraph %s {\n' % self.name)
|
| 488 |
+
for block in self.flow.blocks:
|
| 489 |
+
label = ctx.extract_sources(block)
|
| 490 |
+
if annotate_defs:
|
| 491 |
+
for stat in block.stats:
|
| 492 |
+
if isinstance(stat, NameAssignment):
|
| 493 |
+
label += '\n %s [%s %s]' % (
|
| 494 |
+
stat.entry.name, 'deletion' if stat.is_deletion else 'definition', stat.pos[1])
|
| 495 |
+
elif isinstance(stat, NameReference):
|
| 496 |
+
if stat.entry:
|
| 497 |
+
label += '\n %s [reference %s]' % (stat.entry.name, stat.pos[1])
|
| 498 |
+
if not label:
|
| 499 |
+
label = 'empty'
|
| 500 |
+
pid = ctx.nodeid(block)
|
| 501 |
+
fp.write(' %s [label="%s"];\n' % (pid, ctx.escape(label)))
|
| 502 |
+
for block in self.flow.blocks:
|
| 503 |
+
pid = ctx.nodeid(block)
|
| 504 |
+
for child in block.children:
|
| 505 |
+
fp.write(' %s -> %s;\n' % (pid, ctx.nodeid(child)))
|
| 506 |
+
fp.write(' }\n')
|
| 507 |
+
|
| 508 |
+
|
| 509 |
+
class MessageCollection(object):
|
| 510 |
+
"""Collect error/warnings messages first then sort"""
|
| 511 |
+
def __init__(self):
|
| 512 |
+
self.messages = set()
|
| 513 |
+
|
| 514 |
+
def error(self, pos, message):
|
| 515 |
+
self.messages.add((pos, True, message))
|
| 516 |
+
|
| 517 |
+
def warning(self, pos, message):
|
| 518 |
+
self.messages.add((pos, False, message))
|
| 519 |
+
|
| 520 |
+
def report(self):
|
| 521 |
+
for pos, is_error, message in sorted(self.messages):
|
| 522 |
+
if is_error:
|
| 523 |
+
error(pos, message)
|
| 524 |
+
else:
|
| 525 |
+
warning(pos, message, 2)
|
| 526 |
+
|
| 527 |
+
|
| 528 |
+
def check_definitions(flow, compiler_directives):
|
| 529 |
+
flow.initialize()
|
| 530 |
+
flow.reaching_definitions()
|
| 531 |
+
|
| 532 |
+
# Track down state
|
| 533 |
+
assignments = set()
|
| 534 |
+
# Node to entry map
|
| 535 |
+
references = {}
|
| 536 |
+
assmt_nodes = set()
|
| 537 |
+
|
| 538 |
+
for block in flow.blocks:
|
| 539 |
+
i_state = block.i_input
|
| 540 |
+
for stat in block.stats:
|
| 541 |
+
i_assmts = flow.assmts[stat.entry]
|
| 542 |
+
state = flow.map_one(i_state, stat.entry)
|
| 543 |
+
if isinstance(stat, NameAssignment):
|
| 544 |
+
stat.lhs.cf_state.update(state)
|
| 545 |
+
assmt_nodes.add(stat.lhs)
|
| 546 |
+
i_state = i_state & ~i_assmts.mask
|
| 547 |
+
if stat.is_deletion:
|
| 548 |
+
i_state |= i_assmts.bit
|
| 549 |
+
else:
|
| 550 |
+
i_state |= stat.bit
|
| 551 |
+
assignments.add(stat)
|
| 552 |
+
if stat.rhs is not fake_rhs_expr:
|
| 553 |
+
stat.entry.cf_assignments.append(stat)
|
| 554 |
+
elif isinstance(stat, NameReference):
|
| 555 |
+
references[stat.node] = stat.entry
|
| 556 |
+
stat.entry.cf_references.append(stat)
|
| 557 |
+
stat.node.cf_state.update(state)
|
| 558 |
+
## if not stat.node.allow_null:
|
| 559 |
+
## i_state &= ~i_assmts.bit
|
| 560 |
+
## # after successful read, the state is known to be initialised
|
| 561 |
+
state.discard(Uninitialized)
|
| 562 |
+
state.discard(Unknown)
|
| 563 |
+
for assmt in state:
|
| 564 |
+
assmt.refs.add(stat)
|
| 565 |
+
|
| 566 |
+
# Check variable usage
|
| 567 |
+
warn_maybe_uninitialized = compiler_directives['warn.maybe_uninitialized']
|
| 568 |
+
warn_unused_result = compiler_directives['warn.unused_result']
|
| 569 |
+
warn_unused = compiler_directives['warn.unused']
|
| 570 |
+
warn_unused_arg = compiler_directives['warn.unused_arg']
|
| 571 |
+
|
| 572 |
+
messages = MessageCollection()
|
| 573 |
+
|
| 574 |
+
# assignment hints
|
| 575 |
+
for node in assmt_nodes:
|
| 576 |
+
if Uninitialized in node.cf_state:
|
| 577 |
+
node.cf_maybe_null = True
|
| 578 |
+
if len(node.cf_state) == 1:
|
| 579 |
+
node.cf_is_null = True
|
| 580 |
+
else:
|
| 581 |
+
node.cf_is_null = False
|
| 582 |
+
elif Unknown in node.cf_state:
|
| 583 |
+
node.cf_maybe_null = True
|
| 584 |
+
else:
|
| 585 |
+
node.cf_is_null = False
|
| 586 |
+
node.cf_maybe_null = False
|
| 587 |
+
|
| 588 |
+
# Find uninitialized references and cf-hints
|
| 589 |
+
for node, entry in references.items():
|
| 590 |
+
if Uninitialized in node.cf_state:
|
| 591 |
+
node.cf_maybe_null = True
|
| 592 |
+
if (not entry.from_closure and len(node.cf_state) == 1
|
| 593 |
+
and entry.name not in entry.scope.scope_predefined_names):
|
| 594 |
+
node.cf_is_null = True
|
| 595 |
+
if (node.allow_null or entry.from_closure
|
| 596 |
+
or entry.is_pyclass_attr or entry.type.is_error):
|
| 597 |
+
pass # Can be uninitialized here
|
| 598 |
+
elif node.cf_is_null and not entry.in_closure:
|
| 599 |
+
if entry.error_on_uninitialized or (
|
| 600 |
+
Options.error_on_uninitialized and (
|
| 601 |
+
entry.type.is_pyobject or entry.type.is_unspecified)):
|
| 602 |
+
messages.error(
|
| 603 |
+
node.pos,
|
| 604 |
+
"local variable '%s' referenced before assignment"
|
| 605 |
+
% entry.name)
|
| 606 |
+
else:
|
| 607 |
+
messages.warning(
|
| 608 |
+
node.pos,
|
| 609 |
+
"local variable '%s' referenced before assignment"
|
| 610 |
+
% entry.name)
|
| 611 |
+
elif warn_maybe_uninitialized:
|
| 612 |
+
msg = "local variable '%s' might be referenced before assignment" % entry.name
|
| 613 |
+
if entry.in_closure:
|
| 614 |
+
msg += " (maybe initialized inside a closure)"
|
| 615 |
+
messages.warning(
|
| 616 |
+
node.pos,
|
| 617 |
+
msg)
|
| 618 |
+
elif Unknown in node.cf_state:
|
| 619 |
+
# TODO: better cross-closure analysis to know when inner functions
|
| 620 |
+
# are being called before a variable is being set, and when
|
| 621 |
+
# a variable is known to be set before even defining the
|
| 622 |
+
# inner function, etc.
|
| 623 |
+
node.cf_maybe_null = True
|
| 624 |
+
else:
|
| 625 |
+
node.cf_is_null = False
|
| 626 |
+
node.cf_maybe_null = False
|
| 627 |
+
|
| 628 |
+
# Unused result
|
| 629 |
+
for assmt in assignments:
|
| 630 |
+
if (not assmt.refs and not assmt.entry.is_pyclass_attr
|
| 631 |
+
and not assmt.entry.in_closure):
|
| 632 |
+
if assmt.entry.cf_references and warn_unused_result:
|
| 633 |
+
if assmt.is_arg:
|
| 634 |
+
messages.warning(assmt.pos, "Unused argument value '%s'" %
|
| 635 |
+
assmt.entry.name)
|
| 636 |
+
else:
|
| 637 |
+
messages.warning(assmt.pos, "Unused result in '%s'" %
|
| 638 |
+
assmt.entry.name)
|
| 639 |
+
assmt.lhs.cf_used = False
|
| 640 |
+
|
| 641 |
+
# Unused entries
|
| 642 |
+
for entry in flow.entries:
|
| 643 |
+
if (not entry.cf_references
|
| 644 |
+
and not entry.is_pyclass_attr):
|
| 645 |
+
if entry.name != '_' and not entry.name.startswith('unused'):
|
| 646 |
+
# '_' is often used for unused variables, e.g. in loops
|
| 647 |
+
if entry.is_arg:
|
| 648 |
+
if warn_unused_arg:
|
| 649 |
+
messages.warning(entry.pos, "Unused argument '%s'" %
|
| 650 |
+
entry.name)
|
| 651 |
+
else:
|
| 652 |
+
if warn_unused:
|
| 653 |
+
messages.warning(entry.pos, "Unused entry '%s'" %
|
| 654 |
+
entry.name)
|
| 655 |
+
entry.cf_used = False
|
| 656 |
+
|
| 657 |
+
messages.report()
|
| 658 |
+
|
| 659 |
+
for node in assmt_nodes:
|
| 660 |
+
node.cf_state = ControlFlowState(node.cf_state)
|
| 661 |
+
for node in references:
|
| 662 |
+
node.cf_state = ControlFlowState(node.cf_state)
|
| 663 |
+
|
| 664 |
+
|
| 665 |
+
class AssignmentCollector(TreeVisitor):
|
| 666 |
+
def __init__(self):
|
| 667 |
+
super(AssignmentCollector, self).__init__()
|
| 668 |
+
self.assignments = []
|
| 669 |
+
|
| 670 |
+
def visit_Node(self):
|
| 671 |
+
self._visitchildren(self, None, None)
|
| 672 |
+
|
| 673 |
+
def visit_SingleAssignmentNode(self, node):
|
| 674 |
+
self.assignments.append((node.lhs, node.rhs))
|
| 675 |
+
|
| 676 |
+
def visit_CascadedAssignmentNode(self, node):
|
| 677 |
+
for lhs in node.lhs_list:
|
| 678 |
+
self.assignments.append((lhs, node.rhs))
|
| 679 |
+
|
| 680 |
+
|
| 681 |
+
class ControlFlowAnalysis(CythonTransform):
|
| 682 |
+
|
| 683 |
+
def find_in_stack(self, env):
|
| 684 |
+
if env == self.env:
|
| 685 |
+
return self.flow
|
| 686 |
+
for e, flow in reversed(self.stack):
|
| 687 |
+
if e is env:
|
| 688 |
+
return flow
|
| 689 |
+
assert False
|
| 690 |
+
|
| 691 |
+
def visit_ModuleNode(self, node):
|
| 692 |
+
dot_output = self.current_directives['control_flow.dot_output']
|
| 693 |
+
self.gv_ctx = GVContext() if dot_output else None
|
| 694 |
+
|
| 695 |
+
from .Optimize import ConstantFolding
|
| 696 |
+
self.constant_folder = ConstantFolding()
|
| 697 |
+
|
| 698 |
+
# Set of NameNode reductions
|
| 699 |
+
self.reductions = set()
|
| 700 |
+
|
| 701 |
+
self.in_inplace_assignment = False
|
| 702 |
+
self.env = node.scope
|
| 703 |
+
self.flow = ControlFlow()
|
| 704 |
+
self.stack = [] # a stack of (env, flow) tuples
|
| 705 |
+
self.object_expr = TypedExprNode(PyrexTypes.py_object_type, may_be_none=True)
|
| 706 |
+
self.visitchildren(node)
|
| 707 |
+
|
| 708 |
+
check_definitions(self.flow, self.current_directives)
|
| 709 |
+
|
| 710 |
+
if dot_output:
|
| 711 |
+
annotate_defs = self.current_directives['control_flow.dot_annotate_defs']
|
| 712 |
+
with open(dot_output, 'wt') as fp:
|
| 713 |
+
self.gv_ctx.render(fp, 'module', annotate_defs=annotate_defs)
|
| 714 |
+
return node
|
| 715 |
+
|
| 716 |
+
def visit_FuncDefNode(self, node):
|
| 717 |
+
for arg in node.args:
|
| 718 |
+
if arg.default:
|
| 719 |
+
self.visitchildren(arg)
|
| 720 |
+
self.visitchildren(node, ('decorators',))
|
| 721 |
+
self.stack.append((self.env, self.flow))
|
| 722 |
+
self.env = node.local_scope
|
| 723 |
+
self.flow = ControlFlow()
|
| 724 |
+
|
| 725 |
+
# Collect all entries
|
| 726 |
+
for entry in node.local_scope.entries.values():
|
| 727 |
+
if self.flow.is_tracked(entry):
|
| 728 |
+
self.flow.entries.add(entry)
|
| 729 |
+
|
| 730 |
+
self.mark_position(node)
|
| 731 |
+
# Function body block
|
| 732 |
+
self.flow.nextblock()
|
| 733 |
+
|
| 734 |
+
for arg in node.args:
|
| 735 |
+
self._visit(arg)
|
| 736 |
+
if node.star_arg:
|
| 737 |
+
self.flow.mark_argument(node.star_arg,
|
| 738 |
+
TypedExprNode(Builtin.tuple_type,
|
| 739 |
+
may_be_none=False),
|
| 740 |
+
node.star_arg.entry)
|
| 741 |
+
if node.starstar_arg:
|
| 742 |
+
self.flow.mark_argument(node.starstar_arg,
|
| 743 |
+
TypedExprNode(Builtin.dict_type,
|
| 744 |
+
may_be_none=False),
|
| 745 |
+
node.starstar_arg.entry)
|
| 746 |
+
self._visit(node.body)
|
| 747 |
+
# Workaround for generators
|
| 748 |
+
if node.is_generator:
|
| 749 |
+
self._visit(node.gbody.body)
|
| 750 |
+
|
| 751 |
+
# Exit point
|
| 752 |
+
if self.flow.block:
|
| 753 |
+
self.flow.block.add_child(self.flow.exit_point)
|
| 754 |
+
|
| 755 |
+
# Cleanup graph
|
| 756 |
+
self.flow.normalize()
|
| 757 |
+
check_definitions(self.flow, self.current_directives)
|
| 758 |
+
self.flow.blocks.add(self.flow.entry_point)
|
| 759 |
+
|
| 760 |
+
if self.gv_ctx is not None:
|
| 761 |
+
self.gv_ctx.add(GV(node.local_scope.name, self.flow))
|
| 762 |
+
|
| 763 |
+
self.env, self.flow = self.stack.pop()
|
| 764 |
+
return node
|
| 765 |
+
|
| 766 |
+
def visit_DefNode(self, node):
|
| 767 |
+
node.used = True
|
| 768 |
+
return self.visit_FuncDefNode(node)
|
| 769 |
+
|
| 770 |
+
def visit_GeneratorBodyDefNode(self, node):
|
| 771 |
+
return node
|
| 772 |
+
|
| 773 |
+
def visit_CTypeDefNode(self, node):
|
| 774 |
+
return node
|
| 775 |
+
|
| 776 |
+
def mark_assignment(self, lhs, rhs=None, rhs_scope=None):
|
| 777 |
+
if not self.flow.block:
|
| 778 |
+
return
|
| 779 |
+
if self.flow.exceptions:
|
| 780 |
+
exc_descr = self.flow.exceptions[-1]
|
| 781 |
+
self.flow.block.add_child(exc_descr.entry_point)
|
| 782 |
+
self.flow.nextblock()
|
| 783 |
+
|
| 784 |
+
if not rhs:
|
| 785 |
+
rhs = self.object_expr
|
| 786 |
+
if lhs.is_name:
|
| 787 |
+
if lhs.entry is not None:
|
| 788 |
+
entry = lhs.entry
|
| 789 |
+
else:
|
| 790 |
+
entry = self.env.lookup(lhs.name)
|
| 791 |
+
if entry is None: # TODO: This shouldn't happen...
|
| 792 |
+
return
|
| 793 |
+
self.flow.mark_assignment(lhs, rhs, entry, rhs_scope=rhs_scope)
|
| 794 |
+
elif lhs.is_sequence_constructor:
|
| 795 |
+
for i, arg in enumerate(lhs.args):
|
| 796 |
+
if arg.is_starred:
|
| 797 |
+
# "a, *b = x" assigns a list to "b"
|
| 798 |
+
item_node = TypedExprNode(Builtin.list_type, may_be_none=False, pos=arg.pos)
|
| 799 |
+
elif rhs is self.object_expr:
|
| 800 |
+
item_node = rhs
|
| 801 |
+
else:
|
| 802 |
+
item_node = rhs.inferable_item_node(i)
|
| 803 |
+
self.mark_assignment(arg, item_node)
|
| 804 |
+
else:
|
| 805 |
+
self._visit(lhs)
|
| 806 |
+
|
| 807 |
+
if self.flow.exceptions:
|
| 808 |
+
exc_descr = self.flow.exceptions[-1]
|
| 809 |
+
self.flow.block.add_child(exc_descr.entry_point)
|
| 810 |
+
self.flow.nextblock()
|
| 811 |
+
|
| 812 |
+
def mark_position(self, node):
|
| 813 |
+
"""Mark position if DOT output is enabled."""
|
| 814 |
+
if self.current_directives['control_flow.dot_output']:
|
| 815 |
+
self.flow.mark_position(node)
|
| 816 |
+
|
| 817 |
+
def visit_FromImportStatNode(self, node):
|
| 818 |
+
for name, target in node.items:
|
| 819 |
+
if name != "*":
|
| 820 |
+
self.mark_assignment(target)
|
| 821 |
+
self.visitchildren(node)
|
| 822 |
+
return node
|
| 823 |
+
|
| 824 |
+
def visit_AssignmentNode(self, node):
|
| 825 |
+
raise InternalError("Unhandled assignment node %s" % type(node))
|
| 826 |
+
|
| 827 |
+
def visit_SingleAssignmentNode(self, node):
|
| 828 |
+
self._visit(node.rhs)
|
| 829 |
+
self.mark_assignment(node.lhs, node.rhs)
|
| 830 |
+
return node
|
| 831 |
+
|
| 832 |
+
def visit_CascadedAssignmentNode(self, node):
|
| 833 |
+
self._visit(node.rhs)
|
| 834 |
+
for lhs in node.lhs_list:
|
| 835 |
+
self.mark_assignment(lhs, node.rhs)
|
| 836 |
+
return node
|
| 837 |
+
|
| 838 |
+
def visit_ParallelAssignmentNode(self, node):
|
| 839 |
+
collector = AssignmentCollector()
|
| 840 |
+
collector.visitchildren(node)
|
| 841 |
+
for lhs, rhs in collector.assignments:
|
| 842 |
+
self._visit(rhs)
|
| 843 |
+
for lhs, rhs in collector.assignments:
|
| 844 |
+
self.mark_assignment(lhs, rhs)
|
| 845 |
+
return node
|
| 846 |
+
|
| 847 |
+
def visit_InPlaceAssignmentNode(self, node):
|
| 848 |
+
self.in_inplace_assignment = True
|
| 849 |
+
self.visitchildren(node)
|
| 850 |
+
self.in_inplace_assignment = False
|
| 851 |
+
self.mark_assignment(node.lhs, self.constant_folder(node.create_binop_node()))
|
| 852 |
+
return node
|
| 853 |
+
|
| 854 |
+
def visit_DelStatNode(self, node):
|
| 855 |
+
for arg in node.args:
|
| 856 |
+
if arg.is_name:
|
| 857 |
+
entry = arg.entry or self.env.lookup(arg.name)
|
| 858 |
+
if entry.in_closure or entry.from_closure:
|
| 859 |
+
error(arg.pos,
|
| 860 |
+
"can not delete variable '%s' "
|
| 861 |
+
"referenced in nested scope" % entry.name)
|
| 862 |
+
if not node.ignore_nonexisting:
|
| 863 |
+
self._visit(arg) # mark reference
|
| 864 |
+
self.flow.mark_deletion(arg, entry)
|
| 865 |
+
else:
|
| 866 |
+
self._visit(arg)
|
| 867 |
+
return node
|
| 868 |
+
|
| 869 |
+
def visit_CArgDeclNode(self, node):
|
| 870 |
+
entry = self.env.lookup(node.name)
|
| 871 |
+
if entry:
|
| 872 |
+
may_be_none = not node.not_none
|
| 873 |
+
self.flow.mark_argument(
|
| 874 |
+
node, TypedExprNode(entry.type, may_be_none), entry)
|
| 875 |
+
return node
|
| 876 |
+
|
| 877 |
+
def visit_NameNode(self, node):
|
| 878 |
+
if self.flow.block:
|
| 879 |
+
entry = node.entry or self.env.lookup(node.name)
|
| 880 |
+
if entry:
|
| 881 |
+
self.flow.mark_reference(node, entry)
|
| 882 |
+
|
| 883 |
+
if entry in self.reductions and not self.in_inplace_assignment:
|
| 884 |
+
error(node.pos,
|
| 885 |
+
"Cannot read reduction variable in loop body")
|
| 886 |
+
|
| 887 |
+
return node
|
| 888 |
+
|
| 889 |
+
def visit_StatListNode(self, node):
|
| 890 |
+
if self.flow.block:
|
| 891 |
+
for stat in node.stats:
|
| 892 |
+
self._visit(stat)
|
| 893 |
+
if not self.flow.block:
|
| 894 |
+
stat.is_terminator = True
|
| 895 |
+
break
|
| 896 |
+
return node
|
| 897 |
+
|
| 898 |
+
def visit_Node(self, node):
|
| 899 |
+
self.visitchildren(node)
|
| 900 |
+
self.mark_position(node)
|
| 901 |
+
return node
|
| 902 |
+
|
| 903 |
+
def visit_SizeofVarNode(self, node):
|
| 904 |
+
return node
|
| 905 |
+
|
| 906 |
+
def visit_TypeidNode(self, node):
|
| 907 |
+
return node
|
| 908 |
+
|
| 909 |
+
def visit_IfStatNode(self, node):
|
| 910 |
+
next_block = self.flow.newblock()
|
| 911 |
+
parent = self.flow.block
|
| 912 |
+
# If clauses
|
| 913 |
+
for clause in node.if_clauses:
|
| 914 |
+
parent = self.flow.nextblock(parent)
|
| 915 |
+
self._visit(clause.condition)
|
| 916 |
+
self.flow.nextblock()
|
| 917 |
+
self._visit(clause.body)
|
| 918 |
+
if self.flow.block:
|
| 919 |
+
self.flow.block.add_child(next_block)
|
| 920 |
+
# Else clause
|
| 921 |
+
if node.else_clause:
|
| 922 |
+
self.flow.nextblock(parent=parent)
|
| 923 |
+
self._visit(node.else_clause)
|
| 924 |
+
if self.flow.block:
|
| 925 |
+
self.flow.block.add_child(next_block)
|
| 926 |
+
else:
|
| 927 |
+
parent.add_child(next_block)
|
| 928 |
+
|
| 929 |
+
if next_block.parents:
|
| 930 |
+
self.flow.block = next_block
|
| 931 |
+
else:
|
| 932 |
+
self.flow.block = None
|
| 933 |
+
return node
|
| 934 |
+
|
| 935 |
+
def visit_AssertStatNode(self, node):
|
| 936 |
+
"""Essentially an if-condition that wraps a RaiseStatNode.
|
| 937 |
+
"""
|
| 938 |
+
self.mark_position(node)
|
| 939 |
+
next_block = self.flow.newblock()
|
| 940 |
+
parent = self.flow.block
|
| 941 |
+
# failure case
|
| 942 |
+
parent = self.flow.nextblock(parent)
|
| 943 |
+
self._visit(node.condition)
|
| 944 |
+
self.flow.nextblock()
|
| 945 |
+
self._visit(node.exception)
|
| 946 |
+
if self.flow.block:
|
| 947 |
+
self.flow.block.add_child(next_block)
|
| 948 |
+
parent.add_child(next_block)
|
| 949 |
+
if next_block.parents:
|
| 950 |
+
self.flow.block = next_block
|
| 951 |
+
else:
|
| 952 |
+
self.flow.block = None
|
| 953 |
+
return node
|
| 954 |
+
|
| 955 |
+
def visit_WhileStatNode(self, node):
|
| 956 |
+
condition_block = self.flow.nextblock()
|
| 957 |
+
next_block = self.flow.newblock()
|
| 958 |
+
# Condition block
|
| 959 |
+
self.flow.loops.append(LoopDescr(next_block, condition_block))
|
| 960 |
+
if node.condition:
|
| 961 |
+
self._visit(node.condition)
|
| 962 |
+
# Body block
|
| 963 |
+
self.flow.nextblock()
|
| 964 |
+
self._visit(node.body)
|
| 965 |
+
self.flow.loops.pop()
|
| 966 |
+
# Loop it
|
| 967 |
+
if self.flow.block:
|
| 968 |
+
self.flow.block.add_child(condition_block)
|
| 969 |
+
self.flow.block.add_child(next_block)
|
| 970 |
+
# Else clause
|
| 971 |
+
if node.else_clause:
|
| 972 |
+
self.flow.nextblock(parent=condition_block)
|
| 973 |
+
self._visit(node.else_clause)
|
| 974 |
+
if self.flow.block:
|
| 975 |
+
self.flow.block.add_child(next_block)
|
| 976 |
+
else:
|
| 977 |
+
condition_block.add_child(next_block)
|
| 978 |
+
|
| 979 |
+
if next_block.parents:
|
| 980 |
+
self.flow.block = next_block
|
| 981 |
+
else:
|
| 982 |
+
self.flow.block = None
|
| 983 |
+
return node
|
| 984 |
+
|
| 985 |
+
def mark_forloop_target(self, node):
|
| 986 |
+
# TODO: Remove redundancy with range optimization...
|
| 987 |
+
is_special = False
|
| 988 |
+
sequence = node.iterator.sequence
|
| 989 |
+
target = node.target
|
| 990 |
+
env = node.iterator.expr_scope or self.env
|
| 991 |
+
if isinstance(sequence, ExprNodes.SimpleCallNode):
|
| 992 |
+
function = sequence.function
|
| 993 |
+
if sequence.self is None and function.is_name:
|
| 994 |
+
entry = env.lookup(function.name)
|
| 995 |
+
if not entry or entry.is_builtin:
|
| 996 |
+
if function.name == 'reversed' and len(sequence.args) == 1:
|
| 997 |
+
sequence = sequence.args[0]
|
| 998 |
+
elif function.name == 'enumerate' and len(sequence.args) == 1:
|
| 999 |
+
if target.is_sequence_constructor and len(target.args) == 2:
|
| 1000 |
+
iterator = sequence.args[0]
|
| 1001 |
+
if iterator.is_name:
|
| 1002 |
+
iterator_type = iterator.infer_type(env)
|
| 1003 |
+
if iterator_type.is_builtin_type:
|
| 1004 |
+
# assume that builtin types have a length within Py_ssize_t
|
| 1005 |
+
self.mark_assignment(
|
| 1006 |
+
target.args[0],
|
| 1007 |
+
ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX',
|
| 1008 |
+
type=PyrexTypes.c_py_ssize_t_type),
|
| 1009 |
+
rhs_scope=node.iterator.expr_scope)
|
| 1010 |
+
target = target.args[1]
|
| 1011 |
+
sequence = sequence.args[0]
|
| 1012 |
+
if isinstance(sequence, ExprNodes.SimpleCallNode):
|
| 1013 |
+
function = sequence.function
|
| 1014 |
+
if sequence.self is None and function.is_name:
|
| 1015 |
+
entry = env.lookup(function.name)
|
| 1016 |
+
if not entry or entry.is_builtin:
|
| 1017 |
+
if function.name in ('range', 'xrange'):
|
| 1018 |
+
is_special = True
|
| 1019 |
+
for arg in sequence.args[:2]:
|
| 1020 |
+
self.mark_assignment(target, arg, rhs_scope=node.iterator.expr_scope)
|
| 1021 |
+
if len(sequence.args) > 2:
|
| 1022 |
+
self.mark_assignment(target, self.constant_folder(
|
| 1023 |
+
ExprNodes.binop_node(node.pos,
|
| 1024 |
+
'+',
|
| 1025 |
+
sequence.args[0],
|
| 1026 |
+
sequence.args[2])),
|
| 1027 |
+
rhs_scope=node.iterator.expr_scope)
|
| 1028 |
+
|
| 1029 |
+
if not is_special:
|
| 1030 |
+
# A for-loop basically translates to subsequent calls to
|
| 1031 |
+
# __getitem__(), so using an IndexNode here allows us to
|
| 1032 |
+
# naturally infer the base type of pointers, C arrays,
|
| 1033 |
+
# Python strings, etc., while correctly falling back to an
|
| 1034 |
+
# object type when the base type cannot be handled.
|
| 1035 |
+
|
| 1036 |
+
self.mark_assignment(target, node.item, rhs_scope=node.iterator.expr_scope)
|
| 1037 |
+
|
| 1038 |
+
def visit_AsyncForStatNode(self, node):
|
| 1039 |
+
return self.visit_ForInStatNode(node)
|
| 1040 |
+
|
| 1041 |
+
def visit_ForInStatNode(self, node):
|
| 1042 |
+
condition_block = self.flow.nextblock()
|
| 1043 |
+
next_block = self.flow.newblock()
|
| 1044 |
+
# Condition with iterator
|
| 1045 |
+
self.flow.loops.append(LoopDescr(next_block, condition_block))
|
| 1046 |
+
self._visit(node.iterator)
|
| 1047 |
+
# Target assignment
|
| 1048 |
+
self.flow.nextblock()
|
| 1049 |
+
|
| 1050 |
+
if isinstance(node, Nodes.ForInStatNode):
|
| 1051 |
+
self.mark_forloop_target(node)
|
| 1052 |
+
elif isinstance(node, Nodes.AsyncForStatNode):
|
| 1053 |
+
# not entirely correct, but good enough for now
|
| 1054 |
+
self.mark_assignment(node.target, node.item)
|
| 1055 |
+
else: # Parallel
|
| 1056 |
+
self.mark_assignment(node.target)
|
| 1057 |
+
|
| 1058 |
+
# Body block
|
| 1059 |
+
if isinstance(node, Nodes.ParallelRangeNode):
|
| 1060 |
+
# In case of an invalid
|
| 1061 |
+
self._delete_privates(node, exclude=node.target.entry)
|
| 1062 |
+
|
| 1063 |
+
self.flow.nextblock()
|
| 1064 |
+
self._visit(node.body)
|
| 1065 |
+
self.flow.loops.pop()
|
| 1066 |
+
|
| 1067 |
+
# Loop it
|
| 1068 |
+
if self.flow.block:
|
| 1069 |
+
self.flow.block.add_child(condition_block)
|
| 1070 |
+
# Else clause
|
| 1071 |
+
if node.else_clause:
|
| 1072 |
+
self.flow.nextblock(parent=condition_block)
|
| 1073 |
+
self._visit(node.else_clause)
|
| 1074 |
+
if self.flow.block:
|
| 1075 |
+
self.flow.block.add_child(next_block)
|
| 1076 |
+
else:
|
| 1077 |
+
condition_block.add_child(next_block)
|
| 1078 |
+
|
| 1079 |
+
if next_block.parents:
|
| 1080 |
+
self.flow.block = next_block
|
| 1081 |
+
else:
|
| 1082 |
+
self.flow.block = None
|
| 1083 |
+
return node
|
| 1084 |
+
|
| 1085 |
+
def _delete_privates(self, node, exclude=None):
|
| 1086 |
+
for private_node in node.assigned_nodes:
|
| 1087 |
+
if not exclude or private_node.entry is not exclude:
|
| 1088 |
+
self.flow.mark_deletion(private_node, private_node.entry)
|
| 1089 |
+
|
| 1090 |
+
def visit_ParallelRangeNode(self, node):
|
| 1091 |
+
reductions = self.reductions
|
| 1092 |
+
|
| 1093 |
+
# if node.target is None or not a NameNode, an error will have
|
| 1094 |
+
# been previously issued
|
| 1095 |
+
if hasattr(node.target, 'entry'):
|
| 1096 |
+
self.reductions = set(reductions)
|
| 1097 |
+
|
| 1098 |
+
for private_node in node.assigned_nodes:
|
| 1099 |
+
private_node.entry.error_on_uninitialized = True
|
| 1100 |
+
pos, reduction = node.assignments[private_node.entry]
|
| 1101 |
+
if reduction:
|
| 1102 |
+
self.reductions.add(private_node.entry)
|
| 1103 |
+
|
| 1104 |
+
node = self.visit_ForInStatNode(node)
|
| 1105 |
+
|
| 1106 |
+
self.reductions = reductions
|
| 1107 |
+
return node
|
| 1108 |
+
|
| 1109 |
+
def visit_ParallelWithBlockNode(self, node):
|
| 1110 |
+
for private_node in node.assigned_nodes:
|
| 1111 |
+
private_node.entry.error_on_uninitialized = True
|
| 1112 |
+
|
| 1113 |
+
self._delete_privates(node)
|
| 1114 |
+
self.visitchildren(node)
|
| 1115 |
+
self._delete_privates(node)
|
| 1116 |
+
|
| 1117 |
+
return node
|
| 1118 |
+
|
| 1119 |
+
def visit_ForFromStatNode(self, node):
|
| 1120 |
+
condition_block = self.flow.nextblock()
|
| 1121 |
+
next_block = self.flow.newblock()
|
| 1122 |
+
# Condition with iterator
|
| 1123 |
+
self.flow.loops.append(LoopDescr(next_block, condition_block))
|
| 1124 |
+
self._visit(node.bound1)
|
| 1125 |
+
self._visit(node.bound2)
|
| 1126 |
+
if node.step is not None:
|
| 1127 |
+
self._visit(node.step)
|
| 1128 |
+
# Target assignment
|
| 1129 |
+
self.flow.nextblock()
|
| 1130 |
+
self.mark_assignment(node.target, node.bound1)
|
| 1131 |
+
if node.step is not None:
|
| 1132 |
+
self.mark_assignment(node.target, self.constant_folder(
|
| 1133 |
+
ExprNodes.binop_node(node.pos, '+', node.bound1, node.step)))
|
| 1134 |
+
# Body block
|
| 1135 |
+
self.flow.nextblock()
|
| 1136 |
+
self._visit(node.body)
|
| 1137 |
+
self.flow.loops.pop()
|
| 1138 |
+
# Loop it
|
| 1139 |
+
if self.flow.block:
|
| 1140 |
+
self.flow.block.add_child(condition_block)
|
| 1141 |
+
# Else clause
|
| 1142 |
+
if node.else_clause:
|
| 1143 |
+
self.flow.nextblock(parent=condition_block)
|
| 1144 |
+
self._visit(node.else_clause)
|
| 1145 |
+
if self.flow.block:
|
| 1146 |
+
self.flow.block.add_child(next_block)
|
| 1147 |
+
else:
|
| 1148 |
+
condition_block.add_child(next_block)
|
| 1149 |
+
|
| 1150 |
+
if next_block.parents:
|
| 1151 |
+
self.flow.block = next_block
|
| 1152 |
+
else:
|
| 1153 |
+
self.flow.block = None
|
| 1154 |
+
return node
|
| 1155 |
+
|
| 1156 |
+
def visit_LoopNode(self, node):
|
| 1157 |
+
raise InternalError("Generic loops are not supported")
|
| 1158 |
+
|
| 1159 |
+
def visit_WithTargetAssignmentStatNode(self, node):
|
| 1160 |
+
self.mark_assignment(node.lhs, node.with_node.enter_call)
|
| 1161 |
+
return node
|
| 1162 |
+
|
| 1163 |
+
def visit_WithStatNode(self, node):
|
| 1164 |
+
self._visit(node.manager)
|
| 1165 |
+
self._visit(node.enter_call)
|
| 1166 |
+
self._visit(node.body)
|
| 1167 |
+
return node
|
| 1168 |
+
|
| 1169 |
+
def visit_TryExceptStatNode(self, node):
|
| 1170 |
+
# After exception handling
|
| 1171 |
+
next_block = self.flow.newblock()
|
| 1172 |
+
# Body block
|
| 1173 |
+
self.flow.newblock()
|
| 1174 |
+
# Exception entry point
|
| 1175 |
+
entry_point = self.flow.newblock()
|
| 1176 |
+
self.flow.exceptions.append(ExceptionDescr(entry_point))
|
| 1177 |
+
self.flow.nextblock()
|
| 1178 |
+
## XXX: links to exception handling point should be added by
|
| 1179 |
+
## XXX: children nodes
|
| 1180 |
+
self.flow.block.add_child(entry_point)
|
| 1181 |
+
self.flow.nextblock()
|
| 1182 |
+
self.flow.in_try_block += 1
|
| 1183 |
+
self._visit(node.body)
|
| 1184 |
+
self.flow.in_try_block -= 1
|
| 1185 |
+
self.flow.exceptions.pop()
|
| 1186 |
+
|
| 1187 |
+
# After exception
|
| 1188 |
+
if self.flow.block:
|
| 1189 |
+
if node.else_clause:
|
| 1190 |
+
self.flow.nextblock()
|
| 1191 |
+
self._visit(node.else_clause)
|
| 1192 |
+
if self.flow.block:
|
| 1193 |
+
self.flow.block.add_child(next_block)
|
| 1194 |
+
|
| 1195 |
+
for clause in node.except_clauses:
|
| 1196 |
+
self.flow.block = entry_point
|
| 1197 |
+
if clause.pattern:
|
| 1198 |
+
for pattern in clause.pattern:
|
| 1199 |
+
self._visit(pattern)
|
| 1200 |
+
else:
|
| 1201 |
+
# TODO: handle * pattern
|
| 1202 |
+
pass
|
| 1203 |
+
entry_point = self.flow.newblock(parent=self.flow.block)
|
| 1204 |
+
self.flow.nextblock()
|
| 1205 |
+
if clause.target:
|
| 1206 |
+
self.mark_assignment(clause.target)
|
| 1207 |
+
self._visit(clause.body)
|
| 1208 |
+
if self.flow.block:
|
| 1209 |
+
self.flow.block.add_child(next_block)
|
| 1210 |
+
|
| 1211 |
+
if self.flow.exceptions:
|
| 1212 |
+
entry_point.add_child(self.flow.exceptions[-1].entry_point)
|
| 1213 |
+
|
| 1214 |
+
if next_block.parents:
|
| 1215 |
+
self.flow.block = next_block
|
| 1216 |
+
else:
|
| 1217 |
+
self.flow.block = None
|
| 1218 |
+
return node
|
| 1219 |
+
|
| 1220 |
+
def visit_TryFinallyStatNode(self, node):
|
| 1221 |
+
body_block = self.flow.nextblock()
|
| 1222 |
+
|
| 1223 |
+
# Exception entry point
|
| 1224 |
+
entry_point = self.flow.newblock()
|
| 1225 |
+
self.flow.block = entry_point
|
| 1226 |
+
self._visit(node.finally_except_clause)
|
| 1227 |
+
|
| 1228 |
+
if self.flow.block and self.flow.exceptions:
|
| 1229 |
+
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
|
| 1230 |
+
|
| 1231 |
+
# Normal execution
|
| 1232 |
+
finally_enter = self.flow.newblock()
|
| 1233 |
+
self.flow.block = finally_enter
|
| 1234 |
+
self._visit(node.finally_clause)
|
| 1235 |
+
finally_exit = self.flow.block
|
| 1236 |
+
|
| 1237 |
+
descr = ExceptionDescr(entry_point, finally_enter, finally_exit)
|
| 1238 |
+
self.flow.exceptions.append(descr)
|
| 1239 |
+
if self.flow.loops:
|
| 1240 |
+
self.flow.loops[-1].exceptions.append(descr)
|
| 1241 |
+
self.flow.block = body_block
|
| 1242 |
+
body_block.add_child(entry_point)
|
| 1243 |
+
self.flow.nextblock()
|
| 1244 |
+
self.flow.in_try_block += 1
|
| 1245 |
+
self._visit(node.body)
|
| 1246 |
+
self.flow.in_try_block -= 1
|
| 1247 |
+
self.flow.exceptions.pop()
|
| 1248 |
+
if self.flow.loops:
|
| 1249 |
+
self.flow.loops[-1].exceptions.pop()
|
| 1250 |
+
|
| 1251 |
+
if self.flow.block:
|
| 1252 |
+
self.flow.block.add_child(finally_enter)
|
| 1253 |
+
if finally_exit:
|
| 1254 |
+
self.flow.block = self.flow.nextblock(parent=finally_exit)
|
| 1255 |
+
else:
|
| 1256 |
+
self.flow.block = None
|
| 1257 |
+
return node
|
| 1258 |
+
|
| 1259 |
+
def visit_RaiseStatNode(self, node):
|
| 1260 |
+
self.mark_position(node)
|
| 1261 |
+
self.visitchildren(node)
|
| 1262 |
+
if self.flow.exceptions:
|
| 1263 |
+
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
|
| 1264 |
+
self.flow.block = None
|
| 1265 |
+
if self.flow.in_try_block:
|
| 1266 |
+
node.in_try_block = True
|
| 1267 |
+
return node
|
| 1268 |
+
|
| 1269 |
+
def visit_ReraiseStatNode(self, node):
|
| 1270 |
+
self.mark_position(node)
|
| 1271 |
+
if self.flow.exceptions:
|
| 1272 |
+
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
|
| 1273 |
+
self.flow.block = None
|
| 1274 |
+
return node
|
| 1275 |
+
|
| 1276 |
+
def visit_ReturnStatNode(self, node):
|
| 1277 |
+
self.mark_position(node)
|
| 1278 |
+
self.visitchildren(node)
|
| 1279 |
+
|
| 1280 |
+
outer_exception_handlers = iter(self.flow.exceptions[::-1])
|
| 1281 |
+
for handler in outer_exception_handlers:
|
| 1282 |
+
if handler.finally_enter:
|
| 1283 |
+
self.flow.block.add_child(handler.finally_enter)
|
| 1284 |
+
if handler.finally_exit:
|
| 1285 |
+
# 'return' goes to function exit, or to the next outer 'finally' clause
|
| 1286 |
+
exit_point = self.flow.exit_point
|
| 1287 |
+
for next_handler in outer_exception_handlers:
|
| 1288 |
+
if next_handler.finally_enter:
|
| 1289 |
+
exit_point = next_handler.finally_enter
|
| 1290 |
+
break
|
| 1291 |
+
handler.finally_exit.add_child(exit_point)
|
| 1292 |
+
break
|
| 1293 |
+
else:
|
| 1294 |
+
if self.flow.block:
|
| 1295 |
+
self.flow.block.add_child(self.flow.exit_point)
|
| 1296 |
+
self.flow.block = None
|
| 1297 |
+
return node
|
| 1298 |
+
|
| 1299 |
+
def visit_BreakStatNode(self, node):
|
| 1300 |
+
if not self.flow.loops:
|
| 1301 |
+
#error(node.pos, "break statement not inside loop")
|
| 1302 |
+
return node
|
| 1303 |
+
loop = self.flow.loops[-1]
|
| 1304 |
+
self.mark_position(node)
|
| 1305 |
+
for exception in loop.exceptions[::-1]:
|
| 1306 |
+
if exception.finally_enter:
|
| 1307 |
+
self.flow.block.add_child(exception.finally_enter)
|
| 1308 |
+
if exception.finally_exit:
|
| 1309 |
+
exception.finally_exit.add_child(loop.next_block)
|
| 1310 |
+
break
|
| 1311 |
+
else:
|
| 1312 |
+
self.flow.block.add_child(loop.next_block)
|
| 1313 |
+
self.flow.block = None
|
| 1314 |
+
return node
|
| 1315 |
+
|
| 1316 |
+
def visit_ContinueStatNode(self, node):
|
| 1317 |
+
if not self.flow.loops:
|
| 1318 |
+
#error(node.pos, "continue statement not inside loop")
|
| 1319 |
+
return node
|
| 1320 |
+
loop = self.flow.loops[-1]
|
| 1321 |
+
self.mark_position(node)
|
| 1322 |
+
for exception in loop.exceptions[::-1]:
|
| 1323 |
+
if exception.finally_enter:
|
| 1324 |
+
self.flow.block.add_child(exception.finally_enter)
|
| 1325 |
+
if exception.finally_exit:
|
| 1326 |
+
exception.finally_exit.add_child(loop.loop_block)
|
| 1327 |
+
break
|
| 1328 |
+
else:
|
| 1329 |
+
self.flow.block.add_child(loop.loop_block)
|
| 1330 |
+
self.flow.block = None
|
| 1331 |
+
return node
|
| 1332 |
+
|
| 1333 |
+
def visit_ComprehensionNode(self, node):
|
| 1334 |
+
if node.expr_scope:
|
| 1335 |
+
self.stack.append((self.env, self.flow))
|
| 1336 |
+
self.env = node.expr_scope
|
| 1337 |
+
# Skip append node here
|
| 1338 |
+
self._visit(node.loop)
|
| 1339 |
+
if node.expr_scope:
|
| 1340 |
+
self.env, _ = self.stack.pop()
|
| 1341 |
+
return node
|
| 1342 |
+
|
| 1343 |
+
def visit_ScopedExprNode(self, node):
|
| 1344 |
+
# currently this is written to deal with these two types
|
| 1345 |
+
# (with comprehensions covered in their own function)
|
| 1346 |
+
assert isinstance(node, (ExprNodes.IteratorNode, ExprNodes.AsyncIteratorNode)), node
|
| 1347 |
+
if node.expr_scope:
|
| 1348 |
+
self.stack.append((self.env, self.flow))
|
| 1349 |
+
self.flow = self.find_in_stack(node.expr_scope)
|
| 1350 |
+
self.env = node.expr_scope
|
| 1351 |
+
self.visitchildren(node)
|
| 1352 |
+
if node.expr_scope:
|
| 1353 |
+
self.env, self.flow = self.stack.pop()
|
| 1354 |
+
return node
|
| 1355 |
+
|
| 1356 |
+
def visit_PyClassDefNode(self, node):
|
| 1357 |
+
self.visitchildren(node, ('dict', 'metaclass',
|
| 1358 |
+
'mkw', 'bases', 'class_result'))
|
| 1359 |
+
self.flow.mark_assignment(node.target, node.classobj,
|
| 1360 |
+
self.env.lookup(node.target.name))
|
| 1361 |
+
self.stack.append((self.env, self.flow))
|
| 1362 |
+
self.env = node.scope
|
| 1363 |
+
self.flow.nextblock()
|
| 1364 |
+
if node.doc_node:
|
| 1365 |
+
self.flow.mark_assignment(node.doc_node, fake_rhs_expr, node.doc_node.entry)
|
| 1366 |
+
self.visitchildren(node, ('body',))
|
| 1367 |
+
self.flow.nextblock()
|
| 1368 |
+
self.env, _ = self.stack.pop()
|
| 1369 |
+
return node
|
| 1370 |
+
|
| 1371 |
+
def visit_CClassDefNode(self, node):
|
| 1372 |
+
# just make sure the nodes scope is findable in-case there is a list comprehension in it
|
| 1373 |
+
self.stack.append((node.scope, self.flow))
|
| 1374 |
+
self.visitchildren(node)
|
| 1375 |
+
self.stack.pop()
|
| 1376 |
+
return node
|
| 1377 |
+
|
| 1378 |
+
def visit_AmpersandNode(self, node):
|
| 1379 |
+
if node.operand.is_name:
|
| 1380 |
+
# Fake assignment to silence warning
|
| 1381 |
+
self.mark_assignment(node.operand, fake_rhs_expr)
|
| 1382 |
+
self.visitchildren(node)
|
| 1383 |
+
return node
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FusedNode.cp39-win_amd64.pyd
ADDED
|
Binary file (297 kB). View file
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/FusedNode.py
ADDED
|
@@ -0,0 +1,1015 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import copy
|
| 4 |
+
|
| 5 |
+
from . import (ExprNodes, PyrexTypes, MemoryView,
|
| 6 |
+
ParseTreeTransforms, StringEncoding, Errors,
|
| 7 |
+
Naming)
|
| 8 |
+
from .ExprNodes import CloneNode, ProxyNode, TupleNode
|
| 9 |
+
from .Nodes import FuncDefNode, CFuncDefNode, StatListNode, DefNode
|
| 10 |
+
from ..Utils import OrderedSet
|
| 11 |
+
from .Errors import error, CannotSpecialize
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class FusedCFuncDefNode(StatListNode):
|
| 15 |
+
"""
|
| 16 |
+
This node replaces a function with fused arguments. It deep-copies the
|
| 17 |
+
function for every permutation of fused types, and allocates a new local
|
| 18 |
+
scope for it. It keeps track of the original function in self.node, and
|
| 19 |
+
the entry of the original function in the symbol table is given the
|
| 20 |
+
'fused_cfunction' attribute which points back to us.
|
| 21 |
+
Then when a function lookup occurs (to e.g. call it), the call can be
|
| 22 |
+
dispatched to the right function.
|
| 23 |
+
|
| 24 |
+
node FuncDefNode the original function
|
| 25 |
+
nodes [FuncDefNode] list of copies of node with different specific types
|
| 26 |
+
py_func DefNode the fused python function subscriptable from
|
| 27 |
+
Python space
|
| 28 |
+
__signatures__ A DictNode mapping signature specialization strings
|
| 29 |
+
to PyCFunction nodes
|
| 30 |
+
resulting_fused_function PyCFunction for the fused DefNode that delegates
|
| 31 |
+
to specializations
|
| 32 |
+
fused_func_assignment Assignment of the fused function to the function name
|
| 33 |
+
defaults_tuple TupleNode of defaults (letting PyCFunctionNode build
|
| 34 |
+
defaults would result in many different tuples)
|
| 35 |
+
specialized_pycfuncs List of synthesized pycfunction nodes for the
|
| 36 |
+
specializations
|
| 37 |
+
code_object CodeObjectNode shared by all specializations and the
|
| 38 |
+
fused function
|
| 39 |
+
|
| 40 |
+
fused_compound_types All fused (compound) types (e.g. floating[:])
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
__signatures__ = None
|
| 44 |
+
resulting_fused_function = None
|
| 45 |
+
fused_func_assignment = None
|
| 46 |
+
defaults_tuple = None
|
| 47 |
+
decorators = None
|
| 48 |
+
|
| 49 |
+
child_attrs = StatListNode.child_attrs + [
|
| 50 |
+
'__signatures__', 'resulting_fused_function', 'fused_func_assignment']
|
| 51 |
+
|
| 52 |
+
def __init__(self, node, env):
|
| 53 |
+
super(FusedCFuncDefNode, self).__init__(node.pos)
|
| 54 |
+
|
| 55 |
+
self.nodes = []
|
| 56 |
+
self.node = node
|
| 57 |
+
|
| 58 |
+
is_def = isinstance(self.node, DefNode)
|
| 59 |
+
if is_def:
|
| 60 |
+
# self.node.decorators = []
|
| 61 |
+
self.copy_def(env)
|
| 62 |
+
else:
|
| 63 |
+
self.copy_cdef(env)
|
| 64 |
+
|
| 65 |
+
# Perform some sanity checks. If anything fails, it's a bug
|
| 66 |
+
for n in self.nodes:
|
| 67 |
+
assert not n.entry.type.is_fused
|
| 68 |
+
assert not n.local_scope.return_type.is_fused
|
| 69 |
+
if node.return_type.is_fused:
|
| 70 |
+
assert not n.return_type.is_fused
|
| 71 |
+
|
| 72 |
+
if not is_def and n.cfunc_declarator.optional_arg_count:
|
| 73 |
+
assert n.type.op_arg_struct
|
| 74 |
+
|
| 75 |
+
node.entry.fused_cfunction = self
|
| 76 |
+
# Copy the nodes as AnalyseDeclarationsTransform will prepend
|
| 77 |
+
# self.py_func to self.stats, as we only want specialized
|
| 78 |
+
# CFuncDefNodes in self.nodes
|
| 79 |
+
self.stats = self.nodes[:]
|
| 80 |
+
|
| 81 |
+
def copy_def(self, env):
|
| 82 |
+
"""
|
| 83 |
+
Create a copy of the original def or lambda function for specialized
|
| 84 |
+
versions.
|
| 85 |
+
"""
|
| 86 |
+
fused_compound_types = PyrexTypes.unique(
|
| 87 |
+
[arg.type for arg in self.node.args if arg.type.is_fused])
|
| 88 |
+
fused_types = self._get_fused_base_types(fused_compound_types)
|
| 89 |
+
permutations = PyrexTypes.get_all_specialized_permutations(fused_types)
|
| 90 |
+
|
| 91 |
+
self.fused_compound_types = fused_compound_types
|
| 92 |
+
|
| 93 |
+
if self.node.entry in env.pyfunc_entries:
|
| 94 |
+
env.pyfunc_entries.remove(self.node.entry)
|
| 95 |
+
|
| 96 |
+
for cname, fused_to_specific in permutations:
|
| 97 |
+
copied_node = copy.deepcopy(self.node)
|
| 98 |
+
# keep signature object identity for special casing in DefNode.analyse_declarations()
|
| 99 |
+
copied_node.entry.signature = self.node.entry.signature
|
| 100 |
+
|
| 101 |
+
self._specialize_function_args(copied_node.args, fused_to_specific)
|
| 102 |
+
copied_node.return_type = self.node.return_type.specialize(
|
| 103 |
+
fused_to_specific)
|
| 104 |
+
|
| 105 |
+
copied_node.analyse_declarations(env)
|
| 106 |
+
# copied_node.is_staticmethod = self.node.is_staticmethod
|
| 107 |
+
# copied_node.is_classmethod = self.node.is_classmethod
|
| 108 |
+
self.create_new_local_scope(copied_node, env, fused_to_specific)
|
| 109 |
+
self.specialize_copied_def(copied_node, cname, self.node.entry,
|
| 110 |
+
fused_to_specific, fused_compound_types)
|
| 111 |
+
|
| 112 |
+
PyrexTypes.specialize_entry(copied_node.entry, cname)
|
| 113 |
+
copied_node.entry.used = True
|
| 114 |
+
env.entries[copied_node.entry.name] = copied_node.entry
|
| 115 |
+
|
| 116 |
+
if not self.replace_fused_typechecks(copied_node):
|
| 117 |
+
break
|
| 118 |
+
|
| 119 |
+
self.orig_py_func = self.node
|
| 120 |
+
self.py_func = self.make_fused_cpdef(self.node, env, is_def=True)
|
| 121 |
+
|
| 122 |
+
def copy_cdef(self, env):
|
| 123 |
+
"""
|
| 124 |
+
Create a copy of the original c(p)def function for all specialized
|
| 125 |
+
versions.
|
| 126 |
+
"""
|
| 127 |
+
permutations = self.node.type.get_all_specialized_permutations()
|
| 128 |
+
# print 'Node %s has %d specializations:' % (self.node.entry.name,
|
| 129 |
+
# len(permutations))
|
| 130 |
+
# import pprint; pprint.pprint([d for cname, d in permutations])
|
| 131 |
+
|
| 132 |
+
# Prevent copying of the python function
|
| 133 |
+
self.orig_py_func = orig_py_func = self.node.py_func
|
| 134 |
+
self.node.py_func = None
|
| 135 |
+
if orig_py_func:
|
| 136 |
+
env.pyfunc_entries.remove(orig_py_func.entry)
|
| 137 |
+
|
| 138 |
+
fused_types = self.node.type.get_fused_types()
|
| 139 |
+
self.fused_compound_types = fused_types
|
| 140 |
+
|
| 141 |
+
new_cfunc_entries = []
|
| 142 |
+
for cname, fused_to_specific in permutations:
|
| 143 |
+
copied_node = copy.deepcopy(self.node)
|
| 144 |
+
|
| 145 |
+
# Make the types in our CFuncType specific.
|
| 146 |
+
try:
|
| 147 |
+
type = copied_node.type.specialize(fused_to_specific)
|
| 148 |
+
except CannotSpecialize:
|
| 149 |
+
# unlike for the argument types, specializing the return type can fail
|
| 150 |
+
error(copied_node.pos, "Return type is a fused type that cannot "
|
| 151 |
+
"be determined from the function arguments")
|
| 152 |
+
self.py_func = None # this is just to let the compiler exit gracefully
|
| 153 |
+
return
|
| 154 |
+
entry = copied_node.entry
|
| 155 |
+
type.specialize_entry(entry, cname)
|
| 156 |
+
|
| 157 |
+
# Reuse existing Entries (e.g. from .pxd files).
|
| 158 |
+
for i, orig_entry in enumerate(env.cfunc_entries):
|
| 159 |
+
if entry.cname == orig_entry.cname and type.same_as_resolved_type(orig_entry.type):
|
| 160 |
+
copied_node.entry = env.cfunc_entries[i]
|
| 161 |
+
if not copied_node.entry.func_cname:
|
| 162 |
+
copied_node.entry.func_cname = entry.func_cname
|
| 163 |
+
entry = copied_node.entry
|
| 164 |
+
type = entry.type
|
| 165 |
+
break
|
| 166 |
+
else:
|
| 167 |
+
new_cfunc_entries.append(entry)
|
| 168 |
+
|
| 169 |
+
copied_node.type = type
|
| 170 |
+
entry.type, type.entry = type, entry
|
| 171 |
+
|
| 172 |
+
entry.used = (entry.used or
|
| 173 |
+
self.node.entry.defined_in_pxd or
|
| 174 |
+
env.is_c_class_scope or
|
| 175 |
+
entry.is_cmethod)
|
| 176 |
+
|
| 177 |
+
if self.node.cfunc_declarator.optional_arg_count:
|
| 178 |
+
self.node.cfunc_declarator.declare_optional_arg_struct(
|
| 179 |
+
type, env, fused_cname=cname)
|
| 180 |
+
|
| 181 |
+
copied_node.return_type = type.return_type
|
| 182 |
+
self.create_new_local_scope(copied_node, env, fused_to_specific)
|
| 183 |
+
|
| 184 |
+
# Make the argument types in the CFuncDeclarator specific
|
| 185 |
+
self._specialize_function_args(copied_node.cfunc_declarator.args,
|
| 186 |
+
fused_to_specific)
|
| 187 |
+
|
| 188 |
+
# If a cpdef, declare all specialized cpdefs (this
|
| 189 |
+
# also calls analyse_declarations)
|
| 190 |
+
copied_node.declare_cpdef_wrapper(env)
|
| 191 |
+
if copied_node.py_func:
|
| 192 |
+
env.pyfunc_entries.remove(copied_node.py_func.entry)
|
| 193 |
+
|
| 194 |
+
self.specialize_copied_def(
|
| 195 |
+
copied_node.py_func, cname, self.node.entry.as_variable,
|
| 196 |
+
fused_to_specific, fused_types)
|
| 197 |
+
|
| 198 |
+
if not self.replace_fused_typechecks(copied_node):
|
| 199 |
+
break
|
| 200 |
+
|
| 201 |
+
# replace old entry with new entries
|
| 202 |
+
if self.node.entry in env.cfunc_entries:
|
| 203 |
+
cindex = env.cfunc_entries.index(self.node.entry)
|
| 204 |
+
env.cfunc_entries[cindex:cindex+1] = new_cfunc_entries
|
| 205 |
+
else:
|
| 206 |
+
env.cfunc_entries.extend(new_cfunc_entries)
|
| 207 |
+
|
| 208 |
+
if orig_py_func:
|
| 209 |
+
self.py_func = self.make_fused_cpdef(orig_py_func, env,
|
| 210 |
+
is_def=False)
|
| 211 |
+
else:
|
| 212 |
+
self.py_func = orig_py_func
|
| 213 |
+
|
| 214 |
+
def _get_fused_base_types(self, fused_compound_types):
|
| 215 |
+
"""
|
| 216 |
+
Get a list of unique basic fused types, from a list of
|
| 217 |
+
(possibly) compound fused types.
|
| 218 |
+
"""
|
| 219 |
+
base_types = []
|
| 220 |
+
seen = set()
|
| 221 |
+
for fused_type in fused_compound_types:
|
| 222 |
+
fused_type.get_fused_types(result=base_types, seen=seen)
|
| 223 |
+
return base_types
|
| 224 |
+
|
| 225 |
+
def _specialize_function_args(self, args, fused_to_specific):
|
| 226 |
+
for arg in args:
|
| 227 |
+
if arg.type.is_fused:
|
| 228 |
+
arg.type = arg.type.specialize(fused_to_specific)
|
| 229 |
+
if arg.type.is_memoryviewslice:
|
| 230 |
+
arg.type.validate_memslice_dtype(arg.pos)
|
| 231 |
+
if arg.annotation:
|
| 232 |
+
# TODO might be nice if annotations were specialized instead?
|
| 233 |
+
# (Or might be hard to do reliably)
|
| 234 |
+
arg.annotation.untyped = True
|
| 235 |
+
|
| 236 |
+
def create_new_local_scope(self, node, env, f2s):
|
| 237 |
+
"""
|
| 238 |
+
Create a new local scope for the copied node and append it to
|
| 239 |
+
self.nodes. A new local scope is needed because the arguments with the
|
| 240 |
+
fused types are already in the local scope, and we need the specialized
|
| 241 |
+
entries created after analyse_declarations on each specialized version
|
| 242 |
+
of the (CFunc)DefNode.
|
| 243 |
+
f2s is a dict mapping each fused type to its specialized version
|
| 244 |
+
"""
|
| 245 |
+
node.create_local_scope(env)
|
| 246 |
+
node.local_scope.fused_to_specific = f2s
|
| 247 |
+
|
| 248 |
+
# This is copied from the original function, set it to false to
|
| 249 |
+
# stop recursion
|
| 250 |
+
node.has_fused_arguments = False
|
| 251 |
+
self.nodes.append(node)
|
| 252 |
+
|
| 253 |
+
def specialize_copied_def(self, node, cname, py_entry, f2s, fused_compound_types):
|
| 254 |
+
"""Specialize the copy of a DefNode given the copied node,
|
| 255 |
+
the specialization cname and the original DefNode entry"""
|
| 256 |
+
fused_types = self._get_fused_base_types(fused_compound_types)
|
| 257 |
+
type_strings = [
|
| 258 |
+
PyrexTypes.specialization_signature_string(fused_type, f2s)
|
| 259 |
+
for fused_type in fused_types
|
| 260 |
+
]
|
| 261 |
+
|
| 262 |
+
node.specialized_signature_string = '|'.join(type_strings)
|
| 263 |
+
|
| 264 |
+
node.entry.pymethdef_cname = PyrexTypes.get_fused_cname(
|
| 265 |
+
cname, node.entry.pymethdef_cname)
|
| 266 |
+
node.entry.doc = py_entry.doc
|
| 267 |
+
node.entry.doc_cname = py_entry.doc_cname
|
| 268 |
+
|
| 269 |
+
def replace_fused_typechecks(self, copied_node):
|
| 270 |
+
"""
|
| 271 |
+
Branch-prune fused type checks like
|
| 272 |
+
|
| 273 |
+
if fused_t is int:
|
| 274 |
+
...
|
| 275 |
+
|
| 276 |
+
Returns whether an error was issued and whether we should stop in
|
| 277 |
+
in order to prevent a flood of errors.
|
| 278 |
+
"""
|
| 279 |
+
num_errors = Errors.get_errors_count()
|
| 280 |
+
transform = ParseTreeTransforms.ReplaceFusedTypeChecks(
|
| 281 |
+
copied_node.local_scope)
|
| 282 |
+
transform(copied_node)
|
| 283 |
+
|
| 284 |
+
if Errors.get_errors_count() > num_errors:
|
| 285 |
+
return False
|
| 286 |
+
|
| 287 |
+
return True
|
| 288 |
+
|
| 289 |
+
def _fused_instance_checks(self, normal_types, pyx_code, env):
|
| 290 |
+
"""
|
| 291 |
+
Generate Cython code for instance checks, matching an object to
|
| 292 |
+
specialized types.
|
| 293 |
+
"""
|
| 294 |
+
for specialized_type in normal_types:
|
| 295 |
+
# all_numeric = all_numeric and specialized_type.is_numeric
|
| 296 |
+
py_type_name = specialized_type.py_type_name()
|
| 297 |
+
if py_type_name == 'int':
|
| 298 |
+
# Support Python 2 long
|
| 299 |
+
py_type_name = '(int, long)'
|
| 300 |
+
pyx_code.context.update(
|
| 301 |
+
py_type_name=py_type_name,
|
| 302 |
+
specialized_type_name=specialized_type.specialization_string,
|
| 303 |
+
)
|
| 304 |
+
pyx_code.put_chunk(
|
| 305 |
+
u"""
|
| 306 |
+
if isinstance(arg, {{py_type_name}}):
|
| 307 |
+
dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'; break
|
| 308 |
+
""")
|
| 309 |
+
|
| 310 |
+
def _dtype_name(self, dtype):
|
| 311 |
+
name = str(dtype).replace('_', '__').replace(' ', '_')
|
| 312 |
+
if dtype.is_typedef:
|
| 313 |
+
name = Naming.fused_dtype_prefix + name
|
| 314 |
+
return name
|
| 315 |
+
|
| 316 |
+
def _dtype_type(self, dtype):
|
| 317 |
+
if dtype.is_typedef:
|
| 318 |
+
return self._dtype_name(dtype)
|
| 319 |
+
return str(dtype)
|
| 320 |
+
|
| 321 |
+
def _sizeof_dtype(self, dtype):
|
| 322 |
+
if dtype.is_pyobject:
|
| 323 |
+
return 'sizeof(void *)'
|
| 324 |
+
else:
|
| 325 |
+
return "sizeof(%s)" % self._dtype_type(dtype)
|
| 326 |
+
|
| 327 |
+
def _buffer_check_numpy_dtype_setup_cases(self, pyx_code):
|
| 328 |
+
"Setup some common cases to match dtypes against specializations"
|
| 329 |
+
with pyx_code.indenter("if kind in u'iu':"):
|
| 330 |
+
pyx_code.putln("pass")
|
| 331 |
+
pyx_code.named_insertion_point("dtype_int")
|
| 332 |
+
|
| 333 |
+
with pyx_code.indenter("elif kind == u'f':"):
|
| 334 |
+
pyx_code.putln("pass")
|
| 335 |
+
pyx_code.named_insertion_point("dtype_float")
|
| 336 |
+
|
| 337 |
+
with pyx_code.indenter("elif kind == u'c':"):
|
| 338 |
+
pyx_code.putln("pass")
|
| 339 |
+
pyx_code.named_insertion_point("dtype_complex")
|
| 340 |
+
|
| 341 |
+
with pyx_code.indenter("elif kind == u'O':"):
|
| 342 |
+
pyx_code.putln("pass")
|
| 343 |
+
pyx_code.named_insertion_point("dtype_object")
|
| 344 |
+
|
| 345 |
+
match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'"
|
| 346 |
+
no_match = "dest_sig[{{dest_sig_idx}}] = None"
|
| 347 |
+
def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types, pythran_types):
|
| 348 |
+
"""
|
| 349 |
+
Match a numpy dtype object to the individual specializations.
|
| 350 |
+
"""
|
| 351 |
+
self._buffer_check_numpy_dtype_setup_cases(pyx_code)
|
| 352 |
+
|
| 353 |
+
for specialized_type in pythran_types+specialized_buffer_types:
|
| 354 |
+
final_type = specialized_type
|
| 355 |
+
if specialized_type.is_pythran_expr:
|
| 356 |
+
specialized_type = specialized_type.org_buffer
|
| 357 |
+
dtype = specialized_type.dtype
|
| 358 |
+
pyx_code.context.update(
|
| 359 |
+
itemsize_match=self._sizeof_dtype(dtype) + " == itemsize",
|
| 360 |
+
signed_match="not (%s_is_signed ^ dtype_signed)" % self._dtype_name(dtype),
|
| 361 |
+
dtype=dtype,
|
| 362 |
+
specialized_type_name=final_type.specialization_string)
|
| 363 |
+
|
| 364 |
+
dtypes = [
|
| 365 |
+
(dtype.is_int, pyx_code.dtype_int),
|
| 366 |
+
(dtype.is_float, pyx_code.dtype_float),
|
| 367 |
+
(dtype.is_complex, pyx_code.dtype_complex)
|
| 368 |
+
]
|
| 369 |
+
|
| 370 |
+
for dtype_category, codewriter in dtypes:
|
| 371 |
+
if not dtype_category:
|
| 372 |
+
continue
|
| 373 |
+
cond = '{{itemsize_match}} and (<Py_ssize_t>arg.ndim) == %d' % (
|
| 374 |
+
specialized_type.ndim,)
|
| 375 |
+
if dtype.is_int:
|
| 376 |
+
cond += ' and {{signed_match}}'
|
| 377 |
+
|
| 378 |
+
if final_type.is_pythran_expr:
|
| 379 |
+
cond += ' and arg_is_pythran_compatible'
|
| 380 |
+
|
| 381 |
+
with codewriter.indenter("if %s:" % cond):
|
| 382 |
+
#codewriter.putln("print 'buffer match found based on numpy dtype'")
|
| 383 |
+
codewriter.putln(self.match)
|
| 384 |
+
codewriter.putln("break")
|
| 385 |
+
|
| 386 |
+
def _buffer_parse_format_string_check(self, pyx_code, decl_code,
|
| 387 |
+
specialized_type, env):
|
| 388 |
+
"""
|
| 389 |
+
For each specialized type, try to coerce the object to a memoryview
|
| 390 |
+
slice of that type. This means obtaining a buffer and parsing the
|
| 391 |
+
format string.
|
| 392 |
+
TODO: separate buffer acquisition from format parsing
|
| 393 |
+
"""
|
| 394 |
+
dtype = specialized_type.dtype
|
| 395 |
+
if specialized_type.is_buffer:
|
| 396 |
+
axes = [('direct', 'strided')] * specialized_type.ndim
|
| 397 |
+
else:
|
| 398 |
+
axes = specialized_type.axes
|
| 399 |
+
|
| 400 |
+
memslice_type = PyrexTypes.MemoryViewSliceType(dtype, axes)
|
| 401 |
+
memslice_type.create_from_py_utility_code(env)
|
| 402 |
+
pyx_code.context.update(
|
| 403 |
+
coerce_from_py_func=memslice_type.from_py_function,
|
| 404 |
+
dtype=dtype)
|
| 405 |
+
decl_code.putln(
|
| 406 |
+
"{{memviewslice_cname}} {{coerce_from_py_func}}(object, int)")
|
| 407 |
+
|
| 408 |
+
pyx_code.context.update(
|
| 409 |
+
specialized_type_name=specialized_type.specialization_string,
|
| 410 |
+
sizeof_dtype=self._sizeof_dtype(dtype),
|
| 411 |
+
ndim_dtype=specialized_type.ndim,
|
| 412 |
+
dtype_is_struct_obj=int(dtype.is_struct or dtype.is_pyobject))
|
| 413 |
+
|
| 414 |
+
# use the memoryview object to check itemsize and ndim.
|
| 415 |
+
# In principle it could check more, but these are the easiest to do quickly
|
| 416 |
+
pyx_code.put_chunk(
|
| 417 |
+
u"""
|
| 418 |
+
# try {{dtype}}
|
| 419 |
+
if (((itemsize == -1 and arg_as_memoryview.itemsize == {{sizeof_dtype}})
|
| 420 |
+
or itemsize == {{sizeof_dtype}})
|
| 421 |
+
and arg_as_memoryview.ndim == {{ndim_dtype}}):
|
| 422 |
+
{{if dtype_is_struct_obj}}
|
| 423 |
+
if __PYX_IS_PYPY2:
|
| 424 |
+
# I wasn't able to diagnose why, but PyPy2 fails to convert a
|
| 425 |
+
# memoryview to a Cython memoryview in this case
|
| 426 |
+
memslice = {{coerce_from_py_func}}(arg, 0)
|
| 427 |
+
else:
|
| 428 |
+
{{else}}
|
| 429 |
+
if True:
|
| 430 |
+
{{endif}}
|
| 431 |
+
memslice = {{coerce_from_py_func}}(arg_as_memoryview, 0)
|
| 432 |
+
if memslice.memview:
|
| 433 |
+
__PYX_XCLEAR_MEMVIEW(&memslice, 1)
|
| 434 |
+
# print 'found a match for the buffer through format parsing'
|
| 435 |
+
%s
|
| 436 |
+
break
|
| 437 |
+
else:
|
| 438 |
+
__pyx_PyErr_Clear()
|
| 439 |
+
""" % self.match)
|
| 440 |
+
|
| 441 |
+
def _buffer_checks(self, buffer_types, pythran_types, pyx_code, decl_code, accept_none, env):
|
| 442 |
+
"""
|
| 443 |
+
Generate Cython code to match objects to buffer specializations.
|
| 444 |
+
First try to get a numpy dtype object and match it against the individual
|
| 445 |
+
specializations. If that fails, try naively to coerce the object
|
| 446 |
+
to each specialization, which obtains the buffer each time and tries
|
| 447 |
+
to match the format string.
|
| 448 |
+
"""
|
| 449 |
+
# The first thing to find a match in this loop breaks out of the loop
|
| 450 |
+
pyx_code.put_chunk(
|
| 451 |
+
u"""
|
| 452 |
+
""" + (u"arg_is_pythran_compatible = False" if pythran_types else u"") + u"""
|
| 453 |
+
if ndarray is not None:
|
| 454 |
+
if isinstance(arg, ndarray):
|
| 455 |
+
dtype = arg.dtype
|
| 456 |
+
""" + (u"arg_is_pythran_compatible = True" if pythran_types else u"") + u"""
|
| 457 |
+
elif __pyx_memoryview_check(arg):
|
| 458 |
+
arg_base = arg.base
|
| 459 |
+
if isinstance(arg_base, ndarray):
|
| 460 |
+
dtype = arg_base.dtype
|
| 461 |
+
else:
|
| 462 |
+
dtype = None
|
| 463 |
+
else:
|
| 464 |
+
dtype = None
|
| 465 |
+
|
| 466 |
+
itemsize = -1
|
| 467 |
+
if dtype is not None:
|
| 468 |
+
itemsize = dtype.itemsize
|
| 469 |
+
kind = ord(dtype.kind)
|
| 470 |
+
dtype_signed = kind == u'i'
|
| 471 |
+
""")
|
| 472 |
+
pyx_code.indent(2)
|
| 473 |
+
if pythran_types:
|
| 474 |
+
pyx_code.put_chunk(
|
| 475 |
+
u"""
|
| 476 |
+
# Pythran only supports the endianness of the current compiler
|
| 477 |
+
byteorder = dtype.byteorder
|
| 478 |
+
if byteorder == "<" and not __Pyx_Is_Little_Endian():
|
| 479 |
+
arg_is_pythran_compatible = False
|
| 480 |
+
elif byteorder == ">" and __Pyx_Is_Little_Endian():
|
| 481 |
+
arg_is_pythran_compatible = False
|
| 482 |
+
if arg_is_pythran_compatible:
|
| 483 |
+
cur_stride = itemsize
|
| 484 |
+
shape = arg.shape
|
| 485 |
+
strides = arg.strides
|
| 486 |
+
for i in range(arg.ndim-1, -1, -1):
|
| 487 |
+
if (<Py_ssize_t>strides[i]) != cur_stride:
|
| 488 |
+
arg_is_pythran_compatible = False
|
| 489 |
+
break
|
| 490 |
+
cur_stride *= <Py_ssize_t> shape[i]
|
| 491 |
+
else:
|
| 492 |
+
arg_is_pythran_compatible = not (arg.flags.f_contiguous and (<Py_ssize_t>arg.ndim) > 1)
|
| 493 |
+
""")
|
| 494 |
+
pyx_code.named_insertion_point("numpy_dtype_checks")
|
| 495 |
+
self._buffer_check_numpy_dtype(pyx_code, buffer_types, pythran_types)
|
| 496 |
+
pyx_code.dedent(2)
|
| 497 |
+
|
| 498 |
+
if accept_none:
|
| 499 |
+
# If None is acceptable, then Cython <3.0 matched None with the
|
| 500 |
+
# first type. This behaviour isn't ideal, but keep it for backwards
|
| 501 |
+
# compatibility. Better behaviour would be to see if subsequent
|
| 502 |
+
# arguments give a stronger match.
|
| 503 |
+
pyx_code.context.update(
|
| 504 |
+
specialized_type_name=buffer_types[0].specialization_string
|
| 505 |
+
)
|
| 506 |
+
pyx_code.put_chunk(
|
| 507 |
+
"""
|
| 508 |
+
if arg is None:
|
| 509 |
+
%s
|
| 510 |
+
break
|
| 511 |
+
""" % self.match)
|
| 512 |
+
|
| 513 |
+
# creating a Cython memoryview from a Python memoryview avoids the
|
| 514 |
+
# need to get the buffer multiple times, and we can
|
| 515 |
+
# also use it to check itemsizes etc
|
| 516 |
+
pyx_code.put_chunk(
|
| 517 |
+
"""
|
| 518 |
+
try:
|
| 519 |
+
arg_as_memoryview = memoryview(arg)
|
| 520 |
+
except (ValueError, TypeError):
|
| 521 |
+
pass
|
| 522 |
+
""")
|
| 523 |
+
with pyx_code.indenter("else:"):
|
| 524 |
+
for specialized_type in buffer_types:
|
| 525 |
+
self._buffer_parse_format_string_check(
|
| 526 |
+
pyx_code, decl_code, specialized_type, env)
|
| 527 |
+
|
| 528 |
+
def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types, pythran_types):
|
| 529 |
+
"""
|
| 530 |
+
If we have any buffer specializations, write out some variable
|
| 531 |
+
declarations and imports.
|
| 532 |
+
"""
|
| 533 |
+
decl_code.put_chunk(
|
| 534 |
+
u"""
|
| 535 |
+
ctypedef struct {{memviewslice_cname}}:
|
| 536 |
+
void *memview
|
| 537 |
+
|
| 538 |
+
void __PYX_XCLEAR_MEMVIEW({{memviewslice_cname}} *, int have_gil)
|
| 539 |
+
bint __pyx_memoryview_check(object)
|
| 540 |
+
bint __PYX_IS_PYPY2 "(CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION == 2)"
|
| 541 |
+
""")
|
| 542 |
+
|
| 543 |
+
pyx_code.local_variable_declarations.put_chunk(
|
| 544 |
+
u"""
|
| 545 |
+
cdef {{memviewslice_cname}} memslice
|
| 546 |
+
cdef Py_ssize_t itemsize
|
| 547 |
+
cdef bint dtype_signed
|
| 548 |
+
cdef Py_UCS4 kind
|
| 549 |
+
|
| 550 |
+
itemsize = -1
|
| 551 |
+
""")
|
| 552 |
+
|
| 553 |
+
if pythran_types:
|
| 554 |
+
pyx_code.local_variable_declarations.put_chunk(u"""
|
| 555 |
+
cdef bint arg_is_pythran_compatible
|
| 556 |
+
cdef Py_ssize_t cur_stride
|
| 557 |
+
""")
|
| 558 |
+
|
| 559 |
+
pyx_code.imports.put_chunk(
|
| 560 |
+
u"""
|
| 561 |
+
cdef type ndarray
|
| 562 |
+
ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable()
|
| 563 |
+
""")
|
| 564 |
+
|
| 565 |
+
pyx_code.imports.put_chunk(
|
| 566 |
+
u"""
|
| 567 |
+
cdef memoryview arg_as_memoryview
|
| 568 |
+
"""
|
| 569 |
+
)
|
| 570 |
+
|
| 571 |
+
seen_typedefs = set()
|
| 572 |
+
seen_int_dtypes = set()
|
| 573 |
+
for buffer_type in all_buffer_types:
|
| 574 |
+
dtype = buffer_type.dtype
|
| 575 |
+
dtype_name = self._dtype_name(dtype)
|
| 576 |
+
if dtype.is_typedef:
|
| 577 |
+
if dtype_name not in seen_typedefs:
|
| 578 |
+
seen_typedefs.add(dtype_name)
|
| 579 |
+
decl_code.putln(
|
| 580 |
+
'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name,
|
| 581 |
+
dtype.empty_declaration_code()))
|
| 582 |
+
|
| 583 |
+
if buffer_type.dtype.is_int:
|
| 584 |
+
if str(dtype) not in seen_int_dtypes:
|
| 585 |
+
seen_int_dtypes.add(str(dtype))
|
| 586 |
+
pyx_code.context.update(dtype_name=dtype_name,
|
| 587 |
+
dtype_type=self._dtype_type(dtype))
|
| 588 |
+
pyx_code.local_variable_declarations.put_chunk(
|
| 589 |
+
u"""
|
| 590 |
+
cdef bint {{dtype_name}}_is_signed
|
| 591 |
+
{{dtype_name}}_is_signed = not (<{{dtype_type}}> -1 > 0)
|
| 592 |
+
""")
|
| 593 |
+
|
| 594 |
+
def _split_fused_types(self, arg):
|
| 595 |
+
"""
|
| 596 |
+
Specialize fused types and split into normal types and buffer types.
|
| 597 |
+
"""
|
| 598 |
+
specialized_types = PyrexTypes.get_specialized_types(arg.type)
|
| 599 |
+
|
| 600 |
+
# Prefer long over int, etc by sorting (see type classes in PyrexTypes.py)
|
| 601 |
+
specialized_types.sort()
|
| 602 |
+
|
| 603 |
+
seen_py_type_names = set()
|
| 604 |
+
normal_types, buffer_types, pythran_types = [], [], []
|
| 605 |
+
has_object_fallback = False
|
| 606 |
+
for specialized_type in specialized_types:
|
| 607 |
+
py_type_name = specialized_type.py_type_name()
|
| 608 |
+
if py_type_name:
|
| 609 |
+
if py_type_name in seen_py_type_names:
|
| 610 |
+
continue
|
| 611 |
+
seen_py_type_names.add(py_type_name)
|
| 612 |
+
if py_type_name == 'object':
|
| 613 |
+
has_object_fallback = True
|
| 614 |
+
else:
|
| 615 |
+
normal_types.append(specialized_type)
|
| 616 |
+
elif specialized_type.is_pythran_expr:
|
| 617 |
+
pythran_types.append(specialized_type)
|
| 618 |
+
elif specialized_type.is_buffer or specialized_type.is_memoryviewslice:
|
| 619 |
+
buffer_types.append(specialized_type)
|
| 620 |
+
|
| 621 |
+
return normal_types, buffer_types, pythran_types, has_object_fallback
|
| 622 |
+
|
| 623 |
+
def _unpack_argument(self, pyx_code):
|
| 624 |
+
pyx_code.put_chunk(
|
| 625 |
+
u"""
|
| 626 |
+
# PROCESSING ARGUMENT {{arg_tuple_idx}}
|
| 627 |
+
if {{arg_tuple_idx}} < len(<tuple>args):
|
| 628 |
+
arg = (<tuple>args)[{{arg_tuple_idx}}]
|
| 629 |
+
elif kwargs is not None and '{{arg.name}}' in <dict>kwargs:
|
| 630 |
+
arg = (<dict>kwargs)['{{arg.name}}']
|
| 631 |
+
else:
|
| 632 |
+
{{if arg.default}}
|
| 633 |
+
arg = (<tuple>defaults)[{{default_idx}}]
|
| 634 |
+
{{else}}
|
| 635 |
+
{{if arg_tuple_idx < min_positional_args}}
|
| 636 |
+
raise TypeError("Expected at least %d argument%s, got %d" % (
|
| 637 |
+
{{min_positional_args}}, {{'"s"' if min_positional_args != 1 else '""'}}, len(<tuple>args)))
|
| 638 |
+
{{else}}
|
| 639 |
+
raise TypeError("Missing keyword-only argument: '%s'" % "{{arg.default}}")
|
| 640 |
+
{{endif}}
|
| 641 |
+
{{endif}}
|
| 642 |
+
""")
|
| 643 |
+
|
| 644 |
+
def _fused_signature_index(self, pyx_code):
|
| 645 |
+
"""
|
| 646 |
+
Generate Cython code for constructing a persistent nested dictionary index of
|
| 647 |
+
fused type specialization signatures.
|
| 648 |
+
"""
|
| 649 |
+
pyx_code.put_chunk(
|
| 650 |
+
u"""
|
| 651 |
+
if not _fused_sigindex:
|
| 652 |
+
for sig in <dict> signatures:
|
| 653 |
+
sigindex_node = <dict> _fused_sigindex
|
| 654 |
+
*sig_series, last_type = sig.strip('()').split('|')
|
| 655 |
+
for sig_type in sig_series:
|
| 656 |
+
if sig_type not in sigindex_node:
|
| 657 |
+
sigindex_node[sig_type] = sigindex_node = {}
|
| 658 |
+
else:
|
| 659 |
+
sigindex_node = <dict> sigindex_node[sig_type]
|
| 660 |
+
sigindex_node[last_type] = sig
|
| 661 |
+
"""
|
| 662 |
+
)
|
| 663 |
+
|
| 664 |
+
def make_fused_cpdef(self, orig_py_func, env, is_def):
|
| 665 |
+
"""
|
| 666 |
+
This creates the function that is indexable from Python and does
|
| 667 |
+
runtime dispatch based on the argument types. The function gets the
|
| 668 |
+
arg tuple and kwargs dict (or None) and the defaults tuple
|
| 669 |
+
as arguments from the Binding Fused Function's tp_call.
|
| 670 |
+
"""
|
| 671 |
+
from . import TreeFragment, Code, UtilityCode
|
| 672 |
+
|
| 673 |
+
fused_types = self._get_fused_base_types([
|
| 674 |
+
arg.type for arg in self.node.args if arg.type.is_fused])
|
| 675 |
+
|
| 676 |
+
context = {
|
| 677 |
+
'memviewslice_cname': MemoryView.memviewslice_cname,
|
| 678 |
+
'func_args': self.node.args,
|
| 679 |
+
'n_fused': len(fused_types),
|
| 680 |
+
'min_positional_args':
|
| 681 |
+
self.node.num_required_args - self.node.num_required_kw_args
|
| 682 |
+
if is_def else
|
| 683 |
+
sum(1 for arg in self.node.args if arg.default is None),
|
| 684 |
+
'name': orig_py_func.entry.name,
|
| 685 |
+
}
|
| 686 |
+
|
| 687 |
+
pyx_code = Code.PyxCodeWriter(context=context)
|
| 688 |
+
decl_code = Code.PyxCodeWriter(context=context)
|
| 689 |
+
decl_code.put_chunk(
|
| 690 |
+
u"""
|
| 691 |
+
cdef extern from *:
|
| 692 |
+
void __pyx_PyErr_Clear "PyErr_Clear" ()
|
| 693 |
+
type __Pyx_ImportNumPyArrayTypeIfAvailable()
|
| 694 |
+
int __Pyx_Is_Little_Endian()
|
| 695 |
+
""")
|
| 696 |
+
decl_code.indent()
|
| 697 |
+
|
| 698 |
+
pyx_code.put_chunk(
|
| 699 |
+
u"""
|
| 700 |
+
def __pyx_fused_cpdef(signatures, args, kwargs, defaults, _fused_sigindex={}):
|
| 701 |
+
# FIXME: use a typed signature - currently fails badly because
|
| 702 |
+
# default arguments inherit the types we specify here!
|
| 703 |
+
|
| 704 |
+
cdef list search_list
|
| 705 |
+
cdef dict sigindex_node
|
| 706 |
+
|
| 707 |
+
dest_sig = [None] * {{n_fused}}
|
| 708 |
+
|
| 709 |
+
if kwargs is not None and not kwargs:
|
| 710 |
+
kwargs = None
|
| 711 |
+
|
| 712 |
+
cdef Py_ssize_t i
|
| 713 |
+
|
| 714 |
+
# instance check body
|
| 715 |
+
""")
|
| 716 |
+
|
| 717 |
+
pyx_code.indent() # indent following code to function body
|
| 718 |
+
pyx_code.named_insertion_point("imports")
|
| 719 |
+
pyx_code.named_insertion_point("func_defs")
|
| 720 |
+
pyx_code.named_insertion_point("local_variable_declarations")
|
| 721 |
+
|
| 722 |
+
fused_index = 0
|
| 723 |
+
default_idx = 0
|
| 724 |
+
all_buffer_types = OrderedSet()
|
| 725 |
+
seen_fused_types = set()
|
| 726 |
+
for i, arg in enumerate(self.node.args):
|
| 727 |
+
if arg.type.is_fused:
|
| 728 |
+
arg_fused_types = arg.type.get_fused_types()
|
| 729 |
+
if len(arg_fused_types) > 1:
|
| 730 |
+
raise NotImplementedError("Determination of more than one fused base "
|
| 731 |
+
"type per argument is not implemented.")
|
| 732 |
+
fused_type = arg_fused_types[0]
|
| 733 |
+
|
| 734 |
+
if arg.type.is_fused and fused_type not in seen_fused_types:
|
| 735 |
+
seen_fused_types.add(fused_type)
|
| 736 |
+
|
| 737 |
+
context.update(
|
| 738 |
+
arg_tuple_idx=i,
|
| 739 |
+
arg=arg,
|
| 740 |
+
dest_sig_idx=fused_index,
|
| 741 |
+
default_idx=default_idx,
|
| 742 |
+
)
|
| 743 |
+
|
| 744 |
+
normal_types, buffer_types, pythran_types, has_object_fallback = self._split_fused_types(arg)
|
| 745 |
+
self._unpack_argument(pyx_code)
|
| 746 |
+
|
| 747 |
+
# 'unrolled' loop, first match breaks out of it
|
| 748 |
+
with pyx_code.indenter("while 1:"):
|
| 749 |
+
if normal_types:
|
| 750 |
+
self._fused_instance_checks(normal_types, pyx_code, env)
|
| 751 |
+
if buffer_types or pythran_types:
|
| 752 |
+
env.use_utility_code(Code.UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c"))
|
| 753 |
+
self._buffer_checks(
|
| 754 |
+
buffer_types, pythran_types, pyx_code, decl_code,
|
| 755 |
+
arg.accept_none, env)
|
| 756 |
+
if has_object_fallback:
|
| 757 |
+
pyx_code.context.update(specialized_type_name='object')
|
| 758 |
+
pyx_code.putln(self.match)
|
| 759 |
+
else:
|
| 760 |
+
pyx_code.putln(self.no_match)
|
| 761 |
+
pyx_code.putln("break")
|
| 762 |
+
|
| 763 |
+
fused_index += 1
|
| 764 |
+
all_buffer_types.update(buffer_types)
|
| 765 |
+
all_buffer_types.update(ty.org_buffer for ty in pythran_types)
|
| 766 |
+
|
| 767 |
+
if arg.default:
|
| 768 |
+
default_idx += 1
|
| 769 |
+
|
| 770 |
+
if all_buffer_types:
|
| 771 |
+
self._buffer_declarations(pyx_code, decl_code, all_buffer_types, pythran_types)
|
| 772 |
+
env.use_utility_code(Code.UtilityCode.load_cached("Import", "ImportExport.c"))
|
| 773 |
+
env.use_utility_code(Code.UtilityCode.load_cached("ImportNumPyArray", "ImportExport.c"))
|
| 774 |
+
|
| 775 |
+
self._fused_signature_index(pyx_code)
|
| 776 |
+
|
| 777 |
+
pyx_code.put_chunk(
|
| 778 |
+
u"""
|
| 779 |
+
sigindex_matches = []
|
| 780 |
+
sigindex_candidates = [_fused_sigindex]
|
| 781 |
+
|
| 782 |
+
for dst_type in dest_sig:
|
| 783 |
+
found_matches = []
|
| 784 |
+
found_candidates = []
|
| 785 |
+
# Make two separate lists: One for signature sub-trees
|
| 786 |
+
# with at least one definite match, and another for
|
| 787 |
+
# signature sub-trees with only ambiguous matches
|
| 788 |
+
# (where `dest_sig[i] is None`).
|
| 789 |
+
if dst_type is None:
|
| 790 |
+
for sn in sigindex_matches:
|
| 791 |
+
found_matches.extend((<dict> sn).values())
|
| 792 |
+
for sn in sigindex_candidates:
|
| 793 |
+
found_candidates.extend((<dict> sn).values())
|
| 794 |
+
else:
|
| 795 |
+
for search_list in (sigindex_matches, sigindex_candidates):
|
| 796 |
+
for sn in search_list:
|
| 797 |
+
type_match = (<dict> sn).get(dst_type)
|
| 798 |
+
if type_match is not None:
|
| 799 |
+
found_matches.append(type_match)
|
| 800 |
+
sigindex_matches = found_matches
|
| 801 |
+
sigindex_candidates = found_candidates
|
| 802 |
+
if not (found_matches or found_candidates):
|
| 803 |
+
break
|
| 804 |
+
|
| 805 |
+
candidates = sigindex_matches
|
| 806 |
+
|
| 807 |
+
if not candidates:
|
| 808 |
+
raise TypeError("No matching signature found")
|
| 809 |
+
elif len(candidates) > 1:
|
| 810 |
+
raise TypeError("Function call with ambiguous argument types")
|
| 811 |
+
else:
|
| 812 |
+
return (<dict>signatures)[candidates[0]]
|
| 813 |
+
""")
|
| 814 |
+
|
| 815 |
+
fragment_code = pyx_code.getvalue()
|
| 816 |
+
# print decl_code.getvalue()
|
| 817 |
+
# print fragment_code
|
| 818 |
+
from .Optimize import ConstantFolding
|
| 819 |
+
fragment = TreeFragment.TreeFragment(
|
| 820 |
+
fragment_code, level='module', pipeline=[ConstantFolding()])
|
| 821 |
+
ast = TreeFragment.SetPosTransform(self.node.pos)(fragment.root)
|
| 822 |
+
UtilityCode.declare_declarations_in_scope(
|
| 823 |
+
decl_code.getvalue(), env.global_scope())
|
| 824 |
+
ast.scope = env
|
| 825 |
+
# FIXME: for static methods of cdef classes, we build the wrong signature here: first arg becomes 'self'
|
| 826 |
+
ast.analyse_declarations(env)
|
| 827 |
+
py_func = ast.stats[-1] # the DefNode
|
| 828 |
+
self.fragment_scope = ast.scope
|
| 829 |
+
|
| 830 |
+
if isinstance(self.node, DefNode):
|
| 831 |
+
py_func.specialized_cpdefs = self.nodes[:]
|
| 832 |
+
else:
|
| 833 |
+
py_func.specialized_cpdefs = [n.py_func for n in self.nodes]
|
| 834 |
+
|
| 835 |
+
return py_func
|
| 836 |
+
|
| 837 |
+
def update_fused_defnode_entry(self, env):
|
| 838 |
+
copy_attributes = (
|
| 839 |
+
'name', 'pos', 'cname', 'func_cname', 'pyfunc_cname',
|
| 840 |
+
'pymethdef_cname', 'doc', 'doc_cname', 'is_member',
|
| 841 |
+
'scope'
|
| 842 |
+
)
|
| 843 |
+
|
| 844 |
+
entry = self.py_func.entry
|
| 845 |
+
|
| 846 |
+
for attr in copy_attributes:
|
| 847 |
+
setattr(entry, attr,
|
| 848 |
+
getattr(self.orig_py_func.entry, attr))
|
| 849 |
+
|
| 850 |
+
self.py_func.name = self.orig_py_func.name
|
| 851 |
+
self.py_func.doc = self.orig_py_func.doc
|
| 852 |
+
|
| 853 |
+
env.entries.pop('__pyx_fused_cpdef', None)
|
| 854 |
+
if isinstance(self.node, DefNode):
|
| 855 |
+
env.entries[entry.name] = entry
|
| 856 |
+
else:
|
| 857 |
+
env.entries[entry.name].as_variable = entry
|
| 858 |
+
|
| 859 |
+
env.pyfunc_entries.append(entry)
|
| 860 |
+
|
| 861 |
+
self.py_func.entry.fused_cfunction = self
|
| 862 |
+
for node in self.nodes:
|
| 863 |
+
if isinstance(self.node, DefNode):
|
| 864 |
+
node.fused_py_func = self.py_func
|
| 865 |
+
else:
|
| 866 |
+
node.py_func.fused_py_func = self.py_func
|
| 867 |
+
node.entry.as_variable = entry
|
| 868 |
+
|
| 869 |
+
self.synthesize_defnodes()
|
| 870 |
+
self.stats.append(self.__signatures__)
|
| 871 |
+
|
| 872 |
+
def analyse_expressions(self, env):
|
| 873 |
+
"""
|
| 874 |
+
Analyse the expressions. Take care to only evaluate default arguments
|
| 875 |
+
once and clone the result for all specializations
|
| 876 |
+
"""
|
| 877 |
+
for fused_compound_type in self.fused_compound_types:
|
| 878 |
+
for fused_type in fused_compound_type.get_fused_types():
|
| 879 |
+
for specialization_type in fused_type.types:
|
| 880 |
+
if specialization_type.is_complex:
|
| 881 |
+
specialization_type.create_declaration_utility_code(env)
|
| 882 |
+
|
| 883 |
+
if self.py_func:
|
| 884 |
+
self.__signatures__ = self.__signatures__.analyse_expressions(env)
|
| 885 |
+
self.py_func = self.py_func.analyse_expressions(env)
|
| 886 |
+
self.resulting_fused_function = self.resulting_fused_function.analyse_expressions(env)
|
| 887 |
+
self.fused_func_assignment = self.fused_func_assignment.analyse_expressions(env)
|
| 888 |
+
|
| 889 |
+
self.defaults = defaults = []
|
| 890 |
+
|
| 891 |
+
for arg in self.node.args:
|
| 892 |
+
if arg.default:
|
| 893 |
+
arg.default = arg.default.analyse_expressions(env)
|
| 894 |
+
if arg.default.is_literal:
|
| 895 |
+
defaults.append(copy.copy(arg.default))
|
| 896 |
+
else:
|
| 897 |
+
# coerce the argument to temp since CloneNode really requires a temp
|
| 898 |
+
defaults.append(ProxyNode(arg.default.coerce_to_temp(env)))
|
| 899 |
+
else:
|
| 900 |
+
defaults.append(None)
|
| 901 |
+
|
| 902 |
+
for i, stat in enumerate(self.stats):
|
| 903 |
+
stat = self.stats[i] = stat.analyse_expressions(env)
|
| 904 |
+
if isinstance(stat, FuncDefNode) and stat is not self.py_func:
|
| 905 |
+
# the dispatcher specifically doesn't want its defaults overriding
|
| 906 |
+
for arg, default in zip(stat.args, defaults):
|
| 907 |
+
if default is not None:
|
| 908 |
+
if default.is_literal:
|
| 909 |
+
arg.default = default.coerce_to(arg.type, env)
|
| 910 |
+
else:
|
| 911 |
+
arg.default = CloneNode(default).analyse_expressions(env).coerce_to(arg.type, env)
|
| 912 |
+
|
| 913 |
+
if self.py_func:
|
| 914 |
+
args = [CloneNode(default) for default in defaults if default]
|
| 915 |
+
self.defaults_tuple = TupleNode(self.pos, args=args)
|
| 916 |
+
self.defaults_tuple = self.defaults_tuple.analyse_types(env, skip_children=True).coerce_to_pyobject(env)
|
| 917 |
+
self.defaults_tuple = ProxyNode(self.defaults_tuple)
|
| 918 |
+
self.code_object = ProxyNode(self.specialized_pycfuncs[0].code_object)
|
| 919 |
+
|
| 920 |
+
fused_func = self.resulting_fused_function.arg
|
| 921 |
+
fused_func.defaults_tuple = CloneNode(self.defaults_tuple)
|
| 922 |
+
fused_func.code_object = CloneNode(self.code_object)
|
| 923 |
+
|
| 924 |
+
for i, pycfunc in enumerate(self.specialized_pycfuncs):
|
| 925 |
+
pycfunc.code_object = CloneNode(self.code_object)
|
| 926 |
+
pycfunc = self.specialized_pycfuncs[i] = pycfunc.analyse_types(env)
|
| 927 |
+
pycfunc.defaults_tuple = CloneNode(self.defaults_tuple)
|
| 928 |
+
return self
|
| 929 |
+
|
| 930 |
+
def synthesize_defnodes(self):
|
| 931 |
+
"""
|
| 932 |
+
Create the __signatures__ dict of PyCFunctionNode specializations.
|
| 933 |
+
"""
|
| 934 |
+
if isinstance(self.nodes[0], CFuncDefNode):
|
| 935 |
+
nodes = [node.py_func for node in self.nodes]
|
| 936 |
+
else:
|
| 937 |
+
nodes = self.nodes
|
| 938 |
+
|
| 939 |
+
# For the moment, fused functions do not support METH_FASTCALL
|
| 940 |
+
for node in nodes:
|
| 941 |
+
node.entry.signature.use_fastcall = False
|
| 942 |
+
|
| 943 |
+
signatures = [StringEncoding.EncodedString(node.specialized_signature_string)
|
| 944 |
+
for node in nodes]
|
| 945 |
+
keys = [ExprNodes.StringNode(node.pos, value=sig)
|
| 946 |
+
for node, sig in zip(nodes, signatures)]
|
| 947 |
+
values = [ExprNodes.PyCFunctionNode.from_defnode(node, binding=True)
|
| 948 |
+
for node in nodes]
|
| 949 |
+
|
| 950 |
+
self.__signatures__ = ExprNodes.DictNode.from_pairs(self.pos, zip(keys, values))
|
| 951 |
+
|
| 952 |
+
self.specialized_pycfuncs = values
|
| 953 |
+
for pycfuncnode in values:
|
| 954 |
+
pycfuncnode.is_specialization = True
|
| 955 |
+
|
| 956 |
+
def generate_function_definitions(self, env, code):
|
| 957 |
+
if self.py_func:
|
| 958 |
+
self.py_func.pymethdef_required = True
|
| 959 |
+
self.fused_func_assignment.generate_function_definitions(env, code)
|
| 960 |
+
|
| 961 |
+
from . import Options
|
| 962 |
+
for stat in self.stats:
|
| 963 |
+
if isinstance(stat, FuncDefNode) and (
|
| 964 |
+
stat.entry.used or
|
| 965 |
+
(Options.cimport_from_pyx and not stat.entry.visibility == 'extern')):
|
| 966 |
+
code.mark_pos(stat.pos)
|
| 967 |
+
stat.generate_function_definitions(env, code)
|
| 968 |
+
|
| 969 |
+
def generate_execution_code(self, code):
|
| 970 |
+
# Note: all def function specialization are wrapped in PyCFunction
|
| 971 |
+
# nodes in the self.__signatures__ dictnode.
|
| 972 |
+
for default in self.defaults:
|
| 973 |
+
if default is not None:
|
| 974 |
+
default.generate_evaluation_code(code)
|
| 975 |
+
|
| 976 |
+
if self.py_func:
|
| 977 |
+
self.defaults_tuple.generate_evaluation_code(code)
|
| 978 |
+
self.code_object.generate_evaluation_code(code)
|
| 979 |
+
|
| 980 |
+
for stat in self.stats:
|
| 981 |
+
code.mark_pos(stat.pos)
|
| 982 |
+
if isinstance(stat, ExprNodes.ExprNode):
|
| 983 |
+
stat.generate_evaluation_code(code)
|
| 984 |
+
else:
|
| 985 |
+
stat.generate_execution_code(code)
|
| 986 |
+
|
| 987 |
+
if self.__signatures__:
|
| 988 |
+
self.resulting_fused_function.generate_evaluation_code(code)
|
| 989 |
+
|
| 990 |
+
code.putln(
|
| 991 |
+
"((__pyx_FusedFunctionObject *) %s)->__signatures__ = %s;" %
|
| 992 |
+
(self.resulting_fused_function.result(),
|
| 993 |
+
self.__signatures__.result()))
|
| 994 |
+
self.__signatures__.generate_giveref(code)
|
| 995 |
+
self.__signatures__.generate_post_assignment_code(code)
|
| 996 |
+
self.__signatures__.free_temps(code)
|
| 997 |
+
|
| 998 |
+
self.fused_func_assignment.generate_execution_code(code)
|
| 999 |
+
|
| 1000 |
+
# Dispose of results
|
| 1001 |
+
self.resulting_fused_function.generate_disposal_code(code)
|
| 1002 |
+
self.resulting_fused_function.free_temps(code)
|
| 1003 |
+
self.defaults_tuple.generate_disposal_code(code)
|
| 1004 |
+
self.defaults_tuple.free_temps(code)
|
| 1005 |
+
self.code_object.generate_disposal_code(code)
|
| 1006 |
+
self.code_object.free_temps(code)
|
| 1007 |
+
|
| 1008 |
+
for default in self.defaults:
|
| 1009 |
+
if default is not None:
|
| 1010 |
+
default.generate_disposal_code(code)
|
| 1011 |
+
default.free_temps(code)
|
| 1012 |
+
|
| 1013 |
+
def annotate(self, code):
|
| 1014 |
+
for stat in self.stats:
|
| 1015 |
+
stat.annotate(code)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Future.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def _get_feature(name):
|
| 2 |
+
import __future__
|
| 3 |
+
# fall back to a unique fake object for earlier Python versions or Python 3
|
| 4 |
+
return getattr(__future__, name, object())
|
| 5 |
+
|
| 6 |
+
unicode_literals = _get_feature("unicode_literals")
|
| 7 |
+
with_statement = _get_feature("with_statement") # dummy
|
| 8 |
+
division = _get_feature("division")
|
| 9 |
+
print_function = _get_feature("print_function")
|
| 10 |
+
absolute_import = _get_feature("absolute_import")
|
| 11 |
+
nested_scopes = _get_feature("nested_scopes") # dummy
|
| 12 |
+
generators = _get_feature("generators") # dummy
|
| 13 |
+
generator_stop = _get_feature("generator_stop")
|
| 14 |
+
annotations = _get_feature("annotations")
|
| 15 |
+
|
| 16 |
+
del _get_feature
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Interpreter.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module deals with interpreting the parse tree as Python
|
| 3 |
+
would have done, in the compiler.
|
| 4 |
+
|
| 5 |
+
For now this only covers parse tree to value conversion of
|
| 6 |
+
compile-time values.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from __future__ import absolute_import
|
| 10 |
+
|
| 11 |
+
from .Nodes import *
|
| 12 |
+
from .ExprNodes import *
|
| 13 |
+
from .Errors import CompileError
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class EmptyScope(object):
|
| 17 |
+
def lookup(self, name):
|
| 18 |
+
return None
|
| 19 |
+
|
| 20 |
+
empty_scope = EmptyScope()
|
| 21 |
+
|
| 22 |
+
def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=()):
|
| 23 |
+
"""
|
| 24 |
+
Tries to interpret a list of compile time option nodes.
|
| 25 |
+
The result will be a tuple (optlist, optdict) but where
|
| 26 |
+
all expression nodes have been interpreted. The result is
|
| 27 |
+
in the form of tuples (value, pos).
|
| 28 |
+
|
| 29 |
+
optlist is a list of nodes, while optdict is a DictNode (the
|
| 30 |
+
result optdict is a dict)
|
| 31 |
+
|
| 32 |
+
If type_env is set, all type nodes will be analysed and the resulting
|
| 33 |
+
type set. Otherwise only interpretateable ExprNodes
|
| 34 |
+
are allowed, other nodes raises errors.
|
| 35 |
+
|
| 36 |
+
A CompileError will be raised if there are problems.
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
def interpret(node, ix):
|
| 40 |
+
if ix in type_args:
|
| 41 |
+
if type_env:
|
| 42 |
+
type = node.analyse_as_type(type_env)
|
| 43 |
+
if not type:
|
| 44 |
+
raise CompileError(node.pos, "Invalid type.")
|
| 45 |
+
return (type, node.pos)
|
| 46 |
+
else:
|
| 47 |
+
raise CompileError(node.pos, "Type not allowed here.")
|
| 48 |
+
else:
|
| 49 |
+
if (sys.version_info[0] >=3 and
|
| 50 |
+
isinstance(node, StringNode) and
|
| 51 |
+
node.unicode_value is not None):
|
| 52 |
+
return (node.unicode_value, node.pos)
|
| 53 |
+
return (node.compile_time_value(empty_scope), node.pos)
|
| 54 |
+
|
| 55 |
+
if optlist:
|
| 56 |
+
optlist = [interpret(x, ix) for ix, x in enumerate(optlist)]
|
| 57 |
+
if optdict:
|
| 58 |
+
assert isinstance(optdict, DictNode)
|
| 59 |
+
new_optdict = {}
|
| 60 |
+
for item in optdict.key_value_pairs:
|
| 61 |
+
new_key, dummy = interpret(item.key, None)
|
| 62 |
+
new_optdict[new_key] = interpret(item.value, item.key.value)
|
| 63 |
+
optdict = new_optdict
|
| 64 |
+
return (optlist, new_optdict)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Lexicon.py
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# cython: language_level=3, py2_import=True
|
| 3 |
+
#
|
| 4 |
+
# Cython Scanner - Lexical Definitions
|
| 5 |
+
#
|
| 6 |
+
|
| 7 |
+
from __future__ import absolute_import, unicode_literals
|
| 8 |
+
|
| 9 |
+
raw_prefixes = "rR"
|
| 10 |
+
bytes_prefixes = "bB"
|
| 11 |
+
string_prefixes = "fFuU" + bytes_prefixes
|
| 12 |
+
char_prefixes = "cC"
|
| 13 |
+
any_string_prefix = raw_prefixes + string_prefixes + char_prefixes
|
| 14 |
+
IDENT = 'IDENT'
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def make_lexicon():
|
| 18 |
+
from ..Plex import \
|
| 19 |
+
Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \
|
| 20 |
+
TEXT, IGNORE, Method, State, Lexicon, Range
|
| 21 |
+
|
| 22 |
+
nonzero_digit = Any("123456789")
|
| 23 |
+
digit = Any("0123456789")
|
| 24 |
+
bindigit = Any("01")
|
| 25 |
+
octdigit = Any("01234567")
|
| 26 |
+
hexdigit = Any("0123456789ABCDEFabcdef")
|
| 27 |
+
indentation = Bol + Rep(Any(" \t"))
|
| 28 |
+
|
| 29 |
+
# The list of valid unicode identifier characters are pretty slow to generate at runtime,
|
| 30 |
+
# and require Python3, so are just included directly here
|
| 31 |
+
# (via the generated code block at the bottom of the file)
|
| 32 |
+
unicode_start_character = (Any(unicode_start_ch_any) | Range(unicode_start_ch_range))
|
| 33 |
+
unicode_continuation_character = (
|
| 34 |
+
unicode_start_character |
|
| 35 |
+
Any(unicode_continuation_ch_any) | Range(unicode_continuation_ch_range))
|
| 36 |
+
|
| 37 |
+
def underscore_digits(d):
|
| 38 |
+
return Rep1(d) + Rep(Str("_") + Rep1(d))
|
| 39 |
+
|
| 40 |
+
def prefixed_digits(prefix, digits):
|
| 41 |
+
return prefix + Opt(Str("_")) + underscore_digits(digits)
|
| 42 |
+
|
| 43 |
+
decimal = underscore_digits(digit)
|
| 44 |
+
dot = Str(".")
|
| 45 |
+
exponent = Any("Ee") + Opt(Any("+-")) + decimal
|
| 46 |
+
decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)
|
| 47 |
+
|
| 48 |
+
#name = letter + Rep(letter | digit)
|
| 49 |
+
name = unicode_start_character + Rep(unicode_continuation_character)
|
| 50 |
+
intconst = (prefixed_digits(nonzero_digit, digit) | # decimal literals with underscores must not start with '0'
|
| 51 |
+
(Str("0") + (prefixed_digits(Any("Xx"), hexdigit) |
|
| 52 |
+
prefixed_digits(Any("Oo"), octdigit) |
|
| 53 |
+
prefixed_digits(Any("Bb"), bindigit) )) |
|
| 54 |
+
underscore_digits(Str('0')) # 0_0_0_0... is allowed as a decimal literal
|
| 55 |
+
| Rep1(digit) # FIXME: remove these Py2 style decimal/octal literals (PY_VERSION_HEX < 3)
|
| 56 |
+
)
|
| 57 |
+
intsuffix = (Opt(Any("Uu")) + Opt(Any("Ll")) + Opt(Any("Ll"))) | (Opt(Any("Ll")) + Opt(Any("Ll")) + Opt(Any("Uu")))
|
| 58 |
+
intliteral = intconst + intsuffix
|
| 59 |
+
fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
|
| 60 |
+
imagconst = (intconst | fltconst) + Any("jJ")
|
| 61 |
+
|
| 62 |
+
# invalid combinations of prefixes are caught in p_string_literal
|
| 63 |
+
beginstring = Opt(Rep(Any(string_prefixes + raw_prefixes)) |
|
| 64 |
+
Any(char_prefixes)
|
| 65 |
+
) + (Str("'") | Str('"') | Str("'''") | Str('"""'))
|
| 66 |
+
two_oct = octdigit + octdigit
|
| 67 |
+
three_oct = octdigit + octdigit + octdigit
|
| 68 |
+
two_hex = hexdigit + hexdigit
|
| 69 |
+
four_hex = two_hex + two_hex
|
| 70 |
+
escapeseq = Str("\\") + (two_oct | three_oct |
|
| 71 |
+
Str('N{') + Rep(AnyBut('}')) + Str('}') |
|
| 72 |
+
Str('u') + four_hex | Str('x') + two_hex |
|
| 73 |
+
Str('U') + four_hex + four_hex | AnyChar)
|
| 74 |
+
|
| 75 |
+
bra = Any("([{")
|
| 76 |
+
ket = Any(")]}")
|
| 77 |
+
ellipsis = Str("...")
|
| 78 |
+
punct = Any(":,;+-*/|&<>=.%`~^?!@")
|
| 79 |
+
diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//",
|
| 80 |
+
"+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
|
| 81 |
+
"<<=", ">>=", "**=", "//=", "->", "@=", "&&", "||", ':=')
|
| 82 |
+
spaces = Rep1(Any(" \t\f"))
|
| 83 |
+
escaped_newline = Str("\\\n")
|
| 84 |
+
lineterm = Eol + Opt(Str("\n"))
|
| 85 |
+
|
| 86 |
+
comment = Str("#") + Rep(AnyBut("\n"))
|
| 87 |
+
|
| 88 |
+
return Lexicon([
|
| 89 |
+
(name, Method('normalize_ident')),
|
| 90 |
+
(intliteral, Method('strip_underscores', symbol='INT')),
|
| 91 |
+
(fltconst, Method('strip_underscores', symbol='FLOAT')),
|
| 92 |
+
(imagconst, Method('strip_underscores', symbol='IMAG')),
|
| 93 |
+
(ellipsis | punct | diphthong, TEXT),
|
| 94 |
+
|
| 95 |
+
(bra, Method('open_bracket_action')),
|
| 96 |
+
(ket, Method('close_bracket_action')),
|
| 97 |
+
(lineterm, Method('newline_action')),
|
| 98 |
+
|
| 99 |
+
(beginstring, Method('begin_string_action')),
|
| 100 |
+
|
| 101 |
+
(comment, IGNORE),
|
| 102 |
+
(spaces, IGNORE),
|
| 103 |
+
(escaped_newline, IGNORE),
|
| 104 |
+
|
| 105 |
+
State('INDENT', [
|
| 106 |
+
(comment + lineterm, Method('commentline')),
|
| 107 |
+
(Opt(spaces) + Opt(comment) + lineterm, IGNORE),
|
| 108 |
+
(indentation, Method('indentation_action')),
|
| 109 |
+
(Eof, Method('eof_action'))
|
| 110 |
+
]),
|
| 111 |
+
|
| 112 |
+
State('SQ_STRING', [
|
| 113 |
+
(escapeseq, 'ESCAPE'),
|
| 114 |
+
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
|
| 115 |
+
(Str('"'), 'CHARS'),
|
| 116 |
+
(Str("\n"), Method('unclosed_string_action')),
|
| 117 |
+
(Str("'"), Method('end_string_action')),
|
| 118 |
+
(Eof, 'EOF')
|
| 119 |
+
]),
|
| 120 |
+
|
| 121 |
+
State('DQ_STRING', [
|
| 122 |
+
(escapeseq, 'ESCAPE'),
|
| 123 |
+
(Rep1(AnyBut('"\n\\')), 'CHARS'),
|
| 124 |
+
(Str("'"), 'CHARS'),
|
| 125 |
+
(Str("\n"), Method('unclosed_string_action')),
|
| 126 |
+
(Str('"'), Method('end_string_action')),
|
| 127 |
+
(Eof, 'EOF')
|
| 128 |
+
]),
|
| 129 |
+
|
| 130 |
+
State('TSQ_STRING', [
|
| 131 |
+
(escapeseq, 'ESCAPE'),
|
| 132 |
+
(Rep1(AnyBut("'\"\n\\")), 'CHARS'),
|
| 133 |
+
(Any("'\""), 'CHARS'),
|
| 134 |
+
(Str("\n"), 'NEWLINE'),
|
| 135 |
+
(Str("'''"), Method('end_string_action')),
|
| 136 |
+
(Eof, 'EOF')
|
| 137 |
+
]),
|
| 138 |
+
|
| 139 |
+
State('TDQ_STRING', [
|
| 140 |
+
(escapeseq, 'ESCAPE'),
|
| 141 |
+
(Rep1(AnyBut('"\'\n\\')), 'CHARS'),
|
| 142 |
+
(Any("'\""), 'CHARS'),
|
| 143 |
+
(Str("\n"), 'NEWLINE'),
|
| 144 |
+
(Str('"""'), Method('end_string_action')),
|
| 145 |
+
(Eof, 'EOF')
|
| 146 |
+
]),
|
| 147 |
+
|
| 148 |
+
(Eof, Method('eof_action'))
|
| 149 |
+
],
|
| 150 |
+
|
| 151 |
+
# FIXME: Plex 1.9 needs different args here from Plex 1.1.4
|
| 152 |
+
#debug_flags = scanner_debug_flags,
|
| 153 |
+
#debug_file = scanner_dump_file
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
# BEGIN GENERATED CODE
|
| 158 |
+
# Generated with 'cython-generate-lexicon.py' from:
|
| 159 |
+
# cpython 3.12.0a7+ (heads/master:4cd1cc843a, Apr 11 2023, 10:32:26) [GCC 11.3.0]
|
| 160 |
+
|
| 161 |
+
unicode_start_ch_any = (
|
| 162 |
+
u"\u005f\u00aa\u00b5\u00ba\u02ec\u02ee\u037f\u0386\u038c\u0559\u06d5"
|
| 163 |
+
u"\u06ff\u0710\u07b1\u07fa\u081a\u0824\u0828\u093d\u0950\u09b2\u09bd"
|
| 164 |
+
u"\u09ce\u09fc\u0a5e\u0abd\u0ad0\u0af9\u0b3d\u0b71\u0b83\u0b9c\u0bd0"
|
| 165 |
+
u"\u0c3d\u0c5d\u0c80\u0cbd\u0d3d\u0d4e\u0dbd\u0e32\u0e84\u0ea5\u0eb2"
|
| 166 |
+
u"\u0ebd\u0ec6\u0f00\u103f\u1061\u108e\u10c7\u10cd\u1258\u12c0\u17d7"
|
| 167 |
+
u"\u17dc\u18aa\u1aa7\u1cfa\u1f59\u1f5b\u1f5d\u1fbe\u2071\u207f\u2102"
|
| 168 |
+
u"\u2107\u2115\u2124\u2126\u2128\u214e\u2d27\u2d2d\u2d6f\ua7d3\ua8fb"
|
| 169 |
+
u"\ua9cf\uaa7a\uaab1\uaac0\uaac2\ufb1d\ufb3e\ufe71\ufe73\ufe77\ufe79"
|
| 170 |
+
u"\ufe7b\ufe7d\U00010808\U0001083c\U00010a00\U00010f27\U00011075\U00011144\U00011147\U00011176\U000111da"
|
| 171 |
+
u"\U000111dc\U00011288\U0001133d\U00011350\U000114c7\U00011644\U000116b8\U00011909\U0001193f\U00011941\U000119e1"
|
| 172 |
+
u"\U000119e3\U00011a00\U00011a3a\U00011a50\U00011a9d\U00011c40\U00011d46\U00011d98\U00011f02\U00011fb0\U00016f50"
|
| 173 |
+
u"\U00016fe3\U0001b132\U0001b155\U0001d4a2\U0001d4bb\U0001d546\U0001e14e\U0001e94b\U0001ee24\U0001ee27\U0001ee39"
|
| 174 |
+
u"\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f"
|
| 175 |
+
u"\U0001ee64\U0001ee7e"
|
| 176 |
+
)
|
| 177 |
+
unicode_start_ch_range = (
|
| 178 |
+
u"\u0041\u005a\u0061\u007a\u00c0\u00d6\u00d8\u00f6\u00f8\u02c1\u02c6"
|
| 179 |
+
u"\u02d1\u02e0\u02e4\u0370\u0374\u0376\u0377\u037b\u037d\u0388\u038a"
|
| 180 |
+
u"\u038e\u03a1\u03a3\u03f5\u03f7\u0481\u048a\u052f\u0531\u0556\u0560"
|
| 181 |
+
u"\u0588\u05d0\u05ea\u05ef\u05f2\u0620\u064a\u066e\u066f\u0671\u06d3"
|
| 182 |
+
u"\u06e5\u06e6\u06ee\u06ef\u06fa\u06fc\u0712\u072f\u074d\u07a5\u07ca"
|
| 183 |
+
u"\u07ea\u07f4\u07f5\u0800\u0815\u0840\u0858\u0860\u086a\u0870\u0887"
|
| 184 |
+
u"\u0889\u088e\u08a0\u08c9\u0904\u0939\u0958\u0961\u0971\u0980\u0985"
|
| 185 |
+
u"\u098c\u098f\u0990\u0993\u09a8\u09aa\u09b0\u09b6\u09b9\u09dc\u09dd"
|
| 186 |
+
u"\u09df\u09e1\u09f0\u09f1\u0a05\u0a0a\u0a0f\u0a10\u0a13\u0a28\u0a2a"
|
| 187 |
+
u"\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59\u0a5c\u0a72\u0a74"
|
| 188 |
+
u"\u0a85\u0a8d\u0a8f\u0a91\u0a93\u0aa8\u0aaa\u0ab0\u0ab2\u0ab3\u0ab5"
|
| 189 |
+
u"\u0ab9\u0ae0\u0ae1\u0b05\u0b0c\u0b0f\u0b10\u0b13\u0b28\u0b2a\u0b30"
|
| 190 |
+
u"\u0b32\u0b33\u0b35\u0b39\u0b5c\u0b5d\u0b5f\u0b61\u0b85\u0b8a\u0b8e"
|
| 191 |
+
u"\u0b90\u0b92\u0b95\u0b99\u0b9a\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8\u0baa"
|
| 192 |
+
u"\u0bae\u0bb9\u0c05\u0c0c\u0c0e\u0c10\u0c12\u0c28\u0c2a\u0c39\u0c58"
|
| 193 |
+
u"\u0c5a\u0c60\u0c61\u0c85\u0c8c\u0c8e\u0c90\u0c92\u0ca8\u0caa\u0cb3"
|
| 194 |
+
u"\u0cb5\u0cb9\u0cdd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d04\u0d0c\u0d0e"
|
| 195 |
+
u"\u0d10\u0d12\u0d3a\u0d54\u0d56\u0d5f\u0d61\u0d7a\u0d7f\u0d85\u0d96"
|
| 196 |
+
u"\u0d9a\u0db1\u0db3\u0dbb\u0dc0\u0dc6\u0e01\u0e30\u0e40\u0e46\u0e81"
|
| 197 |
+
u"\u0e82\u0e86\u0e8a\u0e8c\u0ea3\u0ea7\u0eb0\u0ec0\u0ec4\u0edc\u0edf"
|
| 198 |
+
u"\u0f40\u0f47\u0f49\u0f6c\u0f88\u0f8c\u1000\u102a\u1050\u1055\u105a"
|
| 199 |
+
u"\u105d\u1065\u1066\u106e\u1070\u1075\u1081\u10a0\u10c5\u10d0\u10fa"
|
| 200 |
+
u"\u10fc\u1248\u124a\u124d\u1250\u1256\u125a\u125d\u1260\u1288\u128a"
|
| 201 |
+
u"\u128d\u1290\u12b0\u12b2\u12b5\u12b8\u12be\u12c2\u12c5\u12c8\u12d6"
|
| 202 |
+
u"\u12d8\u1310\u1312\u1315\u1318\u135a\u1380\u138f\u13a0\u13f5\u13f8"
|
| 203 |
+
u"\u13fd\u1401\u166c\u166f\u167f\u1681\u169a\u16a0\u16ea\u16ee\u16f8"
|
| 204 |
+
u"\u1700\u1711\u171f\u1731\u1740\u1751\u1760\u176c\u176e\u1770\u1780"
|
| 205 |
+
u"\u17b3\u1820\u1878\u1880\u18a8\u18b0\u18f5\u1900\u191e\u1950\u196d"
|
| 206 |
+
u"\u1970\u1974\u1980\u19ab\u19b0\u19c9\u1a00\u1a16\u1a20\u1a54\u1b05"
|
| 207 |
+
u"\u1b33\u1b45\u1b4c\u1b83\u1ba0\u1bae\u1baf\u1bba\u1be5\u1c00\u1c23"
|
| 208 |
+
u"\u1c4d\u1c4f\u1c5a\u1c7d\u1c80\u1c88\u1c90\u1cba\u1cbd\u1cbf\u1ce9"
|
| 209 |
+
u"\u1cec\u1cee\u1cf3\u1cf5\u1cf6\u1d00\u1dbf\u1e00\u1f15\u1f18\u1f1d"
|
| 210 |
+
u"\u1f20\u1f45\u1f48\u1f4d\u1f50\u1f57\u1f5f\u1f7d\u1f80\u1fb4\u1fb6"
|
| 211 |
+
u"\u1fbc\u1fc2\u1fc4\u1fc6\u1fcc\u1fd0\u1fd3\u1fd6\u1fdb\u1fe0\u1fec"
|
| 212 |
+
u"\u1ff2\u1ff4\u1ff6\u1ffc\u2090\u209c\u210a\u2113\u2118\u211d\u212a"
|
| 213 |
+
u"\u2139\u213c\u213f\u2145\u2149\u2160\u2188\u2c00\u2ce4\u2ceb\u2cee"
|
| 214 |
+
u"\u2cf2\u2cf3\u2d00\u2d25\u2d30\u2d67\u2d80\u2d96\u2da0\u2da6\u2da8"
|
| 215 |
+
u"\u2dae\u2db0\u2db6\u2db8\u2dbe\u2dc0\u2dc6\u2dc8\u2dce\u2dd0\u2dd6"
|
| 216 |
+
u"\u2dd8\u2dde\u3005\u3007\u3021\u3029\u3031\u3035\u3038\u303c\u3041"
|
| 217 |
+
u"\u3096\u309d\u309f\u30a1\u30fa\u30fc\u30ff\u3105\u312f\u3131\u318e"
|
| 218 |
+
u"\u31a0\u31bf\u31f0\u31ff\u3400\u4dbf\u4e00\ua48c\ua4d0\ua4fd\ua500"
|
| 219 |
+
u"\ua60c\ua610\ua61f\ua62a\ua62b\ua640\ua66e\ua67f\ua69d\ua6a0\ua6ef"
|
| 220 |
+
u"\ua717\ua71f\ua722\ua788\ua78b\ua7ca\ua7d0\ua7d1\ua7d5\ua7d9\ua7f2"
|
| 221 |
+
u"\ua801\ua803\ua805\ua807\ua80a\ua80c\ua822\ua840\ua873\ua882\ua8b3"
|
| 222 |
+
u"\ua8f2\ua8f7\ua8fd\ua8fe\ua90a\ua925\ua930\ua946\ua960\ua97c\ua984"
|
| 223 |
+
u"\ua9b2\ua9e0\ua9e4\ua9e6\ua9ef\ua9fa\ua9fe\uaa00\uaa28\uaa40\uaa42"
|
| 224 |
+
u"\uaa44\uaa4b\uaa60\uaa76\uaa7e\uaaaf\uaab5\uaab6\uaab9\uaabd\uaadb"
|
| 225 |
+
u"\uaadd\uaae0\uaaea\uaaf2\uaaf4\uab01\uab06\uab09\uab0e\uab11\uab16"
|
| 226 |
+
u"\uab20\uab26\uab28\uab2e\uab30\uab5a\uab5c\uab69\uab70\uabe2\uac00"
|
| 227 |
+
u"\ud7a3\ud7b0\ud7c6\ud7cb\ud7fb\uf900\ufa6d\ufa70\ufad9\ufb00\ufb06"
|
| 228 |
+
u"\ufb13\ufb17\ufb1f\ufb28\ufb2a\ufb36\ufb38\ufb3c\ufb40\ufb41\ufb43"
|
| 229 |
+
u"\ufb44\ufb46\ufbb1\ufbd3\ufc5d\ufc64\ufd3d\ufd50\ufd8f\ufd92\ufdc7"
|
| 230 |
+
u"\ufdf0\ufdf9\ufe7f\ufefc\uff21\uff3a\uff41\uff5a\uff66\uff9d\uffa0"
|
| 231 |
+
u"\uffbe\uffc2\uffc7\uffca\uffcf\uffd2\uffd7\uffda\uffdc\U00010000\U0001000b"
|
| 232 |
+
u"\U0001000d\U00010026\U00010028\U0001003a\U0001003c\U0001003d\U0001003f\U0001004d\U00010050\U0001005d\U00010080"
|
| 233 |
+
u"\U000100fa\U00010140\U00010174\U00010280\U0001029c\U000102a0\U000102d0\U00010300\U0001031f\U0001032d\U0001034a"
|
| 234 |
+
u"\U00010350\U00010375\U00010380\U0001039d\U000103a0\U000103c3\U000103c8\U000103cf\U000103d1\U000103d5\U00010400"
|
| 235 |
+
u"\U0001049d\U000104b0\U000104d3\U000104d8\U000104fb\U00010500\U00010527\U00010530\U00010563\U00010570\U0001057a"
|
| 236 |
+
u"\U0001057c\U0001058a\U0001058c\U00010592\U00010594\U00010595\U00010597\U000105a1\U000105a3\U000105b1\U000105b3"
|
| 237 |
+
u"\U000105b9\U000105bb\U000105bc\U00010600\U00010736\U00010740\U00010755\U00010760\U00010767\U00010780\U00010785"
|
| 238 |
+
u"\U00010787\U000107b0\U000107b2\U000107ba\U00010800\U00010805\U0001080a\U00010835\U00010837\U00010838\U0001083f"
|
| 239 |
+
u"\U00010855\U00010860\U00010876\U00010880\U0001089e\U000108e0\U000108f2\U000108f4\U000108f5\U00010900\U00010915"
|
| 240 |
+
u"\U00010920\U00010939\U00010980\U000109b7\U000109be\U000109bf\U00010a10\U00010a13\U00010a15\U00010a17\U00010a19"
|
| 241 |
+
u"\U00010a35\U00010a60\U00010a7c\U00010a80\U00010a9c\U00010ac0\U00010ac7\U00010ac9\U00010ae4\U00010b00\U00010b35"
|
| 242 |
+
u"\U00010b40\U00010b55\U00010b60\U00010b72\U00010b80\U00010b91\U00010c00\U00010c48\U00010c80\U00010cb2\U00010cc0"
|
| 243 |
+
u"\U00010cf2\U00010d00\U00010d23\U00010e80\U00010ea9\U00010eb0\U00010eb1\U00010f00\U00010f1c\U00010f30\U00010f45"
|
| 244 |
+
u"\U00010f70\U00010f81\U00010fb0\U00010fc4\U00010fe0\U00010ff6\U00011003\U00011037\U00011071\U00011072\U00011083"
|
| 245 |
+
u"\U000110af\U000110d0\U000110e8\U00011103\U00011126\U00011150\U00011172\U00011183\U000111b2\U000111c1\U000111c4"
|
| 246 |
+
u"\U00011200\U00011211\U00011213\U0001122b\U0001123f\U00011240\U00011280\U00011286\U0001128a\U0001128d\U0001128f"
|
| 247 |
+
u"\U0001129d\U0001129f\U000112a8\U000112b0\U000112de\U00011305\U0001130c\U0001130f\U00011310\U00011313\U00011328"
|
| 248 |
+
u"\U0001132a\U00011330\U00011332\U00011333\U00011335\U00011339\U0001135d\U00011361\U00011400\U00011434\U00011447"
|
| 249 |
+
u"\U0001144a\U0001145f\U00011461\U00011480\U000114af\U000114c4\U000114c5\U00011580\U000115ae\U000115d8\U000115db"
|
| 250 |
+
u"\U00011600\U0001162f\U00011680\U000116aa\U00011700\U0001171a\U00011740\U00011746\U00011800\U0001182b\U000118a0"
|
| 251 |
+
u"\U000118df\U000118ff\U00011906\U0001190c\U00011913\U00011915\U00011916\U00011918\U0001192f\U000119a0\U000119a7"
|
| 252 |
+
u"\U000119aa\U000119d0\U00011a0b\U00011a32\U00011a5c\U00011a89\U00011ab0\U00011af8\U00011c00\U00011c08\U00011c0a"
|
| 253 |
+
u"\U00011c2e\U00011c72\U00011c8f\U00011d00\U00011d06\U00011d08\U00011d09\U00011d0b\U00011d30\U00011d60\U00011d65"
|
| 254 |
+
u"\U00011d67\U00011d68\U00011d6a\U00011d89\U00011ee0\U00011ef2\U00011f04\U00011f10\U00011f12\U00011f33\U00012000"
|
| 255 |
+
u"\U00012399\U00012400\U0001246e\U00012480\U00012543\U00012f90\U00012ff0\U00013000\U0001342f\U00013441\U00013446"
|
| 256 |
+
u"\U00014400\U00014646\U00016800\U00016a38\U00016a40\U00016a5e\U00016a70\U00016abe\U00016ad0\U00016aed\U00016b00"
|
| 257 |
+
u"\U00016b2f\U00016b40\U00016b43\U00016b63\U00016b77\U00016b7d\U00016b8f\U00016e40\U00016e7f\U00016f00\U00016f4a"
|
| 258 |
+
u"\U00016f93\U00016f9f\U00016fe0\U00016fe1\U00017000\U000187f7\U00018800\U00018cd5\U00018d00\U00018d08\U0001aff0"
|
| 259 |
+
u"\U0001aff3\U0001aff5\U0001affb\U0001affd\U0001affe\U0001b000\U0001b122\U0001b150\U0001b152\U0001b164\U0001b167"
|
| 260 |
+
u"\U0001b170\U0001b2fb\U0001bc00\U0001bc6a\U0001bc70\U0001bc7c\U0001bc80\U0001bc88\U0001bc90\U0001bc99\U0001d400"
|
| 261 |
+
u"\U0001d454\U0001d456\U0001d49c\U0001d49e\U0001d49f\U0001d4a5\U0001d4a6\U0001d4a9\U0001d4ac\U0001d4ae\U0001d4b9"
|
| 262 |
+
u"\U0001d4bd\U0001d4c3\U0001d4c5\U0001d505\U0001d507\U0001d50a\U0001d50d\U0001d514\U0001d516\U0001d51c\U0001d51e"
|
| 263 |
+
u"\U0001d539\U0001d53b\U0001d53e\U0001d540\U0001d544\U0001d54a\U0001d550\U0001d552\U0001d6a5\U0001d6a8\U0001d6c0"
|
| 264 |
+
u"\U0001d6c2\U0001d6da\U0001d6dc\U0001d6fa\U0001d6fc\U0001d714\U0001d716\U0001d734\U0001d736\U0001d74e\U0001d750"
|
| 265 |
+
u"\U0001d76e\U0001d770\U0001d788\U0001d78a\U0001d7a8\U0001d7aa\U0001d7c2\U0001d7c4\U0001d7cb\U0001df00\U0001df1e"
|
| 266 |
+
u"\U0001df25\U0001df2a\U0001e030\U0001e06d\U0001e100\U0001e12c\U0001e137\U0001e13d\U0001e290\U0001e2ad\U0001e2c0"
|
| 267 |
+
u"\U0001e2eb\U0001e4d0\U0001e4eb\U0001e7e0\U0001e7e6\U0001e7e8\U0001e7eb\U0001e7ed\U0001e7ee\U0001e7f0\U0001e7fe"
|
| 268 |
+
u"\U0001e800\U0001e8c4\U0001e900\U0001e943\U0001ee00\U0001ee03\U0001ee05\U0001ee1f\U0001ee21\U0001ee22\U0001ee29"
|
| 269 |
+
u"\U0001ee32\U0001ee34\U0001ee37\U0001ee4d\U0001ee4f\U0001ee51\U0001ee52\U0001ee61\U0001ee62\U0001ee67\U0001ee6a"
|
| 270 |
+
u"\U0001ee6c\U0001ee72\U0001ee74\U0001ee77\U0001ee79\U0001ee7c\U0001ee80\U0001ee89\U0001ee8b\U0001ee9b\U0001eea1"
|
| 271 |
+
u"\U0001eea3\U0001eea5\U0001eea9\U0001eeab\U0001eebb\U00020000\U0002a6df\U0002a700\U0002b739\U0002b740\U0002b81d"
|
| 272 |
+
u"\U0002b820\U0002cea1\U0002ceb0\U0002ebe0\U0002f800\U0002fa1d\U00030000\U0003134a"
|
| 273 |
+
)
|
| 274 |
+
unicode_continuation_ch_any = (
|
| 275 |
+
u"\u00b7\u0387\u05bf\u05c7\u0670\u0711\u07fd\u09bc\u09d7\u09fe\u0a3c"
|
| 276 |
+
u"\u0a51\u0a75\u0abc\u0b3c\u0b82\u0bd7\u0c3c\u0cbc\u0cf3\u0d57\u0dca"
|
| 277 |
+
u"\u0dd6\u0e31\u0eb1\u0f35\u0f37\u0f39\u0fc6\u17dd\u18a9\u1ced\u1cf4"
|
| 278 |
+
u"\u2054\u20e1\u2d7f\ua66f\ua802\ua806\ua80b\ua82c\ua9e5\uaa43\uaab0"
|
| 279 |
+
u"\uaac1\ufb1e\uff3f\U000101fd\U000102e0\U00010a3f\U000110c2\U00011173\U0001123e\U00011241\U00011357"
|
| 280 |
+
u"\U0001145e\U00011940\U000119e4\U00011a47\U00011d3a\U00011d47\U00011f03\U00013440\U00016f4f\U00016fe4\U0001da75"
|
| 281 |
+
u"\U0001da84\U0001e08f\U0001e2ae"
|
| 282 |
+
)
|
| 283 |
+
unicode_continuation_ch_range = (
|
| 284 |
+
u"\u0030\u0039\u0300\u036f\u0483\u0487\u0591\u05bd\u05c1\u05c2\u05c4"
|
| 285 |
+
u"\u05c5\u0610\u061a\u064b\u0669\u06d6\u06dc\u06df\u06e4\u06e7\u06e8"
|
| 286 |
+
u"\u06ea\u06ed\u06f0\u06f9\u0730\u074a\u07a6\u07b0\u07c0\u07c9\u07eb"
|
| 287 |
+
u"\u07f3\u0816\u0819\u081b\u0823\u0825\u0827\u0829\u082d\u0859\u085b"
|
| 288 |
+
u"\u0898\u089f\u08ca\u08e1\u08e3\u0903\u093a\u093c\u093e\u094f\u0951"
|
| 289 |
+
u"\u0957\u0962\u0963\u0966\u096f\u0981\u0983\u09be\u09c4\u09c7\u09c8"
|
| 290 |
+
u"\u09cb\u09cd\u09e2\u09e3\u09e6\u09ef\u0a01\u0a03\u0a3e\u0a42\u0a47"
|
| 291 |
+
u"\u0a48\u0a4b\u0a4d\u0a66\u0a71\u0a81\u0a83\u0abe\u0ac5\u0ac7\u0ac9"
|
| 292 |
+
u"\u0acb\u0acd\u0ae2\u0ae3\u0ae6\u0aef\u0afa\u0aff\u0b01\u0b03\u0b3e"
|
| 293 |
+
u"\u0b44\u0b47\u0b48\u0b4b\u0b4d\u0b55\u0b57\u0b62\u0b63\u0b66\u0b6f"
|
| 294 |
+
u"\u0bbe\u0bc2\u0bc6\u0bc8\u0bca\u0bcd\u0be6\u0bef\u0c00\u0c04\u0c3e"
|
| 295 |
+
u"\u0c44\u0c46\u0c48\u0c4a\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66\u0c6f"
|
| 296 |
+
u"\u0c81\u0c83\u0cbe\u0cc4\u0cc6\u0cc8\u0cca\u0ccd\u0cd5\u0cd6\u0ce2"
|
| 297 |
+
u"\u0ce3\u0ce6\u0cef\u0d00\u0d03\u0d3b\u0d3c\u0d3e\u0d44\u0d46\u0d48"
|
| 298 |
+
u"\u0d4a\u0d4d\u0d62\u0d63\u0d66\u0d6f\u0d81\u0d83\u0dcf\u0dd4\u0dd8"
|
| 299 |
+
u"\u0ddf\u0de6\u0def\u0df2\u0df3\u0e33\u0e3a\u0e47\u0e4e\u0e50\u0e59"
|
| 300 |
+
u"\u0eb3\u0ebc\u0ec8\u0ece\u0ed0\u0ed9\u0f18\u0f19\u0f20\u0f29\u0f3e"
|
| 301 |
+
u"\u0f3f\u0f71\u0f84\u0f86\u0f87\u0f8d\u0f97\u0f99\u0fbc\u102b\u103e"
|
| 302 |
+
u"\u1040\u1049\u1056\u1059\u105e\u1060\u1062\u1064\u1067\u106d\u1071"
|
| 303 |
+
u"\u1074\u1082\u108d\u108f\u109d\u135d\u135f\u1369\u1371\u1712\u1715"
|
| 304 |
+
u"\u1732\u1734\u1752\u1753\u1772\u1773\u17b4\u17d3\u17e0\u17e9\u180b"
|
| 305 |
+
u"\u180d\u180f\u1819\u1920\u192b\u1930\u193b\u1946\u194f\u19d0\u19da"
|
| 306 |
+
u"\u1a17\u1a1b\u1a55\u1a5e\u1a60\u1a7c\u1a7f\u1a89\u1a90\u1a99\u1ab0"
|
| 307 |
+
u"\u1abd\u1abf\u1ace\u1b00\u1b04\u1b34\u1b44\u1b50\u1b59\u1b6b\u1b73"
|
| 308 |
+
u"\u1b80\u1b82\u1ba1\u1bad\u1bb0\u1bb9\u1be6\u1bf3\u1c24\u1c37\u1c40"
|
| 309 |
+
u"\u1c49\u1c50\u1c59\u1cd0\u1cd2\u1cd4\u1ce8\u1cf7\u1cf9\u1dc0\u1dff"
|
| 310 |
+
u"\u203f\u2040\u20d0\u20dc\u20e5\u20f0\u2cef\u2cf1\u2de0\u2dff\u302a"
|
| 311 |
+
u"\u302f\u3099\u309a\ua620\ua629\ua674\ua67d\ua69e\ua69f\ua6f0\ua6f1"
|
| 312 |
+
u"\ua823\ua827\ua880\ua881\ua8b4\ua8c5\ua8d0\ua8d9\ua8e0\ua8f1\ua8ff"
|
| 313 |
+
u"\ua909\ua926\ua92d\ua947\ua953\ua980\ua983\ua9b3\ua9c0\ua9d0\ua9d9"
|
| 314 |
+
u"\ua9f0\ua9f9\uaa29\uaa36\uaa4c\uaa4d\uaa50\uaa59\uaa7b\uaa7d\uaab2"
|
| 315 |
+
u"\uaab4\uaab7\uaab8\uaabe\uaabf\uaaeb\uaaef\uaaf5\uaaf6\uabe3\uabea"
|
| 316 |
+
u"\uabec\uabed\uabf0\uabf9\ufe00\ufe0f\ufe20\ufe2f\ufe33\ufe34\ufe4d"
|
| 317 |
+
u"\ufe4f\uff10\uff19\uff9e\uff9f\U00010376\U0001037a\U000104a0\U000104a9\U00010a01\U00010a03"
|
| 318 |
+
u"\U00010a05\U00010a06\U00010a0c\U00010a0f\U00010a38\U00010a3a\U00010ae5\U00010ae6\U00010d24\U00010d27\U00010d30"
|
| 319 |
+
u"\U00010d39\U00010eab\U00010eac\U00010efd\U00010eff\U00010f46\U00010f50\U00010f82\U00010f85\U00011000\U00011002"
|
| 320 |
+
u"\U00011038\U00011046\U00011066\U00011070\U00011073\U00011074\U0001107f\U00011082\U000110b0\U000110ba\U000110f0"
|
| 321 |
+
u"\U000110f9\U00011100\U00011102\U00011127\U00011134\U00011136\U0001113f\U00011145\U00011146\U00011180\U00011182"
|
| 322 |
+
u"\U000111b3\U000111c0\U000111c9\U000111cc\U000111ce\U000111d9\U0001122c\U00011237\U000112df\U000112ea\U000112f0"
|
| 323 |
+
u"\U000112f9\U00011300\U00011303\U0001133b\U0001133c\U0001133e\U00011344\U00011347\U00011348\U0001134b\U0001134d"
|
| 324 |
+
u"\U00011362\U00011363\U00011366\U0001136c\U00011370\U00011374\U00011435\U00011446\U00011450\U00011459\U000114b0"
|
| 325 |
+
u"\U000114c3\U000114d0\U000114d9\U000115af\U000115b5\U000115b8\U000115c0\U000115dc\U000115dd\U00011630\U00011640"
|
| 326 |
+
u"\U00011650\U00011659\U000116ab\U000116b7\U000116c0\U000116c9\U0001171d\U0001172b\U00011730\U00011739\U0001182c"
|
| 327 |
+
u"\U0001183a\U000118e0\U000118e9\U00011930\U00011935\U00011937\U00011938\U0001193b\U0001193e\U00011942\U00011943"
|
| 328 |
+
u"\U00011950\U00011959\U000119d1\U000119d7\U000119da\U000119e0\U00011a01\U00011a0a\U00011a33\U00011a39\U00011a3b"
|
| 329 |
+
u"\U00011a3e\U00011a51\U00011a5b\U00011a8a\U00011a99\U00011c2f\U00011c36\U00011c38\U00011c3f\U00011c50\U00011c59"
|
| 330 |
+
u"\U00011c92\U00011ca7\U00011ca9\U00011cb6\U00011d31\U00011d36\U00011d3c\U00011d3d\U00011d3f\U00011d45\U00011d50"
|
| 331 |
+
u"\U00011d59\U00011d8a\U00011d8e\U00011d90\U00011d91\U00011d93\U00011d97\U00011da0\U00011da9\U00011ef3\U00011ef6"
|
| 332 |
+
u"\U00011f00\U00011f01\U00011f34\U00011f3a\U00011f3e\U00011f42\U00011f50\U00011f59\U00013447\U00013455\U00016a60"
|
| 333 |
+
u"\U00016a69\U00016ac0\U00016ac9\U00016af0\U00016af4\U00016b30\U00016b36\U00016b50\U00016b59\U00016f51\U00016f87"
|
| 334 |
+
u"\U00016f8f\U00016f92\U00016ff0\U00016ff1\U0001bc9d\U0001bc9e\U0001cf00\U0001cf2d\U0001cf30\U0001cf46\U0001d165"
|
| 335 |
+
u"\U0001d169\U0001d16d\U0001d172\U0001d17b\U0001d182\U0001d185\U0001d18b\U0001d1aa\U0001d1ad\U0001d242\U0001d244"
|
| 336 |
+
u"\U0001d7ce\U0001d7ff\U0001da00\U0001da36\U0001da3b\U0001da6c\U0001da9b\U0001da9f\U0001daa1\U0001daaf\U0001e000"
|
| 337 |
+
u"\U0001e006\U0001e008\U0001e018\U0001e01b\U0001e021\U0001e023\U0001e024\U0001e026\U0001e02a\U0001e130\U0001e136"
|
| 338 |
+
u"\U0001e140\U0001e149\U0001e2ec\U0001e2f9\U0001e4ec\U0001e4f9\U0001e8d0\U0001e8d6\U0001e944\U0001e94a\U0001e950"
|
| 339 |
+
u"\U0001e959\U0001fbf0\U0001fbf9"
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
# END GENERATED CODE
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Main.py
ADDED
|
@@ -0,0 +1,789 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Cython Top Level
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import, print_function
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
import re
|
| 9 |
+
import sys
|
| 10 |
+
import io
|
| 11 |
+
|
| 12 |
+
if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 3):
|
| 13 |
+
sys.stderr.write("Sorry, Cython requires Python 2.7 or 3.3+, found %d.%d\n" % tuple(sys.version_info[:2]))
|
| 14 |
+
sys.exit(1)
|
| 15 |
+
|
| 16 |
+
try:
|
| 17 |
+
from __builtin__ import basestring
|
| 18 |
+
except ImportError:
|
| 19 |
+
basestring = str
|
| 20 |
+
|
| 21 |
+
# Do not import Parsing here, import it when needed, because Parsing imports
|
| 22 |
+
# Nodes, which globally needs debug command line options initialized to set a
|
| 23 |
+
# conditional metaclass. These options are processed by CmdLine called from
|
| 24 |
+
# main() in this file.
|
| 25 |
+
# import Parsing
|
| 26 |
+
from . import Errors
|
| 27 |
+
from .StringEncoding import EncodedString
|
| 28 |
+
from .Scanning import PyrexScanner, FileSourceDescriptor
|
| 29 |
+
from .Errors import PyrexError, CompileError, error, warning
|
| 30 |
+
from .Symtab import ModuleScope
|
| 31 |
+
from .. import Utils
|
| 32 |
+
from . import Options
|
| 33 |
+
from .Options import CompilationOptions, default_options
|
| 34 |
+
from .CmdLine import parse_command_line
|
| 35 |
+
from .Lexicon import (unicode_start_ch_any, unicode_continuation_ch_any,
|
| 36 |
+
unicode_start_ch_range, unicode_continuation_ch_range)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _make_range_re(chrs):
|
| 40 |
+
out = []
|
| 41 |
+
for i in range(0, len(chrs), 2):
|
| 42 |
+
out.append(u"{0}-{1}".format(chrs[i], chrs[i+1]))
|
| 43 |
+
return u"".join(out)
|
| 44 |
+
|
| 45 |
+
# py2 version looked like r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)*$"
|
| 46 |
+
module_name_pattern = u"[{0}{1}][{0}{2}{1}{3}]*".format(
|
| 47 |
+
unicode_start_ch_any, _make_range_re(unicode_start_ch_range),
|
| 48 |
+
unicode_continuation_ch_any,
|
| 49 |
+
_make_range_re(unicode_continuation_ch_range))
|
| 50 |
+
module_name_pattern = re.compile(u"{0}(\\.{0})*$".format(module_name_pattern))
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
standard_include_path = os.path.abspath(
|
| 54 |
+
os.path.join(os.path.dirname(os.path.dirname(__file__)), 'Includes'))
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class Context(object):
|
| 58 |
+
# This class encapsulates the context needed for compiling
|
| 59 |
+
# one or more Cython implementation files along with their
|
| 60 |
+
# associated and imported declaration files. It includes
|
| 61 |
+
# the root of the module import namespace and the list
|
| 62 |
+
# of directories to search for include files.
|
| 63 |
+
#
|
| 64 |
+
# modules {string : ModuleScope}
|
| 65 |
+
# include_directories [string]
|
| 66 |
+
# future_directives [object]
|
| 67 |
+
# language_level int currently 2 or 3 for Python 2/3
|
| 68 |
+
|
| 69 |
+
cython_scope = None
|
| 70 |
+
language_level = None # warn when not set but default to Py2
|
| 71 |
+
|
| 72 |
+
def __init__(self, include_directories, compiler_directives, cpp=False,
|
| 73 |
+
language_level=None, options=None):
|
| 74 |
+
# cython_scope is a hack, set to False by subclasses, in order to break
|
| 75 |
+
# an infinite loop.
|
| 76 |
+
# Better code organization would fix it.
|
| 77 |
+
|
| 78 |
+
from . import Builtin, CythonScope
|
| 79 |
+
self.modules = {"__builtin__" : Builtin.builtin_scope}
|
| 80 |
+
self.cython_scope = CythonScope.create_cython_scope(self)
|
| 81 |
+
self.modules["cython"] = self.cython_scope
|
| 82 |
+
self.include_directories = include_directories
|
| 83 |
+
self.future_directives = set()
|
| 84 |
+
self.compiler_directives = compiler_directives
|
| 85 |
+
self.cpp = cpp
|
| 86 |
+
self.options = options
|
| 87 |
+
|
| 88 |
+
self.pxds = {} # full name -> node tree
|
| 89 |
+
self._interned = {} # (type(value), value, *key_args) -> interned_value
|
| 90 |
+
|
| 91 |
+
if language_level is not None:
|
| 92 |
+
self.set_language_level(language_level)
|
| 93 |
+
|
| 94 |
+
self.legacy_implicit_noexcept = self.compiler_directives.get('legacy_implicit_noexcept', False)
|
| 95 |
+
|
| 96 |
+
self.gdb_debug_outputwriter = None
|
| 97 |
+
|
| 98 |
+
@classmethod
|
| 99 |
+
def from_options(cls, options):
|
| 100 |
+
return cls(options.include_path, options.compiler_directives,
|
| 101 |
+
options.cplus, options.language_level, options=options)
|
| 102 |
+
|
| 103 |
+
def set_language_level(self, level):
|
| 104 |
+
from .Future import print_function, unicode_literals, absolute_import, division, generator_stop
|
| 105 |
+
future_directives = set()
|
| 106 |
+
if level == '3str':
|
| 107 |
+
level = 3
|
| 108 |
+
else:
|
| 109 |
+
level = int(level)
|
| 110 |
+
if level >= 3:
|
| 111 |
+
future_directives.add(unicode_literals)
|
| 112 |
+
if level >= 3:
|
| 113 |
+
future_directives.update([print_function, absolute_import, division, generator_stop])
|
| 114 |
+
self.language_level = level
|
| 115 |
+
self.future_directives = future_directives
|
| 116 |
+
if level >= 3:
|
| 117 |
+
self.modules['builtins'] = self.modules['__builtin__']
|
| 118 |
+
|
| 119 |
+
def intern_ustring(self, value, encoding=None):
|
| 120 |
+
key = (EncodedString, value, encoding)
|
| 121 |
+
try:
|
| 122 |
+
return self._interned[key]
|
| 123 |
+
except KeyError:
|
| 124 |
+
pass
|
| 125 |
+
value = EncodedString(value)
|
| 126 |
+
if encoding:
|
| 127 |
+
value.encoding = encoding
|
| 128 |
+
self._interned[key] = value
|
| 129 |
+
return value
|
| 130 |
+
|
| 131 |
+
# pipeline creation functions can now be found in Pipeline.py
|
| 132 |
+
|
| 133 |
+
def process_pxd(self, source_desc, scope, module_name):
|
| 134 |
+
from . import Pipeline
|
| 135 |
+
if isinstance(source_desc, FileSourceDescriptor) and source_desc._file_type == 'pyx':
|
| 136 |
+
source = CompilationSource(source_desc, module_name, os.getcwd())
|
| 137 |
+
result_sink = create_default_resultobj(source, self.options)
|
| 138 |
+
pipeline = Pipeline.create_pyx_as_pxd_pipeline(self, result_sink)
|
| 139 |
+
result = Pipeline.run_pipeline(pipeline, source)
|
| 140 |
+
else:
|
| 141 |
+
pipeline = Pipeline.create_pxd_pipeline(self, scope, module_name)
|
| 142 |
+
result = Pipeline.run_pipeline(pipeline, source_desc)
|
| 143 |
+
return result
|
| 144 |
+
|
| 145 |
+
def nonfatal_error(self, exc):
|
| 146 |
+
return Errors.report_error(exc)
|
| 147 |
+
|
| 148 |
+
def _split_qualified_name(self, qualified_name, relative_import=False):
|
| 149 |
+
# Splits qualified_name into parts in form of 2-tuples: (PART_NAME, IS_PACKAGE).
|
| 150 |
+
qualified_name_parts = qualified_name.split('.')
|
| 151 |
+
last_part = qualified_name_parts.pop()
|
| 152 |
+
qualified_name_parts = [(p, True) for p in qualified_name_parts]
|
| 153 |
+
if last_part != '__init__':
|
| 154 |
+
# If Last part is __init__, then it is omitted. Otherwise, we need to check whether we can find
|
| 155 |
+
# __init__.pyx/__init__.py file to determine if last part is package or not.
|
| 156 |
+
is_package = False
|
| 157 |
+
for suffix in ('.py', '.pyx'):
|
| 158 |
+
path = self.search_include_directories(
|
| 159 |
+
qualified_name, suffix=suffix, source_pos=None, source_file_path=None, sys_path=not relative_import)
|
| 160 |
+
if path:
|
| 161 |
+
is_package = self._is_init_file(path)
|
| 162 |
+
break
|
| 163 |
+
|
| 164 |
+
qualified_name_parts.append((last_part, is_package))
|
| 165 |
+
return qualified_name_parts
|
| 166 |
+
|
| 167 |
+
@staticmethod
|
| 168 |
+
def _is_init_file(path):
|
| 169 |
+
return os.path.basename(path) in ('__init__.pyx', '__init__.py', '__init__.pxd') if path else False
|
| 170 |
+
|
| 171 |
+
@staticmethod
|
| 172 |
+
def _check_pxd_filename(pos, pxd_pathname, qualified_name):
|
| 173 |
+
if not pxd_pathname:
|
| 174 |
+
return
|
| 175 |
+
pxd_filename = os.path.basename(pxd_pathname)
|
| 176 |
+
if '.' in qualified_name and qualified_name == os.path.splitext(pxd_filename)[0]:
|
| 177 |
+
warning(pos, "Dotted filenames ('%s') are deprecated."
|
| 178 |
+
" Please use the normal Python package directory layout." % pxd_filename, level=1)
|
| 179 |
+
|
| 180 |
+
def find_module(self, module_name, from_module=None, pos=None, need_pxd=1,
|
| 181 |
+
absolute_fallback=True, relative_import=False):
|
| 182 |
+
# Finds and returns the module scope corresponding to
|
| 183 |
+
# the given relative or absolute module name. If this
|
| 184 |
+
# is the first time the module has been requested, finds
|
| 185 |
+
# the corresponding .pxd file and process it.
|
| 186 |
+
# If from_module is not None, it must be a module scope,
|
| 187 |
+
# and the module will first be searched for relative to
|
| 188 |
+
# that module, provided its name is not a dotted name.
|
| 189 |
+
debug_find_module = 0
|
| 190 |
+
if debug_find_module:
|
| 191 |
+
print("Context.find_module: module_name = %s, from_module = %s, pos = %s, need_pxd = %s" % (
|
| 192 |
+
module_name, from_module, pos, need_pxd))
|
| 193 |
+
|
| 194 |
+
scope = None
|
| 195 |
+
pxd_pathname = None
|
| 196 |
+
if from_module:
|
| 197 |
+
if module_name:
|
| 198 |
+
# from .module import ...
|
| 199 |
+
qualified_name = from_module.qualify_name(module_name)
|
| 200 |
+
else:
|
| 201 |
+
# from . import ...
|
| 202 |
+
qualified_name = from_module.qualified_name
|
| 203 |
+
scope = from_module
|
| 204 |
+
from_module = None
|
| 205 |
+
else:
|
| 206 |
+
qualified_name = module_name
|
| 207 |
+
|
| 208 |
+
if not module_name_pattern.match(qualified_name):
|
| 209 |
+
raise CompileError(pos or (module_name, 0, 0),
|
| 210 |
+
u"'%s' is not a valid module name" % module_name)
|
| 211 |
+
|
| 212 |
+
if from_module:
|
| 213 |
+
if debug_find_module:
|
| 214 |
+
print("...trying relative import")
|
| 215 |
+
scope = from_module.lookup_submodule(module_name)
|
| 216 |
+
if not scope:
|
| 217 |
+
pxd_pathname = self.find_pxd_file(qualified_name, pos, sys_path=not relative_import)
|
| 218 |
+
self._check_pxd_filename(pos, pxd_pathname, qualified_name)
|
| 219 |
+
if pxd_pathname:
|
| 220 |
+
is_package = self._is_init_file(pxd_pathname)
|
| 221 |
+
scope = from_module.find_submodule(module_name, as_package=is_package)
|
| 222 |
+
if not scope:
|
| 223 |
+
if debug_find_module:
|
| 224 |
+
print("...trying absolute import")
|
| 225 |
+
if absolute_fallback:
|
| 226 |
+
qualified_name = module_name
|
| 227 |
+
scope = self
|
| 228 |
+
for name, is_package in self._split_qualified_name(qualified_name, relative_import=relative_import):
|
| 229 |
+
scope = scope.find_submodule(name, as_package=is_package)
|
| 230 |
+
if debug_find_module:
|
| 231 |
+
print("...scope = %s" % scope)
|
| 232 |
+
if not scope.pxd_file_loaded:
|
| 233 |
+
if debug_find_module:
|
| 234 |
+
print("...pxd not loaded")
|
| 235 |
+
if not pxd_pathname:
|
| 236 |
+
if debug_find_module:
|
| 237 |
+
print("...looking for pxd file")
|
| 238 |
+
# Only look in sys.path if we are explicitly looking
|
| 239 |
+
# for a .pxd file.
|
| 240 |
+
pxd_pathname = self.find_pxd_file(qualified_name, pos, sys_path=need_pxd and not relative_import)
|
| 241 |
+
self._check_pxd_filename(pos, pxd_pathname, qualified_name)
|
| 242 |
+
if debug_find_module:
|
| 243 |
+
print("......found %s" % pxd_pathname)
|
| 244 |
+
if not pxd_pathname and need_pxd:
|
| 245 |
+
# Set pxd_file_loaded such that we don't need to
|
| 246 |
+
# look for the non-existing pxd file next time.
|
| 247 |
+
scope.pxd_file_loaded = True
|
| 248 |
+
package_pathname = self.search_include_directories(
|
| 249 |
+
qualified_name, suffix=".py", source_pos=pos, sys_path=not relative_import)
|
| 250 |
+
if package_pathname and package_pathname.endswith(Utils.PACKAGE_FILES):
|
| 251 |
+
pass
|
| 252 |
+
else:
|
| 253 |
+
error(pos, "'%s.pxd' not found" % qualified_name.replace('.', os.sep))
|
| 254 |
+
if pxd_pathname:
|
| 255 |
+
scope.pxd_file_loaded = True
|
| 256 |
+
try:
|
| 257 |
+
if debug_find_module:
|
| 258 |
+
print("Context.find_module: Parsing %s" % pxd_pathname)
|
| 259 |
+
rel_path = module_name.replace('.', os.sep) + os.path.splitext(pxd_pathname)[1]
|
| 260 |
+
if not pxd_pathname.endswith(rel_path):
|
| 261 |
+
rel_path = pxd_pathname # safety measure to prevent printing incorrect paths
|
| 262 |
+
source_desc = FileSourceDescriptor(pxd_pathname, rel_path)
|
| 263 |
+
err, result = self.process_pxd(source_desc, scope, qualified_name)
|
| 264 |
+
if err:
|
| 265 |
+
raise err
|
| 266 |
+
(pxd_codenodes, pxd_scope) = result
|
| 267 |
+
self.pxds[module_name] = (pxd_codenodes, pxd_scope)
|
| 268 |
+
except CompileError:
|
| 269 |
+
pass
|
| 270 |
+
return scope
|
| 271 |
+
|
| 272 |
+
def find_pxd_file(self, qualified_name, pos=None, sys_path=True, source_file_path=None):
|
| 273 |
+
# Search include path (and sys.path if sys_path is True) for
|
| 274 |
+
# the .pxd file corresponding to the given fully-qualified
|
| 275 |
+
# module name.
|
| 276 |
+
# Will find either a dotted filename or a file in a
|
| 277 |
+
# package directory. If a source file position is given,
|
| 278 |
+
# the directory containing the source file is searched first
|
| 279 |
+
# for a dotted filename, and its containing package root
|
| 280 |
+
# directory is searched first for a non-dotted filename.
|
| 281 |
+
pxd = self.search_include_directories(
|
| 282 |
+
qualified_name, suffix=".pxd", source_pos=pos, sys_path=sys_path, source_file_path=source_file_path)
|
| 283 |
+
if pxd is None and Options.cimport_from_pyx:
|
| 284 |
+
return self.find_pyx_file(qualified_name, pos, sys_path=sys_path)
|
| 285 |
+
return pxd
|
| 286 |
+
|
| 287 |
+
def find_pyx_file(self, qualified_name, pos=None, sys_path=True, source_file_path=None):
|
| 288 |
+
# Search include path for the .pyx file corresponding to the
|
| 289 |
+
# given fully-qualified module name, as for find_pxd_file().
|
| 290 |
+
return self.search_include_directories(
|
| 291 |
+
qualified_name, suffix=".pyx", source_pos=pos, sys_path=sys_path, source_file_path=source_file_path)
|
| 292 |
+
|
| 293 |
+
def find_include_file(self, filename, pos=None, source_file_path=None):
|
| 294 |
+
# Search list of include directories for filename.
|
| 295 |
+
# Reports an error and returns None if not found.
|
| 296 |
+
path = self.search_include_directories(
|
| 297 |
+
filename, source_pos=pos, include=True, source_file_path=source_file_path)
|
| 298 |
+
if not path:
|
| 299 |
+
error(pos, "'%s' not found" % filename)
|
| 300 |
+
return path
|
| 301 |
+
|
| 302 |
+
def search_include_directories(self, qualified_name,
|
| 303 |
+
suffix=None, source_pos=None, include=False, sys_path=False, source_file_path=None):
|
| 304 |
+
include_dirs = self.include_directories
|
| 305 |
+
if sys_path:
|
| 306 |
+
include_dirs = include_dirs + sys.path
|
| 307 |
+
# include_dirs must be hashable for caching in @cached_function
|
| 308 |
+
include_dirs = tuple(include_dirs + [standard_include_path])
|
| 309 |
+
return search_include_directories(
|
| 310 |
+
include_dirs, qualified_name, suffix or "", source_pos, include, source_file_path)
|
| 311 |
+
|
| 312 |
+
def find_root_package_dir(self, file_path):
|
| 313 |
+
return Utils.find_root_package_dir(file_path)
|
| 314 |
+
|
| 315 |
+
def check_package_dir(self, dir, package_names):
|
| 316 |
+
return Utils.check_package_dir(dir, tuple(package_names))
|
| 317 |
+
|
| 318 |
+
def c_file_out_of_date(self, source_path, output_path):
|
| 319 |
+
if not os.path.exists(output_path):
|
| 320 |
+
return 1
|
| 321 |
+
c_time = Utils.modification_time(output_path)
|
| 322 |
+
if Utils.file_newer_than(source_path, c_time):
|
| 323 |
+
return 1
|
| 324 |
+
pxd_path = Utils.replace_suffix(source_path, ".pxd")
|
| 325 |
+
if os.path.exists(pxd_path) and Utils.file_newer_than(pxd_path, c_time):
|
| 326 |
+
return 1
|
| 327 |
+
for kind, name in self.read_dependency_file(source_path):
|
| 328 |
+
if kind == "cimport":
|
| 329 |
+
dep_path = self.find_pxd_file(name, source_file_path=source_path)
|
| 330 |
+
elif kind == "include":
|
| 331 |
+
dep_path = self.search_include_directories(name, source_file_path=source_path)
|
| 332 |
+
else:
|
| 333 |
+
continue
|
| 334 |
+
if dep_path and Utils.file_newer_than(dep_path, c_time):
|
| 335 |
+
return 1
|
| 336 |
+
return 0
|
| 337 |
+
|
| 338 |
+
def find_cimported_module_names(self, source_path):
|
| 339 |
+
return [ name for kind, name in self.read_dependency_file(source_path)
|
| 340 |
+
if kind == "cimport" ]
|
| 341 |
+
|
| 342 |
+
def is_package_dir(self, dir_path):
|
| 343 |
+
return Utils.is_package_dir(dir_path)
|
| 344 |
+
|
| 345 |
+
def read_dependency_file(self, source_path):
|
| 346 |
+
dep_path = Utils.replace_suffix(source_path, ".dep")
|
| 347 |
+
if os.path.exists(dep_path):
|
| 348 |
+
with open(dep_path, "rU") as f:
|
| 349 |
+
chunks = [ line.split(" ", 1)
|
| 350 |
+
for line in (l.strip() for l in f)
|
| 351 |
+
if " " in line ]
|
| 352 |
+
return chunks
|
| 353 |
+
else:
|
| 354 |
+
return ()
|
| 355 |
+
|
| 356 |
+
def lookup_submodule(self, name):
|
| 357 |
+
# Look up a top-level module. Returns None if not found.
|
| 358 |
+
return self.modules.get(name, None)
|
| 359 |
+
|
| 360 |
+
def find_submodule(self, name, as_package=False):
|
| 361 |
+
# Find a top-level module, creating a new one if needed.
|
| 362 |
+
scope = self.lookup_submodule(name)
|
| 363 |
+
if not scope:
|
| 364 |
+
scope = ModuleScope(name,
|
| 365 |
+
parent_module = None, context = self, is_package=as_package)
|
| 366 |
+
self.modules[name] = scope
|
| 367 |
+
return scope
|
| 368 |
+
|
| 369 |
+
def parse(self, source_desc, scope, pxd, full_module_name):
|
| 370 |
+
if not isinstance(source_desc, FileSourceDescriptor):
|
| 371 |
+
raise RuntimeError("Only file sources for code supported")
|
| 372 |
+
source_filename = source_desc.filename
|
| 373 |
+
scope.cpp = self.cpp
|
| 374 |
+
# Parse the given source file and return a parse tree.
|
| 375 |
+
num_errors = Errors.get_errors_count()
|
| 376 |
+
try:
|
| 377 |
+
with Utils.open_source_file(source_filename) as f:
|
| 378 |
+
from . import Parsing
|
| 379 |
+
s = PyrexScanner(f, source_desc, source_encoding = f.encoding,
|
| 380 |
+
scope = scope, context = self)
|
| 381 |
+
tree = Parsing.p_module(s, pxd, full_module_name)
|
| 382 |
+
if self.options.formal_grammar:
|
| 383 |
+
try:
|
| 384 |
+
from ..Parser import ConcreteSyntaxTree
|
| 385 |
+
except ImportError:
|
| 386 |
+
raise RuntimeError(
|
| 387 |
+
"Formal grammar can only be used with compiled Cython with an available pgen.")
|
| 388 |
+
ConcreteSyntaxTree.p_module(source_filename)
|
| 389 |
+
except UnicodeDecodeError as e:
|
| 390 |
+
#import traceback
|
| 391 |
+
#traceback.print_exc()
|
| 392 |
+
raise self._report_decode_error(source_desc, e)
|
| 393 |
+
|
| 394 |
+
if Errors.get_errors_count() > num_errors:
|
| 395 |
+
raise CompileError()
|
| 396 |
+
return tree
|
| 397 |
+
|
| 398 |
+
def _report_decode_error(self, source_desc, exc):
|
| 399 |
+
msg = exc.args[-1]
|
| 400 |
+
position = exc.args[2]
|
| 401 |
+
encoding = exc.args[0]
|
| 402 |
+
|
| 403 |
+
line = 1
|
| 404 |
+
column = idx = 0
|
| 405 |
+
with io.open(source_desc.filename, "r", encoding='iso8859-1', newline='') as f:
|
| 406 |
+
for line, data in enumerate(f, 1):
|
| 407 |
+
idx += len(data)
|
| 408 |
+
if idx >= position:
|
| 409 |
+
column = position - (idx - len(data)) + 1
|
| 410 |
+
break
|
| 411 |
+
|
| 412 |
+
return error((source_desc, line, column),
|
| 413 |
+
"Decoding error, missing or incorrect coding=<encoding-name> "
|
| 414 |
+
"at top of source (cannot decode with encoding %r: %s)" % (encoding, msg))
|
| 415 |
+
|
| 416 |
+
def extract_module_name(self, path, options):
|
| 417 |
+
# Find fully_qualified module name from the full pathname
|
| 418 |
+
# of a source file.
|
| 419 |
+
dir, filename = os.path.split(path)
|
| 420 |
+
module_name, _ = os.path.splitext(filename)
|
| 421 |
+
if "." in module_name:
|
| 422 |
+
return module_name
|
| 423 |
+
names = [module_name]
|
| 424 |
+
while self.is_package_dir(dir):
|
| 425 |
+
parent, package_name = os.path.split(dir)
|
| 426 |
+
if parent == dir:
|
| 427 |
+
break
|
| 428 |
+
names.append(package_name)
|
| 429 |
+
dir = parent
|
| 430 |
+
names.reverse()
|
| 431 |
+
return ".".join(names)
|
| 432 |
+
|
| 433 |
+
def setup_errors(self, options, result):
|
| 434 |
+
Errors.init_thread()
|
| 435 |
+
if options.use_listing_file:
|
| 436 |
+
path = result.listing_file = Utils.replace_suffix(result.main_source_file, ".lis")
|
| 437 |
+
else:
|
| 438 |
+
path = None
|
| 439 |
+
Errors.open_listing_file(path=path, echo_to_stderr=options.errors_to_stderr)
|
| 440 |
+
|
| 441 |
+
def teardown_errors(self, err, options, result):
|
| 442 |
+
source_desc = result.compilation_source.source_desc
|
| 443 |
+
if not isinstance(source_desc, FileSourceDescriptor):
|
| 444 |
+
raise RuntimeError("Only file sources for code supported")
|
| 445 |
+
Errors.close_listing_file()
|
| 446 |
+
result.num_errors = Errors.get_errors_count()
|
| 447 |
+
if result.num_errors > 0:
|
| 448 |
+
err = True
|
| 449 |
+
if err and result.c_file:
|
| 450 |
+
try:
|
| 451 |
+
Utils.castrate_file(result.c_file, os.stat(source_desc.filename))
|
| 452 |
+
except EnvironmentError:
|
| 453 |
+
pass
|
| 454 |
+
result.c_file = None
|
| 455 |
+
|
| 456 |
+
|
| 457 |
+
def get_output_filename(source_filename, cwd, options):
|
| 458 |
+
if options.cplus:
|
| 459 |
+
c_suffix = ".cpp"
|
| 460 |
+
else:
|
| 461 |
+
c_suffix = ".c"
|
| 462 |
+
suggested_file_name = Utils.replace_suffix(source_filename, c_suffix)
|
| 463 |
+
if options.output_file:
|
| 464 |
+
out_path = os.path.join(cwd, options.output_file)
|
| 465 |
+
if os.path.isdir(out_path):
|
| 466 |
+
return os.path.join(out_path, os.path.basename(suggested_file_name))
|
| 467 |
+
else:
|
| 468 |
+
return out_path
|
| 469 |
+
else:
|
| 470 |
+
return suggested_file_name
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def create_default_resultobj(compilation_source, options):
|
| 474 |
+
result = CompilationResult()
|
| 475 |
+
result.main_source_file = compilation_source.source_desc.filename
|
| 476 |
+
result.compilation_source = compilation_source
|
| 477 |
+
source_desc = compilation_source.source_desc
|
| 478 |
+
result.c_file = get_output_filename(source_desc.filename,
|
| 479 |
+
compilation_source.cwd, options)
|
| 480 |
+
result.embedded_metadata = options.embedded_metadata
|
| 481 |
+
return result
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
def run_pipeline(source, options, full_module_name=None, context=None):
|
| 485 |
+
from . import Pipeline
|
| 486 |
+
|
| 487 |
+
# ensure that the inputs are unicode (for Python 2)
|
| 488 |
+
if sys.version_info[0] == 2:
|
| 489 |
+
source = Utils.decode_filename(source)
|
| 490 |
+
if full_module_name:
|
| 491 |
+
full_module_name = Utils.decode_filename(full_module_name)
|
| 492 |
+
|
| 493 |
+
source_ext = os.path.splitext(source)[1]
|
| 494 |
+
options.configure_language_defaults(source_ext[1:]) # py/pyx
|
| 495 |
+
if context is None:
|
| 496 |
+
context = Context.from_options(options)
|
| 497 |
+
|
| 498 |
+
# Set up source object
|
| 499 |
+
cwd = os.getcwd()
|
| 500 |
+
abs_path = os.path.abspath(source)
|
| 501 |
+
full_module_name = full_module_name or context.extract_module_name(source, options)
|
| 502 |
+
full_module_name = EncodedString(full_module_name)
|
| 503 |
+
|
| 504 |
+
Utils.raise_error_if_module_name_forbidden(full_module_name)
|
| 505 |
+
|
| 506 |
+
if options.relative_path_in_code_position_comments:
|
| 507 |
+
rel_path = full_module_name.replace('.', os.sep) + source_ext
|
| 508 |
+
if not abs_path.endswith(rel_path):
|
| 509 |
+
rel_path = source # safety measure to prevent printing incorrect paths
|
| 510 |
+
else:
|
| 511 |
+
rel_path = abs_path
|
| 512 |
+
source_desc = FileSourceDescriptor(abs_path, rel_path)
|
| 513 |
+
source = CompilationSource(source_desc, full_module_name, cwd)
|
| 514 |
+
|
| 515 |
+
# Set up result object
|
| 516 |
+
result = create_default_resultobj(source, options)
|
| 517 |
+
|
| 518 |
+
if options.annotate is None:
|
| 519 |
+
# By default, decide based on whether an html file already exists.
|
| 520 |
+
html_filename = os.path.splitext(result.c_file)[0] + ".html"
|
| 521 |
+
if os.path.exists(html_filename):
|
| 522 |
+
with io.open(html_filename, "r", encoding="UTF-8") as html_file:
|
| 523 |
+
if u'<!-- Generated by Cython' in html_file.read(100):
|
| 524 |
+
options.annotate = True
|
| 525 |
+
|
| 526 |
+
# Get pipeline
|
| 527 |
+
if source_ext.lower() == '.py' or not source_ext:
|
| 528 |
+
pipeline = Pipeline.create_py_pipeline(context, options, result)
|
| 529 |
+
else:
|
| 530 |
+
pipeline = Pipeline.create_pyx_pipeline(context, options, result)
|
| 531 |
+
|
| 532 |
+
context.setup_errors(options, result)
|
| 533 |
+
|
| 534 |
+
if '.' in full_module_name and '.' in os.path.splitext(os.path.basename(abs_path))[0]:
|
| 535 |
+
warning((source_desc, 1, 0),
|
| 536 |
+
"Dotted filenames ('%s') are deprecated."
|
| 537 |
+
" Please use the normal Python package directory layout." % os.path.basename(abs_path), level=1)
|
| 538 |
+
|
| 539 |
+
err, enddata = Pipeline.run_pipeline(pipeline, source)
|
| 540 |
+
context.teardown_errors(err, options, result)
|
| 541 |
+
if err is None and options.depfile:
|
| 542 |
+
from ..Build.Dependencies import create_dependency_tree
|
| 543 |
+
dependencies = create_dependency_tree(context).all_dependencies(result.main_source_file)
|
| 544 |
+
Utils.write_depfile(result.c_file, result.main_source_file, dependencies)
|
| 545 |
+
return result
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
# ------------------------------------------------------------------------
|
| 549 |
+
#
|
| 550 |
+
# Main Python entry points
|
| 551 |
+
#
|
| 552 |
+
# ------------------------------------------------------------------------
|
| 553 |
+
|
| 554 |
+
class CompilationSource(object):
|
| 555 |
+
"""
|
| 556 |
+
Contains the data necessary to start up a compilation pipeline for
|
| 557 |
+
a single compilation unit.
|
| 558 |
+
"""
|
| 559 |
+
def __init__(self, source_desc, full_module_name, cwd):
|
| 560 |
+
self.source_desc = source_desc
|
| 561 |
+
self.full_module_name = full_module_name
|
| 562 |
+
self.cwd = cwd
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
class CompilationResult(object):
|
| 566 |
+
"""
|
| 567 |
+
Results from the Cython compiler:
|
| 568 |
+
|
| 569 |
+
c_file string or None The generated C source file
|
| 570 |
+
h_file string or None The generated C header file
|
| 571 |
+
i_file string or None The generated .pxi file
|
| 572 |
+
api_file string or None The generated C API .h file
|
| 573 |
+
listing_file string or None File of error messages
|
| 574 |
+
object_file string or None Result of compiling the C file
|
| 575 |
+
extension_file string or None Result of linking the object file
|
| 576 |
+
num_errors integer Number of compilation errors
|
| 577 |
+
compilation_source CompilationSource
|
| 578 |
+
"""
|
| 579 |
+
|
| 580 |
+
def __init__(self):
|
| 581 |
+
self.c_file = None
|
| 582 |
+
self.h_file = None
|
| 583 |
+
self.i_file = None
|
| 584 |
+
self.api_file = None
|
| 585 |
+
self.listing_file = None
|
| 586 |
+
self.object_file = None
|
| 587 |
+
self.extension_file = None
|
| 588 |
+
self.main_source_file = None
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
class CompilationResultSet(dict):
|
| 592 |
+
"""
|
| 593 |
+
Results from compiling multiple Pyrex source files. A mapping
|
| 594 |
+
from source file paths to CompilationResult instances. Also
|
| 595 |
+
has the following attributes:
|
| 596 |
+
|
| 597 |
+
num_errors integer Total number of compilation errors
|
| 598 |
+
"""
|
| 599 |
+
|
| 600 |
+
num_errors = 0
|
| 601 |
+
|
| 602 |
+
def add(self, source, result):
|
| 603 |
+
self[source] = result
|
| 604 |
+
self.num_errors += result.num_errors
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
def compile_single(source, options, full_module_name = None):
|
| 608 |
+
"""
|
| 609 |
+
compile_single(source, options, full_module_name)
|
| 610 |
+
|
| 611 |
+
Compile the given Pyrex implementation file and return a CompilationResult.
|
| 612 |
+
Always compiles a single file; does not perform timestamp checking or
|
| 613 |
+
recursion.
|
| 614 |
+
"""
|
| 615 |
+
return run_pipeline(source, options, full_module_name)
|
| 616 |
+
|
| 617 |
+
|
| 618 |
+
def compile_multiple(sources, options):
|
| 619 |
+
"""
|
| 620 |
+
compile_multiple(sources, options)
|
| 621 |
+
|
| 622 |
+
Compiles the given sequence of Pyrex implementation files and returns
|
| 623 |
+
a CompilationResultSet. Performs timestamp checking and/or recursion
|
| 624 |
+
if these are specified in the options.
|
| 625 |
+
"""
|
| 626 |
+
if len(sources) > 1 and options.module_name:
|
| 627 |
+
raise RuntimeError('Full module name can only be set '
|
| 628 |
+
'for single source compilation')
|
| 629 |
+
# run_pipeline creates the context
|
| 630 |
+
# context = Context.from_options(options)
|
| 631 |
+
sources = [os.path.abspath(source) for source in sources]
|
| 632 |
+
processed = set()
|
| 633 |
+
results = CompilationResultSet()
|
| 634 |
+
timestamps = options.timestamps
|
| 635 |
+
verbose = options.verbose
|
| 636 |
+
context = None
|
| 637 |
+
cwd = os.getcwd()
|
| 638 |
+
for source in sources:
|
| 639 |
+
if source not in processed:
|
| 640 |
+
if context is None:
|
| 641 |
+
context = Context.from_options(options)
|
| 642 |
+
output_filename = get_output_filename(source, cwd, options)
|
| 643 |
+
out_of_date = context.c_file_out_of_date(source, output_filename)
|
| 644 |
+
if (not timestamps) or out_of_date:
|
| 645 |
+
if verbose:
|
| 646 |
+
sys.stderr.write("Compiling %s\n" % source)
|
| 647 |
+
result = run_pipeline(source, options,
|
| 648 |
+
full_module_name=options.module_name,
|
| 649 |
+
context=context)
|
| 650 |
+
results.add(source, result)
|
| 651 |
+
# Compiling multiple sources in one context doesn't quite
|
| 652 |
+
# work properly yet.
|
| 653 |
+
context = None
|
| 654 |
+
processed.add(source)
|
| 655 |
+
return results
|
| 656 |
+
|
| 657 |
+
|
| 658 |
+
def compile(source, options = None, full_module_name = None, **kwds):
|
| 659 |
+
"""
|
| 660 |
+
compile(source [, options], [, <option> = <value>]...)
|
| 661 |
+
|
| 662 |
+
Compile one or more Pyrex implementation files, with optional timestamp
|
| 663 |
+
checking and recursing on dependencies. The source argument may be a string
|
| 664 |
+
or a sequence of strings. If it is a string and no recursion or timestamp
|
| 665 |
+
checking is requested, a CompilationResult is returned, otherwise a
|
| 666 |
+
CompilationResultSet is returned.
|
| 667 |
+
"""
|
| 668 |
+
options = CompilationOptions(defaults = options, **kwds)
|
| 669 |
+
if isinstance(source, basestring):
|
| 670 |
+
if not options.timestamps:
|
| 671 |
+
return compile_single(source, options, full_module_name)
|
| 672 |
+
source = [source]
|
| 673 |
+
return compile_multiple(source, options)
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
@Utils.cached_function
|
| 677 |
+
def search_include_directories(dirs, qualified_name, suffix="", pos=None, include=False, source_file_path=None):
|
| 678 |
+
"""
|
| 679 |
+
Search the list of include directories for the given file name.
|
| 680 |
+
|
| 681 |
+
If a source file path or position is given, first searches the directory
|
| 682 |
+
containing that file. Returns None if not found, but does not report an error.
|
| 683 |
+
|
| 684 |
+
The 'include' option will disable package dereferencing.
|
| 685 |
+
"""
|
| 686 |
+
if pos and not source_file_path:
|
| 687 |
+
file_desc = pos[0]
|
| 688 |
+
if not isinstance(file_desc, FileSourceDescriptor):
|
| 689 |
+
raise RuntimeError("Only file sources for code supported")
|
| 690 |
+
source_file_path = file_desc.filename
|
| 691 |
+
if source_file_path:
|
| 692 |
+
if include:
|
| 693 |
+
dirs = (os.path.dirname(source_file_path),) + dirs
|
| 694 |
+
else:
|
| 695 |
+
dirs = (Utils.find_root_package_dir(source_file_path),) + dirs
|
| 696 |
+
|
| 697 |
+
# search for dotted filename e.g. <dir>/foo.bar.pxd
|
| 698 |
+
dotted_filename = qualified_name
|
| 699 |
+
if suffix:
|
| 700 |
+
dotted_filename += suffix
|
| 701 |
+
|
| 702 |
+
for dirname in dirs:
|
| 703 |
+
path = os.path.join(dirname, dotted_filename)
|
| 704 |
+
if os.path.exists(path):
|
| 705 |
+
return path
|
| 706 |
+
|
| 707 |
+
# search for filename in package structure e.g. <dir>/foo/bar.pxd or <dir>/foo/bar/__init__.pxd
|
| 708 |
+
if not include:
|
| 709 |
+
|
| 710 |
+
names = qualified_name.split('.')
|
| 711 |
+
package_names = tuple(names[:-1])
|
| 712 |
+
module_name = names[-1]
|
| 713 |
+
|
| 714 |
+
# search for standard packages first - PEP420
|
| 715 |
+
namespace_dirs = []
|
| 716 |
+
for dirname in dirs:
|
| 717 |
+
package_dir, is_namespace = Utils.check_package_dir(dirname, package_names)
|
| 718 |
+
if package_dir is not None:
|
| 719 |
+
if is_namespace:
|
| 720 |
+
namespace_dirs.append(package_dir)
|
| 721 |
+
continue
|
| 722 |
+
path = search_module_in_dir(package_dir, module_name, suffix)
|
| 723 |
+
if path:
|
| 724 |
+
return path
|
| 725 |
+
|
| 726 |
+
# search for namespaces second - PEP420
|
| 727 |
+
for package_dir in namespace_dirs:
|
| 728 |
+
path = search_module_in_dir(package_dir, module_name, suffix)
|
| 729 |
+
if path:
|
| 730 |
+
return path
|
| 731 |
+
|
| 732 |
+
return None
|
| 733 |
+
|
| 734 |
+
|
| 735 |
+
@Utils.cached_function
|
| 736 |
+
def search_module_in_dir(package_dir, module_name, suffix):
|
| 737 |
+
# matches modules of the form: <dir>/foo/bar.pxd
|
| 738 |
+
path = Utils.find_versioned_file(package_dir, module_name, suffix)
|
| 739 |
+
|
| 740 |
+
# matches modules of the form: <dir>/foo/bar/__init__.pxd
|
| 741 |
+
if not path and suffix:
|
| 742 |
+
path = Utils.find_versioned_file(os.path.join(package_dir, module_name), "__init__", suffix)
|
| 743 |
+
|
| 744 |
+
return path
|
| 745 |
+
|
| 746 |
+
|
| 747 |
+
# ------------------------------------------------------------------------
|
| 748 |
+
#
|
| 749 |
+
# Main command-line entry point
|
| 750 |
+
#
|
| 751 |
+
# ------------------------------------------------------------------------
|
| 752 |
+
|
| 753 |
+
def setuptools_main():
|
| 754 |
+
return main(command_line = 1)
|
| 755 |
+
|
| 756 |
+
|
| 757 |
+
def main(command_line = 0):
|
| 758 |
+
args = sys.argv[1:]
|
| 759 |
+
any_failures = 0
|
| 760 |
+
if command_line:
|
| 761 |
+
try:
|
| 762 |
+
options, sources = parse_command_line(args)
|
| 763 |
+
except IOError as e:
|
| 764 |
+
# TODO: IOError can be replaced with FileNotFoundError in Cython 3.1
|
| 765 |
+
import errno
|
| 766 |
+
if errno.ENOENT != e.errno:
|
| 767 |
+
# Raised IOError is not caused by missing file.
|
| 768 |
+
raise
|
| 769 |
+
print("{}: No such file or directory: '{}'".format(sys.argv[0], e.filename), file=sys.stderr)
|
| 770 |
+
sys.exit(1)
|
| 771 |
+
else:
|
| 772 |
+
options = CompilationOptions(default_options)
|
| 773 |
+
sources = args
|
| 774 |
+
|
| 775 |
+
if options.show_version:
|
| 776 |
+
Utils.print_version()
|
| 777 |
+
|
| 778 |
+
if options.working_path!="":
|
| 779 |
+
os.chdir(options.working_path)
|
| 780 |
+
|
| 781 |
+
try:
|
| 782 |
+
result = compile(sources, options)
|
| 783 |
+
if result.num_errors > 0:
|
| 784 |
+
any_failures = 1
|
| 785 |
+
except (EnvironmentError, PyrexError) as e:
|
| 786 |
+
sys.stderr.write(str(e) + '\n')
|
| 787 |
+
any_failures = 1
|
| 788 |
+
if any_failures:
|
| 789 |
+
sys.exit(1)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/MemoryView.py
ADDED
|
@@ -0,0 +1,863 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
from .Errors import CompileError, error
|
| 4 |
+
from . import ExprNodes
|
| 5 |
+
from .ExprNodes import IntNode, NameNode, AttributeNode
|
| 6 |
+
from . import Options
|
| 7 |
+
from .Code import UtilityCode, TempitaUtilityCode
|
| 8 |
+
from .UtilityCode import CythonUtilityCode
|
| 9 |
+
from . import Buffer
|
| 10 |
+
from . import PyrexTypes
|
| 11 |
+
from . import ModuleNode
|
| 12 |
+
|
| 13 |
+
START_ERR = "Start must not be given."
|
| 14 |
+
STOP_ERR = "Axis specification only allowed in the 'step' slot."
|
| 15 |
+
STEP_ERR = "Step must be omitted, 1, or a valid specifier."
|
| 16 |
+
BOTH_CF_ERR = "Cannot specify an array that is both C and Fortran contiguous."
|
| 17 |
+
INVALID_ERR = "Invalid axis specification."
|
| 18 |
+
NOT_CIMPORTED_ERR = "Variable was not cimported from cython.view"
|
| 19 |
+
EXPR_ERR = "no expressions allowed in axis spec, only names and literals."
|
| 20 |
+
CF_ERR = "Invalid axis specification for a C/Fortran contiguous array."
|
| 21 |
+
ERR_UNINITIALIZED = ("Cannot check if memoryview %s is initialized without the "
|
| 22 |
+
"GIL, consider using initializedcheck(False)")
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
format_flag = "PyBUF_FORMAT"
|
| 26 |
+
|
| 27 |
+
memview_c_contiguous = "(PyBUF_C_CONTIGUOUS | PyBUF_FORMAT)"
|
| 28 |
+
memview_f_contiguous = "(PyBUF_F_CONTIGUOUS | PyBUF_FORMAT)"
|
| 29 |
+
memview_any_contiguous = "(PyBUF_ANY_CONTIGUOUS | PyBUF_FORMAT)"
|
| 30 |
+
memview_full_access = "PyBUF_FULL_RO"
|
| 31 |
+
#memview_strided_access = "PyBUF_STRIDED_RO"
|
| 32 |
+
memview_strided_access = "PyBUF_RECORDS_RO"
|
| 33 |
+
|
| 34 |
+
MEMVIEW_DIRECT = '__Pyx_MEMVIEW_DIRECT'
|
| 35 |
+
MEMVIEW_PTR = '__Pyx_MEMVIEW_PTR'
|
| 36 |
+
MEMVIEW_FULL = '__Pyx_MEMVIEW_FULL'
|
| 37 |
+
MEMVIEW_CONTIG = '__Pyx_MEMVIEW_CONTIG'
|
| 38 |
+
MEMVIEW_STRIDED= '__Pyx_MEMVIEW_STRIDED'
|
| 39 |
+
MEMVIEW_FOLLOW = '__Pyx_MEMVIEW_FOLLOW'
|
| 40 |
+
|
| 41 |
+
_spec_to_const = {
|
| 42 |
+
'direct' : MEMVIEW_DIRECT,
|
| 43 |
+
'ptr' : MEMVIEW_PTR,
|
| 44 |
+
'full' : MEMVIEW_FULL,
|
| 45 |
+
'contig' : MEMVIEW_CONTIG,
|
| 46 |
+
'strided': MEMVIEW_STRIDED,
|
| 47 |
+
'follow' : MEMVIEW_FOLLOW,
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
_spec_to_abbrev = {
|
| 51 |
+
'direct' : 'd',
|
| 52 |
+
'ptr' : 'p',
|
| 53 |
+
'full' : 'f',
|
| 54 |
+
'contig' : 'c',
|
| 55 |
+
'strided' : 's',
|
| 56 |
+
'follow' : '_',
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
memslice_entry_init = "{ 0, 0, { 0 }, { 0 }, { 0 } }"
|
| 60 |
+
|
| 61 |
+
memview_name = u'memoryview'
|
| 62 |
+
memview_typeptr_cname = '__pyx_memoryview_type'
|
| 63 |
+
memview_objstruct_cname = '__pyx_memoryview_obj'
|
| 64 |
+
memviewslice_cname = u'__Pyx_memviewslice'
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def put_init_entry(mv_cname, code):
|
| 68 |
+
code.putln("%s.data = NULL;" % mv_cname)
|
| 69 |
+
code.putln("%s.memview = NULL;" % mv_cname)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
#def axes_to_str(axes):
|
| 73 |
+
# return "".join([access[0].upper()+packing[0] for (access, packing) in axes])
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def put_acquire_memoryviewslice(lhs_cname, lhs_type, lhs_pos, rhs, code,
|
| 77 |
+
have_gil=False, first_assignment=True):
|
| 78 |
+
"We can avoid decreffing the lhs if we know it is the first assignment"
|
| 79 |
+
assert rhs.type.is_memoryviewslice
|
| 80 |
+
|
| 81 |
+
pretty_rhs = rhs.result_in_temp() or rhs.is_simple()
|
| 82 |
+
if pretty_rhs:
|
| 83 |
+
rhstmp = rhs.result()
|
| 84 |
+
else:
|
| 85 |
+
rhstmp = code.funcstate.allocate_temp(lhs_type, manage_ref=False)
|
| 86 |
+
code.putln("%s = %s;" % (rhstmp, rhs.result_as(lhs_type)))
|
| 87 |
+
|
| 88 |
+
# Allow uninitialized assignment
|
| 89 |
+
#code.putln(code.put_error_if_unbound(lhs_pos, rhs.entry))
|
| 90 |
+
put_assign_to_memviewslice(lhs_cname, rhs, rhstmp, lhs_type, code,
|
| 91 |
+
have_gil=have_gil, first_assignment=first_assignment)
|
| 92 |
+
|
| 93 |
+
if not pretty_rhs:
|
| 94 |
+
code.funcstate.release_temp(rhstmp)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def put_assign_to_memviewslice(lhs_cname, rhs, rhs_cname, memviewslicetype, code,
|
| 98 |
+
have_gil=False, first_assignment=False):
|
| 99 |
+
if lhs_cname == rhs_cname:
|
| 100 |
+
# self assignment is tricky because memoryview xdecref clears the memoryview
|
| 101 |
+
# thus invalidating both sides of the assignment. Therefore make it actually do nothing
|
| 102 |
+
code.putln("/* memoryview self assignment no-op */")
|
| 103 |
+
return
|
| 104 |
+
|
| 105 |
+
if not first_assignment:
|
| 106 |
+
code.put_xdecref(lhs_cname, memviewslicetype,
|
| 107 |
+
have_gil=have_gil)
|
| 108 |
+
|
| 109 |
+
if not rhs.result_in_temp():
|
| 110 |
+
rhs.make_owned_memoryviewslice(code)
|
| 111 |
+
|
| 112 |
+
code.putln("%s = %s;" % (lhs_cname, rhs_cname))
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def get_buf_flags(specs):
|
| 116 |
+
is_c_contig, is_f_contig = is_cf_contig(specs)
|
| 117 |
+
|
| 118 |
+
if is_c_contig:
|
| 119 |
+
return memview_c_contiguous
|
| 120 |
+
elif is_f_contig:
|
| 121 |
+
return memview_f_contiguous
|
| 122 |
+
|
| 123 |
+
access, packing = zip(*specs)
|
| 124 |
+
|
| 125 |
+
if 'full' in access or 'ptr' in access:
|
| 126 |
+
return memview_full_access
|
| 127 |
+
else:
|
| 128 |
+
return memview_strided_access
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def insert_newaxes(memoryviewtype, n):
|
| 132 |
+
axes = [('direct', 'strided')] * n
|
| 133 |
+
axes.extend(memoryviewtype.axes)
|
| 134 |
+
return PyrexTypes.MemoryViewSliceType(memoryviewtype.dtype, axes)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def broadcast_types(src, dst):
|
| 138 |
+
n = abs(src.ndim - dst.ndim)
|
| 139 |
+
if src.ndim < dst.ndim:
|
| 140 |
+
return insert_newaxes(src, n), dst
|
| 141 |
+
else:
|
| 142 |
+
return src, insert_newaxes(dst, n)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def valid_memslice_dtype(dtype, i=0):
|
| 146 |
+
"""
|
| 147 |
+
Return whether type dtype can be used as the base type of a
|
| 148 |
+
memoryview slice.
|
| 149 |
+
|
| 150 |
+
We support structs, numeric types and objects
|
| 151 |
+
"""
|
| 152 |
+
if dtype.is_complex and dtype.real_type.is_int:
|
| 153 |
+
return False
|
| 154 |
+
|
| 155 |
+
if dtype is PyrexTypes.c_bint_type:
|
| 156 |
+
return False
|
| 157 |
+
|
| 158 |
+
if dtype.is_struct and dtype.kind == 'struct':
|
| 159 |
+
for member in dtype.scope.var_entries:
|
| 160 |
+
if not valid_memslice_dtype(member.type):
|
| 161 |
+
return False
|
| 162 |
+
|
| 163 |
+
return True
|
| 164 |
+
|
| 165 |
+
return (
|
| 166 |
+
dtype.is_error or
|
| 167 |
+
# Pointers are not valid (yet)
|
| 168 |
+
# (dtype.is_ptr and valid_memslice_dtype(dtype.base_type)) or
|
| 169 |
+
(dtype.is_array and i < 8 and
|
| 170 |
+
valid_memslice_dtype(dtype.base_type, i + 1)) or
|
| 171 |
+
dtype.is_numeric or
|
| 172 |
+
dtype.is_pyobject or
|
| 173 |
+
dtype.is_fused or # accept this as it will be replaced by specializations later
|
| 174 |
+
(dtype.is_typedef and valid_memslice_dtype(dtype.typedef_base_type))
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
class MemoryViewSliceBufferEntry(Buffer.BufferEntry):
|
| 179 |
+
"""
|
| 180 |
+
May be used during code generation time to be queried for
|
| 181 |
+
shape/strides/suboffsets attributes, or to perform indexing or slicing.
|
| 182 |
+
"""
|
| 183 |
+
def __init__(self, entry):
|
| 184 |
+
self.entry = entry
|
| 185 |
+
self.type = entry.type
|
| 186 |
+
self.cname = entry.cname
|
| 187 |
+
|
| 188 |
+
self.buf_ptr = "%s.data" % self.cname
|
| 189 |
+
|
| 190 |
+
dtype = self.entry.type.dtype
|
| 191 |
+
self.buf_ptr_type = PyrexTypes.CPtrType(dtype)
|
| 192 |
+
self.init_attributes()
|
| 193 |
+
|
| 194 |
+
def get_buf_suboffsetvars(self):
|
| 195 |
+
return self._for_all_ndim("%s.suboffsets[%d]")
|
| 196 |
+
|
| 197 |
+
def get_buf_stridevars(self):
|
| 198 |
+
return self._for_all_ndim("%s.strides[%d]")
|
| 199 |
+
|
| 200 |
+
def get_buf_shapevars(self):
|
| 201 |
+
return self._for_all_ndim("%s.shape[%d]")
|
| 202 |
+
|
| 203 |
+
def generate_buffer_lookup_code(self, code, index_cnames):
|
| 204 |
+
axes = [(dim, index_cnames[dim], access, packing)
|
| 205 |
+
for dim, (access, packing) in enumerate(self.type.axes)]
|
| 206 |
+
return self._generate_buffer_lookup_code(code, axes)
|
| 207 |
+
|
| 208 |
+
def _generate_buffer_lookup_code(self, code, axes, cast_result=True):
|
| 209 |
+
"""
|
| 210 |
+
Generate a single expression that indexes the memory view slice
|
| 211 |
+
in each dimension.
|
| 212 |
+
"""
|
| 213 |
+
bufp = self.buf_ptr
|
| 214 |
+
type_decl = self.type.dtype.empty_declaration_code()
|
| 215 |
+
|
| 216 |
+
for dim, index, access, packing in axes:
|
| 217 |
+
shape = "%s.shape[%d]" % (self.cname, dim)
|
| 218 |
+
stride = "%s.strides[%d]" % (self.cname, dim)
|
| 219 |
+
suboffset = "%s.suboffsets[%d]" % (self.cname, dim)
|
| 220 |
+
|
| 221 |
+
flag = get_memoryview_flag(access, packing)
|
| 222 |
+
|
| 223 |
+
if flag in ("generic", "generic_contiguous"):
|
| 224 |
+
# Note: we cannot do cast tricks to avoid stride multiplication
|
| 225 |
+
# for generic_contiguous, as we may have to do (dtype *)
|
| 226 |
+
# or (dtype **) arithmetic, we won't know which unless
|
| 227 |
+
# we check suboffsets
|
| 228 |
+
code.globalstate.use_utility_code(memviewslice_index_helpers)
|
| 229 |
+
bufp = ('__pyx_memviewslice_index_full(%s, %s, %s, %s)' %
|
| 230 |
+
(bufp, index, stride, suboffset))
|
| 231 |
+
|
| 232 |
+
elif flag == "indirect":
|
| 233 |
+
bufp = "(%s + %s * %s)" % (bufp, index, stride)
|
| 234 |
+
bufp = ("(*((char **) %s) + %s)" % (bufp, suboffset))
|
| 235 |
+
|
| 236 |
+
elif flag == "indirect_contiguous":
|
| 237 |
+
# Note: we do char ** arithmetic
|
| 238 |
+
bufp = "(*((char **) %s + %s) + %s)" % (bufp, index, suboffset)
|
| 239 |
+
|
| 240 |
+
elif flag == "strided":
|
| 241 |
+
bufp = "(%s + %s * %s)" % (bufp, index, stride)
|
| 242 |
+
|
| 243 |
+
else:
|
| 244 |
+
assert flag == 'contiguous', flag
|
| 245 |
+
bufp = '((char *) (((%s *) %s) + %s))' % (type_decl, bufp, index)
|
| 246 |
+
|
| 247 |
+
bufp = '( /* dim=%d */ %s )' % (dim, bufp)
|
| 248 |
+
|
| 249 |
+
if cast_result:
|
| 250 |
+
return "((%s *) %s)" % (type_decl, bufp)
|
| 251 |
+
|
| 252 |
+
return bufp
|
| 253 |
+
|
| 254 |
+
def generate_buffer_slice_code(self, code, indices, dst, dst_type, have_gil,
|
| 255 |
+
have_slices, directives):
|
| 256 |
+
"""
|
| 257 |
+
Slice a memoryviewslice.
|
| 258 |
+
|
| 259 |
+
indices - list of index nodes. If not a SliceNode, or NoneNode,
|
| 260 |
+
then it must be coercible to Py_ssize_t
|
| 261 |
+
|
| 262 |
+
Simply call __pyx_memoryview_slice_memviewslice with the right
|
| 263 |
+
arguments, unless the dimension is omitted or a bare ':', in which
|
| 264 |
+
case we copy over the shape/strides/suboffsets attributes directly
|
| 265 |
+
for that dimension.
|
| 266 |
+
"""
|
| 267 |
+
src = self.cname
|
| 268 |
+
|
| 269 |
+
code.putln("%(dst)s.data = %(src)s.data;" % locals())
|
| 270 |
+
code.putln("%(dst)s.memview = %(src)s.memview;" % locals())
|
| 271 |
+
code.put_incref_memoryviewslice(dst, dst_type, have_gil=have_gil)
|
| 272 |
+
|
| 273 |
+
all_dimensions_direct = all(access == 'direct' for access, packing in self.type.axes)
|
| 274 |
+
suboffset_dim_temp = []
|
| 275 |
+
|
| 276 |
+
def get_suboffset_dim():
|
| 277 |
+
# create global temp variable at request
|
| 278 |
+
if not suboffset_dim_temp:
|
| 279 |
+
suboffset_dim = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
|
| 280 |
+
code.putln("%s = -1;" % suboffset_dim)
|
| 281 |
+
suboffset_dim_temp.append(suboffset_dim)
|
| 282 |
+
return suboffset_dim_temp[0]
|
| 283 |
+
|
| 284 |
+
dim = -1
|
| 285 |
+
new_ndim = 0
|
| 286 |
+
for index in indices:
|
| 287 |
+
if index.is_none:
|
| 288 |
+
# newaxis
|
| 289 |
+
for attrib, value in [('shape', 1), ('strides', 0), ('suboffsets', -1)]:
|
| 290 |
+
code.putln("%s.%s[%d] = %d;" % (dst, attrib, new_ndim, value))
|
| 291 |
+
|
| 292 |
+
new_ndim += 1
|
| 293 |
+
continue
|
| 294 |
+
|
| 295 |
+
dim += 1
|
| 296 |
+
access, packing = self.type.axes[dim]
|
| 297 |
+
|
| 298 |
+
if index.is_slice:
|
| 299 |
+
# slice, unspecified dimension, or part of ellipsis
|
| 300 |
+
d = dict(locals())
|
| 301 |
+
for s in "start stop step".split():
|
| 302 |
+
idx = getattr(index, s)
|
| 303 |
+
have_idx = d['have_' + s] = not idx.is_none
|
| 304 |
+
d[s] = idx.result() if have_idx else "0"
|
| 305 |
+
|
| 306 |
+
if not (d['have_start'] or d['have_stop'] or d['have_step']):
|
| 307 |
+
# full slice (:), simply copy over the extent, stride
|
| 308 |
+
# and suboffset. Also update suboffset_dim if needed
|
| 309 |
+
d['access'] = access
|
| 310 |
+
util_name = "SimpleSlice"
|
| 311 |
+
else:
|
| 312 |
+
util_name = "ToughSlice"
|
| 313 |
+
d['error_goto'] = code.error_goto(index.pos)
|
| 314 |
+
|
| 315 |
+
new_ndim += 1
|
| 316 |
+
else:
|
| 317 |
+
# normal index
|
| 318 |
+
idx = index.result()
|
| 319 |
+
|
| 320 |
+
indirect = access != 'direct'
|
| 321 |
+
if indirect:
|
| 322 |
+
generic = access == 'full'
|
| 323 |
+
if new_ndim != 0:
|
| 324 |
+
return error(index.pos,
|
| 325 |
+
"All preceding dimensions must be "
|
| 326 |
+
"indexed and not sliced")
|
| 327 |
+
|
| 328 |
+
d = dict(
|
| 329 |
+
locals(),
|
| 330 |
+
wraparound=int(directives['wraparound']),
|
| 331 |
+
boundscheck=int(directives['boundscheck']),
|
| 332 |
+
)
|
| 333 |
+
if d['boundscheck']:
|
| 334 |
+
d['error_goto'] = code.error_goto(index.pos)
|
| 335 |
+
util_name = "SliceIndex"
|
| 336 |
+
|
| 337 |
+
_, impl = TempitaUtilityCode.load_as_string(util_name, "MemoryView_C.c", context=d)
|
| 338 |
+
code.put(impl)
|
| 339 |
+
|
| 340 |
+
if suboffset_dim_temp:
|
| 341 |
+
code.funcstate.release_temp(suboffset_dim_temp[0])
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def empty_slice(pos):
|
| 345 |
+
none = ExprNodes.NoneNode(pos)
|
| 346 |
+
return ExprNodes.SliceNode(pos, start=none,
|
| 347 |
+
stop=none, step=none)
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def unellipsify(indices, ndim):
|
| 351 |
+
result = []
|
| 352 |
+
seen_ellipsis = False
|
| 353 |
+
have_slices = False
|
| 354 |
+
|
| 355 |
+
newaxes = [newaxis for newaxis in indices if newaxis.is_none]
|
| 356 |
+
n_indices = len(indices) - len(newaxes)
|
| 357 |
+
|
| 358 |
+
for index in indices:
|
| 359 |
+
if isinstance(index, ExprNodes.EllipsisNode):
|
| 360 |
+
have_slices = True
|
| 361 |
+
full_slice = empty_slice(index.pos)
|
| 362 |
+
|
| 363 |
+
if seen_ellipsis:
|
| 364 |
+
result.append(full_slice)
|
| 365 |
+
else:
|
| 366 |
+
nslices = ndim - n_indices + 1
|
| 367 |
+
result.extend([full_slice] * nslices)
|
| 368 |
+
seen_ellipsis = True
|
| 369 |
+
else:
|
| 370 |
+
have_slices = have_slices or index.is_slice or index.is_none
|
| 371 |
+
result.append(index)
|
| 372 |
+
|
| 373 |
+
result_length = len(result) - len(newaxes)
|
| 374 |
+
if result_length < ndim:
|
| 375 |
+
have_slices = True
|
| 376 |
+
nslices = ndim - result_length
|
| 377 |
+
result.extend([empty_slice(indices[-1].pos)] * nslices)
|
| 378 |
+
|
| 379 |
+
return have_slices, result, newaxes
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def get_memoryview_flag(access, packing):
|
| 383 |
+
if access == 'full' and packing in ('strided', 'follow'):
|
| 384 |
+
return 'generic'
|
| 385 |
+
elif access == 'full' and packing == 'contig':
|
| 386 |
+
return 'generic_contiguous'
|
| 387 |
+
elif access == 'ptr' and packing in ('strided', 'follow'):
|
| 388 |
+
return 'indirect'
|
| 389 |
+
elif access == 'ptr' and packing == 'contig':
|
| 390 |
+
return 'indirect_contiguous'
|
| 391 |
+
elif access == 'direct' and packing in ('strided', 'follow'):
|
| 392 |
+
return 'strided'
|
| 393 |
+
else:
|
| 394 |
+
assert (access, packing) == ('direct', 'contig'), (access, packing)
|
| 395 |
+
return 'contiguous'
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
def get_is_contig_func_name(contig_type, ndim):
|
| 399 |
+
assert contig_type in ('C', 'F')
|
| 400 |
+
return "__pyx_memviewslice_is_contig_%s%d" % (contig_type, ndim)
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
def get_is_contig_utility(contig_type, ndim):
|
| 404 |
+
assert contig_type in ('C', 'F')
|
| 405 |
+
C = dict(context, ndim=ndim, contig_type=contig_type)
|
| 406 |
+
utility = load_memview_c_utility("MemviewSliceCheckContig", C, requires=[is_contig_utility])
|
| 407 |
+
return utility
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
def slice_iter(slice_type, slice_result, ndim, code, force_strided=False):
|
| 411 |
+
if (slice_type.is_c_contig or slice_type.is_f_contig) and not force_strided:
|
| 412 |
+
return ContigSliceIter(slice_type, slice_result, ndim, code)
|
| 413 |
+
else:
|
| 414 |
+
return StridedSliceIter(slice_type, slice_result, ndim, code)
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
class SliceIter(object):
|
| 418 |
+
def __init__(self, slice_type, slice_result, ndim, code):
|
| 419 |
+
self.slice_type = slice_type
|
| 420 |
+
self.slice_result = slice_result
|
| 421 |
+
self.code = code
|
| 422 |
+
self.ndim = ndim
|
| 423 |
+
|
| 424 |
+
|
| 425 |
+
class ContigSliceIter(SliceIter):
|
| 426 |
+
def start_loops(self):
|
| 427 |
+
code = self.code
|
| 428 |
+
code.begin_block()
|
| 429 |
+
|
| 430 |
+
type_decl = self.slice_type.dtype.empty_declaration_code()
|
| 431 |
+
|
| 432 |
+
total_size = ' * '.join("%s.shape[%d]" % (self.slice_result, i)
|
| 433 |
+
for i in range(self.ndim))
|
| 434 |
+
code.putln("Py_ssize_t __pyx_temp_extent = %s;" % total_size)
|
| 435 |
+
code.putln("Py_ssize_t __pyx_temp_idx;")
|
| 436 |
+
code.putln("%s *__pyx_temp_pointer = (%s *) %s.data;" % (
|
| 437 |
+
type_decl, type_decl, self.slice_result))
|
| 438 |
+
code.putln("for (__pyx_temp_idx = 0; "
|
| 439 |
+
"__pyx_temp_idx < __pyx_temp_extent; "
|
| 440 |
+
"__pyx_temp_idx++) {")
|
| 441 |
+
|
| 442 |
+
return "__pyx_temp_pointer"
|
| 443 |
+
|
| 444 |
+
def end_loops(self):
|
| 445 |
+
self.code.putln("__pyx_temp_pointer += 1;")
|
| 446 |
+
self.code.putln("}")
|
| 447 |
+
self.code.end_block()
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
class StridedSliceIter(SliceIter):
|
| 451 |
+
def start_loops(self):
|
| 452 |
+
code = self.code
|
| 453 |
+
code.begin_block()
|
| 454 |
+
|
| 455 |
+
for i in range(self.ndim):
|
| 456 |
+
t = i, self.slice_result, i
|
| 457 |
+
code.putln("Py_ssize_t __pyx_temp_extent_%d = %s.shape[%d];" % t)
|
| 458 |
+
code.putln("Py_ssize_t __pyx_temp_stride_%d = %s.strides[%d];" % t)
|
| 459 |
+
code.putln("char *__pyx_temp_pointer_%d;" % i)
|
| 460 |
+
code.putln("Py_ssize_t __pyx_temp_idx_%d;" % i)
|
| 461 |
+
|
| 462 |
+
code.putln("__pyx_temp_pointer_0 = %s.data;" % self.slice_result)
|
| 463 |
+
|
| 464 |
+
for i in range(self.ndim):
|
| 465 |
+
if i > 0:
|
| 466 |
+
code.putln("__pyx_temp_pointer_%d = __pyx_temp_pointer_%d;" % (i, i - 1))
|
| 467 |
+
|
| 468 |
+
code.putln("for (__pyx_temp_idx_%d = 0; "
|
| 469 |
+
"__pyx_temp_idx_%d < __pyx_temp_extent_%d; "
|
| 470 |
+
"__pyx_temp_idx_%d++) {" % (i, i, i, i))
|
| 471 |
+
|
| 472 |
+
return "__pyx_temp_pointer_%d" % (self.ndim - 1)
|
| 473 |
+
|
| 474 |
+
def end_loops(self):
|
| 475 |
+
code = self.code
|
| 476 |
+
for i in range(self.ndim - 1, -1, -1):
|
| 477 |
+
code.putln("__pyx_temp_pointer_%d += __pyx_temp_stride_%d;" % (i, i))
|
| 478 |
+
code.putln("}")
|
| 479 |
+
|
| 480 |
+
code.end_block()
|
| 481 |
+
|
| 482 |
+
|
| 483 |
+
def copy_c_or_fortran_cname(memview):
|
| 484 |
+
if memview.is_c_contig:
|
| 485 |
+
c_or_f = 'c'
|
| 486 |
+
else:
|
| 487 |
+
c_or_f = 'f'
|
| 488 |
+
|
| 489 |
+
return "__pyx_memoryview_copy_slice_%s_%s" % (
|
| 490 |
+
memview.specialization_suffix(), c_or_f)
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
def get_copy_new_utility(pos, from_memview, to_memview):
|
| 494 |
+
if (from_memview.dtype != to_memview.dtype and
|
| 495 |
+
not (from_memview.dtype.is_cv_qualified and from_memview.dtype.cv_base_type == to_memview.dtype)):
|
| 496 |
+
error(pos, "dtypes must be the same!")
|
| 497 |
+
return
|
| 498 |
+
if len(from_memview.axes) != len(to_memview.axes):
|
| 499 |
+
error(pos, "number of dimensions must be same")
|
| 500 |
+
return
|
| 501 |
+
if not (to_memview.is_c_contig or to_memview.is_f_contig):
|
| 502 |
+
error(pos, "to_memview must be c or f contiguous.")
|
| 503 |
+
return
|
| 504 |
+
|
| 505 |
+
for (access, packing) in from_memview.axes:
|
| 506 |
+
if access != 'direct':
|
| 507 |
+
error(pos, "cannot handle 'full' or 'ptr' access at this time.")
|
| 508 |
+
return
|
| 509 |
+
|
| 510 |
+
if to_memview.is_c_contig:
|
| 511 |
+
mode = 'c'
|
| 512 |
+
contig_flag = memview_c_contiguous
|
| 513 |
+
else:
|
| 514 |
+
assert to_memview.is_f_contig
|
| 515 |
+
mode = 'fortran'
|
| 516 |
+
contig_flag = memview_f_contiguous
|
| 517 |
+
|
| 518 |
+
return load_memview_c_utility(
|
| 519 |
+
"CopyContentsUtility",
|
| 520 |
+
context=dict(
|
| 521 |
+
context,
|
| 522 |
+
mode=mode,
|
| 523 |
+
dtype_decl=to_memview.dtype.empty_declaration_code(),
|
| 524 |
+
contig_flag=contig_flag,
|
| 525 |
+
ndim=to_memview.ndim,
|
| 526 |
+
func_cname=copy_c_or_fortran_cname(to_memview),
|
| 527 |
+
dtype_is_object=int(to_memview.dtype.is_pyobject)),
|
| 528 |
+
requires=[copy_contents_new_utility])
|
| 529 |
+
|
| 530 |
+
|
| 531 |
+
def get_axes_specs(env, axes):
|
| 532 |
+
'''
|
| 533 |
+
get_axes_specs(env, axes) -> list of (access, packing) specs for each axis.
|
| 534 |
+
access is one of 'full', 'ptr' or 'direct'
|
| 535 |
+
packing is one of 'contig', 'strided' or 'follow'
|
| 536 |
+
'''
|
| 537 |
+
|
| 538 |
+
cythonscope = env.global_scope().context.cython_scope
|
| 539 |
+
cythonscope.load_cythonscope()
|
| 540 |
+
viewscope = cythonscope.viewscope
|
| 541 |
+
|
| 542 |
+
access_specs = tuple([viewscope.lookup(name)
|
| 543 |
+
for name in ('full', 'direct', 'ptr')])
|
| 544 |
+
packing_specs = tuple([viewscope.lookup(name)
|
| 545 |
+
for name in ('contig', 'strided', 'follow')])
|
| 546 |
+
|
| 547 |
+
is_f_contig, is_c_contig = False, False
|
| 548 |
+
default_access, default_packing = 'direct', 'strided'
|
| 549 |
+
cf_access, cf_packing = default_access, 'follow'
|
| 550 |
+
|
| 551 |
+
axes_specs = []
|
| 552 |
+
# analyse all axes.
|
| 553 |
+
for idx, axis in enumerate(axes):
|
| 554 |
+
if not axis.start.is_none:
|
| 555 |
+
raise CompileError(axis.start.pos, START_ERR)
|
| 556 |
+
|
| 557 |
+
if not axis.stop.is_none:
|
| 558 |
+
raise CompileError(axis.stop.pos, STOP_ERR)
|
| 559 |
+
|
| 560 |
+
if axis.step.is_none:
|
| 561 |
+
axes_specs.append((default_access, default_packing))
|
| 562 |
+
|
| 563 |
+
elif isinstance(axis.step, IntNode):
|
| 564 |
+
# the packing for the ::1 axis is contiguous,
|
| 565 |
+
# all others are cf_packing.
|
| 566 |
+
if axis.step.compile_time_value(env) != 1:
|
| 567 |
+
raise CompileError(axis.step.pos, STEP_ERR)
|
| 568 |
+
|
| 569 |
+
axes_specs.append((cf_access, 'cfcontig'))
|
| 570 |
+
|
| 571 |
+
elif isinstance(axis.step, (NameNode, AttributeNode)):
|
| 572 |
+
entry = _get_resolved_spec(env, axis.step)
|
| 573 |
+
if entry.name in view_constant_to_access_packing:
|
| 574 |
+
axes_specs.append(view_constant_to_access_packing[entry.name])
|
| 575 |
+
else:
|
| 576 |
+
raise CompileError(axis.step.pos, INVALID_ERR)
|
| 577 |
+
|
| 578 |
+
else:
|
| 579 |
+
raise CompileError(axis.step.pos, INVALID_ERR)
|
| 580 |
+
|
| 581 |
+
# First, find out if we have a ::1 somewhere
|
| 582 |
+
contig_dim = 0
|
| 583 |
+
is_contig = False
|
| 584 |
+
for idx, (access, packing) in enumerate(axes_specs):
|
| 585 |
+
if packing == 'cfcontig':
|
| 586 |
+
if is_contig:
|
| 587 |
+
raise CompileError(axis.step.pos, BOTH_CF_ERR)
|
| 588 |
+
|
| 589 |
+
contig_dim = idx
|
| 590 |
+
axes_specs[idx] = (access, 'contig')
|
| 591 |
+
is_contig = True
|
| 592 |
+
|
| 593 |
+
if is_contig:
|
| 594 |
+
# We have a ::1 somewhere, see if we're C or Fortran contiguous
|
| 595 |
+
if contig_dim == len(axes) - 1:
|
| 596 |
+
is_c_contig = True
|
| 597 |
+
else:
|
| 598 |
+
is_f_contig = True
|
| 599 |
+
|
| 600 |
+
if contig_dim and not axes_specs[contig_dim - 1][0] in ('full', 'ptr'):
|
| 601 |
+
raise CompileError(axes[contig_dim].pos,
|
| 602 |
+
"Fortran contiguous specifier must follow an indirect dimension")
|
| 603 |
+
|
| 604 |
+
if is_c_contig:
|
| 605 |
+
# Contiguous in the last dimension, find the last indirect dimension
|
| 606 |
+
contig_dim = -1
|
| 607 |
+
for idx, (access, packing) in enumerate(reversed(axes_specs)):
|
| 608 |
+
if access in ('ptr', 'full'):
|
| 609 |
+
contig_dim = len(axes) - idx - 1
|
| 610 |
+
|
| 611 |
+
# Replace 'strided' with 'follow' for any dimension following the last
|
| 612 |
+
# indirect dimension, the first dimension or the dimension following
|
| 613 |
+
# the ::1.
|
| 614 |
+
# int[::indirect, ::1, :, :]
|
| 615 |
+
# ^ ^
|
| 616 |
+
# int[::indirect, :, :, ::1]
|
| 617 |
+
# ^ ^
|
| 618 |
+
start = contig_dim + 1
|
| 619 |
+
stop = len(axes) - is_c_contig
|
| 620 |
+
for idx, (access, packing) in enumerate(axes_specs[start:stop]):
|
| 621 |
+
idx = contig_dim + 1 + idx
|
| 622 |
+
if access != 'direct':
|
| 623 |
+
raise CompileError(axes[idx].pos,
|
| 624 |
+
"Indirect dimension may not follow "
|
| 625 |
+
"Fortran contiguous dimension")
|
| 626 |
+
if packing == 'contig':
|
| 627 |
+
raise CompileError(axes[idx].pos,
|
| 628 |
+
"Dimension may not be contiguous")
|
| 629 |
+
axes_specs[idx] = (access, cf_packing)
|
| 630 |
+
|
| 631 |
+
if is_c_contig:
|
| 632 |
+
# For C contiguity, we need to fix the 'contig' dimension
|
| 633 |
+
# after the loop
|
| 634 |
+
a, p = axes_specs[-1]
|
| 635 |
+
axes_specs[-1] = a, 'contig'
|
| 636 |
+
|
| 637 |
+
validate_axes_specs([axis.start.pos for axis in axes],
|
| 638 |
+
axes_specs,
|
| 639 |
+
is_c_contig,
|
| 640 |
+
is_f_contig)
|
| 641 |
+
|
| 642 |
+
return axes_specs
|
| 643 |
+
|
| 644 |
+
|
| 645 |
+
def validate_axes(pos, axes):
|
| 646 |
+
if len(axes) >= Options.buffer_max_dims:
|
| 647 |
+
error(pos, "More dimensions than the maximum number"
|
| 648 |
+
" of buffer dimensions were used.")
|
| 649 |
+
return False
|
| 650 |
+
|
| 651 |
+
return True
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
def is_cf_contig(specs):
|
| 655 |
+
is_c_contig = is_f_contig = False
|
| 656 |
+
|
| 657 |
+
if len(specs) == 1 and specs == [('direct', 'contig')]:
|
| 658 |
+
is_c_contig = True
|
| 659 |
+
|
| 660 |
+
elif (specs[-1] == ('direct','contig') and
|
| 661 |
+
all(axis == ('direct','follow') for axis in specs[:-1])):
|
| 662 |
+
# c_contiguous: 'follow', 'follow', ..., 'follow', 'contig'
|
| 663 |
+
is_c_contig = True
|
| 664 |
+
|
| 665 |
+
elif (len(specs) > 1 and
|
| 666 |
+
specs[0] == ('direct','contig') and
|
| 667 |
+
all(axis == ('direct','follow') for axis in specs[1:])):
|
| 668 |
+
# f_contiguous: 'contig', 'follow', 'follow', ..., 'follow'
|
| 669 |
+
is_f_contig = True
|
| 670 |
+
|
| 671 |
+
return is_c_contig, is_f_contig
|
| 672 |
+
|
| 673 |
+
|
| 674 |
+
def get_mode(specs):
|
| 675 |
+
is_c_contig, is_f_contig = is_cf_contig(specs)
|
| 676 |
+
|
| 677 |
+
if is_c_contig:
|
| 678 |
+
return 'c'
|
| 679 |
+
elif is_f_contig:
|
| 680 |
+
return 'fortran'
|
| 681 |
+
|
| 682 |
+
for access, packing in specs:
|
| 683 |
+
if access in ('ptr', 'full'):
|
| 684 |
+
return 'full'
|
| 685 |
+
|
| 686 |
+
return 'strided'
|
| 687 |
+
|
| 688 |
+
view_constant_to_access_packing = {
|
| 689 |
+
'generic': ('full', 'strided'),
|
| 690 |
+
'strided': ('direct', 'strided'),
|
| 691 |
+
'indirect': ('ptr', 'strided'),
|
| 692 |
+
'generic_contiguous': ('full', 'contig'),
|
| 693 |
+
'contiguous': ('direct', 'contig'),
|
| 694 |
+
'indirect_contiguous': ('ptr', 'contig'),
|
| 695 |
+
}
|
| 696 |
+
|
| 697 |
+
def validate_axes_specs(positions, specs, is_c_contig, is_f_contig):
|
| 698 |
+
|
| 699 |
+
packing_specs = ('contig', 'strided', 'follow')
|
| 700 |
+
access_specs = ('direct', 'ptr', 'full')
|
| 701 |
+
|
| 702 |
+
# is_c_contig, is_f_contig = is_cf_contig(specs)
|
| 703 |
+
|
| 704 |
+
has_contig = has_follow = has_strided = has_generic_contig = False
|
| 705 |
+
|
| 706 |
+
last_indirect_dimension = -1
|
| 707 |
+
for idx, (access, packing) in enumerate(specs):
|
| 708 |
+
if access == 'ptr':
|
| 709 |
+
last_indirect_dimension = idx
|
| 710 |
+
|
| 711 |
+
for idx, (pos, (access, packing)) in enumerate(zip(positions, specs)):
|
| 712 |
+
|
| 713 |
+
if not (access in access_specs and
|
| 714 |
+
packing in packing_specs):
|
| 715 |
+
raise CompileError(pos, "Invalid axes specification.")
|
| 716 |
+
|
| 717 |
+
if packing == 'strided':
|
| 718 |
+
has_strided = True
|
| 719 |
+
elif packing == 'contig':
|
| 720 |
+
if has_contig:
|
| 721 |
+
raise CompileError(pos, "Only one direct contiguous "
|
| 722 |
+
"axis may be specified.")
|
| 723 |
+
|
| 724 |
+
valid_contig_dims = last_indirect_dimension + 1, len(specs) - 1
|
| 725 |
+
if idx not in valid_contig_dims and access != 'ptr':
|
| 726 |
+
if last_indirect_dimension + 1 != len(specs) - 1:
|
| 727 |
+
dims = "dimensions %d and %d" % valid_contig_dims
|
| 728 |
+
else:
|
| 729 |
+
dims = "dimension %d" % valid_contig_dims[0]
|
| 730 |
+
|
| 731 |
+
raise CompileError(pos, "Only %s may be contiguous and direct" % dims)
|
| 732 |
+
|
| 733 |
+
has_contig = access != 'ptr'
|
| 734 |
+
elif packing == 'follow':
|
| 735 |
+
if has_strided:
|
| 736 |
+
raise CompileError(pos, "A memoryview cannot have both follow and strided axis specifiers.")
|
| 737 |
+
if not (is_c_contig or is_f_contig):
|
| 738 |
+
raise CompileError(pos, "Invalid use of the follow specifier.")
|
| 739 |
+
|
| 740 |
+
if access in ('ptr', 'full'):
|
| 741 |
+
has_strided = False
|
| 742 |
+
|
| 743 |
+
def _get_resolved_spec(env, spec):
|
| 744 |
+
# spec must be a NameNode or an AttributeNode
|
| 745 |
+
if isinstance(spec, NameNode):
|
| 746 |
+
return _resolve_NameNode(env, spec)
|
| 747 |
+
elif isinstance(spec, AttributeNode):
|
| 748 |
+
return _resolve_AttributeNode(env, spec)
|
| 749 |
+
else:
|
| 750 |
+
raise CompileError(spec.pos, INVALID_ERR)
|
| 751 |
+
|
| 752 |
+
def _resolve_NameNode(env, node):
|
| 753 |
+
try:
|
| 754 |
+
resolved_name = env.lookup(node.name).name
|
| 755 |
+
except AttributeError:
|
| 756 |
+
raise CompileError(node.pos, INVALID_ERR)
|
| 757 |
+
|
| 758 |
+
viewscope = env.global_scope().context.cython_scope.viewscope
|
| 759 |
+
entry = viewscope.lookup(resolved_name)
|
| 760 |
+
if entry is None:
|
| 761 |
+
raise CompileError(node.pos, NOT_CIMPORTED_ERR)
|
| 762 |
+
|
| 763 |
+
return entry
|
| 764 |
+
|
| 765 |
+
def _resolve_AttributeNode(env, node):
|
| 766 |
+
path = []
|
| 767 |
+
while isinstance(node, AttributeNode):
|
| 768 |
+
path.insert(0, node.attribute)
|
| 769 |
+
node = node.obj
|
| 770 |
+
if isinstance(node, NameNode):
|
| 771 |
+
path.insert(0, node.name)
|
| 772 |
+
else:
|
| 773 |
+
raise CompileError(node.pos, EXPR_ERR)
|
| 774 |
+
modnames = path[:-1]
|
| 775 |
+
# must be at least 1 module name, o/w not an AttributeNode.
|
| 776 |
+
assert modnames
|
| 777 |
+
|
| 778 |
+
scope = env
|
| 779 |
+
for modname in modnames:
|
| 780 |
+
mod = scope.lookup(modname)
|
| 781 |
+
if not mod or not mod.as_module:
|
| 782 |
+
raise CompileError(
|
| 783 |
+
node.pos, "undeclared name not builtin: %s" % modname)
|
| 784 |
+
scope = mod.as_module
|
| 785 |
+
|
| 786 |
+
entry = scope.lookup(path[-1])
|
| 787 |
+
if not entry:
|
| 788 |
+
raise CompileError(node.pos, "No such attribute '%s'" % path[-1])
|
| 789 |
+
|
| 790 |
+
return entry
|
| 791 |
+
|
| 792 |
+
#
|
| 793 |
+
### Utility loading
|
| 794 |
+
#
|
| 795 |
+
|
| 796 |
+
def load_memview_cy_utility(util_code_name, context=None, **kwargs):
|
| 797 |
+
return CythonUtilityCode.load(util_code_name, "MemoryView.pyx",
|
| 798 |
+
context=context, **kwargs)
|
| 799 |
+
|
| 800 |
+
def load_memview_c_utility(util_code_name, context=None, **kwargs):
|
| 801 |
+
if context is None:
|
| 802 |
+
return UtilityCode.load(util_code_name, "MemoryView_C.c", **kwargs)
|
| 803 |
+
else:
|
| 804 |
+
return TempitaUtilityCode.load(util_code_name, "MemoryView_C.c",
|
| 805 |
+
context=context, **kwargs)
|
| 806 |
+
|
| 807 |
+
def use_cython_array_utility_code(env):
|
| 808 |
+
cython_scope = env.global_scope().context.cython_scope
|
| 809 |
+
cython_scope.load_cythonscope()
|
| 810 |
+
cython_scope.viewscope.lookup('array_cwrapper').used = True
|
| 811 |
+
|
| 812 |
+
context = {
|
| 813 |
+
'memview_struct_name': memview_objstruct_cname,
|
| 814 |
+
'max_dims': Options.buffer_max_dims,
|
| 815 |
+
'memviewslice_name': memviewslice_cname,
|
| 816 |
+
'memslice_init': PyrexTypes.MemoryViewSliceType.default_value,
|
| 817 |
+
'THREAD_LOCKS_PREALLOCATED': 8,
|
| 818 |
+
}
|
| 819 |
+
memviewslice_declare_code = load_memview_c_utility(
|
| 820 |
+
"MemviewSliceStruct",
|
| 821 |
+
context=context,
|
| 822 |
+
requires=[])
|
| 823 |
+
|
| 824 |
+
atomic_utility = load_memview_c_utility("Atomics", context)
|
| 825 |
+
|
| 826 |
+
memviewslice_init_code = load_memview_c_utility(
|
| 827 |
+
"MemviewSliceInit",
|
| 828 |
+
context=dict(context, BUF_MAX_NDIMS=Options.buffer_max_dims),
|
| 829 |
+
requires=[memviewslice_declare_code,
|
| 830 |
+
atomic_utility],
|
| 831 |
+
)
|
| 832 |
+
|
| 833 |
+
memviewslice_index_helpers = load_memview_c_utility("MemviewSliceIndex")
|
| 834 |
+
|
| 835 |
+
typeinfo_to_format_code = load_memview_cy_utility(
|
| 836 |
+
"BufferFormatFromTypeInfo", requires=[Buffer._typeinfo_to_format_code])
|
| 837 |
+
|
| 838 |
+
is_contig_utility = load_memview_c_utility("MemviewSliceIsContig", context)
|
| 839 |
+
overlapping_utility = load_memview_c_utility("OverlappingSlices", context)
|
| 840 |
+
copy_contents_new_utility = load_memview_c_utility(
|
| 841 |
+
"MemviewSliceCopyTemplate",
|
| 842 |
+
context,
|
| 843 |
+
requires=[], # require cython_array_utility_code
|
| 844 |
+
)
|
| 845 |
+
|
| 846 |
+
view_utility_code = load_memview_cy_utility(
|
| 847 |
+
"View.MemoryView",
|
| 848 |
+
context=context,
|
| 849 |
+
requires=[Buffer.GetAndReleaseBufferUtilityCode(),
|
| 850 |
+
Buffer.buffer_struct_declare_code,
|
| 851 |
+
Buffer.buffer_formats_declare_code,
|
| 852 |
+
memviewslice_init_code,
|
| 853 |
+
is_contig_utility,
|
| 854 |
+
overlapping_utility,
|
| 855 |
+
copy_contents_new_utility,
|
| 856 |
+
],
|
| 857 |
+
)
|
| 858 |
+
view_utility_allowlist = ('array', 'memoryview', 'array_cwrapper',
|
| 859 |
+
'generic', 'strided', 'indirect', 'contiguous',
|
| 860 |
+
'indirect_contiguous')
|
| 861 |
+
|
| 862 |
+
memviewslice_declare_code.requires.append(view_utility_code)
|
| 863 |
+
copy_contents_new_utility.requires.append(view_utility_code)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/ModuleNode.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Naming.py
ADDED
|
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# C naming conventions
|
| 3 |
+
#
|
| 4 |
+
#
|
| 5 |
+
# Prefixes for generating C names.
|
| 6 |
+
# Collected here to facilitate ensuring uniqueness.
|
| 7 |
+
#
|
| 8 |
+
from .. import __version__
|
| 9 |
+
|
| 10 |
+
pyrex_prefix = "__pyx_"
|
| 11 |
+
cyversion = __version__.replace('.', '_')
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
codewriter_temp_prefix = pyrex_prefix + "t_"
|
| 15 |
+
|
| 16 |
+
temp_prefix = u"__cyt_"
|
| 17 |
+
|
| 18 |
+
pyunicode_identifier_prefix = pyrex_prefix + 'U'
|
| 19 |
+
|
| 20 |
+
builtin_prefix = pyrex_prefix + "builtin_"
|
| 21 |
+
arg_prefix = pyrex_prefix + "arg_"
|
| 22 |
+
genexpr_arg_prefix = pyrex_prefix + "genexpr_arg_"
|
| 23 |
+
funcdoc_prefix = pyrex_prefix + "doc_"
|
| 24 |
+
enum_prefix = pyrex_prefix + "e_"
|
| 25 |
+
func_prefix = pyrex_prefix + "f_"
|
| 26 |
+
func_prefix_api = pyrex_prefix + "api_f_"
|
| 27 |
+
pyfunc_prefix = pyrex_prefix + "pf_"
|
| 28 |
+
pywrap_prefix = pyrex_prefix + "pw_"
|
| 29 |
+
genbody_prefix = pyrex_prefix + "gb_"
|
| 30 |
+
gstab_prefix = pyrex_prefix + "getsets_"
|
| 31 |
+
prop_get_prefix = pyrex_prefix + "getprop_"
|
| 32 |
+
const_prefix = pyrex_prefix + "k_"
|
| 33 |
+
py_const_prefix = pyrex_prefix + "kp_"
|
| 34 |
+
label_prefix = pyrex_prefix + "L"
|
| 35 |
+
pymethdef_prefix = pyrex_prefix + "mdef_"
|
| 36 |
+
method_wrapper_prefix = pyrex_prefix + "specialmethod_"
|
| 37 |
+
methtab_prefix = pyrex_prefix + "methods_"
|
| 38 |
+
memtab_prefix = pyrex_prefix + "members_"
|
| 39 |
+
objstruct_prefix = pyrex_prefix + "obj_"
|
| 40 |
+
typeptr_prefix = pyrex_prefix + "ptype_"
|
| 41 |
+
prop_set_prefix = pyrex_prefix + "setprop_"
|
| 42 |
+
type_prefix = pyrex_prefix + "t_"
|
| 43 |
+
typeobj_prefix = pyrex_prefix + "type_"
|
| 44 |
+
var_prefix = pyrex_prefix + "v_"
|
| 45 |
+
varptr_prefix = pyrex_prefix + "vp_"
|
| 46 |
+
varptr_prefix_api = pyrex_prefix + "api_vp_"
|
| 47 |
+
wrapperbase_prefix= pyrex_prefix + "wrapperbase_"
|
| 48 |
+
pybuffernd_prefix = pyrex_prefix + "pybuffernd_"
|
| 49 |
+
pybufferstruct_prefix = pyrex_prefix + "pybuffer_"
|
| 50 |
+
vtable_prefix = pyrex_prefix + "vtable_"
|
| 51 |
+
vtabptr_prefix = pyrex_prefix + "vtabptr_"
|
| 52 |
+
vtabstruct_prefix = pyrex_prefix + "vtabstruct_"
|
| 53 |
+
unicode_vtabentry_prefix = pyrex_prefix + "Uvtabentry_"
|
| 54 |
+
# vtab entries aren't normally mangled,
|
| 55 |
+
# but punycode names sometimes start with numbers leading to a C syntax error
|
| 56 |
+
unicode_structmember_prefix = pyrex_prefix + "Umember_"
|
| 57 |
+
# as above -
|
| 58 |
+
# not normally mangled but punycode names cause specific problems
|
| 59 |
+
opt_arg_prefix = pyrex_prefix + "opt_args_"
|
| 60 |
+
convert_func_prefix = pyrex_prefix + "convert_"
|
| 61 |
+
closure_scope_prefix = pyrex_prefix + "scope_"
|
| 62 |
+
closure_class_prefix = pyrex_prefix + "scope_struct_"
|
| 63 |
+
lambda_func_prefix = pyrex_prefix + "lambda_"
|
| 64 |
+
module_is_main = pyrex_prefix + "module_is_main"
|
| 65 |
+
defaults_struct_prefix = pyrex_prefix + "defaults"
|
| 66 |
+
dynamic_args_cname = pyrex_prefix + "dynamic_args"
|
| 67 |
+
|
| 68 |
+
interned_prefixes = {
|
| 69 |
+
'str': pyrex_prefix + "n_",
|
| 70 |
+
'int': pyrex_prefix + "int_",
|
| 71 |
+
'float': pyrex_prefix + "float_",
|
| 72 |
+
'tuple': pyrex_prefix + "tuple_",
|
| 73 |
+
'codeobj': pyrex_prefix + "codeobj_",
|
| 74 |
+
'slice': pyrex_prefix + "slice_",
|
| 75 |
+
'ustring': pyrex_prefix + "ustring_",
|
| 76 |
+
'umethod': pyrex_prefix + "umethod_",
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
ctuple_type_prefix = pyrex_prefix + "ctuple_"
|
| 80 |
+
args_cname = pyrex_prefix + "args"
|
| 81 |
+
nargs_cname = pyrex_prefix + "nargs"
|
| 82 |
+
kwvalues_cname = pyrex_prefix + "kwvalues"
|
| 83 |
+
generator_cname = pyrex_prefix + "generator"
|
| 84 |
+
sent_value_cname = pyrex_prefix + "sent_value"
|
| 85 |
+
pykwdlist_cname = pyrex_prefix + "pyargnames"
|
| 86 |
+
obj_base_cname = pyrex_prefix + "base"
|
| 87 |
+
builtins_cname = pyrex_prefix + "b"
|
| 88 |
+
preimport_cname = pyrex_prefix + "i"
|
| 89 |
+
moddict_cname = pyrex_prefix + "d"
|
| 90 |
+
dummy_cname = pyrex_prefix + "dummy"
|
| 91 |
+
filename_cname = pyrex_prefix + "filename"
|
| 92 |
+
modulename_cname = pyrex_prefix + "modulename"
|
| 93 |
+
filetable_cname = pyrex_prefix + "f"
|
| 94 |
+
intern_tab_cname = pyrex_prefix + "intern_tab"
|
| 95 |
+
kwds_cname = pyrex_prefix + "kwds"
|
| 96 |
+
lineno_cname = pyrex_prefix + "lineno"
|
| 97 |
+
clineno_cname = pyrex_prefix + "clineno"
|
| 98 |
+
cfilenm_cname = pyrex_prefix + "cfilenm"
|
| 99 |
+
local_tstate_cname = pyrex_prefix + "tstate"
|
| 100 |
+
module_cname = pyrex_prefix + "m"
|
| 101 |
+
modulestate_cname = pyrex_prefix + "mstate"
|
| 102 |
+
modulestateglobal_cname = pyrex_prefix + "mstate_global"
|
| 103 |
+
moddoc_cname = pyrex_prefix + "mdoc"
|
| 104 |
+
methtable_cname = pyrex_prefix + "methods"
|
| 105 |
+
retval_cname = pyrex_prefix + "r"
|
| 106 |
+
reqd_kwds_cname = pyrex_prefix + "reqd_kwds"
|
| 107 |
+
self_cname = pyrex_prefix + "self"
|
| 108 |
+
stringtab_cname = pyrex_prefix + "string_tab"
|
| 109 |
+
vtabslot_cname = pyrex_prefix + "vtab"
|
| 110 |
+
c_api_tab_cname = pyrex_prefix + "c_api_tab"
|
| 111 |
+
gilstate_cname = pyrex_prefix + "state"
|
| 112 |
+
skip_dispatch_cname = pyrex_prefix + "skip_dispatch"
|
| 113 |
+
empty_tuple = pyrex_prefix + "empty_tuple"
|
| 114 |
+
empty_bytes = pyrex_prefix + "empty_bytes"
|
| 115 |
+
empty_unicode = pyrex_prefix + "empty_unicode"
|
| 116 |
+
print_function = pyrex_prefix + "print"
|
| 117 |
+
print_function_kwargs = pyrex_prefix + "print_kwargs"
|
| 118 |
+
cleanup_cname = pyrex_prefix + "module_cleanup"
|
| 119 |
+
pymoduledef_cname = pyrex_prefix + "moduledef"
|
| 120 |
+
pymoduledef_slots_cname = pyrex_prefix + "moduledef_slots"
|
| 121 |
+
pymodinit_module_arg = pyrex_prefix + "pyinit_module"
|
| 122 |
+
pymodule_create_func_cname = pyrex_prefix + "pymod_create"
|
| 123 |
+
pymodule_exec_func_cname = pyrex_prefix + "pymod_exec"
|
| 124 |
+
optional_args_cname = pyrex_prefix + "optional_args"
|
| 125 |
+
import_star = pyrex_prefix + "import_star"
|
| 126 |
+
import_star_set = pyrex_prefix + "import_star_set"
|
| 127 |
+
outer_scope_cname= pyrex_prefix + "outer_scope"
|
| 128 |
+
cur_scope_cname = pyrex_prefix + "cur_scope"
|
| 129 |
+
enc_scope_cname = pyrex_prefix + "enc_scope"
|
| 130 |
+
frame_cname = pyrex_prefix + "frame"
|
| 131 |
+
frame_code_cname = pyrex_prefix + "frame_code"
|
| 132 |
+
error_without_exception_cname = pyrex_prefix + "error_without_exception"
|
| 133 |
+
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
|
| 134 |
+
fused_func_prefix = pyrex_prefix + 'fuse_'
|
| 135 |
+
fused_dtype_prefix = pyrex_prefix + 'fused_dtype_'
|
| 136 |
+
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
|
| 137 |
+
tp_dict_version_temp = pyrex_prefix + "tp_dict_version"
|
| 138 |
+
obj_dict_version_temp = pyrex_prefix + "obj_dict_version"
|
| 139 |
+
type_dict_guard_temp = pyrex_prefix + "typedict_guard"
|
| 140 |
+
cython_runtime_cname = pyrex_prefix + "cython_runtime"
|
| 141 |
+
cyfunction_type_cname = pyrex_prefix + "CyFunctionType"
|
| 142 |
+
fusedfunction_type_cname = pyrex_prefix + "FusedFunctionType"
|
| 143 |
+
# the name "dflt" was picked by analogy with the CPython dataclass module which stores
|
| 144 |
+
# the default values in variables named f"_dflt_{field.name}" in a hidden scope that's
|
| 145 |
+
# passed to the __init__ function. (The name is unimportant to the exact workings though)
|
| 146 |
+
dataclass_field_default_cname = pyrex_prefix + "dataclass_dflt"
|
| 147 |
+
|
| 148 |
+
global_code_object_cache_find = pyrex_prefix + 'find_code_object'
|
| 149 |
+
global_code_object_cache_insert = pyrex_prefix + 'insert_code_object'
|
| 150 |
+
|
| 151 |
+
genexpr_id_ref = 'genexpr'
|
| 152 |
+
freelist_name = 'freelist'
|
| 153 |
+
freecount_name = 'freecount'
|
| 154 |
+
|
| 155 |
+
line_c_macro = "__LINE__"
|
| 156 |
+
|
| 157 |
+
file_c_macro = "__FILE__"
|
| 158 |
+
|
| 159 |
+
extern_c_macro = pyrex_prefix.upper() + "EXTERN_C"
|
| 160 |
+
|
| 161 |
+
exc_type_name = pyrex_prefix + "exc_type"
|
| 162 |
+
exc_value_name = pyrex_prefix + "exc_value"
|
| 163 |
+
exc_tb_name = pyrex_prefix + "exc_tb"
|
| 164 |
+
exc_lineno_name = pyrex_prefix + "exc_lineno"
|
| 165 |
+
|
| 166 |
+
parallel_exc_type = pyrex_prefix + "parallel_exc_type"
|
| 167 |
+
parallel_exc_value = pyrex_prefix + "parallel_exc_value"
|
| 168 |
+
parallel_exc_tb = pyrex_prefix + "parallel_exc_tb"
|
| 169 |
+
parallel_filename = pyrex_prefix + "parallel_filename"
|
| 170 |
+
parallel_lineno = pyrex_prefix + "parallel_lineno"
|
| 171 |
+
parallel_clineno = pyrex_prefix + "parallel_clineno"
|
| 172 |
+
parallel_why = pyrex_prefix + "parallel_why"
|
| 173 |
+
|
| 174 |
+
exc_vars = (exc_type_name, exc_value_name, exc_tb_name)
|
| 175 |
+
|
| 176 |
+
api_name = pyrex_prefix + "capi__"
|
| 177 |
+
|
| 178 |
+
# the h and api guards get changed to:
|
| 179 |
+
# __PYX_HAVE__FILENAME (for ascii filenames)
|
| 180 |
+
# __PYX_HAVE_U_PUNYCODEFILENAME (for non-ascii filenames)
|
| 181 |
+
h_guard_prefix = "__PYX_HAVE_"
|
| 182 |
+
api_guard_prefix = "__PYX_HAVE_API_"
|
| 183 |
+
api_func_guard = "__PYX_HAVE_API_FUNC_"
|
| 184 |
+
|
| 185 |
+
PYX_NAN = "__PYX_NAN()"
|
| 186 |
+
|
| 187 |
+
def py_version_hex(major, minor=0, micro=0, release_level=0, release_serial=0):
|
| 188 |
+
return (major << 24) | (minor << 16) | (micro << 8) | (release_level << 4) | (release_serial)
|
| 189 |
+
|
| 190 |
+
# there's a few places where it's useful to iterate over all of these
|
| 191 |
+
used_types_and_macros = [
|
| 192 |
+
(cyfunction_type_cname, '__Pyx_CyFunction_USED'),
|
| 193 |
+
(fusedfunction_type_cname, '__Pyx_FusedFunction_USED'),
|
| 194 |
+
('__pyx_GeneratorType', '__Pyx_Generator_USED'),
|
| 195 |
+
('__pyx_IterableCoroutineType', '__Pyx_IterableCoroutine_USED'),
|
| 196 |
+
('__pyx_CoroutineAwaitType', '__Pyx_Coroutine_USED'),
|
| 197 |
+
('__pyx_CoroutineType', '__Pyx_Coroutine_USED'),
|
| 198 |
+
]
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Nodes.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Optimize.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Options.py
ADDED
|
@@ -0,0 +1,799 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Cython - Compilation-wide options and pragma declarations
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
from .. import Utils
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class ShouldBeFromDirective(object):
|
| 13 |
+
|
| 14 |
+
known_directives = []
|
| 15 |
+
|
| 16 |
+
def __init__(self, options_name, directive_name=None, disallow=False):
|
| 17 |
+
self.options_name = options_name
|
| 18 |
+
self.directive_name = directive_name or options_name
|
| 19 |
+
self.disallow = disallow
|
| 20 |
+
self.known_directives.append(self)
|
| 21 |
+
|
| 22 |
+
def __nonzero__(self):
|
| 23 |
+
self._bad_access()
|
| 24 |
+
|
| 25 |
+
def __int__(self):
|
| 26 |
+
self._bad_access()
|
| 27 |
+
|
| 28 |
+
def _bad_access(self):
|
| 29 |
+
raise RuntimeError(repr(self))
|
| 30 |
+
|
| 31 |
+
def __repr__(self):
|
| 32 |
+
return "Illegal access of '%s' from Options module rather than directive '%s'" % (
|
| 33 |
+
self.options_name, self.directive_name)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
"""
|
| 37 |
+
The members of this module are documented using autodata in
|
| 38 |
+
Cython/docs/src/reference/compilation.rst.
|
| 39 |
+
See https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#directive-autoattribute
|
| 40 |
+
for how autodata works.
|
| 41 |
+
Descriptions of those members should start with a #:
|
| 42 |
+
Donc forget to keep the docs in sync by removing and adding
|
| 43 |
+
the members in both this file and the .rst file.
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
#: Whether or not to include docstring in the Python extension. If False, the binary size
|
| 47 |
+
#: will be smaller, but the ``__doc__`` attribute of any class or function will be an
|
| 48 |
+
#: empty string.
|
| 49 |
+
docstrings = True
|
| 50 |
+
|
| 51 |
+
#: Embed the source code position in the docstrings of functions and classes.
|
| 52 |
+
embed_pos_in_docstring = False
|
| 53 |
+
|
| 54 |
+
# undocumented
|
| 55 |
+
pre_import = None
|
| 56 |
+
|
| 57 |
+
#: Decref global variables in each module on exit for garbage collection.
|
| 58 |
+
#: 0: None, 1+: interned objects, 2+: cdef globals, 3+: types objects
|
| 59 |
+
#: Mostly for reducing noise in Valgrind as it typically executes at process exit
|
| 60 |
+
#: (when all memory will be reclaimed anyways).
|
| 61 |
+
#: Note that directly or indirectly executed cleanup code that makes use of global
|
| 62 |
+
#: variables or types may no longer be safe when enabling the respective level since
|
| 63 |
+
#: there is no guaranteed order in which the (reference counted) objects will
|
| 64 |
+
#: be cleaned up. The order can change due to live references and reference cycles.
|
| 65 |
+
generate_cleanup_code = False
|
| 66 |
+
|
| 67 |
+
#: Should tp_clear() set object fields to None instead of clearing them to NULL?
|
| 68 |
+
clear_to_none = True
|
| 69 |
+
|
| 70 |
+
#: Generate an annotated HTML version of the input source files for debugging and optimisation purposes.
|
| 71 |
+
#: This has the same effect as the ``annotate`` argument in :func:`cythonize`.
|
| 72 |
+
annotate = False
|
| 73 |
+
|
| 74 |
+
# When annotating source files in HTML, include coverage information from
|
| 75 |
+
# this file.
|
| 76 |
+
annotate_coverage_xml = None
|
| 77 |
+
|
| 78 |
+
#: This will abort the compilation on the first error occurred rather than trying
|
| 79 |
+
#: to keep going and printing further error messages.
|
| 80 |
+
fast_fail = False
|
| 81 |
+
|
| 82 |
+
#: Turn all warnings into errors.
|
| 83 |
+
warning_errors = False
|
| 84 |
+
|
| 85 |
+
#: Make unknown names an error. Python raises a NameError when
|
| 86 |
+
#: encountering unknown names at runtime, whereas this option makes
|
| 87 |
+
#: them a compile time error. If you want full Python compatibility,
|
| 88 |
+
#: you should disable this option and also 'cache_builtins'.
|
| 89 |
+
error_on_unknown_names = True
|
| 90 |
+
|
| 91 |
+
#: Make uninitialized local variable reference a compile time error.
|
| 92 |
+
#: Python raises UnboundLocalError at runtime, whereas this option makes
|
| 93 |
+
#: them a compile time error. Note that this option affects only variables
|
| 94 |
+
#: of "python object" type.
|
| 95 |
+
error_on_uninitialized = True
|
| 96 |
+
|
| 97 |
+
#: This will convert statements of the form ``for i in range(...)``
|
| 98 |
+
#: to ``for i from ...`` when ``i`` is a C integer type, and the direction
|
| 99 |
+
#: (i.e. sign of step) can be determined.
|
| 100 |
+
#: WARNING: This may change the semantics if the range causes assignment to
|
| 101 |
+
#: i to overflow. Specifically, if this option is set, an error will be
|
| 102 |
+
#: raised before the loop is entered, whereas without this option the loop
|
| 103 |
+
#: will execute until an overflowing value is encountered.
|
| 104 |
+
convert_range = True
|
| 105 |
+
|
| 106 |
+
#: Perform lookups on builtin names only once, at module initialisation
|
| 107 |
+
#: time. This will prevent the module from getting imported if a
|
| 108 |
+
#: builtin name that it uses cannot be found during initialisation.
|
| 109 |
+
#: Default is True.
|
| 110 |
+
#: Note that some legacy builtins are automatically remapped
|
| 111 |
+
#: from their Python 2 names to their Python 3 names by Cython
|
| 112 |
+
#: when building in Python 3.x,
|
| 113 |
+
#: so that they do not get in the way even if this option is enabled.
|
| 114 |
+
cache_builtins = True
|
| 115 |
+
|
| 116 |
+
#: Generate branch prediction hints to speed up error handling etc.
|
| 117 |
+
gcc_branch_hints = True
|
| 118 |
+
|
| 119 |
+
#: Enable this to allow one to write ``your_module.foo = ...`` to overwrite the
|
| 120 |
+
#: definition if the cpdef function foo, at the cost of an extra dictionary
|
| 121 |
+
#: lookup on every call.
|
| 122 |
+
#: If this is false it generates only the Python wrapper and no override check.
|
| 123 |
+
lookup_module_cpdef = False
|
| 124 |
+
|
| 125 |
+
#: Whether or not to embed the Python interpreter, for use in making a
|
| 126 |
+
#: standalone executable or calling from external libraries.
|
| 127 |
+
#: This will provide a C function which initialises the interpreter and
|
| 128 |
+
#: executes the body of this module.
|
| 129 |
+
#: See `this demo <https://github.com/cython/cython/tree/master/Demos/embed>`_
|
| 130 |
+
#: for a concrete example.
|
| 131 |
+
#: If true, the initialisation function is the C main() function, but
|
| 132 |
+
#: this option can also be set to a non-empty string to provide a function name explicitly.
|
| 133 |
+
#: Default is False.
|
| 134 |
+
embed = None
|
| 135 |
+
|
| 136 |
+
# In previous iterations of Cython, globals() gave the first non-Cython module
|
| 137 |
+
# globals in the call stack. Sage relies on this behavior for variable injection.
|
| 138 |
+
old_style_globals = ShouldBeFromDirective('old_style_globals')
|
| 139 |
+
|
| 140 |
+
#: Allows cimporting from a pyx file without a pxd file.
|
| 141 |
+
cimport_from_pyx = False
|
| 142 |
+
|
| 143 |
+
#: Maximum number of dimensions for buffers -- set lower than number of
|
| 144 |
+
#: dimensions in numpy, as
|
| 145 |
+
#: slices are passed by value and involve a lot of copying.
|
| 146 |
+
buffer_max_dims = 8
|
| 147 |
+
|
| 148 |
+
#: Number of function closure instances to keep in a freelist (0: no freelists)
|
| 149 |
+
closure_freelist_size = 8
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def get_directive_defaults():
|
| 153 |
+
# To add an item to this list, all accesses should be changed to use the new
|
| 154 |
+
# directive, and the global option itself should be set to an instance of
|
| 155 |
+
# ShouldBeFromDirective.
|
| 156 |
+
for old_option in ShouldBeFromDirective.known_directives:
|
| 157 |
+
value = globals().get(old_option.options_name)
|
| 158 |
+
assert old_option.directive_name in _directive_defaults
|
| 159 |
+
if not isinstance(value, ShouldBeFromDirective):
|
| 160 |
+
if old_option.disallow:
|
| 161 |
+
raise RuntimeError(
|
| 162 |
+
"Option '%s' must be set from directive '%s'" % (
|
| 163 |
+
old_option.option_name, old_option.directive_name))
|
| 164 |
+
else:
|
| 165 |
+
# Warn?
|
| 166 |
+
_directive_defaults[old_option.directive_name] = value
|
| 167 |
+
return _directive_defaults
|
| 168 |
+
|
| 169 |
+
def copy_inherited_directives(outer_directives, **new_directives):
|
| 170 |
+
# A few directives are not copied downwards and this function removes them.
|
| 171 |
+
# For example, test_assert_path_exists and test_fail_if_path_exists should not be inherited
|
| 172 |
+
# otherwise they can produce very misleading test failures
|
| 173 |
+
new_directives_out = dict(outer_directives)
|
| 174 |
+
for name in ('test_assert_path_exists', 'test_fail_if_path_exists', 'test_assert_c_code_has', 'test_fail_if_c_code_has'):
|
| 175 |
+
new_directives_out.pop(name, None)
|
| 176 |
+
new_directives_out.update(new_directives)
|
| 177 |
+
return new_directives_out
|
| 178 |
+
|
| 179 |
+
# Declare compiler directives
|
| 180 |
+
_directive_defaults = {
|
| 181 |
+
'binding': True, # was False before 3.0
|
| 182 |
+
'boundscheck' : True,
|
| 183 |
+
'nonecheck' : False,
|
| 184 |
+
'initializedcheck' : True,
|
| 185 |
+
'embedsignature': False,
|
| 186 |
+
'embedsignature.format': 'c',
|
| 187 |
+
'auto_cpdef': False,
|
| 188 |
+
'auto_pickle': None,
|
| 189 |
+
'cdivision': False, # was True before 0.12
|
| 190 |
+
'cdivision_warnings': False,
|
| 191 |
+
'cpow': None, # was True before 3.0
|
| 192 |
+
# None (not set by user) is treated as slightly different from False
|
| 193 |
+
'c_api_binop_methods': False, # was True before 3.0
|
| 194 |
+
'overflowcheck': False,
|
| 195 |
+
'overflowcheck.fold': True,
|
| 196 |
+
'always_allow_keywords': True,
|
| 197 |
+
'allow_none_for_extension_args': True,
|
| 198 |
+
'wraparound' : True,
|
| 199 |
+
'ccomplex' : False, # use C99/C++ for complex types and arith
|
| 200 |
+
'callspec' : "",
|
| 201 |
+
'nogil' : False,
|
| 202 |
+
'gil' : False,
|
| 203 |
+
'with_gil' : False,
|
| 204 |
+
'profile': False,
|
| 205 |
+
'linetrace': False,
|
| 206 |
+
'emit_code_comments': True, # copy original source code into C code comments
|
| 207 |
+
'annotation_typing': True, # read type declarations from Python function annotations
|
| 208 |
+
'infer_types': None,
|
| 209 |
+
'infer_types.verbose': False,
|
| 210 |
+
'autotestdict': True,
|
| 211 |
+
'autotestdict.cdef': False,
|
| 212 |
+
'autotestdict.all': False,
|
| 213 |
+
'language_level': None,
|
| 214 |
+
'fast_getattr': False, # Undocumented until we come up with a better way to handle this everywhere.
|
| 215 |
+
'py2_import': False, # For backward compatibility of Cython's source code in Py3 source mode
|
| 216 |
+
'preliminary_late_includes_cy28': False, # Temporary directive in 0.28, to be removed in a later version (see GH#2079).
|
| 217 |
+
'iterable_coroutine': False, # Make async coroutines backwards compatible with the old asyncio yield-from syntax.
|
| 218 |
+
'c_string_type': 'bytes',
|
| 219 |
+
'c_string_encoding': '',
|
| 220 |
+
'type_version_tag': True, # enables Py_TPFLAGS_HAVE_VERSION_TAG on extension types
|
| 221 |
+
'unraisable_tracebacks': True,
|
| 222 |
+
'old_style_globals': False,
|
| 223 |
+
'np_pythran': False,
|
| 224 |
+
'fast_gil': False,
|
| 225 |
+
'cpp_locals': False, # uses std::optional for C++ locals, so that they work more like Python locals
|
| 226 |
+
'legacy_implicit_noexcept': False,
|
| 227 |
+
|
| 228 |
+
# set __file__ and/or __path__ to known source/target path at import time (instead of not having them available)
|
| 229 |
+
'set_initial_path' : None, # SOURCEFILE or "/full/path/to/module"
|
| 230 |
+
|
| 231 |
+
'warn': None,
|
| 232 |
+
'warn.undeclared': False,
|
| 233 |
+
'warn.unreachable': True,
|
| 234 |
+
'warn.maybe_uninitialized': False,
|
| 235 |
+
'warn.unused': False,
|
| 236 |
+
'warn.unused_arg': False,
|
| 237 |
+
'warn.unused_result': False,
|
| 238 |
+
'warn.multiple_declarators': True,
|
| 239 |
+
'show_performance_hints': True,
|
| 240 |
+
|
| 241 |
+
# optimizations
|
| 242 |
+
'optimize.inline_defnode_calls': True,
|
| 243 |
+
'optimize.unpack_method_calls': True, # increases code size when True
|
| 244 |
+
'optimize.unpack_method_calls_in_pyinit': False, # uselessly increases code size when True
|
| 245 |
+
'optimize.use_switch': True,
|
| 246 |
+
|
| 247 |
+
# remove unreachable code
|
| 248 |
+
'remove_unreachable': True,
|
| 249 |
+
|
| 250 |
+
# control flow debug directives
|
| 251 |
+
'control_flow.dot_output': "", # Graphviz output filename
|
| 252 |
+
'control_flow.dot_annotate_defs': False, # Annotate definitions
|
| 253 |
+
|
| 254 |
+
# test support
|
| 255 |
+
'test_assert_path_exists' : [],
|
| 256 |
+
'test_fail_if_path_exists' : [],
|
| 257 |
+
'test_assert_c_code_has' : [],
|
| 258 |
+
'test_fail_if_c_code_has' : [],
|
| 259 |
+
|
| 260 |
+
# experimental, subject to change
|
| 261 |
+
'formal_grammar': False,
|
| 262 |
+
}
|
| 263 |
+
|
| 264 |
+
# Extra warning directives
|
| 265 |
+
extra_warnings = {
|
| 266 |
+
'warn.maybe_uninitialized': True,
|
| 267 |
+
'warn.unreachable': True,
|
| 268 |
+
'warn.unused': True,
|
| 269 |
+
}
|
| 270 |
+
|
| 271 |
+
def one_of(*args):
|
| 272 |
+
def validate(name, value):
|
| 273 |
+
if value not in args:
|
| 274 |
+
raise ValueError("%s directive must be one of %s, got '%s'" % (
|
| 275 |
+
name, args, value))
|
| 276 |
+
else:
|
| 277 |
+
return value
|
| 278 |
+
return validate
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def normalise_encoding_name(option_name, encoding):
|
| 282 |
+
"""
|
| 283 |
+
>>> normalise_encoding_name('c_string_encoding', 'ascii')
|
| 284 |
+
'ascii'
|
| 285 |
+
>>> normalise_encoding_name('c_string_encoding', 'AsCIi')
|
| 286 |
+
'ascii'
|
| 287 |
+
>>> normalise_encoding_name('c_string_encoding', 'us-ascii')
|
| 288 |
+
'ascii'
|
| 289 |
+
>>> normalise_encoding_name('c_string_encoding', 'utF8')
|
| 290 |
+
'utf8'
|
| 291 |
+
>>> normalise_encoding_name('c_string_encoding', 'utF-8')
|
| 292 |
+
'utf8'
|
| 293 |
+
>>> normalise_encoding_name('c_string_encoding', 'deFAuLT')
|
| 294 |
+
'default'
|
| 295 |
+
>>> normalise_encoding_name('c_string_encoding', 'default')
|
| 296 |
+
'default'
|
| 297 |
+
>>> normalise_encoding_name('c_string_encoding', 'SeriousLyNoSuch--Encoding')
|
| 298 |
+
'SeriousLyNoSuch--Encoding'
|
| 299 |
+
"""
|
| 300 |
+
if not encoding:
|
| 301 |
+
return ''
|
| 302 |
+
if encoding.lower() in ('default', 'ascii', 'utf8'):
|
| 303 |
+
return encoding.lower()
|
| 304 |
+
import codecs
|
| 305 |
+
try:
|
| 306 |
+
decoder = codecs.getdecoder(encoding)
|
| 307 |
+
except LookupError:
|
| 308 |
+
return encoding # may exists at runtime ...
|
| 309 |
+
for name in ('ascii', 'utf8'):
|
| 310 |
+
if codecs.getdecoder(name) == decoder:
|
| 311 |
+
return name
|
| 312 |
+
return encoding
|
| 313 |
+
|
| 314 |
+
# use as a sential value to defer analysis of the arguments
|
| 315 |
+
# instead of analysing them in InterpretCompilerDirectives. The dataclass directives are quite
|
| 316 |
+
# complicated and it's easier to deal with them at the point the dataclass is created
|
| 317 |
+
class DEFER_ANALYSIS_OF_ARGUMENTS:
|
| 318 |
+
pass
|
| 319 |
+
DEFER_ANALYSIS_OF_ARGUMENTS = DEFER_ANALYSIS_OF_ARGUMENTS()
|
| 320 |
+
|
| 321 |
+
# Override types possibilities above, if needed
|
| 322 |
+
directive_types = {
|
| 323 |
+
'language_level': str, # values can be None/2/3/'3str', where None == 2+warning
|
| 324 |
+
'auto_pickle': bool,
|
| 325 |
+
'locals': dict,
|
| 326 |
+
'final' : bool, # final cdef classes and methods
|
| 327 |
+
'collection_type': one_of('sequence'),
|
| 328 |
+
'nogil' : DEFER_ANALYSIS_OF_ARGUMENTS,
|
| 329 |
+
'gil' : DEFER_ANALYSIS_OF_ARGUMENTS,
|
| 330 |
+
'with_gil' : None,
|
| 331 |
+
'internal' : bool, # cdef class visibility in the module dict
|
| 332 |
+
'infer_types' : bool, # values can be True/None/False
|
| 333 |
+
'binding' : bool,
|
| 334 |
+
'cfunc' : None, # decorators do not take directive value
|
| 335 |
+
'ccall' : None,
|
| 336 |
+
'ufunc': None,
|
| 337 |
+
'cpow' : bool,
|
| 338 |
+
'inline' : None,
|
| 339 |
+
'staticmethod' : None,
|
| 340 |
+
'cclass' : None,
|
| 341 |
+
'no_gc_clear' : bool,
|
| 342 |
+
'no_gc' : bool,
|
| 343 |
+
'returns' : type,
|
| 344 |
+
'exceptval': type, # actually (type, check=True/False), but has its own parser
|
| 345 |
+
'set_initial_path': str,
|
| 346 |
+
'freelist': int,
|
| 347 |
+
'c_string_type': one_of('bytes', 'bytearray', 'str', 'unicode'),
|
| 348 |
+
'c_string_encoding': normalise_encoding_name,
|
| 349 |
+
'trashcan': bool,
|
| 350 |
+
'total_ordering': None,
|
| 351 |
+
'dataclasses.dataclass': DEFER_ANALYSIS_OF_ARGUMENTS,
|
| 352 |
+
'dataclasses.field': DEFER_ANALYSIS_OF_ARGUMENTS,
|
| 353 |
+
'embedsignature.format': one_of('c', 'clinic', 'python'),
|
| 354 |
+
}
|
| 355 |
+
|
| 356 |
+
for key, val in _directive_defaults.items():
|
| 357 |
+
if key not in directive_types:
|
| 358 |
+
directive_types[key] = type(val)
|
| 359 |
+
|
| 360 |
+
directive_scopes = { # defaults to available everywhere
|
| 361 |
+
# 'module', 'function', 'class', 'with statement'
|
| 362 |
+
'auto_pickle': ('module', 'cclass'),
|
| 363 |
+
'final' : ('cclass', 'function'),
|
| 364 |
+
'collection_type': ('cclass',),
|
| 365 |
+
'nogil' : ('function', 'with statement'),
|
| 366 |
+
'gil' : ('with statement'),
|
| 367 |
+
'with_gil' : ('function',),
|
| 368 |
+
'inline' : ('function',),
|
| 369 |
+
'cfunc' : ('function', 'with statement'),
|
| 370 |
+
'ccall' : ('function', 'with statement'),
|
| 371 |
+
'returns' : ('function',),
|
| 372 |
+
'exceptval' : ('function',),
|
| 373 |
+
'locals' : ('function',),
|
| 374 |
+
'staticmethod' : ('function',), # FIXME: analysis currently lacks more specific function scope
|
| 375 |
+
'no_gc_clear' : ('cclass',),
|
| 376 |
+
'no_gc' : ('cclass',),
|
| 377 |
+
'internal' : ('cclass',),
|
| 378 |
+
'cclass' : ('class', 'cclass', 'with statement'),
|
| 379 |
+
'autotestdict' : ('module',),
|
| 380 |
+
'autotestdict.all' : ('module',),
|
| 381 |
+
'autotestdict.cdef' : ('module',),
|
| 382 |
+
'set_initial_path' : ('module',),
|
| 383 |
+
'test_assert_path_exists' : ('function', 'class', 'cclass'),
|
| 384 |
+
'test_fail_if_path_exists' : ('function', 'class', 'cclass'),
|
| 385 |
+
'test_assert_c_code_has' : ('module',),
|
| 386 |
+
'test_fail_if_c_code_has' : ('module',),
|
| 387 |
+
'freelist': ('cclass',),
|
| 388 |
+
'emit_code_comments': ('module',),
|
| 389 |
+
# Avoid scope-specific to/from_py_functions for c_string.
|
| 390 |
+
'c_string_type': ('module',),
|
| 391 |
+
'c_string_encoding': ('module',),
|
| 392 |
+
'type_version_tag': ('module', 'cclass'),
|
| 393 |
+
'language_level': ('module',),
|
| 394 |
+
# globals() could conceivably be controlled at a finer granularity,
|
| 395 |
+
# but that would complicate the implementation
|
| 396 |
+
'old_style_globals': ('module',),
|
| 397 |
+
'np_pythran': ('module',),
|
| 398 |
+
'fast_gil': ('module',),
|
| 399 |
+
'iterable_coroutine': ('module', 'function'),
|
| 400 |
+
'trashcan' : ('cclass',),
|
| 401 |
+
'total_ordering': ('class', 'cclass'),
|
| 402 |
+
'dataclasses.dataclass' : ('class', 'cclass'),
|
| 403 |
+
'cpp_locals': ('module', 'function', 'cclass'), # I don't think they make sense in a with_statement
|
| 404 |
+
'ufunc': ('function',),
|
| 405 |
+
'legacy_implicit_noexcept': ('module', ),
|
| 406 |
+
}
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
# A list of directives that (when used as a decorator) are only applied to
|
| 410 |
+
# the object they decorate and not to its children.
|
| 411 |
+
immediate_decorator_directives = {
|
| 412 |
+
'cfunc', 'ccall', 'cclass', 'dataclasses.dataclass', 'ufunc',
|
| 413 |
+
# function signature directives
|
| 414 |
+
'inline', 'exceptval', 'returns', 'with_gil', # 'nogil',
|
| 415 |
+
# class directives
|
| 416 |
+
'freelist', 'no_gc', 'no_gc_clear', 'type_version_tag', 'final',
|
| 417 |
+
'auto_pickle', 'internal', 'collection_type', 'total_ordering',
|
| 418 |
+
# testing directives
|
| 419 |
+
'test_fail_if_path_exists', 'test_assert_path_exists',
|
| 420 |
+
}
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
def parse_directive_value(name, value, relaxed_bool=False):
|
| 424 |
+
"""
|
| 425 |
+
Parses value as an option value for the given name and returns
|
| 426 |
+
the interpreted value. None is returned if the option does not exist.
|
| 427 |
+
|
| 428 |
+
>>> print(parse_directive_value('nonexisting', 'asdf asdfd'))
|
| 429 |
+
None
|
| 430 |
+
>>> parse_directive_value('boundscheck', 'True')
|
| 431 |
+
True
|
| 432 |
+
>>> parse_directive_value('boundscheck', 'true')
|
| 433 |
+
Traceback (most recent call last):
|
| 434 |
+
...
|
| 435 |
+
ValueError: boundscheck directive must be set to True or False, got 'true'
|
| 436 |
+
|
| 437 |
+
>>> parse_directive_value('c_string_encoding', 'us-ascii')
|
| 438 |
+
'ascii'
|
| 439 |
+
>>> parse_directive_value('c_string_type', 'str')
|
| 440 |
+
'str'
|
| 441 |
+
>>> parse_directive_value('c_string_type', 'bytes')
|
| 442 |
+
'bytes'
|
| 443 |
+
>>> parse_directive_value('c_string_type', 'bytearray')
|
| 444 |
+
'bytearray'
|
| 445 |
+
>>> parse_directive_value('c_string_type', 'unicode')
|
| 446 |
+
'unicode'
|
| 447 |
+
>>> parse_directive_value('c_string_type', 'unnicode')
|
| 448 |
+
Traceback (most recent call last):
|
| 449 |
+
ValueError: c_string_type directive must be one of ('bytes', 'bytearray', 'str', 'unicode'), got 'unnicode'
|
| 450 |
+
"""
|
| 451 |
+
type = directive_types.get(name)
|
| 452 |
+
if not type:
|
| 453 |
+
return None
|
| 454 |
+
orig_value = value
|
| 455 |
+
if type is bool:
|
| 456 |
+
value = str(value)
|
| 457 |
+
if value == 'True':
|
| 458 |
+
return True
|
| 459 |
+
if value == 'False':
|
| 460 |
+
return False
|
| 461 |
+
if relaxed_bool:
|
| 462 |
+
value = value.lower()
|
| 463 |
+
if value in ("true", "yes"):
|
| 464 |
+
return True
|
| 465 |
+
elif value in ("false", "no"):
|
| 466 |
+
return False
|
| 467 |
+
raise ValueError("%s directive must be set to True or False, got '%s'" % (
|
| 468 |
+
name, orig_value))
|
| 469 |
+
elif type is int:
|
| 470 |
+
try:
|
| 471 |
+
return int(value)
|
| 472 |
+
except ValueError:
|
| 473 |
+
raise ValueError("%s directive must be set to an integer, got '%s'" % (
|
| 474 |
+
name, orig_value))
|
| 475 |
+
elif type is str:
|
| 476 |
+
return str(value)
|
| 477 |
+
elif callable(type):
|
| 478 |
+
return type(name, value)
|
| 479 |
+
else:
|
| 480 |
+
assert False
|
| 481 |
+
|
| 482 |
+
|
| 483 |
+
def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False,
|
| 484 |
+
current_settings=None):
|
| 485 |
+
"""
|
| 486 |
+
Parses a comma-separated list of pragma options. Whitespace
|
| 487 |
+
is not considered.
|
| 488 |
+
|
| 489 |
+
>>> parse_directive_list(' ')
|
| 490 |
+
{}
|
| 491 |
+
>>> (parse_directive_list('boundscheck=True') ==
|
| 492 |
+
... {'boundscheck': True})
|
| 493 |
+
True
|
| 494 |
+
>>> parse_directive_list(' asdf')
|
| 495 |
+
Traceback (most recent call last):
|
| 496 |
+
...
|
| 497 |
+
ValueError: Expected "=" in option "asdf"
|
| 498 |
+
>>> parse_directive_list('boundscheck=hey')
|
| 499 |
+
Traceback (most recent call last):
|
| 500 |
+
...
|
| 501 |
+
ValueError: boundscheck directive must be set to True or False, got 'hey'
|
| 502 |
+
>>> parse_directive_list('unknown=True')
|
| 503 |
+
Traceback (most recent call last):
|
| 504 |
+
...
|
| 505 |
+
ValueError: Unknown option: "unknown"
|
| 506 |
+
>>> warnings = parse_directive_list('warn.all=True')
|
| 507 |
+
>>> len(warnings) > 1
|
| 508 |
+
True
|
| 509 |
+
>>> sum(warnings.values()) == len(warnings) # all true.
|
| 510 |
+
True
|
| 511 |
+
"""
|
| 512 |
+
if current_settings is None:
|
| 513 |
+
result = {}
|
| 514 |
+
else:
|
| 515 |
+
result = current_settings
|
| 516 |
+
for item in s.split(','):
|
| 517 |
+
item = item.strip()
|
| 518 |
+
if not item:
|
| 519 |
+
continue
|
| 520 |
+
if '=' not in item:
|
| 521 |
+
raise ValueError('Expected "=" in option "%s"' % item)
|
| 522 |
+
name, value = [s.strip() for s in item.strip().split('=', 1)]
|
| 523 |
+
if name not in _directive_defaults:
|
| 524 |
+
found = False
|
| 525 |
+
if name.endswith('.all'):
|
| 526 |
+
prefix = name[:-3]
|
| 527 |
+
for directive in _directive_defaults:
|
| 528 |
+
if directive.startswith(prefix):
|
| 529 |
+
found = True
|
| 530 |
+
parsed_value = parse_directive_value(directive, value, relaxed_bool=relaxed_bool)
|
| 531 |
+
result[directive] = parsed_value
|
| 532 |
+
if not found and not ignore_unknown:
|
| 533 |
+
raise ValueError('Unknown option: "%s"' % name)
|
| 534 |
+
elif directive_types.get(name) is list:
|
| 535 |
+
if name in result:
|
| 536 |
+
result[name].append(value)
|
| 537 |
+
else:
|
| 538 |
+
result[name] = [value]
|
| 539 |
+
else:
|
| 540 |
+
parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool)
|
| 541 |
+
result[name] = parsed_value
|
| 542 |
+
return result
|
| 543 |
+
|
| 544 |
+
|
| 545 |
+
def parse_variable_value(value):
|
| 546 |
+
"""
|
| 547 |
+
Parses value as an option value for the given name and returns
|
| 548 |
+
the interpreted value.
|
| 549 |
+
|
| 550 |
+
>>> parse_variable_value('True')
|
| 551 |
+
True
|
| 552 |
+
>>> parse_variable_value('true')
|
| 553 |
+
'true'
|
| 554 |
+
>>> parse_variable_value('us-ascii')
|
| 555 |
+
'us-ascii'
|
| 556 |
+
>>> parse_variable_value('str')
|
| 557 |
+
'str'
|
| 558 |
+
>>> parse_variable_value('123')
|
| 559 |
+
123
|
| 560 |
+
>>> parse_variable_value('1.23')
|
| 561 |
+
1.23
|
| 562 |
+
|
| 563 |
+
"""
|
| 564 |
+
if value == "True":
|
| 565 |
+
return True
|
| 566 |
+
elif value == "False":
|
| 567 |
+
return False
|
| 568 |
+
elif value == "None":
|
| 569 |
+
return None
|
| 570 |
+
elif value.isdigit():
|
| 571 |
+
return int(value)
|
| 572 |
+
else:
|
| 573 |
+
try:
|
| 574 |
+
value = float(value)
|
| 575 |
+
except Exception:
|
| 576 |
+
# Not a float
|
| 577 |
+
pass
|
| 578 |
+
return value
|
| 579 |
+
|
| 580 |
+
|
| 581 |
+
def parse_compile_time_env(s, current_settings=None):
|
| 582 |
+
"""
|
| 583 |
+
Parses a comma-separated list of pragma options. Whitespace
|
| 584 |
+
is not considered.
|
| 585 |
+
|
| 586 |
+
>>> parse_compile_time_env(' ')
|
| 587 |
+
{}
|
| 588 |
+
>>> (parse_compile_time_env('HAVE_OPENMP=True') ==
|
| 589 |
+
... {'HAVE_OPENMP': True})
|
| 590 |
+
True
|
| 591 |
+
>>> parse_compile_time_env(' asdf')
|
| 592 |
+
Traceback (most recent call last):
|
| 593 |
+
...
|
| 594 |
+
ValueError: Expected "=" in option "asdf"
|
| 595 |
+
>>> parse_compile_time_env('NUM_THREADS=4') == {'NUM_THREADS': 4}
|
| 596 |
+
True
|
| 597 |
+
>>> parse_compile_time_env('unknown=anything') == {'unknown': 'anything'}
|
| 598 |
+
True
|
| 599 |
+
"""
|
| 600 |
+
if current_settings is None:
|
| 601 |
+
result = {}
|
| 602 |
+
else:
|
| 603 |
+
result = current_settings
|
| 604 |
+
for item in s.split(','):
|
| 605 |
+
item = item.strip()
|
| 606 |
+
if not item:
|
| 607 |
+
continue
|
| 608 |
+
if '=' not in item:
|
| 609 |
+
raise ValueError('Expected "=" in option "%s"' % item)
|
| 610 |
+
name, value = [s.strip() for s in item.split('=', 1)]
|
| 611 |
+
result[name] = parse_variable_value(value)
|
| 612 |
+
return result
|
| 613 |
+
|
| 614 |
+
|
| 615 |
+
# ------------------------------------------------------------------------
|
| 616 |
+
# CompilationOptions are constructed from user input and are the `option`
|
| 617 |
+
# object passed throughout the compilation pipeline.
|
| 618 |
+
|
| 619 |
+
class CompilationOptions(object):
|
| 620 |
+
r"""
|
| 621 |
+
See default_options at the end of this module for a list of all possible
|
| 622 |
+
options and CmdLine.usage and CmdLine.parse_command_line() for their
|
| 623 |
+
meaning.
|
| 624 |
+
"""
|
| 625 |
+
def __init__(self, defaults=None, **kw):
|
| 626 |
+
self.include_path = []
|
| 627 |
+
if defaults:
|
| 628 |
+
if isinstance(defaults, CompilationOptions):
|
| 629 |
+
defaults = defaults.__dict__
|
| 630 |
+
else:
|
| 631 |
+
defaults = default_options
|
| 632 |
+
|
| 633 |
+
options = dict(defaults)
|
| 634 |
+
options.update(kw)
|
| 635 |
+
|
| 636 |
+
# let's assume 'default_options' contains a value for most known compiler options
|
| 637 |
+
# and validate against them
|
| 638 |
+
unknown_options = set(options) - set(default_options)
|
| 639 |
+
# ignore valid options that are not in the defaults
|
| 640 |
+
unknown_options.difference_update(['include_path'])
|
| 641 |
+
if unknown_options:
|
| 642 |
+
message = "got unknown compilation option%s, please remove: %s" % (
|
| 643 |
+
's' if len(unknown_options) > 1 else '',
|
| 644 |
+
', '.join(unknown_options))
|
| 645 |
+
raise ValueError(message)
|
| 646 |
+
|
| 647 |
+
directive_defaults = get_directive_defaults()
|
| 648 |
+
directives = dict(options['compiler_directives']) # copy mutable field
|
| 649 |
+
# check for invalid directives
|
| 650 |
+
unknown_directives = set(directives) - set(directive_defaults)
|
| 651 |
+
if unknown_directives:
|
| 652 |
+
message = "got unknown compiler directive%s: %s" % (
|
| 653 |
+
's' if len(unknown_directives) > 1 else '',
|
| 654 |
+
', '.join(unknown_directives))
|
| 655 |
+
raise ValueError(message)
|
| 656 |
+
options['compiler_directives'] = directives
|
| 657 |
+
if directives.get('np_pythran', False) and not options['cplus']:
|
| 658 |
+
import warnings
|
| 659 |
+
warnings.warn("C++ mode forced when in Pythran mode!")
|
| 660 |
+
options['cplus'] = True
|
| 661 |
+
if 'language_level' not in kw and directives.get('language_level'):
|
| 662 |
+
options['language_level'] = directives['language_level']
|
| 663 |
+
elif not options.get('language_level'):
|
| 664 |
+
options['language_level'] = directive_defaults.get('language_level')
|
| 665 |
+
if 'formal_grammar' in directives and 'formal_grammar' not in kw:
|
| 666 |
+
options['formal_grammar'] = directives['formal_grammar']
|
| 667 |
+
if options['cache'] is True:
|
| 668 |
+
options['cache'] = os.path.join(Utils.get_cython_cache_dir(), 'compiler')
|
| 669 |
+
|
| 670 |
+
self.__dict__.update(options)
|
| 671 |
+
|
| 672 |
+
def configure_language_defaults(self, source_extension):
|
| 673 |
+
if source_extension == 'py':
|
| 674 |
+
if self.compiler_directives.get('binding') is None:
|
| 675 |
+
self.compiler_directives['binding'] = True
|
| 676 |
+
|
| 677 |
+
def get_fingerprint(self):
|
| 678 |
+
r"""
|
| 679 |
+
Return a string that contains all the options that are relevant for cache invalidation.
|
| 680 |
+
"""
|
| 681 |
+
# Collect only the data that can affect the generated file(s).
|
| 682 |
+
data = {}
|
| 683 |
+
|
| 684 |
+
for key, value in self.__dict__.items():
|
| 685 |
+
if key in ['show_version', 'errors_to_stderr', 'verbose', 'quiet']:
|
| 686 |
+
# verbosity flags have no influence on the compilation result
|
| 687 |
+
continue
|
| 688 |
+
elif key in ['output_file', 'output_dir']:
|
| 689 |
+
# ignore the exact name of the output file
|
| 690 |
+
continue
|
| 691 |
+
elif key in ['depfile']:
|
| 692 |
+
# external build system dependency tracking file does not influence outputs
|
| 693 |
+
continue
|
| 694 |
+
elif key in ['timestamps']:
|
| 695 |
+
# the cache cares about the content of files, not about the timestamps of sources
|
| 696 |
+
continue
|
| 697 |
+
elif key in ['cache']:
|
| 698 |
+
# hopefully caching has no influence on the compilation result
|
| 699 |
+
continue
|
| 700 |
+
elif key in ['compiler_directives']:
|
| 701 |
+
# directives passed on to the C compiler do not influence the generated C code
|
| 702 |
+
continue
|
| 703 |
+
elif key in ['include_path']:
|
| 704 |
+
# this path changes which headers are tracked as dependencies,
|
| 705 |
+
# it has no influence on the generated C code
|
| 706 |
+
continue
|
| 707 |
+
elif key in ['working_path']:
|
| 708 |
+
# this path changes where modules and pxd files are found;
|
| 709 |
+
# their content is part of the fingerprint anyway, their
|
| 710 |
+
# absolute path does not matter
|
| 711 |
+
continue
|
| 712 |
+
elif key in ['create_extension']:
|
| 713 |
+
# create_extension() has already mangled the options, e.g.,
|
| 714 |
+
# embedded_metadata, when the fingerprint is computed so we
|
| 715 |
+
# ignore it here.
|
| 716 |
+
continue
|
| 717 |
+
elif key in ['build_dir']:
|
| 718 |
+
# the (temporary) directory where we collect dependencies
|
| 719 |
+
# has no influence on the C output
|
| 720 |
+
continue
|
| 721 |
+
elif key in ['use_listing_file', 'generate_pxi', 'annotate', 'annotate_coverage_xml']:
|
| 722 |
+
# all output files are contained in the cache so the types of
|
| 723 |
+
# files generated must be part of the fingerprint
|
| 724 |
+
data[key] = value
|
| 725 |
+
elif key in ['formal_grammar', 'evaluate_tree_assertions']:
|
| 726 |
+
# these bits can change whether compilation to C passes/fails
|
| 727 |
+
data[key] = value
|
| 728 |
+
elif key in ['embedded_metadata', 'emit_linenums',
|
| 729 |
+
'c_line_in_traceback', 'gdb_debug',
|
| 730 |
+
'relative_path_in_code_position_comments']:
|
| 731 |
+
# the generated code contains additional bits when these are set
|
| 732 |
+
data[key] = value
|
| 733 |
+
elif key in ['cplus', 'language_level', 'compile_time_env', 'np_pythran']:
|
| 734 |
+
# assorted bits that, e.g., influence the parser
|
| 735 |
+
data[key] = value
|
| 736 |
+
elif key == ['capi_reexport_cincludes']:
|
| 737 |
+
if self.capi_reexport_cincludes:
|
| 738 |
+
# our caching implementation does not yet include fingerprints of all the header files
|
| 739 |
+
raise NotImplementedError('capi_reexport_cincludes is not compatible with Cython caching')
|
| 740 |
+
elif key == ['common_utility_include_dir']:
|
| 741 |
+
if self.common_utility_include_dir:
|
| 742 |
+
raise NotImplementedError('common_utility_include_dir is not compatible with Cython caching yet')
|
| 743 |
+
else:
|
| 744 |
+
# any unexpected option should go into the fingerprint; it's better
|
| 745 |
+
# to recompile than to return incorrect results from the cache.
|
| 746 |
+
data[key] = value
|
| 747 |
+
|
| 748 |
+
def to_fingerprint(item):
|
| 749 |
+
r"""
|
| 750 |
+
Recursively turn item into a string, turning dicts into lists with
|
| 751 |
+
deterministic ordering.
|
| 752 |
+
"""
|
| 753 |
+
if isinstance(item, dict):
|
| 754 |
+
item = sorted([(repr(key), to_fingerprint(value)) for key, value in item.items()])
|
| 755 |
+
return repr(item)
|
| 756 |
+
|
| 757 |
+
return to_fingerprint(data)
|
| 758 |
+
|
| 759 |
+
|
| 760 |
+
# ------------------------------------------------------------------------
|
| 761 |
+
#
|
| 762 |
+
# Set the default options depending on the platform
|
| 763 |
+
#
|
| 764 |
+
# ------------------------------------------------------------------------
|
| 765 |
+
|
| 766 |
+
default_options = dict(
|
| 767 |
+
show_version=0,
|
| 768 |
+
use_listing_file=0,
|
| 769 |
+
errors_to_stderr=1,
|
| 770 |
+
cplus=0,
|
| 771 |
+
output_file=None,
|
| 772 |
+
depfile=None,
|
| 773 |
+
annotate=None,
|
| 774 |
+
annotate_coverage_xml=None,
|
| 775 |
+
generate_pxi=0,
|
| 776 |
+
capi_reexport_cincludes=0,
|
| 777 |
+
working_path="",
|
| 778 |
+
timestamps=None,
|
| 779 |
+
verbose=0,
|
| 780 |
+
quiet=0,
|
| 781 |
+
compiler_directives={},
|
| 782 |
+
embedded_metadata={},
|
| 783 |
+
evaluate_tree_assertions=False,
|
| 784 |
+
emit_linenums=False,
|
| 785 |
+
relative_path_in_code_position_comments=True,
|
| 786 |
+
c_line_in_traceback=True,
|
| 787 |
+
language_level=None, # warn but default to 2
|
| 788 |
+
formal_grammar=False,
|
| 789 |
+
gdb_debug=False,
|
| 790 |
+
compile_time_env=None,
|
| 791 |
+
module_name=None,
|
| 792 |
+
common_utility_include_dir=None,
|
| 793 |
+
output_dir=None,
|
| 794 |
+
build_dir=None,
|
| 795 |
+
cache=None,
|
| 796 |
+
create_extension=None,
|
| 797 |
+
np_pythran=False,
|
| 798 |
+
legacy_implicit_noexcept=None,
|
| 799 |
+
)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/ParseTreeTransforms.pxd
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# cython: language_level=3str
|
| 2 |
+
|
| 3 |
+
cimport cython
|
| 4 |
+
|
| 5 |
+
from .Visitor cimport (
|
| 6 |
+
CythonTransform, VisitorTransform, TreeVisitor,
|
| 7 |
+
ScopeTrackingTransform, EnvTransform)
|
| 8 |
+
|
| 9 |
+
# Don't include mixins, only the main classes.
|
| 10 |
+
#cdef class SkipDeclarations:
|
| 11 |
+
|
| 12 |
+
cdef class NormalizeTree(CythonTransform):
|
| 13 |
+
cdef bint is_in_statlist
|
| 14 |
+
cdef bint is_in_expr
|
| 15 |
+
cpdef visit_StatNode(self, node, is_listcontainer=*)
|
| 16 |
+
|
| 17 |
+
cdef class PostParse(ScopeTrackingTransform):
|
| 18 |
+
cdef dict specialattribute_handlers
|
| 19 |
+
cdef size_t lambda_counter
|
| 20 |
+
cdef size_t genexpr_counter
|
| 21 |
+
cdef _visit_assignment_node(self, node, list expr_list)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
#def eliminate_rhs_duplicates(list expr_list_list, list ref_node_sequence)
|
| 25 |
+
#def sort_common_subsequences(list items)
|
| 26 |
+
@cython.locals(starred_targets=Py_ssize_t, lhs_size=Py_ssize_t, rhs_size=Py_ssize_t)
|
| 27 |
+
cdef flatten_parallel_assignments(list input, list output)
|
| 28 |
+
cdef map_starred_assignment(list lhs_targets, list starred_assignments, list lhs_args, list rhs_args)
|
| 29 |
+
|
| 30 |
+
#class PxdPostParse(CythonTransform, SkipDeclarations):
|
| 31 |
+
#class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
|
| 32 |
+
#class WithTransform(VisitorTransform, SkipDeclarations):
|
| 33 |
+
#class DecoratorTransform(CythonTransform, SkipDeclarations):
|
| 34 |
+
|
| 35 |
+
#class AnalyseDeclarationsTransform(EnvTransform):
|
| 36 |
+
|
| 37 |
+
cdef class AnalyseExpressionsTransform(CythonTransform):
|
| 38 |
+
pass
|
| 39 |
+
|
| 40 |
+
cdef class ExpandInplaceOperators(EnvTransform):
|
| 41 |
+
pass
|
| 42 |
+
|
| 43 |
+
cdef class AlignFunctionDefinitions(CythonTransform):
|
| 44 |
+
cdef dict directives
|
| 45 |
+
cdef set imported_names
|
| 46 |
+
cdef object scope
|
| 47 |
+
|
| 48 |
+
@cython.final
|
| 49 |
+
cdef class YieldNodeCollector(TreeVisitor):
|
| 50 |
+
cdef public list yields
|
| 51 |
+
cdef public list returns
|
| 52 |
+
cdef public list finallys
|
| 53 |
+
cdef public list excepts
|
| 54 |
+
cdef public bint has_return_value
|
| 55 |
+
cdef public bint has_yield
|
| 56 |
+
cdef public bint has_await
|
| 57 |
+
cdef list excludes
|
| 58 |
+
|
| 59 |
+
@cython.final
|
| 60 |
+
cdef class MarkClosureVisitor(CythonTransform):
|
| 61 |
+
cdef bint needs_closure
|
| 62 |
+
cdef list excludes
|
| 63 |
+
|
| 64 |
+
@cython.final
|
| 65 |
+
cdef class CreateClosureClasses(CythonTransform):
|
| 66 |
+
cdef list path
|
| 67 |
+
cdef bint in_lambda
|
| 68 |
+
cdef module_scope
|
| 69 |
+
cdef generator_class
|
| 70 |
+
|
| 71 |
+
cdef create_class_from_scope(self, node, target_module_scope, inner_node=*)
|
| 72 |
+
cdef find_entries_used_in_closures(self, node)
|
| 73 |
+
|
| 74 |
+
#cdef class InjectGilHandling(VisitorTransform, SkipDeclarations):
|
| 75 |
+
# cdef bint nogil
|
| 76 |
+
|
| 77 |
+
cdef class GilCheck(VisitorTransform):
|
| 78 |
+
cdef list env_stack
|
| 79 |
+
cdef bint nogil
|
| 80 |
+
cdef bint nogil_declarator_only
|
| 81 |
+
cdef bint current_gilstat_node_knows_gil_state
|
| 82 |
+
|
| 83 |
+
cdef class TransformBuiltinMethods(EnvTransform):
|
| 84 |
+
cdef visit_cython_attribute(self, node)
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/ParseTreeTransforms.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Parsing.cp39-win_amd64.pyd
ADDED
|
Binary file (713 kB). View file
|
|
|
.eggs/Cython-3.0.8-py3.9-win-amd64.egg/Cython/Compiler/Parsing.pxd
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# cython: language_level=3
|
| 2 |
+
|
| 3 |
+
# We declare all of these here to type the first argument.
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import
|
| 6 |
+
|
| 7 |
+
cimport cython
|
| 8 |
+
from .Scanning cimport PyrexScanner
|
| 9 |
+
|
| 10 |
+
ctypedef object (*p_sub_expr_func)(PyrexScanner obj)
|
| 11 |
+
|
| 12 |
+
# entry points
|
| 13 |
+
|
| 14 |
+
cpdef p_module(PyrexScanner s, pxd, full_module_name, ctx=*)
|
| 15 |
+
cpdef p_code(PyrexScanner s, level= *, ctx=*)
|
| 16 |
+
|
| 17 |
+
# internal parser states
|
| 18 |
+
|
| 19 |
+
cdef p_ident(PyrexScanner s, message =*)
|
| 20 |
+
cdef p_ident_list(PyrexScanner s)
|
| 21 |
+
|
| 22 |
+
cdef tuple p_binop_operator(PyrexScanner s)
|
| 23 |
+
cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr)
|
| 24 |
+
cdef p_lambdef(PyrexScanner s)
|
| 25 |
+
cdef p_test(PyrexScanner s)
|
| 26 |
+
cdef p_test_allow_walrus_after(PyrexScanner s)
|
| 27 |
+
cdef p_namedexpr_test(PyrexScanner s)
|
| 28 |
+
cdef p_or_test(PyrexScanner s)
|
| 29 |
+
cdef p_rassoc_binop_expr(PyrexScanner s, unicode op, p_sub_expr_func p_subexpr)
|
| 30 |
+
cdef p_and_test(PyrexScanner s)
|
| 31 |
+
cdef p_not_test(PyrexScanner s)
|
| 32 |
+
cdef p_comparison(PyrexScanner s)
|
| 33 |
+
cdef p_test_or_starred_expr(PyrexScanner s)
|
| 34 |
+
cdef p_namedexpr_test_or_starred_expr(PyrexScanner s)
|
| 35 |
+
cdef p_starred_expr(PyrexScanner s)
|
| 36 |
+
cdef p_cascaded_cmp(PyrexScanner s)
|
| 37 |
+
cdef p_cmp_op(PyrexScanner s)
|
| 38 |
+
cdef p_bit_expr(PyrexScanner s)
|
| 39 |
+
cdef p_xor_expr(PyrexScanner s)
|
| 40 |
+
cdef p_and_expr(PyrexScanner s)
|
| 41 |
+
cdef p_shift_expr(PyrexScanner s)
|
| 42 |
+
cdef p_arith_expr(PyrexScanner s)
|
| 43 |
+
cdef p_term(PyrexScanner s)
|
| 44 |
+
cdef p_factor(PyrexScanner s)
|
| 45 |
+
cdef _p_factor(PyrexScanner s)
|
| 46 |
+
cdef p_typecast(PyrexScanner s)
|
| 47 |
+
cdef p_sizeof(PyrexScanner s)
|
| 48 |
+
cdef p_yield_expression(PyrexScanner s)
|
| 49 |
+
cdef p_yield_statement(PyrexScanner s)
|
| 50 |
+
cdef p_async_statement(PyrexScanner s, ctx, decorators)
|
| 51 |
+
cdef p_power(PyrexScanner s)
|
| 52 |
+
cdef p_new_expr(PyrexScanner s)
|
| 53 |
+
cdef p_trailer(PyrexScanner s, node1)
|
| 54 |
+
cdef p_call_parse_args(PyrexScanner s, bint allow_genexp = *)
|
| 55 |
+
cdef p_call_build_packed_args(pos, positional_args, keyword_args)
|
| 56 |
+
cdef p_call(PyrexScanner s, function)
|
| 57 |
+
cdef p_index(PyrexScanner s, base)
|
| 58 |
+
cdef tuple p_subscript_list(PyrexScanner s)
|
| 59 |
+
cdef p_subscript(PyrexScanner s)
|
| 60 |
+
cdef p_slice_element(PyrexScanner s, follow_set)
|
| 61 |
+
cdef expect_ellipsis(PyrexScanner s)
|
| 62 |
+
cdef make_slice_nodes(pos, subscripts)
|
| 63 |
+
cpdef make_slice_node(pos, start, stop = *, step = *)
|
| 64 |
+
cdef p_atom(PyrexScanner s)
|
| 65 |
+
@cython.locals(value=unicode)
|
| 66 |
+
cdef p_int_literal(PyrexScanner s)
|
| 67 |
+
cdef p_name(PyrexScanner s, name)
|
| 68 |
+
cdef wrap_compile_time_constant(pos, value)
|
| 69 |
+
cdef p_cat_string_literal(PyrexScanner s)
|
| 70 |
+
cdef p_opt_string_literal(PyrexScanner s, required_type=*)
|
| 71 |
+
cdef bint check_for_non_ascii_characters(unicode string)
|
| 72 |
+
@cython.locals(systr=unicode, is_python3_source=bint, is_raw=bint)
|
| 73 |
+
cdef p_string_literal(PyrexScanner s, kind_override=*)
|
| 74 |
+
cdef _append_escape_sequence(kind, builder, unicode escape_sequence, PyrexScanner s)
|
| 75 |
+
cdef tuple _f_string_error_pos(pos, string, Py_ssize_t i)
|
| 76 |
+
@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, next_start=Py_ssize_t)
|
| 77 |
+
cdef list p_f_string(PyrexScanner s, unicode_value, pos, bint is_raw)
|
| 78 |
+
@cython.locals(i=Py_ssize_t, size=Py_ssize_t, c=Py_UCS4, quote_char=Py_UCS4, NO_CHAR=Py_UCS4)
|
| 79 |
+
cdef tuple p_f_string_expr(PyrexScanner s, unicode_value, pos, Py_ssize_t starting_index, bint is_raw)
|
| 80 |
+
cdef p_list_maker(PyrexScanner s)
|
| 81 |
+
cdef p_comp_iter(PyrexScanner s, body)
|
| 82 |
+
cdef p_comp_for(PyrexScanner s, body)
|
| 83 |
+
cdef p_comp_if(PyrexScanner s, body)
|
| 84 |
+
cdef p_dict_or_set_maker(PyrexScanner s)
|
| 85 |
+
cdef p_backquote_expr(PyrexScanner s)
|
| 86 |
+
cdef p_simple_expr_list(PyrexScanner s, expr=*)
|
| 87 |
+
cdef p_test_or_starred_expr_list(PyrexScanner s, expr=*)
|
| 88 |
+
cdef p_namedexpr_test_or_starred_expr_list(s, expr=*)
|
| 89 |
+
cdef p_testlist(PyrexScanner s)
|
| 90 |
+
cdef p_testlist_star_expr(PyrexScanner s)
|
| 91 |
+
cdef p_testlist_comp(PyrexScanner s)
|
| 92 |
+
cdef p_genexp(PyrexScanner s, expr)
|
| 93 |
+
|
| 94 |
+
#-------------------------------------------------------
|
| 95 |
+
#
|
| 96 |
+
# Statements
|
| 97 |
+
#
|
| 98 |
+
#-------------------------------------------------------
|
| 99 |
+
|
| 100 |
+
cdef p_global_statement(PyrexScanner s)
|
| 101 |
+
cdef p_nonlocal_statement(PyrexScanner s)
|
| 102 |
+
cdef p_expression_or_assignment(PyrexScanner s)
|
| 103 |
+
cdef p_print_statement(PyrexScanner s)
|
| 104 |
+
cdef p_exec_statement(PyrexScanner s)
|
| 105 |
+
cdef p_del_statement(PyrexScanner s)
|
| 106 |
+
cdef p_pass_statement(PyrexScanner s, bint with_newline = *)
|
| 107 |
+
cdef p_break_statement(PyrexScanner s)
|
| 108 |
+
cdef p_continue_statement(PyrexScanner s)
|
| 109 |
+
cdef p_return_statement(PyrexScanner s)
|
| 110 |
+
cdef p_raise_statement(PyrexScanner s)
|
| 111 |
+
cdef p_import_statement(PyrexScanner s)
|
| 112 |
+
cdef p_from_import_statement(PyrexScanner s, bint first_statement = *)
|
| 113 |
+
cdef p_imported_name(PyrexScanner s)
|
| 114 |
+
cdef p_dotted_name(PyrexScanner s, bint as_allowed)
|
| 115 |
+
cdef p_as_name(PyrexScanner s)
|
| 116 |
+
cdef p_assert_statement(PyrexScanner s)
|
| 117 |
+
cdef p_if_statement(PyrexScanner s)
|
| 118 |
+
cdef p_if_clause(PyrexScanner s)
|
| 119 |
+
cdef p_else_clause(PyrexScanner s)
|
| 120 |
+
cdef p_while_statement(PyrexScanner s)
|
| 121 |
+
cdef p_for_statement(PyrexScanner s, bint is_async=*)
|
| 122 |
+
cdef dict p_for_bounds(PyrexScanner s, bint allow_testlist=*, bint is_async=*)
|
| 123 |
+
cdef p_for_from_relation(PyrexScanner s)
|
| 124 |
+
cdef p_for_from_step(PyrexScanner s)
|
| 125 |
+
cdef p_target(PyrexScanner s, terminator)
|
| 126 |
+
cdef p_for_target(PyrexScanner s)
|
| 127 |
+
cdef p_for_iterator(PyrexScanner s, bint allow_testlist=*, bint is_async=*)
|
| 128 |
+
cdef p_try_statement(PyrexScanner s)
|
| 129 |
+
cdef p_except_clause(PyrexScanner s)
|
| 130 |
+
cdef p_include_statement(PyrexScanner s, ctx)
|
| 131 |
+
cdef p_with_statement(PyrexScanner s)
|
| 132 |
+
cdef p_with_items(PyrexScanner s, bint is_async=*)
|
| 133 |
+
cdef p_with_items_list(PyrexScanner s, bint is_async)
|
| 134 |
+
cdef tuple p_with_item(PyrexScanner s, bint is_async)
|
| 135 |
+
cdef p_with_template(PyrexScanner s)
|
| 136 |
+
cdef p_simple_statement(PyrexScanner s, bint first_statement = *)
|
| 137 |
+
cdef p_simple_statement_list(PyrexScanner s, ctx, bint first_statement = *)
|
| 138 |
+
cdef p_compile_time_expr(PyrexScanner s)
|
| 139 |
+
cdef p_DEF_statement(PyrexScanner s)
|
| 140 |
+
cdef p_IF_statement(PyrexScanner s, ctx)
|
| 141 |
+
cdef p_statement(PyrexScanner s, ctx, bint first_statement = *)
|
| 142 |
+
cdef p_statement_list(PyrexScanner s, ctx, bint first_statement = *)
|
| 143 |
+
cdef p_suite(PyrexScanner s, ctx = *)
|
| 144 |
+
cdef tuple p_suite_with_docstring(PyrexScanner s, ctx, bint with_doc_only=*)
|
| 145 |
+
cdef tuple _extract_docstring(node)
|
| 146 |
+
cdef p_positional_and_keyword_args(PyrexScanner s, end_sy_set, templates = *)
|
| 147 |
+
|
| 148 |
+
cpdef p_c_base_type(PyrexScanner s, bint nonempty = *, templates = *)
|
| 149 |
+
cdef p_calling_convention(PyrexScanner s)
|
| 150 |
+
cdef p_c_complex_base_type(PyrexScanner s, templates = *)
|
| 151 |
+
cdef p_c_simple_base_type(PyrexScanner s, bint nonempty, templates = *)
|
| 152 |
+
cdef p_buffer_or_template(PyrexScanner s, base_type_node, templates)
|
| 153 |
+
cdef p_bracketed_base_type(PyrexScanner s, base_type_node, nonempty, empty)
|
| 154 |
+
cdef is_memoryviewslice_access(PyrexScanner s)
|
| 155 |
+
cdef p_memoryviewslice_access(PyrexScanner s, base_type_node)
|
| 156 |
+
cdef bint looking_at_name(PyrexScanner s) except -2
|
| 157 |
+
cdef object looking_at_expr(PyrexScanner s)# except -2
|
| 158 |
+
cdef bint looking_at_base_type(PyrexScanner s) except -2
|
| 159 |
+
cdef bint looking_at_dotted_name(PyrexScanner s) except -2
|
| 160 |
+
cdef p_sign_and_longness(PyrexScanner s)
|
| 161 |
+
cdef p_opt_cname(PyrexScanner s)
|
| 162 |
+
cpdef p_c_declarator(PyrexScanner s, ctx = *, bint empty = *, bint is_type = *, bint cmethod_flag = *,
|
| 163 |
+
bint assignable = *, bint nonempty = *,
|
| 164 |
+
bint calling_convention_allowed = *)
|
| 165 |
+
cdef p_c_array_declarator(PyrexScanner s, base)
|
| 166 |
+
cdef p_c_func_declarator(PyrexScanner s, pos, ctx, base, bint cmethod_flag)
|
| 167 |
+
cdef p_c_simple_declarator(PyrexScanner s, ctx, bint empty, bint is_type, bint cmethod_flag,
|
| 168 |
+
bint assignable, bint nonempty)
|
| 169 |
+
cdef p_nogil(PyrexScanner s)
|
| 170 |
+
cdef p_with_gil(PyrexScanner s)
|
| 171 |
+
cdef p_exception_value_clause(PyrexScanner s, bint is_extern)
|
| 172 |
+
cpdef p_c_arg_list(PyrexScanner s, ctx = *, bint in_pyfunc = *, bint cmethod_flag = *,
|
| 173 |
+
bint nonempty_declarators = *, bint kw_only = *, bint annotated = *)
|
| 174 |
+
cdef p_optional_ellipsis(PyrexScanner s)
|
| 175 |
+
cdef p_c_arg_decl(PyrexScanner s, ctx, in_pyfunc, bint cmethod_flag = *, bint nonempty = *, bint kw_only = *, bint annotated = *)
|
| 176 |
+
cdef p_api(PyrexScanner s)
|
| 177 |
+
cdef p_cdef_statement(PyrexScanner s, ctx)
|
| 178 |
+
cdef p_cdef_block(PyrexScanner s, ctx)
|
| 179 |
+
cdef p_cdef_extern_block(PyrexScanner s, pos, ctx)
|
| 180 |
+
cdef p_c_enum_definition(PyrexScanner s, pos, ctx)
|
| 181 |
+
cdef p_c_enum_line(PyrexScanner s, ctx, list items)
|
| 182 |
+
cdef p_c_enum_item(PyrexScanner s, ctx, list items)
|
| 183 |
+
cdef p_c_struct_or_union_definition(PyrexScanner s, pos, ctx)
|
| 184 |
+
cdef p_fused_definition(PyrexScanner s, pos, ctx)
|
| 185 |
+
cdef p_struct_enum(PyrexScanner s, pos, ctx)
|
| 186 |
+
cdef p_visibility(PyrexScanner s, prev_visibility)
|
| 187 |
+
cdef p_c_modifiers(PyrexScanner s)
|
| 188 |
+
cdef p_c_func_or_var_declaration(PyrexScanner s, pos, ctx)
|
| 189 |
+
cdef p_ctypedef_statement(PyrexScanner s, ctx)
|
| 190 |
+
cdef p_decorators(PyrexScanner s)
|
| 191 |
+
cdef _reject_cdef_modifier_in_py(PyrexScanner s, name)
|
| 192 |
+
cdef p_def_statement(PyrexScanner s, list decorators=*, bint is_async_def=*)
|
| 193 |
+
cdef p_varargslist(PyrexScanner s, terminator=*, bint annotated = *)
|
| 194 |
+
cdef p_py_arg_decl(PyrexScanner s, bint annotated = *)
|
| 195 |
+
cdef p_class_statement(PyrexScanner s, decorators)
|
| 196 |
+
cdef p_c_class_definition(PyrexScanner s, pos, ctx)
|
| 197 |
+
cdef tuple p_c_class_options(PyrexScanner s)
|
| 198 |
+
cdef p_property_decl(PyrexScanner s)
|
| 199 |
+
cdef p_doc_string(PyrexScanner s)
|
| 200 |
+
cdef p_ignorable_statement(PyrexScanner s)
|
| 201 |
+
cdef dict p_compiler_directive_comments(PyrexScanner s)
|
| 202 |
+
cdef p_template_definition(PyrexScanner s)
|
| 203 |
+
cdef p_cpp_class_definition(PyrexScanner s, pos, ctx)
|
| 204 |
+
cdef p_cpp_class_attribute(PyrexScanner s, ctx)
|
| 205 |
+
cdef p_annotation(PyrexScanner s)
|