Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/BuildExecutable.py +170 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Cythonize.py +255 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Dependencies.py +1380 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Distutils.py +1 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Inline.py +372 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/IpythonMagic.py +572 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Tests/TestCythonizeArgsParser.py +482 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Tests/TestInline.py +112 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Tests/TestRecythonize.py +212 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Tests/__init__.py +1 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__init__.py +14 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/BuildExecutable.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/Cythonize.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/Dependencies.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/Distutils.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/Inline.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/IpythonMagic.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/AutoDocTransforms.py +318 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/CmdLine.py +251 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Dataclass.py +839 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/DebugFlags.py +21 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Errors.py +300 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/FusedNode.py +1015 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/StringEncoding.py +392 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/UtilityCode.py +266 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Version.py +9 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/__pycache__/AutoDocTransforms.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/DFA.py +149 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__init__.py +35 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/Actions.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/Regexps.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/Transitions.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__init__.py +4 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__pycache__/_looper.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__pycache__/_tempita.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__pycache__/compat3.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/_tempita.py +1091 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/projection.py +528 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality_subset.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_group.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_laplacian_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_load_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-311.pyc +0 -0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/BuildExecutable.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Compile a Python script into an executable that embeds CPython.
|
| 3 |
+
Requires CPython to be built as a shared library ('libpythonX.Y').
|
| 4 |
+
|
| 5 |
+
Basic usage:
|
| 6 |
+
|
| 7 |
+
python -m Cython.Build.BuildExecutable [ARGS] somefile.py
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from __future__ import absolute_import
|
| 11 |
+
|
| 12 |
+
DEBUG = True
|
| 13 |
+
|
| 14 |
+
import sys
|
| 15 |
+
import os
|
| 16 |
+
if sys.version_info < (3, 9):
|
| 17 |
+
from distutils import sysconfig as _sysconfig
|
| 18 |
+
|
| 19 |
+
class sysconfig(object):
|
| 20 |
+
|
| 21 |
+
@staticmethod
|
| 22 |
+
def get_path(name):
|
| 23 |
+
assert name == 'include'
|
| 24 |
+
return _sysconfig.get_python_inc()
|
| 25 |
+
|
| 26 |
+
get_config_var = staticmethod(_sysconfig.get_config_var)
|
| 27 |
+
else:
|
| 28 |
+
# sysconfig can be trusted from cpython >= 3.8.7
|
| 29 |
+
import sysconfig
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def get_config_var(name, default=''):
|
| 33 |
+
return sysconfig.get_config_var(name) or default
|
| 34 |
+
|
| 35 |
+
INCDIR = sysconfig.get_path('include')
|
| 36 |
+
LIBDIR1 = get_config_var('LIBDIR')
|
| 37 |
+
LIBDIR2 = get_config_var('LIBPL')
|
| 38 |
+
PYLIB = get_config_var('LIBRARY')
|
| 39 |
+
PYLIB_DYN = get_config_var('LDLIBRARY')
|
| 40 |
+
if PYLIB_DYN == PYLIB:
|
| 41 |
+
# no shared library
|
| 42 |
+
PYLIB_DYN = ''
|
| 43 |
+
else:
|
| 44 |
+
PYLIB_DYN = os.path.splitext(PYLIB_DYN[3:])[0] # 'lib(XYZ).so' -> XYZ
|
| 45 |
+
|
| 46 |
+
CC = get_config_var('CC', os.environ.get('CC', ''))
|
| 47 |
+
CFLAGS = get_config_var('CFLAGS') + ' ' + os.environ.get('CFLAGS', '')
|
| 48 |
+
LINKCC = get_config_var('LINKCC', os.environ.get('LINKCC', CC))
|
| 49 |
+
LINKFORSHARED = get_config_var('LINKFORSHARED')
|
| 50 |
+
LIBS = get_config_var('LIBS')
|
| 51 |
+
SYSLIBS = get_config_var('SYSLIBS')
|
| 52 |
+
EXE_EXT = sysconfig.get_config_var('EXE')
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _debug(msg, *args):
|
| 56 |
+
if DEBUG:
|
| 57 |
+
if args:
|
| 58 |
+
msg = msg % args
|
| 59 |
+
sys.stderr.write(msg + '\n')
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def dump_config():
|
| 63 |
+
_debug('INCDIR: %s', INCDIR)
|
| 64 |
+
_debug('LIBDIR1: %s', LIBDIR1)
|
| 65 |
+
_debug('LIBDIR2: %s', LIBDIR2)
|
| 66 |
+
_debug('PYLIB: %s', PYLIB)
|
| 67 |
+
_debug('PYLIB_DYN: %s', PYLIB_DYN)
|
| 68 |
+
_debug('CC: %s', CC)
|
| 69 |
+
_debug('CFLAGS: %s', CFLAGS)
|
| 70 |
+
_debug('LINKCC: %s', LINKCC)
|
| 71 |
+
_debug('LINKFORSHARED: %s', LINKFORSHARED)
|
| 72 |
+
_debug('LIBS: %s', LIBS)
|
| 73 |
+
_debug('SYSLIBS: %s', SYSLIBS)
|
| 74 |
+
_debug('EXE_EXT: %s', EXE_EXT)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _parse_args(args):
|
| 78 |
+
cy_args = []
|
| 79 |
+
last_arg = None
|
| 80 |
+
for i, arg in enumerate(args):
|
| 81 |
+
if arg.startswith('-'):
|
| 82 |
+
cy_args.append(arg)
|
| 83 |
+
elif last_arg in ('-X', '--directive'):
|
| 84 |
+
cy_args.append(arg)
|
| 85 |
+
else:
|
| 86 |
+
input_file = arg
|
| 87 |
+
args = args[i+1:]
|
| 88 |
+
break
|
| 89 |
+
last_arg = arg
|
| 90 |
+
else:
|
| 91 |
+
raise ValueError('no input file provided')
|
| 92 |
+
|
| 93 |
+
return input_file, cy_args, args
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def runcmd(cmd, shell=True):
|
| 97 |
+
if shell:
|
| 98 |
+
cmd = ' '.join(cmd)
|
| 99 |
+
_debug(cmd)
|
| 100 |
+
else:
|
| 101 |
+
_debug(' '.join(cmd))
|
| 102 |
+
|
| 103 |
+
import subprocess
|
| 104 |
+
returncode = subprocess.call(cmd, shell=shell)
|
| 105 |
+
|
| 106 |
+
if returncode:
|
| 107 |
+
sys.exit(returncode)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def clink(basename):
|
| 111 |
+
runcmd([LINKCC, '-o', basename + EXE_EXT, basename+'.o', '-L'+LIBDIR1, '-L'+LIBDIR2]
|
| 112 |
+
+ [PYLIB_DYN and ('-l'+PYLIB_DYN) or os.path.join(LIBDIR1, PYLIB)]
|
| 113 |
+
+ LIBS.split() + SYSLIBS.split() + LINKFORSHARED.split())
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def ccompile(basename):
|
| 117 |
+
runcmd([CC, '-c', '-o', basename+'.o', basename+'.c', '-I' + INCDIR] + CFLAGS.split())
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def cycompile(input_file, options=()):
|
| 121 |
+
from ..Compiler import Version, CmdLine, Main
|
| 122 |
+
options, sources = CmdLine.parse_command_line(list(options or ()) + ['--embed', input_file])
|
| 123 |
+
_debug('Using Cython %s to compile %s', Version.version, input_file)
|
| 124 |
+
result = Main.compile(sources, options)
|
| 125 |
+
if result.num_errors > 0:
|
| 126 |
+
sys.exit(1)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def exec_file(program_name, args=()):
|
| 130 |
+
runcmd([os.path.abspath(program_name)] + list(args), shell=False)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def build(input_file, compiler_args=(), force=False):
|
| 134 |
+
"""
|
| 135 |
+
Build an executable program from a Cython module.
|
| 136 |
+
|
| 137 |
+
Returns the name of the executable file.
|
| 138 |
+
"""
|
| 139 |
+
basename = os.path.splitext(input_file)[0]
|
| 140 |
+
exe_file = basename + EXE_EXT
|
| 141 |
+
if not force and os.path.abspath(exe_file) == os.path.abspath(input_file):
|
| 142 |
+
raise ValueError("Input and output file names are the same, refusing to overwrite")
|
| 143 |
+
if (not force and os.path.exists(exe_file) and os.path.exists(input_file)
|
| 144 |
+
and os.path.getmtime(input_file) <= os.path.getmtime(exe_file)):
|
| 145 |
+
_debug("File is up to date, not regenerating %s", exe_file)
|
| 146 |
+
return exe_file
|
| 147 |
+
cycompile(input_file, compiler_args)
|
| 148 |
+
ccompile(basename)
|
| 149 |
+
clink(basename)
|
| 150 |
+
return exe_file
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def build_and_run(args):
|
| 154 |
+
"""
|
| 155 |
+
Build an executable program from a Cython module and run it.
|
| 156 |
+
|
| 157 |
+
Arguments after the module name will be passed verbatimly to the program.
|
| 158 |
+
"""
|
| 159 |
+
program_name, args = _build(args)
|
| 160 |
+
exec_file(program_name, args)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def _build(args):
|
| 164 |
+
input_file, cy_args, args = _parse_args(args)
|
| 165 |
+
program_name = build(input_file, cy_args)
|
| 166 |
+
return program_name, args
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
if __name__ == '__main__':
|
| 170 |
+
_build(sys.argv[1:])
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Cythonize.py
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import, print_function
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import shutil
|
| 5 |
+
import tempfile
|
| 6 |
+
|
| 7 |
+
from .Dependencies import cythonize, extended_iglob
|
| 8 |
+
from ..Utils import is_package_dir
|
| 9 |
+
from ..Compiler import Options
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import multiprocessing
|
| 13 |
+
parallel_compiles = int(multiprocessing.cpu_count() * 1.5)
|
| 14 |
+
except ImportError:
|
| 15 |
+
multiprocessing = None
|
| 16 |
+
parallel_compiles = 0
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class _FakePool(object):
|
| 20 |
+
def map_async(self, func, args):
|
| 21 |
+
try:
|
| 22 |
+
from itertools import imap
|
| 23 |
+
except ImportError:
|
| 24 |
+
imap=map
|
| 25 |
+
for _ in imap(func, args):
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
def close(self):
|
| 29 |
+
pass
|
| 30 |
+
|
| 31 |
+
def terminate(self):
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
def join(self):
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def find_package_base(path):
|
| 39 |
+
base_dir, package_path = os.path.split(path)
|
| 40 |
+
while is_package_dir(base_dir):
|
| 41 |
+
base_dir, parent = os.path.split(base_dir)
|
| 42 |
+
package_path = '%s/%s' % (parent, package_path)
|
| 43 |
+
return base_dir, package_path
|
| 44 |
+
|
| 45 |
+
def cython_compile(path_pattern, options):
|
| 46 |
+
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
|
| 47 |
+
_cython_compile_files(all_paths, options)
|
| 48 |
+
|
| 49 |
+
def _cython_compile_files(all_paths, options):
|
| 50 |
+
pool = None
|
| 51 |
+
try:
|
| 52 |
+
for path in all_paths:
|
| 53 |
+
if options.build_inplace:
|
| 54 |
+
base_dir = path
|
| 55 |
+
while not os.path.isdir(base_dir) or is_package_dir(base_dir):
|
| 56 |
+
base_dir = os.path.dirname(base_dir)
|
| 57 |
+
else:
|
| 58 |
+
base_dir = None
|
| 59 |
+
|
| 60 |
+
if os.path.isdir(path):
|
| 61 |
+
# recursively compiling a package
|
| 62 |
+
paths = [os.path.join(path, '**', '*.{py,pyx}')]
|
| 63 |
+
else:
|
| 64 |
+
# assume it's a file(-like thing)
|
| 65 |
+
paths = [path]
|
| 66 |
+
|
| 67 |
+
ext_modules = cythonize(
|
| 68 |
+
paths,
|
| 69 |
+
nthreads=options.parallel,
|
| 70 |
+
exclude_failures=options.keep_going,
|
| 71 |
+
exclude=options.excludes,
|
| 72 |
+
compiler_directives=options.directives,
|
| 73 |
+
compile_time_env=options.compile_time_env,
|
| 74 |
+
force=options.force,
|
| 75 |
+
quiet=options.quiet,
|
| 76 |
+
depfile=options.depfile,
|
| 77 |
+
language=options.language,
|
| 78 |
+
**options.options)
|
| 79 |
+
|
| 80 |
+
if ext_modules and options.build:
|
| 81 |
+
if len(ext_modules) > 1 and options.parallel > 1:
|
| 82 |
+
if pool is None:
|
| 83 |
+
try:
|
| 84 |
+
pool = multiprocessing.Pool(options.parallel)
|
| 85 |
+
except OSError:
|
| 86 |
+
pool = _FakePool()
|
| 87 |
+
pool.map_async(run_distutils, [
|
| 88 |
+
(base_dir, [ext]) for ext in ext_modules])
|
| 89 |
+
else:
|
| 90 |
+
run_distutils((base_dir, ext_modules))
|
| 91 |
+
except:
|
| 92 |
+
if pool is not None:
|
| 93 |
+
pool.terminate()
|
| 94 |
+
raise
|
| 95 |
+
else:
|
| 96 |
+
if pool is not None:
|
| 97 |
+
pool.close()
|
| 98 |
+
pool.join()
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def run_distutils(args):
|
| 102 |
+
try:
|
| 103 |
+
from distutils.core import setup
|
| 104 |
+
except ImportError:
|
| 105 |
+
try:
|
| 106 |
+
from setuptools import setup
|
| 107 |
+
except ImportError:
|
| 108 |
+
raise ImportError("'distutils' is not available. Please install 'setuptools' for binary builds.")
|
| 109 |
+
|
| 110 |
+
base_dir, ext_modules = args
|
| 111 |
+
script_args = ['build_ext', '-i']
|
| 112 |
+
cwd = os.getcwd()
|
| 113 |
+
temp_dir = None
|
| 114 |
+
try:
|
| 115 |
+
if base_dir:
|
| 116 |
+
os.chdir(base_dir)
|
| 117 |
+
temp_dir = tempfile.mkdtemp(dir=base_dir)
|
| 118 |
+
script_args.extend(['--build-temp', temp_dir])
|
| 119 |
+
setup(
|
| 120 |
+
script_name='setup.py',
|
| 121 |
+
script_args=script_args,
|
| 122 |
+
ext_modules=ext_modules,
|
| 123 |
+
)
|
| 124 |
+
finally:
|
| 125 |
+
if base_dir:
|
| 126 |
+
os.chdir(cwd)
|
| 127 |
+
if temp_dir and os.path.isdir(temp_dir):
|
| 128 |
+
shutil.rmtree(temp_dir)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def create_args_parser():
|
| 132 |
+
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
| 133 |
+
from ..Compiler.CmdLine import ParseDirectivesAction, ParseOptionsAction, ParseCompileTimeEnvAction
|
| 134 |
+
|
| 135 |
+
parser = ArgumentParser(
|
| 136 |
+
formatter_class=RawDescriptionHelpFormatter,
|
| 137 |
+
epilog="""\
|
| 138 |
+
Environment variables:
|
| 139 |
+
CYTHON_FORCE_REGEN: if set to 1, forces cythonize to regenerate the output files regardless
|
| 140 |
+
of modification times and changes.
|
| 141 |
+
Environment variables accepted by setuptools are supported to configure the C compiler and build:
|
| 142 |
+
https://setuptools.pypa.io/en/latest/userguide/ext_modules.html#compiler-and-linker-options"""
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
|
| 146 |
+
dest='directives', default={}, type=str,
|
| 147 |
+
action=ParseDirectivesAction,
|
| 148 |
+
help='set a compiler directive')
|
| 149 |
+
parser.add_argument('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
| 150 |
+
dest='compile_time_env', default={}, type=str,
|
| 151 |
+
action=ParseCompileTimeEnvAction,
|
| 152 |
+
help='set a compile time environment variable')
|
| 153 |
+
parser.add_argument('-s', '--option', metavar='NAME=VALUE',
|
| 154 |
+
dest='options', default={}, type=str,
|
| 155 |
+
action=ParseOptionsAction,
|
| 156 |
+
help='set a cythonize option')
|
| 157 |
+
parser.add_argument('-2', dest='language_level', action='store_const', const=2, default=None,
|
| 158 |
+
help='use Python 2 syntax mode by default')
|
| 159 |
+
parser.add_argument('-3', dest='language_level', action='store_const', const=3,
|
| 160 |
+
help='use Python 3 syntax mode by default')
|
| 161 |
+
parser.add_argument('--3str', dest='language_level', action='store_const', const='3str',
|
| 162 |
+
help='use Python 3 syntax mode by default')
|
| 163 |
+
parser.add_argument('-+', '--cplus', dest='language', action='store_const', const='c++', default=None,
|
| 164 |
+
help='Compile as C++ rather than C')
|
| 165 |
+
parser.add_argument('-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 166 |
+
help='Produce a colorized HTML version of the source.')
|
| 167 |
+
parser.add_argument('--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 168 |
+
help='Produce a colorized HTML version of the source '
|
| 169 |
+
'which includes entire generated C/C++-code.')
|
| 170 |
+
parser.add_argument('-x', '--exclude', metavar='PATTERN', dest='excludes',
|
| 171 |
+
action='append', default=[],
|
| 172 |
+
help='exclude certain file patterns from the compilation')
|
| 173 |
+
|
| 174 |
+
parser.add_argument('-b', '--build', dest='build', action='store_true', default=None,
|
| 175 |
+
help='build extension modules using distutils/setuptools')
|
| 176 |
+
parser.add_argument('-i', '--inplace', dest='build_inplace', action='store_true', default=None,
|
| 177 |
+
help='build extension modules in place using distutils/setuptools (implies -b)')
|
| 178 |
+
parser.add_argument('-j', '--parallel', dest='parallel', metavar='N',
|
| 179 |
+
type=int, default=parallel_compiles,
|
| 180 |
+
help=('run builds in N parallel jobs (default: %d)' %
|
| 181 |
+
parallel_compiles or 1))
|
| 182 |
+
parser.add_argument('-f', '--force', dest='force', action='store_true', default=None,
|
| 183 |
+
help='force recompilation')
|
| 184 |
+
parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', default=None,
|
| 185 |
+
help='be less verbose during compilation')
|
| 186 |
+
|
| 187 |
+
parser.add_argument('--lenient', dest='lenient', action='store_true', default=None,
|
| 188 |
+
help='increase Python compatibility by ignoring some compile time errors')
|
| 189 |
+
parser.add_argument('-k', '--keep-going', dest='keep_going', action='store_true', default=None,
|
| 190 |
+
help='compile as much as possible, ignore compilation failures')
|
| 191 |
+
parser.add_argument('--no-docstrings', dest='no_docstrings', action='store_true', default=None,
|
| 192 |
+
help='strip docstrings')
|
| 193 |
+
parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
| 194 |
+
parser.add_argument('sources', nargs='*')
|
| 195 |
+
return parser
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def parse_args_raw(parser, args):
|
| 199 |
+
options, unknown = parser.parse_known_args(args)
|
| 200 |
+
sources = options.sources
|
| 201 |
+
# if positional arguments were interspersed
|
| 202 |
+
# some of them are in unknown
|
| 203 |
+
for option in unknown:
|
| 204 |
+
if option.startswith('-'):
|
| 205 |
+
parser.error("unknown option "+option)
|
| 206 |
+
else:
|
| 207 |
+
sources.append(option)
|
| 208 |
+
del options.sources
|
| 209 |
+
return (options, sources)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def parse_args(args):
|
| 213 |
+
parser = create_args_parser()
|
| 214 |
+
options, args = parse_args_raw(parser, args)
|
| 215 |
+
|
| 216 |
+
if not args:
|
| 217 |
+
parser.error("no source files provided")
|
| 218 |
+
if options.build_inplace:
|
| 219 |
+
options.build = True
|
| 220 |
+
if multiprocessing is None:
|
| 221 |
+
options.parallel = 0
|
| 222 |
+
if options.language_level:
|
| 223 |
+
assert options.language_level in (2, 3, '3str')
|
| 224 |
+
options.options['language_level'] = options.language_level
|
| 225 |
+
|
| 226 |
+
if options.lenient:
|
| 227 |
+
# increase Python compatibility by ignoring compile time errors
|
| 228 |
+
Options.error_on_unknown_names = False
|
| 229 |
+
Options.error_on_uninitialized = False
|
| 230 |
+
|
| 231 |
+
if options.annotate:
|
| 232 |
+
Options.annotate = options.annotate
|
| 233 |
+
|
| 234 |
+
if options.no_docstrings:
|
| 235 |
+
Options.docstrings = False
|
| 236 |
+
|
| 237 |
+
return options, args
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def main(args=None):
|
| 241 |
+
options, paths = parse_args(args)
|
| 242 |
+
|
| 243 |
+
all_paths = []
|
| 244 |
+
for path in paths:
|
| 245 |
+
expanded_path = [os.path.abspath(p) for p in extended_iglob(path)]
|
| 246 |
+
if not expanded_path:
|
| 247 |
+
import sys
|
| 248 |
+
print("{}: No such file or directory: '{}'".format(sys.argv[0], path), file=sys.stderr)
|
| 249 |
+
sys.exit(1)
|
| 250 |
+
all_paths.extend(expanded_path)
|
| 251 |
+
_cython_compile_files(all_paths, options)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
if __name__ == '__main__':
|
| 255 |
+
main()
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Dependencies.py
ADDED
|
@@ -0,0 +1,1380 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import, print_function
|
| 2 |
+
|
| 3 |
+
import cython
|
| 4 |
+
from .. import __version__
|
| 5 |
+
|
| 6 |
+
import collections
|
| 7 |
+
import contextlib
|
| 8 |
+
import hashlib
|
| 9 |
+
import os
|
| 10 |
+
import shutil
|
| 11 |
+
import subprocess
|
| 12 |
+
import re, sys, time
|
| 13 |
+
from glob import iglob
|
| 14 |
+
from io import open as io_open
|
| 15 |
+
from os.path import relpath as _relpath
|
| 16 |
+
import zipfile
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
from collections.abc import Iterable
|
| 20 |
+
except ImportError:
|
| 21 |
+
from collections import Iterable
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
import gzip
|
| 25 |
+
gzip_open = gzip.open
|
| 26 |
+
gzip_ext = '.gz'
|
| 27 |
+
except ImportError:
|
| 28 |
+
gzip_open = open
|
| 29 |
+
gzip_ext = ''
|
| 30 |
+
|
| 31 |
+
try:
|
| 32 |
+
import zlib
|
| 33 |
+
zipfile_compression_mode = zipfile.ZIP_DEFLATED
|
| 34 |
+
except ImportError:
|
| 35 |
+
zipfile_compression_mode = zipfile.ZIP_STORED
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
import pythran
|
| 39 |
+
except:
|
| 40 |
+
pythran = None
|
| 41 |
+
|
| 42 |
+
from .. import Utils
|
| 43 |
+
from ..Utils import (cached_function, cached_method, path_exists,
|
| 44 |
+
safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, write_depfile)
|
| 45 |
+
from ..Compiler import Errors
|
| 46 |
+
from ..Compiler.Main import Context
|
| 47 |
+
from ..Compiler.Options import (CompilationOptions, default_options,
|
| 48 |
+
get_directive_defaults)
|
| 49 |
+
|
| 50 |
+
join_path = cached_function(os.path.join)
|
| 51 |
+
copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
|
| 52 |
+
safe_makedirs_once = cached_function(safe_makedirs)
|
| 53 |
+
|
| 54 |
+
if sys.version_info[0] < 3:
|
| 55 |
+
# stupid Py2 distutils enforces str type in list of sources
|
| 56 |
+
_fs_encoding = sys.getfilesystemencoding()
|
| 57 |
+
if _fs_encoding is None:
|
| 58 |
+
_fs_encoding = sys.getdefaultencoding()
|
| 59 |
+
def encode_filename_in_py2(filename):
|
| 60 |
+
if not isinstance(filename, bytes):
|
| 61 |
+
return filename.encode(_fs_encoding)
|
| 62 |
+
return filename
|
| 63 |
+
else:
|
| 64 |
+
def encode_filename_in_py2(filename):
|
| 65 |
+
return filename
|
| 66 |
+
basestring = str
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def _make_relative(file_paths, base=None):
|
| 70 |
+
if not base:
|
| 71 |
+
base = os.getcwd()
|
| 72 |
+
if base[-1] != os.path.sep:
|
| 73 |
+
base += os.path.sep
|
| 74 |
+
return [_relpath(path, base) if path.startswith(base) else path
|
| 75 |
+
for path in file_paths]
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def extended_iglob(pattern):
|
| 79 |
+
if '{' in pattern:
|
| 80 |
+
m = re.match('(.*){([^}]+)}(.*)', pattern)
|
| 81 |
+
if m:
|
| 82 |
+
before, switch, after = m.groups()
|
| 83 |
+
for case in switch.split(','):
|
| 84 |
+
for path in extended_iglob(before + case + after):
|
| 85 |
+
yield path
|
| 86 |
+
return
|
| 87 |
+
|
| 88 |
+
# We always accept '/' and also '\' on Windows,
|
| 89 |
+
# because '/' is generally common for relative paths.
|
| 90 |
+
if '**/' in pattern or os.sep == '\\' and '**\\' in pattern:
|
| 91 |
+
seen = set()
|
| 92 |
+
first, rest = re.split(r'\*\*[%s]' % ('/\\\\' if os.sep == '\\' else '/'), pattern, 1)
|
| 93 |
+
if first:
|
| 94 |
+
first = iglob(first + os.sep)
|
| 95 |
+
else:
|
| 96 |
+
first = ['']
|
| 97 |
+
for root in first:
|
| 98 |
+
for path in extended_iglob(join_path(root, rest)):
|
| 99 |
+
if path not in seen:
|
| 100 |
+
seen.add(path)
|
| 101 |
+
yield path
|
| 102 |
+
for path in extended_iglob(join_path(root, '*', '**', rest)):
|
| 103 |
+
if path not in seen:
|
| 104 |
+
seen.add(path)
|
| 105 |
+
yield path
|
| 106 |
+
else:
|
| 107 |
+
for path in iglob(pattern):
|
| 108 |
+
yield path
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def nonempty(it, error_msg="expected non-empty iterator"):
|
| 112 |
+
empty = True
|
| 113 |
+
for value in it:
|
| 114 |
+
empty = False
|
| 115 |
+
yield value
|
| 116 |
+
if empty:
|
| 117 |
+
raise ValueError(error_msg)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
@cached_function
|
| 121 |
+
def file_hash(filename):
|
| 122 |
+
path = os.path.normpath(filename)
|
| 123 |
+
prefix = ('%d:%s' % (len(path), path)).encode("UTF-8")
|
| 124 |
+
m = hashlib.sha1(prefix)
|
| 125 |
+
with open(path, 'rb') as f:
|
| 126 |
+
data = f.read(65000)
|
| 127 |
+
while data:
|
| 128 |
+
m.update(data)
|
| 129 |
+
data = f.read(65000)
|
| 130 |
+
return m.hexdigest()
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def update_pythran_extension(ext):
|
| 134 |
+
if pythran is None:
|
| 135 |
+
raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
|
| 136 |
+
try:
|
| 137 |
+
pythran_ext = pythran.config.make_extension(python=True)
|
| 138 |
+
except TypeError: # older pythran version only
|
| 139 |
+
pythran_ext = pythran.config.make_extension()
|
| 140 |
+
|
| 141 |
+
ext.include_dirs.extend(pythran_ext['include_dirs'])
|
| 142 |
+
ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
|
| 143 |
+
ext.extra_link_args.extend(pythran_ext['extra_link_args'])
|
| 144 |
+
ext.define_macros.extend(pythran_ext['define_macros'])
|
| 145 |
+
ext.undef_macros.extend(pythran_ext['undef_macros'])
|
| 146 |
+
ext.library_dirs.extend(pythran_ext['library_dirs'])
|
| 147 |
+
ext.libraries.extend(pythran_ext['libraries'])
|
| 148 |
+
ext.language = 'c++'
|
| 149 |
+
|
| 150 |
+
# These options are not compatible with the way normal Cython extensions work
|
| 151 |
+
for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
|
| 152 |
+
try:
|
| 153 |
+
ext.extra_compile_args.remove(bad_option)
|
| 154 |
+
except ValueError:
|
| 155 |
+
pass
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def parse_list(s):
|
| 159 |
+
"""
|
| 160 |
+
>>> parse_list("")
|
| 161 |
+
[]
|
| 162 |
+
>>> parse_list("a")
|
| 163 |
+
['a']
|
| 164 |
+
>>> parse_list("a b c")
|
| 165 |
+
['a', 'b', 'c']
|
| 166 |
+
>>> parse_list("[a, b, c]")
|
| 167 |
+
['a', 'b', 'c']
|
| 168 |
+
>>> parse_list('a " " b')
|
| 169 |
+
['a', ' ', 'b']
|
| 170 |
+
>>> parse_list('[a, ",a", "a,", ",", ]')
|
| 171 |
+
['a', ',a', 'a,', ',']
|
| 172 |
+
"""
|
| 173 |
+
if len(s) >= 2 and s[0] == '[' and s[-1] == ']':
|
| 174 |
+
s = s[1:-1]
|
| 175 |
+
delimiter = ','
|
| 176 |
+
else:
|
| 177 |
+
delimiter = ' '
|
| 178 |
+
s, literals = strip_string_literals(s)
|
| 179 |
+
def unquote(literal):
|
| 180 |
+
literal = literal.strip()
|
| 181 |
+
if literal[0] in "'\"":
|
| 182 |
+
return literals[literal[1:-1]]
|
| 183 |
+
else:
|
| 184 |
+
return literal
|
| 185 |
+
return [unquote(item) for item in s.split(delimiter) if item.strip()]
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
transitive_str = object()
|
| 189 |
+
transitive_list = object()
|
| 190 |
+
bool_or = object()
|
| 191 |
+
|
| 192 |
+
distutils_settings = {
|
| 193 |
+
'name': str,
|
| 194 |
+
'sources': list,
|
| 195 |
+
'define_macros': list,
|
| 196 |
+
'undef_macros': list,
|
| 197 |
+
'libraries': transitive_list,
|
| 198 |
+
'library_dirs': transitive_list,
|
| 199 |
+
'runtime_library_dirs': transitive_list,
|
| 200 |
+
'include_dirs': transitive_list,
|
| 201 |
+
'extra_objects': list,
|
| 202 |
+
'extra_compile_args': transitive_list,
|
| 203 |
+
'extra_link_args': transitive_list,
|
| 204 |
+
'export_symbols': list,
|
| 205 |
+
'depends': transitive_list,
|
| 206 |
+
'language': transitive_str,
|
| 207 |
+
'np_pythran': bool_or
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def _legacy_strtobool(val):
|
| 212 |
+
# Used to be "distutils.util.strtobool", adapted for deprecation warnings.
|
| 213 |
+
if val == "True":
|
| 214 |
+
return True
|
| 215 |
+
elif val == "False":
|
| 216 |
+
return False
|
| 217 |
+
|
| 218 |
+
import warnings
|
| 219 |
+
warnings.warn("The 'np_python' option requires 'True' or 'False'", category=DeprecationWarning)
|
| 220 |
+
val = val.lower()
|
| 221 |
+
if val in ('y', 'yes', 't', 'true', 'on', '1'):
|
| 222 |
+
return True
|
| 223 |
+
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
|
| 224 |
+
return False
|
| 225 |
+
else:
|
| 226 |
+
raise ValueError("invalid truth value %r" % (val,))
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
@cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
|
| 230 |
+
def line_iter(source):
|
| 231 |
+
if isinstance(source, basestring):
|
| 232 |
+
start = 0
|
| 233 |
+
while True:
|
| 234 |
+
end = source.find('\n', start)
|
| 235 |
+
if end == -1:
|
| 236 |
+
yield source[start:]
|
| 237 |
+
return
|
| 238 |
+
yield source[start:end]
|
| 239 |
+
start = end+1
|
| 240 |
+
else:
|
| 241 |
+
for line in source:
|
| 242 |
+
yield line
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
class DistutilsInfo(object):
|
| 246 |
+
|
| 247 |
+
def __init__(self, source=None, exn=None):
|
| 248 |
+
self.values = {}
|
| 249 |
+
if source is not None:
|
| 250 |
+
for line in line_iter(source):
|
| 251 |
+
line = line.lstrip()
|
| 252 |
+
if not line:
|
| 253 |
+
continue
|
| 254 |
+
if line[0] != '#':
|
| 255 |
+
break
|
| 256 |
+
line = line[1:].lstrip()
|
| 257 |
+
kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None)
|
| 258 |
+
if kind is not None:
|
| 259 |
+
key, _, value = [s.strip() for s in line[len(kind):].partition('=')]
|
| 260 |
+
type = distutils_settings.get(key, None)
|
| 261 |
+
if line.startswith("cython:") and type is None: continue
|
| 262 |
+
if type in (list, transitive_list):
|
| 263 |
+
value = parse_list(value)
|
| 264 |
+
if key == 'define_macros':
|
| 265 |
+
value = [tuple(macro.split('=', 1))
|
| 266 |
+
if '=' in macro else (macro, None)
|
| 267 |
+
for macro in value]
|
| 268 |
+
if type is bool_or:
|
| 269 |
+
value = _legacy_strtobool(value)
|
| 270 |
+
self.values[key] = value
|
| 271 |
+
elif exn is not None:
|
| 272 |
+
for key in distutils_settings:
|
| 273 |
+
if key in ('name', 'sources','np_pythran'):
|
| 274 |
+
continue
|
| 275 |
+
value = getattr(exn, key, None)
|
| 276 |
+
if value:
|
| 277 |
+
self.values[key] = value
|
| 278 |
+
|
| 279 |
+
def merge(self, other):
|
| 280 |
+
if other is None:
|
| 281 |
+
return self
|
| 282 |
+
for key, value in other.values.items():
|
| 283 |
+
type = distutils_settings[key]
|
| 284 |
+
if type is transitive_str and key not in self.values:
|
| 285 |
+
self.values[key] = value
|
| 286 |
+
elif type is transitive_list:
|
| 287 |
+
if key in self.values:
|
| 288 |
+
# Change a *copy* of the list (Trac #845)
|
| 289 |
+
all = self.values[key][:]
|
| 290 |
+
for v in value:
|
| 291 |
+
if v not in all:
|
| 292 |
+
all.append(v)
|
| 293 |
+
value = all
|
| 294 |
+
self.values[key] = value
|
| 295 |
+
elif type is bool_or:
|
| 296 |
+
self.values[key] = self.values.get(key, False) | value
|
| 297 |
+
return self
|
| 298 |
+
|
| 299 |
+
def subs(self, aliases):
|
| 300 |
+
if aliases is None:
|
| 301 |
+
return self
|
| 302 |
+
resolved = DistutilsInfo()
|
| 303 |
+
for key, value in self.values.items():
|
| 304 |
+
type = distutils_settings[key]
|
| 305 |
+
if type in [list, transitive_list]:
|
| 306 |
+
new_value_list = []
|
| 307 |
+
for v in value:
|
| 308 |
+
if v in aliases:
|
| 309 |
+
v = aliases[v]
|
| 310 |
+
if isinstance(v, list):
|
| 311 |
+
new_value_list += v
|
| 312 |
+
else:
|
| 313 |
+
new_value_list.append(v)
|
| 314 |
+
value = new_value_list
|
| 315 |
+
else:
|
| 316 |
+
if value in aliases:
|
| 317 |
+
value = aliases[value]
|
| 318 |
+
resolved.values[key] = value
|
| 319 |
+
return resolved
|
| 320 |
+
|
| 321 |
+
def apply(self, extension):
|
| 322 |
+
for key, value in self.values.items():
|
| 323 |
+
type = distutils_settings[key]
|
| 324 |
+
if type in [list, transitive_list]:
|
| 325 |
+
value = getattr(extension, key) + list(value)
|
| 326 |
+
setattr(extension, key, value)
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
@cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t,
|
| 330 |
+
single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t,
|
| 331 |
+
hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t,
|
| 332 |
+
k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t)
|
| 333 |
+
def strip_string_literals(code, prefix='__Pyx_L'):
|
| 334 |
+
"""
|
| 335 |
+
Normalizes every string literal to be of the form '__Pyx_Lxxx',
|
| 336 |
+
returning the normalized code and a mapping of labels to
|
| 337 |
+
string literals.
|
| 338 |
+
"""
|
| 339 |
+
new_code = []
|
| 340 |
+
literals = {}
|
| 341 |
+
counter = 0
|
| 342 |
+
start = q = 0
|
| 343 |
+
in_quote = False
|
| 344 |
+
hash_mark = single_q = double_q = -1
|
| 345 |
+
code_len = len(code)
|
| 346 |
+
quote_type = None
|
| 347 |
+
quote_len = -1
|
| 348 |
+
|
| 349 |
+
while True:
|
| 350 |
+
if hash_mark < q:
|
| 351 |
+
hash_mark = code.find('#', q)
|
| 352 |
+
if single_q < q:
|
| 353 |
+
single_q = code.find("'", q)
|
| 354 |
+
if double_q < q:
|
| 355 |
+
double_q = code.find('"', q)
|
| 356 |
+
q = min(single_q, double_q)
|
| 357 |
+
if q == -1:
|
| 358 |
+
q = max(single_q, double_q)
|
| 359 |
+
|
| 360 |
+
# We're done.
|
| 361 |
+
if q == -1 and hash_mark == -1:
|
| 362 |
+
new_code.append(code[start:])
|
| 363 |
+
break
|
| 364 |
+
|
| 365 |
+
# Try to close the quote.
|
| 366 |
+
elif in_quote:
|
| 367 |
+
if code[q-1] == u'\\':
|
| 368 |
+
k = 2
|
| 369 |
+
while q >= k and code[q-k] == u'\\':
|
| 370 |
+
k += 1
|
| 371 |
+
if k % 2 == 0:
|
| 372 |
+
q += 1
|
| 373 |
+
continue
|
| 374 |
+
if code[q] == quote_type and (
|
| 375 |
+
quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])):
|
| 376 |
+
counter += 1
|
| 377 |
+
label = "%s%s_" % (prefix, counter)
|
| 378 |
+
literals[label] = code[start+quote_len:q]
|
| 379 |
+
full_quote = code[q:q+quote_len]
|
| 380 |
+
new_code.append(full_quote)
|
| 381 |
+
new_code.append(label)
|
| 382 |
+
new_code.append(full_quote)
|
| 383 |
+
q += quote_len
|
| 384 |
+
in_quote = False
|
| 385 |
+
start = q
|
| 386 |
+
else:
|
| 387 |
+
q += 1
|
| 388 |
+
|
| 389 |
+
# Process comment.
|
| 390 |
+
elif -1 != hash_mark and (hash_mark < q or q == -1):
|
| 391 |
+
new_code.append(code[start:hash_mark+1])
|
| 392 |
+
end = code.find('\n', hash_mark)
|
| 393 |
+
counter += 1
|
| 394 |
+
label = "%s%s_" % (prefix, counter)
|
| 395 |
+
if end == -1:
|
| 396 |
+
end_or_none = None
|
| 397 |
+
else:
|
| 398 |
+
end_or_none = end
|
| 399 |
+
literals[label] = code[hash_mark+1:end_or_none]
|
| 400 |
+
new_code.append(label)
|
| 401 |
+
if end == -1:
|
| 402 |
+
break
|
| 403 |
+
start = q = end
|
| 404 |
+
|
| 405 |
+
# Open the quote.
|
| 406 |
+
else:
|
| 407 |
+
if code_len >= q+3 and (code[q] == code[q+1] == code[q+2]):
|
| 408 |
+
quote_len = 3
|
| 409 |
+
else:
|
| 410 |
+
quote_len = 1
|
| 411 |
+
in_quote = True
|
| 412 |
+
quote_type = code[q]
|
| 413 |
+
new_code.append(code[start:q])
|
| 414 |
+
start = q
|
| 415 |
+
q += quote_len
|
| 416 |
+
|
| 417 |
+
return "".join(new_code), literals
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
# We need to allow spaces to allow for conditional compilation like
|
| 421 |
+
# IF ...:
|
| 422 |
+
# cimport ...
|
| 423 |
+
dependency_regex = re.compile(r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|"
|
| 424 |
+
r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|"
|
| 425 |
+
r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|"
|
| 426 |
+
r"(?:^\s*include +['\"]([^'\"]+)['\"])", re.M)
|
| 427 |
+
dependency_after_from_regex = re.compile(
|
| 428 |
+
r"(?:^\s+\(([0-9a-zA-Z_., ]*)\)[#\n])|"
|
| 429 |
+
r"(?:^\s+([0-9a-zA-Z_., ]*)[#\n])",
|
| 430 |
+
re.M)
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
def normalize_existing(base_path, rel_paths):
|
| 434 |
+
return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths)))
|
| 435 |
+
|
| 436 |
+
|
| 437 |
+
@cached_function
|
| 438 |
+
def normalize_existing0(base_dir, rel_paths):
|
| 439 |
+
"""
|
| 440 |
+
Given some base directory ``base_dir`` and a list of path names
|
| 441 |
+
``rel_paths``, normalize each relative path name ``rel`` by
|
| 442 |
+
replacing it by ``os.path.join(base, rel)`` if that file exists.
|
| 443 |
+
|
| 444 |
+
Return a couple ``(normalized, needed_base)`` where ``normalized``
|
| 445 |
+
if the list of normalized file names and ``needed_base`` is
|
| 446 |
+
``base_dir`` if we actually needed ``base_dir``. If no paths were
|
| 447 |
+
changed (for example, if all paths were already absolute), then
|
| 448 |
+
``needed_base`` is ``None``.
|
| 449 |
+
"""
|
| 450 |
+
normalized = []
|
| 451 |
+
needed_base = None
|
| 452 |
+
for rel in rel_paths:
|
| 453 |
+
if os.path.isabs(rel):
|
| 454 |
+
normalized.append(rel)
|
| 455 |
+
continue
|
| 456 |
+
path = join_path(base_dir, rel)
|
| 457 |
+
if path_exists(path):
|
| 458 |
+
normalized.append(os.path.normpath(path))
|
| 459 |
+
needed_base = base_dir
|
| 460 |
+
else:
|
| 461 |
+
normalized.append(rel)
|
| 462 |
+
return (normalized, needed_base)
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
def resolve_depends(depends, include_dirs):
|
| 466 |
+
include_dirs = tuple(include_dirs)
|
| 467 |
+
resolved = []
|
| 468 |
+
for depend in depends:
|
| 469 |
+
path = resolve_depend(depend, include_dirs)
|
| 470 |
+
if path is not None:
|
| 471 |
+
resolved.append(path)
|
| 472 |
+
return resolved
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
@cached_function
|
| 476 |
+
def resolve_depend(depend, include_dirs):
|
| 477 |
+
if depend[0] == '<' and depend[-1] == '>':
|
| 478 |
+
return None
|
| 479 |
+
for dir in include_dirs:
|
| 480 |
+
path = join_path(dir, depend)
|
| 481 |
+
if path_exists(path):
|
| 482 |
+
return os.path.normpath(path)
|
| 483 |
+
return None
|
| 484 |
+
|
| 485 |
+
|
| 486 |
+
@cached_function
|
| 487 |
+
def package(filename):
|
| 488 |
+
dir = os.path.dirname(os.path.abspath(str(filename)))
|
| 489 |
+
if dir != filename and is_package_dir(dir):
|
| 490 |
+
return package(dir) + (os.path.basename(dir),)
|
| 491 |
+
else:
|
| 492 |
+
return ()
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
@cached_function
|
| 496 |
+
def fully_qualified_name(filename):
|
| 497 |
+
module = os.path.splitext(os.path.basename(filename))[0]
|
| 498 |
+
return '.'.join(package(filename) + (module,))
|
| 499 |
+
|
| 500 |
+
|
| 501 |
+
@cached_function
|
| 502 |
+
def parse_dependencies(source_filename):
|
| 503 |
+
# Actual parsing is way too slow, so we use regular expressions.
|
| 504 |
+
# The only catch is that we must strip comments and string
|
| 505 |
+
# literals ahead of time.
|
| 506 |
+
with Utils.open_source_file(source_filename, error_handling='ignore') as fh:
|
| 507 |
+
source = fh.read()
|
| 508 |
+
distutils_info = DistutilsInfo(source)
|
| 509 |
+
source, literals = strip_string_literals(source)
|
| 510 |
+
source = source.replace('\\\n', ' ').replace('\t', ' ')
|
| 511 |
+
|
| 512 |
+
# TODO: pure mode
|
| 513 |
+
cimports = []
|
| 514 |
+
includes = []
|
| 515 |
+
externs = []
|
| 516 |
+
for m in dependency_regex.finditer(source):
|
| 517 |
+
cimport_from, cimport_list, extern, include = m.groups()
|
| 518 |
+
if cimport_from:
|
| 519 |
+
cimports.append(cimport_from)
|
| 520 |
+
m_after_from = dependency_after_from_regex.search(source, pos=m.end())
|
| 521 |
+
if m_after_from:
|
| 522 |
+
multiline, one_line = m_after_from.groups()
|
| 523 |
+
subimports = multiline or one_line
|
| 524 |
+
cimports.extend("{0}.{1}".format(cimport_from, s.strip())
|
| 525 |
+
for s in subimports.split(','))
|
| 526 |
+
|
| 527 |
+
elif cimport_list:
|
| 528 |
+
cimports.extend(x.strip() for x in cimport_list.split(","))
|
| 529 |
+
elif extern:
|
| 530 |
+
externs.append(literals[extern])
|
| 531 |
+
else:
|
| 532 |
+
includes.append(literals[include])
|
| 533 |
+
return cimports, includes, externs, distutils_info
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
class DependencyTree(object):
|
| 537 |
+
|
| 538 |
+
def __init__(self, context, quiet=False):
|
| 539 |
+
self.context = context
|
| 540 |
+
self.quiet = quiet
|
| 541 |
+
self._transitive_cache = {}
|
| 542 |
+
|
| 543 |
+
def parse_dependencies(self, source_filename):
|
| 544 |
+
if path_exists(source_filename):
|
| 545 |
+
source_filename = os.path.normpath(source_filename)
|
| 546 |
+
return parse_dependencies(source_filename)
|
| 547 |
+
|
| 548 |
+
@cached_method
|
| 549 |
+
def included_files(self, filename):
|
| 550 |
+
# This is messy because included files are textually included, resolving
|
| 551 |
+
# cimports (but not includes) relative to the including file.
|
| 552 |
+
all = set()
|
| 553 |
+
for include in self.parse_dependencies(filename)[1]:
|
| 554 |
+
include_path = join_path(os.path.dirname(filename), include)
|
| 555 |
+
if not path_exists(include_path):
|
| 556 |
+
include_path = self.context.find_include_file(include, source_file_path=filename)
|
| 557 |
+
if include_path:
|
| 558 |
+
if '.' + os.path.sep in include_path:
|
| 559 |
+
include_path = os.path.normpath(include_path)
|
| 560 |
+
all.add(include_path)
|
| 561 |
+
all.update(self.included_files(include_path))
|
| 562 |
+
elif not self.quiet:
|
| 563 |
+
print(u"Unable to locate '%s' referenced from '%s'" % (filename, include))
|
| 564 |
+
return all
|
| 565 |
+
|
| 566 |
+
@cached_method
|
| 567 |
+
def cimports_externs_incdirs(self, filename):
|
| 568 |
+
# This is really ugly. Nested cimports are resolved with respect to the
|
| 569 |
+
# includer, but includes are resolved with respect to the includee.
|
| 570 |
+
cimports, includes, externs = self.parse_dependencies(filename)[:3]
|
| 571 |
+
cimports = set(cimports)
|
| 572 |
+
externs = set(externs)
|
| 573 |
+
incdirs = set()
|
| 574 |
+
for include in self.included_files(filename):
|
| 575 |
+
included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include)
|
| 576 |
+
cimports.update(included_cimports)
|
| 577 |
+
externs.update(included_externs)
|
| 578 |
+
incdirs.update(included_incdirs)
|
| 579 |
+
externs, incdir = normalize_existing(filename, externs)
|
| 580 |
+
if incdir:
|
| 581 |
+
incdirs.add(incdir)
|
| 582 |
+
return tuple(cimports), externs, incdirs
|
| 583 |
+
|
| 584 |
+
def cimports(self, filename):
|
| 585 |
+
return self.cimports_externs_incdirs(filename)[0]
|
| 586 |
+
|
| 587 |
+
def package(self, filename):
|
| 588 |
+
return package(filename)
|
| 589 |
+
|
| 590 |
+
def fully_qualified_name(self, filename):
|
| 591 |
+
return fully_qualified_name(filename)
|
| 592 |
+
|
| 593 |
+
@cached_method
|
| 594 |
+
def find_pxd(self, module, filename=None):
|
| 595 |
+
is_relative = module[0] == '.'
|
| 596 |
+
if is_relative and not filename:
|
| 597 |
+
raise NotImplementedError("New relative imports.")
|
| 598 |
+
if filename is not None:
|
| 599 |
+
module_path = module.split('.')
|
| 600 |
+
if is_relative:
|
| 601 |
+
module_path.pop(0) # just explicitly relative
|
| 602 |
+
package_path = list(self.package(filename))
|
| 603 |
+
while module_path and not module_path[0]:
|
| 604 |
+
try:
|
| 605 |
+
package_path.pop()
|
| 606 |
+
except IndexError:
|
| 607 |
+
return None # FIXME: error?
|
| 608 |
+
module_path.pop(0)
|
| 609 |
+
relative = '.'.join(package_path + module_path)
|
| 610 |
+
pxd = self.context.find_pxd_file(relative, source_file_path=filename)
|
| 611 |
+
if pxd:
|
| 612 |
+
return pxd
|
| 613 |
+
if is_relative:
|
| 614 |
+
return None # FIXME: error?
|
| 615 |
+
return self.context.find_pxd_file(module, source_file_path=filename)
|
| 616 |
+
|
| 617 |
+
@cached_method
|
| 618 |
+
def cimported_files(self, filename):
|
| 619 |
+
filename_root, filename_ext = os.path.splitext(filename)
|
| 620 |
+
if filename_ext in ('.pyx', '.py') and path_exists(filename_root + '.pxd'):
|
| 621 |
+
pxd_list = [filename_root + '.pxd']
|
| 622 |
+
else:
|
| 623 |
+
pxd_list = []
|
| 624 |
+
# Cimports generates all possible combinations package.module
|
| 625 |
+
# when imported as from package cimport module.
|
| 626 |
+
for module in self.cimports(filename):
|
| 627 |
+
if module[:7] == 'cython.' or module == 'cython':
|
| 628 |
+
continue
|
| 629 |
+
pxd_file = self.find_pxd(module, filename)
|
| 630 |
+
if pxd_file is not None:
|
| 631 |
+
pxd_list.append(pxd_file)
|
| 632 |
+
return tuple(pxd_list)
|
| 633 |
+
|
| 634 |
+
@cached_method
|
| 635 |
+
def immediate_dependencies(self, filename):
|
| 636 |
+
all_deps = {filename}
|
| 637 |
+
all_deps.update(self.cimported_files(filename))
|
| 638 |
+
all_deps.update(self.included_files(filename))
|
| 639 |
+
return all_deps
|
| 640 |
+
|
| 641 |
+
def all_dependencies(self, filename):
|
| 642 |
+
return self.transitive_merge(filename, self.immediate_dependencies, set.union)
|
| 643 |
+
|
| 644 |
+
@cached_method
|
| 645 |
+
def timestamp(self, filename):
|
| 646 |
+
return os.path.getmtime(filename)
|
| 647 |
+
|
| 648 |
+
def extract_timestamp(self, filename):
|
| 649 |
+
return self.timestamp(filename), filename
|
| 650 |
+
|
| 651 |
+
def newest_dependency(self, filename):
|
| 652 |
+
return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)])
|
| 653 |
+
|
| 654 |
+
def transitive_fingerprint(self, filename, module, compilation_options):
|
| 655 |
+
r"""
|
| 656 |
+
Return a fingerprint of a cython file that is about to be cythonized.
|
| 657 |
+
|
| 658 |
+
Fingerprints are looked up in future compilations. If the fingerprint
|
| 659 |
+
is found, the cythonization can be skipped. The fingerprint must
|
| 660 |
+
incorporate everything that has an influence on the generated code.
|
| 661 |
+
"""
|
| 662 |
+
try:
|
| 663 |
+
m = hashlib.sha1(__version__.encode('UTF-8'))
|
| 664 |
+
m.update(file_hash(filename).encode('UTF-8'))
|
| 665 |
+
for x in sorted(self.all_dependencies(filename)):
|
| 666 |
+
if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'):
|
| 667 |
+
m.update(file_hash(x).encode('UTF-8'))
|
| 668 |
+
# Include the module attributes that change the compilation result
|
| 669 |
+
# in the fingerprint. We do not iterate over module.__dict__ and
|
| 670 |
+
# include almost everything here as users might extend Extension
|
| 671 |
+
# with arbitrary (random) attributes that would lead to cache
|
| 672 |
+
# misses.
|
| 673 |
+
m.update(str((
|
| 674 |
+
module.language,
|
| 675 |
+
getattr(module, 'py_limited_api', False),
|
| 676 |
+
getattr(module, 'np_pythran', False)
|
| 677 |
+
)).encode('UTF-8'))
|
| 678 |
+
|
| 679 |
+
m.update(compilation_options.get_fingerprint().encode('UTF-8'))
|
| 680 |
+
return m.hexdigest()
|
| 681 |
+
except IOError:
|
| 682 |
+
return None
|
| 683 |
+
|
| 684 |
+
def distutils_info0(self, filename):
|
| 685 |
+
info = self.parse_dependencies(filename)[3]
|
| 686 |
+
kwds = info.values
|
| 687 |
+
cimports, externs, incdirs = self.cimports_externs_incdirs(filename)
|
| 688 |
+
basedir = os.getcwd()
|
| 689 |
+
# Add dependencies on "cdef extern from ..." files
|
| 690 |
+
if externs:
|
| 691 |
+
externs = _make_relative(externs, basedir)
|
| 692 |
+
if 'depends' in kwds:
|
| 693 |
+
kwds['depends'] = list(set(kwds['depends']).union(externs))
|
| 694 |
+
else:
|
| 695 |
+
kwds['depends'] = list(externs)
|
| 696 |
+
# Add include_dirs to ensure that the C compiler will find the
|
| 697 |
+
# "cdef extern from ..." files
|
| 698 |
+
if incdirs:
|
| 699 |
+
include_dirs = list(kwds.get('include_dirs', []))
|
| 700 |
+
for inc in _make_relative(incdirs, basedir):
|
| 701 |
+
if inc not in include_dirs:
|
| 702 |
+
include_dirs.append(inc)
|
| 703 |
+
kwds['include_dirs'] = include_dirs
|
| 704 |
+
return info
|
| 705 |
+
|
| 706 |
+
def distutils_info(self, filename, aliases=None, base=None):
|
| 707 |
+
return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge)
|
| 708 |
+
.subs(aliases)
|
| 709 |
+
.merge(base))
|
| 710 |
+
|
| 711 |
+
def transitive_merge(self, node, extract, merge):
|
| 712 |
+
try:
|
| 713 |
+
seen = self._transitive_cache[extract, merge]
|
| 714 |
+
except KeyError:
|
| 715 |
+
seen = self._transitive_cache[extract, merge] = {}
|
| 716 |
+
return self.transitive_merge_helper(
|
| 717 |
+
node, extract, merge, seen, {}, self.cimported_files)[0]
|
| 718 |
+
|
| 719 |
+
def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing):
|
| 720 |
+
if node in seen:
|
| 721 |
+
return seen[node], None
|
| 722 |
+
deps = extract(node)
|
| 723 |
+
if node in stack:
|
| 724 |
+
return deps, node
|
| 725 |
+
try:
|
| 726 |
+
stack[node] = len(stack)
|
| 727 |
+
loop = None
|
| 728 |
+
for next in outgoing(node):
|
| 729 |
+
sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing)
|
| 730 |
+
if sub_loop is not None:
|
| 731 |
+
if loop is not None and stack[loop] < stack[sub_loop]:
|
| 732 |
+
pass
|
| 733 |
+
else:
|
| 734 |
+
loop = sub_loop
|
| 735 |
+
deps = merge(deps, sub_deps)
|
| 736 |
+
if loop == node:
|
| 737 |
+
loop = None
|
| 738 |
+
if loop is None:
|
| 739 |
+
seen[node] = deps
|
| 740 |
+
return deps, loop
|
| 741 |
+
finally:
|
| 742 |
+
del stack[node]
|
| 743 |
+
|
| 744 |
+
|
| 745 |
+
_dep_tree = None
|
| 746 |
+
|
| 747 |
+
def create_dependency_tree(ctx=None, quiet=False):
|
| 748 |
+
global _dep_tree
|
| 749 |
+
if _dep_tree is None:
|
| 750 |
+
if ctx is None:
|
| 751 |
+
ctx = Context(["."], get_directive_defaults(),
|
| 752 |
+
options=CompilationOptions(default_options))
|
| 753 |
+
_dep_tree = DependencyTree(ctx, quiet=quiet)
|
| 754 |
+
return _dep_tree
|
| 755 |
+
|
| 756 |
+
|
| 757 |
+
# If this changes, change also docs/src/reference/compilation.rst
|
| 758 |
+
# which mentions this function
|
| 759 |
+
def default_create_extension(template, kwds):
|
| 760 |
+
if 'depends' in kwds:
|
| 761 |
+
include_dirs = kwds.get('include_dirs', []) + ["."]
|
| 762 |
+
depends = resolve_depends(kwds['depends'], include_dirs)
|
| 763 |
+
kwds['depends'] = sorted(set(depends + template.depends))
|
| 764 |
+
|
| 765 |
+
t = template.__class__
|
| 766 |
+
ext = t(**kwds)
|
| 767 |
+
metadata = dict(distutils=kwds, module_name=kwds['name'])
|
| 768 |
+
return (ext, metadata)
|
| 769 |
+
|
| 770 |
+
|
| 771 |
+
# This may be useful for advanced users?
|
| 772 |
+
def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
|
| 773 |
+
exclude_failures=False):
|
| 774 |
+
if language is not None:
|
| 775 |
+
print('Warning: passing language={0!r} to cythonize() is deprecated. '
|
| 776 |
+
'Instead, put "# distutils: language={0}" in your .pyx or .pxd file(s)'.format(language))
|
| 777 |
+
if exclude is None:
|
| 778 |
+
exclude = []
|
| 779 |
+
if patterns is None:
|
| 780 |
+
return [], {}
|
| 781 |
+
elif isinstance(patterns, basestring) or not isinstance(patterns, Iterable):
|
| 782 |
+
patterns = [patterns]
|
| 783 |
+
|
| 784 |
+
from distutils.extension import Extension
|
| 785 |
+
if 'setuptools' in sys.modules:
|
| 786 |
+
# Support setuptools Extension instances as well.
|
| 787 |
+
extension_classes = (
|
| 788 |
+
Extension, # should normally be the same as 'setuptools.extension._Extension'
|
| 789 |
+
sys.modules['setuptools.extension']._Extension,
|
| 790 |
+
sys.modules['setuptools'].Extension,
|
| 791 |
+
)
|
| 792 |
+
else:
|
| 793 |
+
extension_classes = (Extension,)
|
| 794 |
+
|
| 795 |
+
explicit_modules = {m.name for m in patterns if isinstance(m, extension_classes)}
|
| 796 |
+
deps = create_dependency_tree(ctx, quiet=quiet)
|
| 797 |
+
|
| 798 |
+
to_exclude = set()
|
| 799 |
+
if not isinstance(exclude, list):
|
| 800 |
+
exclude = [exclude]
|
| 801 |
+
for pattern in exclude:
|
| 802 |
+
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
|
| 803 |
+
|
| 804 |
+
module_list = []
|
| 805 |
+
module_metadata = {}
|
| 806 |
+
|
| 807 |
+
# if no create_extension() function is defined, use a simple
|
| 808 |
+
# default function.
|
| 809 |
+
create_extension = ctx.options.create_extension or default_create_extension
|
| 810 |
+
|
| 811 |
+
seen = set()
|
| 812 |
+
for pattern in patterns:
|
| 813 |
+
if not isinstance(pattern, extension_classes):
|
| 814 |
+
pattern = encode_filename_in_py2(pattern)
|
| 815 |
+
if isinstance(pattern, str):
|
| 816 |
+
filepattern = pattern
|
| 817 |
+
template = Extension(pattern, []) # Fake Extension without sources
|
| 818 |
+
name = '*'
|
| 819 |
+
base = None
|
| 820 |
+
ext_language = language
|
| 821 |
+
elif isinstance(pattern, extension_classes):
|
| 822 |
+
cython_sources = [s for s in pattern.sources
|
| 823 |
+
if os.path.splitext(s)[1] in ('.py', '.pyx')]
|
| 824 |
+
if cython_sources:
|
| 825 |
+
filepattern = cython_sources[0]
|
| 826 |
+
if len(cython_sources) > 1:
|
| 827 |
+
print(u"Warning: Multiple cython sources found for extension '%s': %s\n"
|
| 828 |
+
u"See https://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
|
| 829 |
+
u"for sharing declarations among Cython files." % (pattern.name, cython_sources))
|
| 830 |
+
else:
|
| 831 |
+
# ignore non-cython modules
|
| 832 |
+
module_list.append(pattern)
|
| 833 |
+
continue
|
| 834 |
+
template = pattern
|
| 835 |
+
name = template.name
|
| 836 |
+
base = DistutilsInfo(exn=template)
|
| 837 |
+
ext_language = None # do not override whatever the Extension says
|
| 838 |
+
else:
|
| 839 |
+
msg = str("pattern is not of type str nor subclass of Extension (%s)"
|
| 840 |
+
" but of type %s and class %s" % (repr(Extension),
|
| 841 |
+
type(pattern),
|
| 842 |
+
pattern.__class__))
|
| 843 |
+
raise TypeError(msg)
|
| 844 |
+
|
| 845 |
+
for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern):
|
| 846 |
+
if os.path.abspath(file) in to_exclude:
|
| 847 |
+
continue
|
| 848 |
+
module_name = deps.fully_qualified_name(file)
|
| 849 |
+
if '*' in name:
|
| 850 |
+
if module_name in explicit_modules:
|
| 851 |
+
continue
|
| 852 |
+
elif name:
|
| 853 |
+
module_name = name
|
| 854 |
+
|
| 855 |
+
Utils.raise_error_if_module_name_forbidden(module_name)
|
| 856 |
+
|
| 857 |
+
if module_name not in seen:
|
| 858 |
+
try:
|
| 859 |
+
kwds = deps.distutils_info(file, aliases, base).values
|
| 860 |
+
except Exception:
|
| 861 |
+
if exclude_failures:
|
| 862 |
+
continue
|
| 863 |
+
raise
|
| 864 |
+
if base is not None:
|
| 865 |
+
for key, value in base.values.items():
|
| 866 |
+
if key not in kwds:
|
| 867 |
+
kwds[key] = value
|
| 868 |
+
|
| 869 |
+
kwds['name'] = module_name
|
| 870 |
+
|
| 871 |
+
sources = [file] + [m for m in template.sources if m != filepattern]
|
| 872 |
+
if 'sources' in kwds:
|
| 873 |
+
# allow users to add .c files etc.
|
| 874 |
+
for source in kwds['sources']:
|
| 875 |
+
source = encode_filename_in_py2(source)
|
| 876 |
+
if source not in sources:
|
| 877 |
+
sources.append(source)
|
| 878 |
+
kwds['sources'] = sources
|
| 879 |
+
|
| 880 |
+
if ext_language and 'language' not in kwds:
|
| 881 |
+
kwds['language'] = ext_language
|
| 882 |
+
|
| 883 |
+
np_pythran = kwds.pop('np_pythran', False)
|
| 884 |
+
|
| 885 |
+
# Create the new extension
|
| 886 |
+
m, metadata = create_extension(template, kwds)
|
| 887 |
+
m.np_pythran = np_pythran or getattr(m, 'np_pythran', False)
|
| 888 |
+
if m.np_pythran:
|
| 889 |
+
update_pythran_extension(m)
|
| 890 |
+
module_list.append(m)
|
| 891 |
+
|
| 892 |
+
# Store metadata (this will be written as JSON in the
|
| 893 |
+
# generated C file but otherwise has no purpose)
|
| 894 |
+
module_metadata[module_name] = metadata
|
| 895 |
+
|
| 896 |
+
if file not in m.sources:
|
| 897 |
+
# Old setuptools unconditionally replaces .pyx with .c/.cpp
|
| 898 |
+
target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
|
| 899 |
+
try:
|
| 900 |
+
m.sources.remove(target_file)
|
| 901 |
+
except ValueError:
|
| 902 |
+
# never seen this in the wild, but probably better to warn about this unexpected case
|
| 903 |
+
print(u"Warning: Cython source file not found in sources list, adding %s" % file)
|
| 904 |
+
m.sources.insert(0, file)
|
| 905 |
+
seen.add(name)
|
| 906 |
+
return module_list, module_metadata
|
| 907 |
+
|
| 908 |
+
|
| 909 |
+
# This is the user-exposed entry point.
|
| 910 |
+
def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=None, language=None,
|
| 911 |
+
exclude_failures=False, show_all_warnings=False, **options):
|
| 912 |
+
"""
|
| 913 |
+
Compile a set of source modules into C/C++ files and return a list of distutils
|
| 914 |
+
Extension objects for them.
|
| 915 |
+
|
| 916 |
+
:param module_list: As module list, pass either a glob pattern, a list of glob
|
| 917 |
+
patterns or a list of Extension objects. The latter
|
| 918 |
+
allows you to configure the extensions separately
|
| 919 |
+
through the normal distutils options.
|
| 920 |
+
You can also pass Extension objects that have
|
| 921 |
+
glob patterns as their sources. Then, cythonize
|
| 922 |
+
will resolve the pattern and create a
|
| 923 |
+
copy of the Extension for every matching file.
|
| 924 |
+
|
| 925 |
+
:param exclude: When passing glob patterns as ``module_list``, you can exclude certain
|
| 926 |
+
module names explicitly by passing them into the ``exclude`` option.
|
| 927 |
+
|
| 928 |
+
:param nthreads: The number of concurrent builds for parallel compilation
|
| 929 |
+
(requires the ``multiprocessing`` module).
|
| 930 |
+
|
| 931 |
+
:param aliases: If you want to use compiler directives like ``# distutils: ...`` but
|
| 932 |
+
can only know at compile time (when running the ``setup.py``) which values
|
| 933 |
+
to use, you can use aliases and pass a dictionary mapping those aliases
|
| 934 |
+
to Python strings when calling :func:`cythonize`. As an example, say you
|
| 935 |
+
want to use the compiler
|
| 936 |
+
directive ``# distutils: include_dirs = ../static_libs/include/``
|
| 937 |
+
but this path isn't always fixed and you want to find it when running
|
| 938 |
+
the ``setup.py``. You can then do ``# distutils: include_dirs = MY_HEADERS``,
|
| 939 |
+
find the value of ``MY_HEADERS`` in the ``setup.py``, put it in a python
|
| 940 |
+
variable called ``foo`` as a string, and then call
|
| 941 |
+
``cythonize(..., aliases={'MY_HEADERS': foo})``.
|
| 942 |
+
|
| 943 |
+
:param quiet: If True, Cython won't print error, warning, or status messages during the
|
| 944 |
+
compilation.
|
| 945 |
+
|
| 946 |
+
:param force: Forces the recompilation of the Cython modules, even if the timestamps
|
| 947 |
+
don't indicate that a recompilation is necessary.
|
| 948 |
+
|
| 949 |
+
:param language: To globally enable C++ mode, you can pass ``language='c++'``. Otherwise, this
|
| 950 |
+
will be determined at a per-file level based on compiler directives. This
|
| 951 |
+
affects only modules found based on file names. Extension instances passed
|
| 952 |
+
into :func:`cythonize` will not be changed. It is recommended to rather
|
| 953 |
+
use the compiler directive ``# distutils: language = c++`` than this option.
|
| 954 |
+
|
| 955 |
+
:param exclude_failures: For a broad 'try to compile' mode that ignores compilation
|
| 956 |
+
failures and simply excludes the failed extensions,
|
| 957 |
+
pass ``exclude_failures=True``. Note that this only
|
| 958 |
+
really makes sense for compiling ``.py`` files which can also
|
| 959 |
+
be used without compilation.
|
| 960 |
+
|
| 961 |
+
:param show_all_warnings: By default, not all Cython warnings are printed.
|
| 962 |
+
Set to true to show all warnings.
|
| 963 |
+
|
| 964 |
+
:param annotate: If ``True``, will produce a HTML file for each of the ``.pyx`` or ``.py``
|
| 965 |
+
files compiled. The HTML file gives an indication
|
| 966 |
+
of how much Python interaction there is in
|
| 967 |
+
each of the source code lines, compared to plain C code.
|
| 968 |
+
It also allows you to see the C/C++ code
|
| 969 |
+
generated for each line of Cython code. This report is invaluable when
|
| 970 |
+
optimizing a function for speed,
|
| 971 |
+
and for determining when to :ref:`release the GIL <nogil>`:
|
| 972 |
+
in general, a ``nogil`` block may contain only "white" code.
|
| 973 |
+
See examples in :ref:`determining_where_to_add_types` or
|
| 974 |
+
:ref:`primes`.
|
| 975 |
+
|
| 976 |
+
|
| 977 |
+
:param annotate-fullc: If ``True`` will produce a colorized HTML version of
|
| 978 |
+
the source which includes entire generated C/C++-code.
|
| 979 |
+
|
| 980 |
+
|
| 981 |
+
:param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this:
|
| 982 |
+
``compiler_directives={'embedsignature': True}``.
|
| 983 |
+
See :ref:`compiler-directives`.
|
| 984 |
+
|
| 985 |
+
:param depfile: produce depfiles for the sources if True.
|
| 986 |
+
"""
|
| 987 |
+
if exclude is None:
|
| 988 |
+
exclude = []
|
| 989 |
+
if 'include_path' not in options:
|
| 990 |
+
options['include_path'] = ['.']
|
| 991 |
+
if 'common_utility_include_dir' in options:
|
| 992 |
+
safe_makedirs(options['common_utility_include_dir'])
|
| 993 |
+
|
| 994 |
+
depfile = options.pop('depfile', None)
|
| 995 |
+
|
| 996 |
+
if pythran is None:
|
| 997 |
+
pythran_options = None
|
| 998 |
+
else:
|
| 999 |
+
pythran_options = CompilationOptions(**options)
|
| 1000 |
+
pythran_options.cplus = True
|
| 1001 |
+
pythran_options.np_pythran = True
|
| 1002 |
+
|
| 1003 |
+
if force is None:
|
| 1004 |
+
force = os.environ.get("CYTHON_FORCE_REGEN") == "1" # allow global overrides for build systems
|
| 1005 |
+
|
| 1006 |
+
c_options = CompilationOptions(**options)
|
| 1007 |
+
cpp_options = CompilationOptions(**options); cpp_options.cplus = True
|
| 1008 |
+
ctx = Context.from_options(c_options)
|
| 1009 |
+
options = c_options
|
| 1010 |
+
module_list, module_metadata = create_extension_list(
|
| 1011 |
+
module_list,
|
| 1012 |
+
exclude=exclude,
|
| 1013 |
+
ctx=ctx,
|
| 1014 |
+
quiet=quiet,
|
| 1015 |
+
exclude_failures=exclude_failures,
|
| 1016 |
+
language=language,
|
| 1017 |
+
aliases=aliases)
|
| 1018 |
+
|
| 1019 |
+
fix_windows_unicode_modules(module_list)
|
| 1020 |
+
|
| 1021 |
+
deps = create_dependency_tree(ctx, quiet=quiet)
|
| 1022 |
+
build_dir = getattr(options, 'build_dir', None)
|
| 1023 |
+
|
| 1024 |
+
def copy_to_build_dir(filepath, root=os.getcwd()):
|
| 1025 |
+
filepath_abs = os.path.abspath(filepath)
|
| 1026 |
+
if os.path.isabs(filepath):
|
| 1027 |
+
filepath = filepath_abs
|
| 1028 |
+
if filepath_abs.startswith(root):
|
| 1029 |
+
# distutil extension depends are relative to cwd
|
| 1030 |
+
mod_dir = join_path(build_dir,
|
| 1031 |
+
os.path.dirname(_relpath(filepath, root)))
|
| 1032 |
+
copy_once_if_newer(filepath_abs, mod_dir)
|
| 1033 |
+
|
| 1034 |
+
modules_by_cfile = collections.defaultdict(list)
|
| 1035 |
+
to_compile = []
|
| 1036 |
+
for m in module_list:
|
| 1037 |
+
if build_dir:
|
| 1038 |
+
for dep in m.depends:
|
| 1039 |
+
copy_to_build_dir(dep)
|
| 1040 |
+
|
| 1041 |
+
cy_sources = [
|
| 1042 |
+
source for source in m.sources
|
| 1043 |
+
if os.path.splitext(source)[1] in ('.pyx', '.py')]
|
| 1044 |
+
if len(cy_sources) == 1:
|
| 1045 |
+
# normal "special" case: believe the Extension module name to allow user overrides
|
| 1046 |
+
full_module_name = m.name
|
| 1047 |
+
else:
|
| 1048 |
+
# infer FQMN from source files
|
| 1049 |
+
full_module_name = None
|
| 1050 |
+
|
| 1051 |
+
new_sources = []
|
| 1052 |
+
for source in m.sources:
|
| 1053 |
+
base, ext = os.path.splitext(source)
|
| 1054 |
+
if ext in ('.pyx', '.py'):
|
| 1055 |
+
if m.np_pythran:
|
| 1056 |
+
c_file = base + '.cpp'
|
| 1057 |
+
options = pythran_options
|
| 1058 |
+
elif m.language == 'c++':
|
| 1059 |
+
c_file = base + '.cpp'
|
| 1060 |
+
options = cpp_options
|
| 1061 |
+
else:
|
| 1062 |
+
c_file = base + '.c'
|
| 1063 |
+
options = c_options
|
| 1064 |
+
|
| 1065 |
+
# setup for out of place build directory if enabled
|
| 1066 |
+
if build_dir:
|
| 1067 |
+
if os.path.isabs(c_file):
|
| 1068 |
+
c_file = os.path.splitdrive(c_file)[1]
|
| 1069 |
+
c_file = c_file.split(os.sep, 1)[1]
|
| 1070 |
+
c_file = os.path.join(build_dir, c_file)
|
| 1071 |
+
dir = os.path.dirname(c_file)
|
| 1072 |
+
safe_makedirs_once(dir)
|
| 1073 |
+
|
| 1074 |
+
# write out the depfile, if requested
|
| 1075 |
+
if depfile:
|
| 1076 |
+
dependencies = deps.all_dependencies(source)
|
| 1077 |
+
write_depfile(c_file, source, dependencies)
|
| 1078 |
+
|
| 1079 |
+
# Missing files and those generated by other Cython versions should always be recreated.
|
| 1080 |
+
if Utils.file_generated_by_this_cython(c_file):
|
| 1081 |
+
c_timestamp = os.path.getmtime(c_file)
|
| 1082 |
+
else:
|
| 1083 |
+
c_timestamp = -1
|
| 1084 |
+
|
| 1085 |
+
# Priority goes first to modified files, second to direct
|
| 1086 |
+
# dependents, and finally to indirect dependents.
|
| 1087 |
+
if c_timestamp < deps.timestamp(source):
|
| 1088 |
+
dep_timestamp, dep = deps.timestamp(source), source
|
| 1089 |
+
priority = 0
|
| 1090 |
+
else:
|
| 1091 |
+
dep_timestamp, dep = deps.newest_dependency(source)
|
| 1092 |
+
priority = 2 - (dep in deps.immediate_dependencies(source))
|
| 1093 |
+
if force or c_timestamp < dep_timestamp:
|
| 1094 |
+
if not quiet and not force:
|
| 1095 |
+
if source == dep:
|
| 1096 |
+
print(u"Compiling %s because it changed." % Utils.decode_filename(source))
|
| 1097 |
+
else:
|
| 1098 |
+
print(u"Compiling %s because it depends on %s." % (
|
| 1099 |
+
Utils.decode_filename(source),
|
| 1100 |
+
Utils.decode_filename(dep),
|
| 1101 |
+
))
|
| 1102 |
+
if not force and options.cache:
|
| 1103 |
+
fingerprint = deps.transitive_fingerprint(source, m, options)
|
| 1104 |
+
else:
|
| 1105 |
+
fingerprint = None
|
| 1106 |
+
to_compile.append((
|
| 1107 |
+
priority, source, c_file, fingerprint, quiet,
|
| 1108 |
+
options, not exclude_failures, module_metadata.get(m.name),
|
| 1109 |
+
full_module_name, show_all_warnings))
|
| 1110 |
+
new_sources.append(c_file)
|
| 1111 |
+
modules_by_cfile[c_file].append(m)
|
| 1112 |
+
else:
|
| 1113 |
+
new_sources.append(source)
|
| 1114 |
+
if build_dir:
|
| 1115 |
+
copy_to_build_dir(source)
|
| 1116 |
+
m.sources = new_sources
|
| 1117 |
+
|
| 1118 |
+
if options.cache:
|
| 1119 |
+
if not os.path.exists(options.cache):
|
| 1120 |
+
os.makedirs(options.cache)
|
| 1121 |
+
to_compile.sort()
|
| 1122 |
+
# Drop "priority" component of "to_compile" entries and add a
|
| 1123 |
+
# simple progress indicator.
|
| 1124 |
+
N = len(to_compile)
|
| 1125 |
+
progress_fmt = "[{0:%d}/{1}] " % len(str(N))
|
| 1126 |
+
for i in range(N):
|
| 1127 |
+
progress = progress_fmt.format(i+1, N)
|
| 1128 |
+
to_compile[i] = to_compile[i][1:] + (progress,)
|
| 1129 |
+
|
| 1130 |
+
if N <= 1:
|
| 1131 |
+
nthreads = 0
|
| 1132 |
+
if nthreads:
|
| 1133 |
+
import multiprocessing
|
| 1134 |
+
pool = multiprocessing.Pool(
|
| 1135 |
+
nthreads, initializer=_init_multiprocessing_helper)
|
| 1136 |
+
# This is a bit more involved than it should be, because KeyboardInterrupts
|
| 1137 |
+
# break the multiprocessing workers when using a normal pool.map().
|
| 1138 |
+
# See, for example:
|
| 1139 |
+
# https://noswap.com/blog/python-multiprocessing-keyboardinterrupt
|
| 1140 |
+
try:
|
| 1141 |
+
result = pool.map_async(cythonize_one_helper, to_compile, chunksize=1)
|
| 1142 |
+
pool.close()
|
| 1143 |
+
while not result.ready():
|
| 1144 |
+
try:
|
| 1145 |
+
result.get(99999) # seconds
|
| 1146 |
+
except multiprocessing.TimeoutError:
|
| 1147 |
+
pass
|
| 1148 |
+
except KeyboardInterrupt:
|
| 1149 |
+
pool.terminate()
|
| 1150 |
+
raise
|
| 1151 |
+
pool.join()
|
| 1152 |
+
else:
|
| 1153 |
+
for args in to_compile:
|
| 1154 |
+
cythonize_one(*args)
|
| 1155 |
+
|
| 1156 |
+
if exclude_failures:
|
| 1157 |
+
failed_modules = set()
|
| 1158 |
+
for c_file, modules in modules_by_cfile.items():
|
| 1159 |
+
if not os.path.exists(c_file):
|
| 1160 |
+
failed_modules.update(modules)
|
| 1161 |
+
elif os.path.getsize(c_file) < 200:
|
| 1162 |
+
f = io_open(c_file, 'r', encoding='iso8859-1')
|
| 1163 |
+
try:
|
| 1164 |
+
if f.read(len('#error ')) == '#error ':
|
| 1165 |
+
# dead compilation result
|
| 1166 |
+
failed_modules.update(modules)
|
| 1167 |
+
finally:
|
| 1168 |
+
f.close()
|
| 1169 |
+
if failed_modules:
|
| 1170 |
+
for module in failed_modules:
|
| 1171 |
+
module_list.remove(module)
|
| 1172 |
+
print(u"Failed compilations: %s" % ', '.join(sorted([
|
| 1173 |
+
module.name for module in failed_modules])))
|
| 1174 |
+
|
| 1175 |
+
if options.cache:
|
| 1176 |
+
cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100))
|
| 1177 |
+
# cythonize() is often followed by the (non-Python-buffered)
|
| 1178 |
+
# compiler output, flush now to avoid interleaving output.
|
| 1179 |
+
sys.stdout.flush()
|
| 1180 |
+
return module_list
|
| 1181 |
+
|
| 1182 |
+
|
| 1183 |
+
def fix_windows_unicode_modules(module_list):
|
| 1184 |
+
# Hack around a distutils 3.[5678] bug on Windows for unicode module names.
|
| 1185 |
+
# https://bugs.python.org/issue39432
|
| 1186 |
+
if sys.platform != "win32":
|
| 1187 |
+
return
|
| 1188 |
+
if sys.version_info < (3, 5) or sys.version_info >= (3, 8, 2):
|
| 1189 |
+
return
|
| 1190 |
+
|
| 1191 |
+
def make_filtered_list(ignored_symbol, old_entries):
|
| 1192 |
+
class FilteredExportSymbols(list):
|
| 1193 |
+
# export_symbols for unicode filename cause link errors on Windows
|
| 1194 |
+
# Cython doesn't need them (it already defines PyInit with the correct linkage)
|
| 1195 |
+
# so use this class as a temporary fix to stop them from being generated
|
| 1196 |
+
def __contains__(self, val):
|
| 1197 |
+
# so distutils doesn't "helpfully" add PyInit_<name>
|
| 1198 |
+
return val == ignored_symbol or list.__contains__(self, val)
|
| 1199 |
+
|
| 1200 |
+
filtered_list = FilteredExportSymbols(old_entries)
|
| 1201 |
+
if old_entries:
|
| 1202 |
+
filtered_list.extend(name for name in old_entries if name != ignored_symbol)
|
| 1203 |
+
return filtered_list
|
| 1204 |
+
|
| 1205 |
+
for m in module_list:
|
| 1206 |
+
# TODO: use m.name.isascii() in Py3.7+
|
| 1207 |
+
try:
|
| 1208 |
+
m.name.encode("ascii")
|
| 1209 |
+
continue
|
| 1210 |
+
except UnicodeEncodeError:
|
| 1211 |
+
pass
|
| 1212 |
+
m.export_symbols = make_filtered_list(
|
| 1213 |
+
"PyInit_" + m.name.rsplit(".", 1)[-1],
|
| 1214 |
+
m.export_symbols,
|
| 1215 |
+
)
|
| 1216 |
+
|
| 1217 |
+
|
| 1218 |
+
if os.environ.get('XML_RESULTS'):
|
| 1219 |
+
compile_result_dir = os.environ['XML_RESULTS']
|
| 1220 |
+
def record_results(func):
|
| 1221 |
+
def with_record(*args):
|
| 1222 |
+
t = time.time()
|
| 1223 |
+
success = True
|
| 1224 |
+
try:
|
| 1225 |
+
try:
|
| 1226 |
+
func(*args)
|
| 1227 |
+
except:
|
| 1228 |
+
success = False
|
| 1229 |
+
finally:
|
| 1230 |
+
t = time.time() - t
|
| 1231 |
+
module = fully_qualified_name(args[0])
|
| 1232 |
+
name = "cythonize." + module
|
| 1233 |
+
failures = 1 - success
|
| 1234 |
+
if success:
|
| 1235 |
+
failure_item = ""
|
| 1236 |
+
else:
|
| 1237 |
+
failure_item = "failure"
|
| 1238 |
+
output = open(os.path.join(compile_result_dir, name + ".xml"), "w")
|
| 1239 |
+
output.write("""
|
| 1240 |
+
<?xml version="1.0" ?>
|
| 1241 |
+
<testsuite name="%(name)s" errors="0" failures="%(failures)s" tests="1" time="%(t)s">
|
| 1242 |
+
<testcase classname="%(name)s" name="cythonize">
|
| 1243 |
+
%(failure_item)s
|
| 1244 |
+
</testcase>
|
| 1245 |
+
</testsuite>
|
| 1246 |
+
""".strip() % locals())
|
| 1247 |
+
output.close()
|
| 1248 |
+
return with_record
|
| 1249 |
+
else:
|
| 1250 |
+
def record_results(func):
|
| 1251 |
+
return func
|
| 1252 |
+
|
| 1253 |
+
|
| 1254 |
+
# TODO: Share context? Issue: pyx processing leaks into pxd module
|
| 1255 |
+
@record_results
|
| 1256 |
+
def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
|
| 1257 |
+
raise_on_failure=True, embedded_metadata=None,
|
| 1258 |
+
full_module_name=None, show_all_warnings=False,
|
| 1259 |
+
progress=""):
|
| 1260 |
+
from ..Compiler.Main import compile_single, default_options
|
| 1261 |
+
from ..Compiler.Errors import CompileError, PyrexError
|
| 1262 |
+
|
| 1263 |
+
if fingerprint:
|
| 1264 |
+
if not os.path.exists(options.cache):
|
| 1265 |
+
safe_makedirs(options.cache)
|
| 1266 |
+
# Cython-generated c files are highly compressible.
|
| 1267 |
+
# (E.g. a compression ratio of about 10 for Sage).
|
| 1268 |
+
fingerprint_file_base = join_path(
|
| 1269 |
+
options.cache, "%s-%s" % (os.path.basename(c_file), fingerprint))
|
| 1270 |
+
gz_fingerprint_file = fingerprint_file_base + gzip_ext
|
| 1271 |
+
zip_fingerprint_file = fingerprint_file_base + '.zip'
|
| 1272 |
+
if os.path.exists(gz_fingerprint_file) or os.path.exists(zip_fingerprint_file):
|
| 1273 |
+
if not quiet:
|
| 1274 |
+
print(u"%sFound compiled %s in cache" % (progress, pyx_file))
|
| 1275 |
+
if os.path.exists(gz_fingerprint_file):
|
| 1276 |
+
os.utime(gz_fingerprint_file, None)
|
| 1277 |
+
with contextlib.closing(gzip_open(gz_fingerprint_file, 'rb')) as g:
|
| 1278 |
+
with contextlib.closing(open(c_file, 'wb')) as f:
|
| 1279 |
+
shutil.copyfileobj(g, f)
|
| 1280 |
+
else:
|
| 1281 |
+
os.utime(zip_fingerprint_file, None)
|
| 1282 |
+
dirname = os.path.dirname(c_file)
|
| 1283 |
+
with contextlib.closing(zipfile.ZipFile(zip_fingerprint_file)) as z:
|
| 1284 |
+
for artifact in z.namelist():
|
| 1285 |
+
z.extract(artifact, os.path.join(dirname, artifact))
|
| 1286 |
+
return
|
| 1287 |
+
if not quiet:
|
| 1288 |
+
print(u"%sCythonizing %s" % (progress, Utils.decode_filename(pyx_file)))
|
| 1289 |
+
if options is None:
|
| 1290 |
+
options = CompilationOptions(default_options)
|
| 1291 |
+
options.output_file = c_file
|
| 1292 |
+
options.embedded_metadata = embedded_metadata
|
| 1293 |
+
|
| 1294 |
+
old_warning_level = Errors.LEVEL
|
| 1295 |
+
if show_all_warnings:
|
| 1296 |
+
Errors.LEVEL = 0
|
| 1297 |
+
|
| 1298 |
+
any_failures = 0
|
| 1299 |
+
try:
|
| 1300 |
+
result = compile_single(pyx_file, options, full_module_name=full_module_name)
|
| 1301 |
+
if result.num_errors > 0:
|
| 1302 |
+
any_failures = 1
|
| 1303 |
+
except (EnvironmentError, PyrexError) as e:
|
| 1304 |
+
sys.stderr.write('%s\n' % e)
|
| 1305 |
+
any_failures = 1
|
| 1306 |
+
# XXX
|
| 1307 |
+
import traceback
|
| 1308 |
+
traceback.print_exc()
|
| 1309 |
+
except Exception:
|
| 1310 |
+
if raise_on_failure:
|
| 1311 |
+
raise
|
| 1312 |
+
import traceback
|
| 1313 |
+
traceback.print_exc()
|
| 1314 |
+
any_failures = 1
|
| 1315 |
+
finally:
|
| 1316 |
+
if show_all_warnings:
|
| 1317 |
+
Errors.LEVEL = old_warning_level
|
| 1318 |
+
|
| 1319 |
+
if any_failures:
|
| 1320 |
+
if raise_on_failure:
|
| 1321 |
+
raise CompileError(None, pyx_file)
|
| 1322 |
+
elif os.path.exists(c_file):
|
| 1323 |
+
os.remove(c_file)
|
| 1324 |
+
elif fingerprint:
|
| 1325 |
+
artifacts = list(filter(None, [
|
| 1326 |
+
getattr(result, attr, None)
|
| 1327 |
+
for attr in ('c_file', 'h_file', 'api_file', 'i_file')]))
|
| 1328 |
+
if len(artifacts) == 1:
|
| 1329 |
+
fingerprint_file = gz_fingerprint_file
|
| 1330 |
+
with contextlib.closing(open(c_file, 'rb')) as f:
|
| 1331 |
+
with contextlib.closing(gzip_open(fingerprint_file + '.tmp', 'wb')) as g:
|
| 1332 |
+
shutil.copyfileobj(f, g)
|
| 1333 |
+
else:
|
| 1334 |
+
fingerprint_file = zip_fingerprint_file
|
| 1335 |
+
with contextlib.closing(zipfile.ZipFile(
|
| 1336 |
+
fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
|
| 1337 |
+
for artifact in artifacts:
|
| 1338 |
+
zip.write(artifact, os.path.basename(artifact))
|
| 1339 |
+
os.rename(fingerprint_file + '.tmp', fingerprint_file)
|
| 1340 |
+
|
| 1341 |
+
|
| 1342 |
+
def cythonize_one_helper(m):
|
| 1343 |
+
import traceback
|
| 1344 |
+
try:
|
| 1345 |
+
return cythonize_one(*m)
|
| 1346 |
+
except Exception:
|
| 1347 |
+
traceback.print_exc()
|
| 1348 |
+
raise
|
| 1349 |
+
|
| 1350 |
+
|
| 1351 |
+
def _init_multiprocessing_helper():
|
| 1352 |
+
# KeyboardInterrupt kills workers, so don't let them get it
|
| 1353 |
+
import signal
|
| 1354 |
+
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
| 1355 |
+
|
| 1356 |
+
|
| 1357 |
+
def cleanup_cache(cache, target_size, ratio=.85):
|
| 1358 |
+
try:
|
| 1359 |
+
p = subprocess.Popen(['du', '-s', '-k', os.path.abspath(cache)], stdout=subprocess.PIPE)
|
| 1360 |
+
stdout, _ = p.communicate()
|
| 1361 |
+
res = p.wait()
|
| 1362 |
+
if res == 0:
|
| 1363 |
+
total_size = 1024 * int(stdout.strip().split()[0])
|
| 1364 |
+
if total_size < target_size:
|
| 1365 |
+
return
|
| 1366 |
+
except (OSError, ValueError):
|
| 1367 |
+
pass
|
| 1368 |
+
total_size = 0
|
| 1369 |
+
all = []
|
| 1370 |
+
for file in os.listdir(cache):
|
| 1371 |
+
path = join_path(cache, file)
|
| 1372 |
+
s = os.stat(path)
|
| 1373 |
+
total_size += s.st_size
|
| 1374 |
+
all.append((s.st_atime, s.st_size, path))
|
| 1375 |
+
if total_size > target_size:
|
| 1376 |
+
for time, size, file in reversed(sorted(all)):
|
| 1377 |
+
os.unlink(file)
|
| 1378 |
+
total_size -= size
|
| 1379 |
+
if total_size < target_size * ratio:
|
| 1380 |
+
break
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Distutils.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from Cython.Distutils.build_ext import build_ext
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Inline.py
ADDED
|
@@ -0,0 +1,372 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import hashlib
|
| 4 |
+
import inspect
|
| 5 |
+
import os
|
| 6 |
+
import re
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
from distutils.core import Distribution, Extension
|
| 10 |
+
from distutils.command.build_ext import build_ext
|
| 11 |
+
|
| 12 |
+
import Cython
|
| 13 |
+
from ..Compiler.Main import Context
|
| 14 |
+
from ..Compiler.Options import (default_options, CompilationOptions,
|
| 15 |
+
get_directive_defaults)
|
| 16 |
+
|
| 17 |
+
from ..Compiler.Visitor import CythonTransform, EnvTransform
|
| 18 |
+
from ..Compiler.ParseTreeTransforms import SkipDeclarations
|
| 19 |
+
from ..Compiler.TreeFragment import parse_from_strings
|
| 20 |
+
from ..Compiler.StringEncoding import _unicode
|
| 21 |
+
from .Dependencies import strip_string_literals, cythonize, cached_function
|
| 22 |
+
from ..Compiler import Pipeline
|
| 23 |
+
from ..Utils import get_cython_cache_dir
|
| 24 |
+
import cython as cython_module
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
IS_PY3 = sys.version_info >= (3,)
|
| 28 |
+
|
| 29 |
+
# A utility function to convert user-supplied ASCII strings to unicode.
|
| 30 |
+
if not IS_PY3:
|
| 31 |
+
def to_unicode(s):
|
| 32 |
+
if isinstance(s, bytes):
|
| 33 |
+
return s.decode('ascii')
|
| 34 |
+
else:
|
| 35 |
+
return s
|
| 36 |
+
else:
|
| 37 |
+
to_unicode = lambda x: x
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
if sys.version_info < (3, 5):
|
| 41 |
+
import imp
|
| 42 |
+
def load_dynamic(name, module_path):
|
| 43 |
+
return imp.load_dynamic(name, module_path)
|
| 44 |
+
else:
|
| 45 |
+
import importlib.util
|
| 46 |
+
from importlib.machinery import ExtensionFileLoader
|
| 47 |
+
|
| 48 |
+
def load_dynamic(name, path):
|
| 49 |
+
spec = importlib.util.spec_from_file_location(name, loader=ExtensionFileLoader(name, path))
|
| 50 |
+
module = importlib.util.module_from_spec(spec)
|
| 51 |
+
spec.loader.exec_module(module)
|
| 52 |
+
return module
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class UnboundSymbols(EnvTransform, SkipDeclarations):
|
| 56 |
+
def __init__(self):
|
| 57 |
+
super(EnvTransform, self).__init__(context=None)
|
| 58 |
+
self.unbound = set()
|
| 59 |
+
def visit_NameNode(self, node):
|
| 60 |
+
if not self.current_env().lookup(node.name):
|
| 61 |
+
self.unbound.add(node.name)
|
| 62 |
+
return node
|
| 63 |
+
def __call__(self, node):
|
| 64 |
+
super(UnboundSymbols, self).__call__(node)
|
| 65 |
+
return self.unbound
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
@cached_function
|
| 69 |
+
def unbound_symbols(code, context=None):
|
| 70 |
+
code = to_unicode(code)
|
| 71 |
+
if context is None:
|
| 72 |
+
context = Context([], get_directive_defaults(),
|
| 73 |
+
options=CompilationOptions(default_options))
|
| 74 |
+
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
|
| 75 |
+
tree = parse_from_strings('(tree fragment)', code)
|
| 76 |
+
for phase in Pipeline.create_pipeline(context, 'pyx'):
|
| 77 |
+
if phase is None:
|
| 78 |
+
continue
|
| 79 |
+
tree = phase(tree)
|
| 80 |
+
if isinstance(phase, AnalyseDeclarationsTransform):
|
| 81 |
+
break
|
| 82 |
+
try:
|
| 83 |
+
import builtins
|
| 84 |
+
except ImportError:
|
| 85 |
+
import __builtin__ as builtins
|
| 86 |
+
return tuple(UnboundSymbols()(tree) - set(dir(builtins)))
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def unsafe_type(arg, context=None):
|
| 90 |
+
py_type = type(arg)
|
| 91 |
+
if py_type is int:
|
| 92 |
+
return 'long'
|
| 93 |
+
else:
|
| 94 |
+
return safe_type(arg, context)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def safe_type(arg, context=None):
|
| 98 |
+
py_type = type(arg)
|
| 99 |
+
if py_type in (list, tuple, dict, str):
|
| 100 |
+
return py_type.__name__
|
| 101 |
+
elif py_type is complex:
|
| 102 |
+
return 'double complex'
|
| 103 |
+
elif py_type is float:
|
| 104 |
+
return 'double'
|
| 105 |
+
elif py_type is bool:
|
| 106 |
+
return 'bint'
|
| 107 |
+
elif 'numpy' in sys.modules and isinstance(arg, sys.modules['numpy'].ndarray):
|
| 108 |
+
return 'numpy.ndarray[numpy.%s_t, ndim=%s]' % (arg.dtype.name, arg.ndim)
|
| 109 |
+
else:
|
| 110 |
+
for base_type in py_type.__mro__:
|
| 111 |
+
if base_type.__module__ in ('__builtin__', 'builtins'):
|
| 112 |
+
return 'object'
|
| 113 |
+
module = context.find_module(base_type.__module__, need_pxd=False)
|
| 114 |
+
if module:
|
| 115 |
+
entry = module.lookup(base_type.__name__)
|
| 116 |
+
if entry.is_type:
|
| 117 |
+
return '%s.%s' % (base_type.__module__, base_type.__name__)
|
| 118 |
+
return 'object'
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def _get_build_extension():
|
| 122 |
+
dist = Distribution()
|
| 123 |
+
# Ensure the build respects distutils configuration by parsing
|
| 124 |
+
# the configuration files
|
| 125 |
+
config_files = dist.find_config_files()
|
| 126 |
+
dist.parse_config_files(config_files)
|
| 127 |
+
build_extension = build_ext(dist)
|
| 128 |
+
build_extension.finalize_options()
|
| 129 |
+
return build_extension
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
@cached_function
|
| 133 |
+
def _create_context(cython_include_dirs):
|
| 134 |
+
return Context(
|
| 135 |
+
list(cython_include_dirs),
|
| 136 |
+
get_directive_defaults(),
|
| 137 |
+
options=CompilationOptions(default_options)
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
_cython_inline_cache = {}
|
| 142 |
+
_cython_inline_default_context = _create_context(('.',))
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def _populate_unbound(kwds, unbound_symbols, locals=None, globals=None):
|
| 146 |
+
for symbol in unbound_symbols:
|
| 147 |
+
if symbol not in kwds:
|
| 148 |
+
if locals is None or globals is None:
|
| 149 |
+
calling_frame = inspect.currentframe().f_back.f_back.f_back
|
| 150 |
+
if locals is None:
|
| 151 |
+
locals = calling_frame.f_locals
|
| 152 |
+
if globals is None:
|
| 153 |
+
globals = calling_frame.f_globals
|
| 154 |
+
if not isinstance(locals, dict):
|
| 155 |
+
# FrameLocalsProxy is stricter than dict on how it looks up keys
|
| 156 |
+
# and this means our "EncodedStrings" don't match the keys in locals.
|
| 157 |
+
# Therefore copy to a dict.
|
| 158 |
+
locals = dict(locals)
|
| 159 |
+
if symbol in locals:
|
| 160 |
+
kwds[symbol] = locals[symbol]
|
| 161 |
+
elif symbol in globals:
|
| 162 |
+
kwds[symbol] = globals[symbol]
|
| 163 |
+
else:
|
| 164 |
+
print("Couldn't find %r" % symbol)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def _inline_key(orig_code, arg_sigs, language_level):
|
| 168 |
+
key = orig_code, arg_sigs, sys.version_info, sys.executable, language_level, Cython.__version__
|
| 169 |
+
return hashlib.sha1(_unicode(key).encode('utf-8')).hexdigest()
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def cython_inline(code, get_type=unsafe_type,
|
| 173 |
+
lib_dir=os.path.join(get_cython_cache_dir(), 'inline'),
|
| 174 |
+
cython_include_dirs=None, cython_compiler_directives=None,
|
| 175 |
+
force=False, quiet=False, locals=None, globals=None, language_level=None, **kwds):
|
| 176 |
+
|
| 177 |
+
if get_type is None:
|
| 178 |
+
get_type = lambda x: 'object'
|
| 179 |
+
ctx = _create_context(tuple(cython_include_dirs)) if cython_include_dirs else _cython_inline_default_context
|
| 180 |
+
|
| 181 |
+
cython_compiler_directives = dict(cython_compiler_directives) if cython_compiler_directives else {}
|
| 182 |
+
if language_level is None and 'language_level' not in cython_compiler_directives:
|
| 183 |
+
language_level = '3str'
|
| 184 |
+
if language_level is not None:
|
| 185 |
+
cython_compiler_directives['language_level'] = language_level
|
| 186 |
+
|
| 187 |
+
key_hash = None
|
| 188 |
+
|
| 189 |
+
# Fast path if this has been called in this session.
|
| 190 |
+
_unbound_symbols = _cython_inline_cache.get(code)
|
| 191 |
+
if _unbound_symbols is not None:
|
| 192 |
+
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
| 193 |
+
args = sorted(kwds.items())
|
| 194 |
+
arg_sigs = tuple([(get_type(value, ctx), arg) for arg, value in args])
|
| 195 |
+
key_hash = _inline_key(code, arg_sigs, language_level)
|
| 196 |
+
invoke = _cython_inline_cache.get((code, arg_sigs, key_hash))
|
| 197 |
+
if invoke is not None:
|
| 198 |
+
arg_list = [arg[1] for arg in args]
|
| 199 |
+
return invoke(*arg_list)
|
| 200 |
+
|
| 201 |
+
orig_code = code
|
| 202 |
+
code = to_unicode(code)
|
| 203 |
+
code, literals = strip_string_literals(code)
|
| 204 |
+
code = strip_common_indent(code)
|
| 205 |
+
if locals is None:
|
| 206 |
+
locals = inspect.currentframe().f_back.f_back.f_locals
|
| 207 |
+
if globals is None:
|
| 208 |
+
globals = inspect.currentframe().f_back.f_back.f_globals
|
| 209 |
+
try:
|
| 210 |
+
_cython_inline_cache[orig_code] = _unbound_symbols = unbound_symbols(code)
|
| 211 |
+
_populate_unbound(kwds, _unbound_symbols, locals, globals)
|
| 212 |
+
except AssertionError:
|
| 213 |
+
if not quiet:
|
| 214 |
+
# Parsing from strings not fully supported (e.g. cimports).
|
| 215 |
+
print("Could not parse code as a string (to extract unbound symbols).")
|
| 216 |
+
|
| 217 |
+
cimports = []
|
| 218 |
+
for name, arg in list(kwds.items()):
|
| 219 |
+
if arg is cython_module:
|
| 220 |
+
cimports.append('\ncimport cython as %s' % name)
|
| 221 |
+
del kwds[name]
|
| 222 |
+
arg_names = sorted(kwds)
|
| 223 |
+
arg_sigs = tuple([(get_type(kwds[arg], ctx), arg) for arg in arg_names])
|
| 224 |
+
if key_hash is None:
|
| 225 |
+
key_hash = _inline_key(orig_code, arg_sigs, language_level)
|
| 226 |
+
module_name = "_cython_inline_" + key_hash
|
| 227 |
+
|
| 228 |
+
if module_name in sys.modules:
|
| 229 |
+
module = sys.modules[module_name]
|
| 230 |
+
|
| 231 |
+
else:
|
| 232 |
+
build_extension = None
|
| 233 |
+
if cython_inline.so_ext is None:
|
| 234 |
+
# Figure out and cache current extension suffix
|
| 235 |
+
build_extension = _get_build_extension()
|
| 236 |
+
cython_inline.so_ext = build_extension.get_ext_filename('')
|
| 237 |
+
|
| 238 |
+
lib_dir = os.path.abspath(lib_dir)
|
| 239 |
+
module_path = os.path.join(lib_dir, module_name + cython_inline.so_ext)
|
| 240 |
+
|
| 241 |
+
if not os.path.exists(lib_dir):
|
| 242 |
+
os.makedirs(lib_dir)
|
| 243 |
+
if force or not os.path.isfile(module_path):
|
| 244 |
+
cflags = []
|
| 245 |
+
define_macros = []
|
| 246 |
+
c_include_dirs = []
|
| 247 |
+
qualified = re.compile(r'([.\w]+)[.]')
|
| 248 |
+
for type, _ in arg_sigs:
|
| 249 |
+
m = qualified.match(type)
|
| 250 |
+
if m:
|
| 251 |
+
cimports.append('\ncimport %s' % m.groups()[0])
|
| 252 |
+
# one special case
|
| 253 |
+
if m.groups()[0] == 'numpy':
|
| 254 |
+
import numpy
|
| 255 |
+
c_include_dirs.append(numpy.get_include())
|
| 256 |
+
define_macros.append(("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION"))
|
| 257 |
+
# cflags.append('-Wno-unused')
|
| 258 |
+
module_body, func_body = extract_func_code(code)
|
| 259 |
+
params = ', '.join(['%s %s' % a for a in arg_sigs])
|
| 260 |
+
module_code = """
|
| 261 |
+
%(module_body)s
|
| 262 |
+
%(cimports)s
|
| 263 |
+
def __invoke(%(params)s):
|
| 264 |
+
%(func_body)s
|
| 265 |
+
return locals()
|
| 266 |
+
""" % {'cimports': '\n'.join(cimports),
|
| 267 |
+
'module_body': module_body,
|
| 268 |
+
'params': params,
|
| 269 |
+
'func_body': func_body }
|
| 270 |
+
for key, value in literals.items():
|
| 271 |
+
module_code = module_code.replace(key, value)
|
| 272 |
+
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
| 273 |
+
fh = open(pyx_file, 'w')
|
| 274 |
+
try:
|
| 275 |
+
fh.write(module_code)
|
| 276 |
+
finally:
|
| 277 |
+
fh.close()
|
| 278 |
+
extension = Extension(
|
| 279 |
+
name=module_name,
|
| 280 |
+
sources=[pyx_file],
|
| 281 |
+
include_dirs=c_include_dirs or None,
|
| 282 |
+
extra_compile_args=cflags or None,
|
| 283 |
+
define_macros=define_macros or None,
|
| 284 |
+
)
|
| 285 |
+
if build_extension is None:
|
| 286 |
+
build_extension = _get_build_extension()
|
| 287 |
+
build_extension.extensions = cythonize(
|
| 288 |
+
[extension],
|
| 289 |
+
include_path=cython_include_dirs or ['.'],
|
| 290 |
+
compiler_directives=cython_compiler_directives,
|
| 291 |
+
quiet=quiet)
|
| 292 |
+
build_extension.build_temp = os.path.dirname(pyx_file)
|
| 293 |
+
build_extension.build_lib = lib_dir
|
| 294 |
+
build_extension.run()
|
| 295 |
+
|
| 296 |
+
if sys.platform == 'win32' and sys.version_info >= (3, 8):
|
| 297 |
+
with os.add_dll_directory(os.path.abspath(lib_dir)):
|
| 298 |
+
module = load_dynamic(module_name, module_path)
|
| 299 |
+
else:
|
| 300 |
+
module = load_dynamic(module_name, module_path)
|
| 301 |
+
|
| 302 |
+
_cython_inline_cache[orig_code, arg_sigs, key_hash] = module.__invoke
|
| 303 |
+
arg_list = [kwds[arg] for arg in arg_names]
|
| 304 |
+
return module.__invoke(*arg_list)
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
# Cached suffix used by cython_inline above. None should get
|
| 308 |
+
# overridden with actual value upon the first cython_inline invocation
|
| 309 |
+
cython_inline.so_ext = None
|
| 310 |
+
|
| 311 |
+
_find_non_space = re.compile('[^ ]').search
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def strip_common_indent(code):
|
| 315 |
+
min_indent = None
|
| 316 |
+
lines = code.splitlines()
|
| 317 |
+
for line in lines:
|
| 318 |
+
match = _find_non_space(line)
|
| 319 |
+
if not match:
|
| 320 |
+
continue # blank
|
| 321 |
+
indent = match.start()
|
| 322 |
+
if line[indent] == '#':
|
| 323 |
+
continue # comment
|
| 324 |
+
if min_indent is None or min_indent > indent:
|
| 325 |
+
min_indent = indent
|
| 326 |
+
for ix, line in enumerate(lines):
|
| 327 |
+
match = _find_non_space(line)
|
| 328 |
+
if not match or not line or line[indent:indent+1] == '#':
|
| 329 |
+
continue
|
| 330 |
+
lines[ix] = line[min_indent:]
|
| 331 |
+
return '\n'.join(lines)
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimport)|(from .+ import +[*]))')
|
| 335 |
+
def extract_func_code(code):
|
| 336 |
+
module = []
|
| 337 |
+
function = []
|
| 338 |
+
current = function
|
| 339 |
+
code = code.replace('\t', ' ')
|
| 340 |
+
lines = code.split('\n')
|
| 341 |
+
for line in lines:
|
| 342 |
+
if not line.startswith(' '):
|
| 343 |
+
if module_statement.match(line):
|
| 344 |
+
current = module
|
| 345 |
+
else:
|
| 346 |
+
current = function
|
| 347 |
+
current.append(line)
|
| 348 |
+
return '\n'.join(module), ' ' + '\n '.join(function)
|
| 349 |
+
|
| 350 |
+
|
| 351 |
+
def get_body(source):
|
| 352 |
+
ix = source.index(':')
|
| 353 |
+
if source[:5] == 'lambda':
|
| 354 |
+
return "return %s" % source[ix+1:]
|
| 355 |
+
else:
|
| 356 |
+
return source[ix+1:]
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
# Lots to be done here... It would be especially cool if compiled functions
|
| 360 |
+
# could invoke each other quickly.
|
| 361 |
+
class RuntimeCompiledFunction(object):
|
| 362 |
+
|
| 363 |
+
def __init__(self, f):
|
| 364 |
+
self._f = f
|
| 365 |
+
self._body = get_body(inspect.getsource(f))
|
| 366 |
+
|
| 367 |
+
def __call__(self, *args, **kwds):
|
| 368 |
+
all = inspect.getcallargs(self._f, *args, **kwds)
|
| 369 |
+
if IS_PY3:
|
| 370 |
+
return cython_inline(self._body, locals=self._f.__globals__, globals=self._f.__globals__, **all)
|
| 371 |
+
else:
|
| 372 |
+
return cython_inline(self._body, locals=self._f.func_globals, globals=self._f.func_globals, **all)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/IpythonMagic.py
ADDED
|
@@ -0,0 +1,572 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
=====================
|
| 4 |
+
Cython related magics
|
| 5 |
+
=====================
|
| 6 |
+
|
| 7 |
+
Magic command interface for interactive work with Cython
|
| 8 |
+
|
| 9 |
+
.. note::
|
| 10 |
+
|
| 11 |
+
The ``Cython`` package needs to be installed separately. It
|
| 12 |
+
can be obtained using ``easy_install`` or ``pip``.
|
| 13 |
+
|
| 14 |
+
Usage
|
| 15 |
+
=====
|
| 16 |
+
|
| 17 |
+
To enable the magics below, execute ``%load_ext cython``.
|
| 18 |
+
|
| 19 |
+
``%%cython``
|
| 20 |
+
|
| 21 |
+
{CYTHON_DOC}
|
| 22 |
+
|
| 23 |
+
``%%cython_inline``
|
| 24 |
+
|
| 25 |
+
{CYTHON_INLINE_DOC}
|
| 26 |
+
|
| 27 |
+
``%%cython_pyximport``
|
| 28 |
+
|
| 29 |
+
{CYTHON_PYXIMPORT_DOC}
|
| 30 |
+
|
| 31 |
+
Author:
|
| 32 |
+
* Brian Granger
|
| 33 |
+
|
| 34 |
+
Code moved from IPython and adapted by:
|
| 35 |
+
* Martín Gaitán
|
| 36 |
+
|
| 37 |
+
Parts of this code were taken from Cython.inline.
|
| 38 |
+
"""
|
| 39 |
+
#-----------------------------------------------------------------------------
|
| 40 |
+
# Copyright (C) 2010-2011, IPython Development Team.
|
| 41 |
+
#
|
| 42 |
+
# Distributed under the terms of the Modified BSD License.
|
| 43 |
+
#
|
| 44 |
+
# The full license is in the file ipython-COPYING.rst, distributed with this software.
|
| 45 |
+
#-----------------------------------------------------------------------------
|
| 46 |
+
|
| 47 |
+
from __future__ import absolute_import, print_function
|
| 48 |
+
|
| 49 |
+
import io
|
| 50 |
+
import os
|
| 51 |
+
import re
|
| 52 |
+
import sys
|
| 53 |
+
import time
|
| 54 |
+
import copy
|
| 55 |
+
import distutils.log
|
| 56 |
+
import textwrap
|
| 57 |
+
|
| 58 |
+
IO_ENCODING = sys.getfilesystemencoding()
|
| 59 |
+
IS_PY2 = sys.version_info[0] < 3
|
| 60 |
+
|
| 61 |
+
import hashlib
|
| 62 |
+
from distutils.core import Distribution, Extension
|
| 63 |
+
from distutils.command.build_ext import build_ext
|
| 64 |
+
|
| 65 |
+
from IPython.core import display
|
| 66 |
+
from IPython.core import magic_arguments
|
| 67 |
+
from IPython.core.magic import Magics, magics_class, cell_magic
|
| 68 |
+
try:
|
| 69 |
+
from IPython.paths import get_ipython_cache_dir
|
| 70 |
+
except ImportError:
|
| 71 |
+
# older IPython version
|
| 72 |
+
from IPython.utils.path import get_ipython_cache_dir
|
| 73 |
+
from IPython.utils.text import dedent
|
| 74 |
+
|
| 75 |
+
from ..Shadow import __version__ as cython_version
|
| 76 |
+
from ..Compiler.Errors import CompileError
|
| 77 |
+
from .Inline import cython_inline, load_dynamic
|
| 78 |
+
from .Dependencies import cythonize
|
| 79 |
+
from ..Utils import captured_fd, print_captured
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
PGO_CONFIG = {
|
| 83 |
+
'gcc': {
|
| 84 |
+
'gen': ['-fprofile-generate', '-fprofile-dir={TEMPDIR}'],
|
| 85 |
+
'use': ['-fprofile-use', '-fprofile-correction', '-fprofile-dir={TEMPDIR}'],
|
| 86 |
+
},
|
| 87 |
+
# blind copy from 'configure' script in CPython 3.7
|
| 88 |
+
'icc': {
|
| 89 |
+
'gen': ['-prof-gen'],
|
| 90 |
+
'use': ['-prof-use'],
|
| 91 |
+
}
|
| 92 |
+
}
|
| 93 |
+
PGO_CONFIG['mingw32'] = PGO_CONFIG['gcc']
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
if IS_PY2:
|
| 97 |
+
def encode_fs(name):
|
| 98 |
+
return name if isinstance(name, bytes) else name.encode(IO_ENCODING)
|
| 99 |
+
else:
|
| 100 |
+
def encode_fs(name):
|
| 101 |
+
return name
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@magics_class
|
| 105 |
+
class CythonMagics(Magics):
|
| 106 |
+
|
| 107 |
+
def __init__(self, shell):
|
| 108 |
+
super(CythonMagics, self).__init__(shell)
|
| 109 |
+
self._reloads = {}
|
| 110 |
+
self._code_cache = {}
|
| 111 |
+
self._pyximport_installed = False
|
| 112 |
+
|
| 113 |
+
def _import_all(self, module):
|
| 114 |
+
mdict = module.__dict__
|
| 115 |
+
if '__all__' in mdict:
|
| 116 |
+
keys = mdict['__all__']
|
| 117 |
+
else:
|
| 118 |
+
keys = [k for k in mdict if not k.startswith('_')]
|
| 119 |
+
|
| 120 |
+
for k in keys:
|
| 121 |
+
try:
|
| 122 |
+
self.shell.push({k: mdict[k]})
|
| 123 |
+
except KeyError:
|
| 124 |
+
msg = "'module' object has no attribute '%s'" % k
|
| 125 |
+
raise AttributeError(msg)
|
| 126 |
+
|
| 127 |
+
@cell_magic
|
| 128 |
+
def cython_inline(self, line, cell):
|
| 129 |
+
"""Compile and run a Cython code cell using Cython.inline.
|
| 130 |
+
|
| 131 |
+
This magic simply passes the body of the cell to Cython.inline
|
| 132 |
+
and returns the result. If the variables `a` and `b` are defined
|
| 133 |
+
in the user's namespace, here is a simple example that returns
|
| 134 |
+
their sum::
|
| 135 |
+
|
| 136 |
+
%%cython_inline
|
| 137 |
+
return a+b
|
| 138 |
+
|
| 139 |
+
For most purposes, we recommend the usage of the `%%cython` magic.
|
| 140 |
+
"""
|
| 141 |
+
locs = self.shell.user_global_ns
|
| 142 |
+
globs = self.shell.user_ns
|
| 143 |
+
return cython_inline(cell, locals=locs, globals=globs)
|
| 144 |
+
|
| 145 |
+
@cell_magic
|
| 146 |
+
def cython_pyximport(self, line, cell):
|
| 147 |
+
"""Compile and import a Cython code cell using pyximport.
|
| 148 |
+
|
| 149 |
+
The contents of the cell are written to a `.pyx` file in the current
|
| 150 |
+
working directory, which is then imported using `pyximport`. This
|
| 151 |
+
magic requires a module name to be passed::
|
| 152 |
+
|
| 153 |
+
%%cython_pyximport modulename
|
| 154 |
+
def f(x):
|
| 155 |
+
return 2.0*x
|
| 156 |
+
|
| 157 |
+
The compiled module is then imported and all of its symbols are
|
| 158 |
+
injected into the user's namespace. For most purposes, we recommend
|
| 159 |
+
the usage of the `%%cython` magic.
|
| 160 |
+
"""
|
| 161 |
+
module_name = line.strip()
|
| 162 |
+
if not module_name:
|
| 163 |
+
raise ValueError('module name must be given')
|
| 164 |
+
fname = module_name + '.pyx'
|
| 165 |
+
with io.open(fname, 'w', encoding='utf-8') as f:
|
| 166 |
+
f.write(cell)
|
| 167 |
+
if 'pyximport' not in sys.modules or not self._pyximport_installed:
|
| 168 |
+
import pyximport
|
| 169 |
+
pyximport.install()
|
| 170 |
+
self._pyximport_installed = True
|
| 171 |
+
if module_name in self._reloads:
|
| 172 |
+
module = self._reloads[module_name]
|
| 173 |
+
# Note: reloading extension modules is not actually supported
|
| 174 |
+
# (requires PEP-489 reinitialisation support).
|
| 175 |
+
# Don't know why this should ever have worked as it reads here.
|
| 176 |
+
# All we really need to do is to update the globals below.
|
| 177 |
+
#reload(module)
|
| 178 |
+
else:
|
| 179 |
+
__import__(module_name)
|
| 180 |
+
module = sys.modules[module_name]
|
| 181 |
+
self._reloads[module_name] = module
|
| 182 |
+
self._import_all(module)
|
| 183 |
+
|
| 184 |
+
@magic_arguments.magic_arguments()
|
| 185 |
+
@magic_arguments.argument(
|
| 186 |
+
'-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 187 |
+
help="Produce a colorized HTML version of the source."
|
| 188 |
+
)
|
| 189 |
+
@magic_arguments.argument(
|
| 190 |
+
'--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 191 |
+
help="Produce a colorized HTML version of the source "
|
| 192 |
+
"which includes entire generated C/C++-code."
|
| 193 |
+
)
|
| 194 |
+
@magic_arguments.argument(
|
| 195 |
+
'-+', '--cplus', action='store_true', default=False,
|
| 196 |
+
help="Output a C++ rather than C file."
|
| 197 |
+
)
|
| 198 |
+
@magic_arguments.argument(
|
| 199 |
+
'-3', dest='language_level', action='store_const', const=3, default=None,
|
| 200 |
+
help="Select Python 3 syntax."
|
| 201 |
+
)
|
| 202 |
+
@magic_arguments.argument(
|
| 203 |
+
'-2', dest='language_level', action='store_const', const=2, default=None,
|
| 204 |
+
help="Select Python 2 syntax."
|
| 205 |
+
)
|
| 206 |
+
@magic_arguments.argument(
|
| 207 |
+
'-f', '--force', action='store_true', default=False,
|
| 208 |
+
help="Force the compilation of a new module, even if the source has been "
|
| 209 |
+
"previously compiled."
|
| 210 |
+
)
|
| 211 |
+
@magic_arguments.argument(
|
| 212 |
+
'-c', '--compile-args', action='append', default=[],
|
| 213 |
+
help="Extra flags to pass to compiler via the `extra_compile_args` "
|
| 214 |
+
"Extension flag (can be specified multiple times)."
|
| 215 |
+
)
|
| 216 |
+
@magic_arguments.argument(
|
| 217 |
+
'--link-args', action='append', default=[],
|
| 218 |
+
help="Extra flags to pass to linker via the `extra_link_args` "
|
| 219 |
+
"Extension flag (can be specified multiple times)."
|
| 220 |
+
)
|
| 221 |
+
@magic_arguments.argument(
|
| 222 |
+
'-l', '--lib', action='append', default=[],
|
| 223 |
+
help="Add a library to link the extension against (can be specified "
|
| 224 |
+
"multiple times)."
|
| 225 |
+
)
|
| 226 |
+
@magic_arguments.argument(
|
| 227 |
+
'-n', '--name',
|
| 228 |
+
help="Specify a name for the Cython module."
|
| 229 |
+
)
|
| 230 |
+
@magic_arguments.argument(
|
| 231 |
+
'-L', dest='library_dirs', metavar='dir', action='append', default=[],
|
| 232 |
+
help="Add a path to the list of library directories (can be specified "
|
| 233 |
+
"multiple times)."
|
| 234 |
+
)
|
| 235 |
+
@magic_arguments.argument(
|
| 236 |
+
'-I', '--include', action='append', default=[],
|
| 237 |
+
help="Add a path to the list of include directories (can be specified "
|
| 238 |
+
"multiple times)."
|
| 239 |
+
)
|
| 240 |
+
@magic_arguments.argument(
|
| 241 |
+
'-S', '--src', action='append', default=[],
|
| 242 |
+
help="Add a path to the list of src files (can be specified "
|
| 243 |
+
"multiple times)."
|
| 244 |
+
)
|
| 245 |
+
@magic_arguments.argument(
|
| 246 |
+
'--pgo', dest='pgo', action='store_true', default=False,
|
| 247 |
+
help=("Enable profile guided optimisation in the C compiler. "
|
| 248 |
+
"Compiles the cell twice and executes it in between to generate a runtime profile.")
|
| 249 |
+
)
|
| 250 |
+
@magic_arguments.argument(
|
| 251 |
+
'--verbose', dest='quiet', action='store_false', default=True,
|
| 252 |
+
help=("Print debug information like generated .c/.cpp file location "
|
| 253 |
+
"and exact gcc/g++ command invoked.")
|
| 254 |
+
)
|
| 255 |
+
@cell_magic
|
| 256 |
+
def cython(self, line, cell):
|
| 257 |
+
"""Compile and import everything from a Cython code cell.
|
| 258 |
+
|
| 259 |
+
The contents of the cell are written to a `.pyx` file in the
|
| 260 |
+
directory `IPYTHONDIR/cython` using a filename with the hash of the
|
| 261 |
+
code. This file is then cythonized and compiled. The resulting module
|
| 262 |
+
is imported and all of its symbols are injected into the user's
|
| 263 |
+
namespace. The usage is similar to that of `%%cython_pyximport` but
|
| 264 |
+
you don't have to pass a module name::
|
| 265 |
+
|
| 266 |
+
%%cython
|
| 267 |
+
def f(x):
|
| 268 |
+
return 2.0*x
|
| 269 |
+
|
| 270 |
+
To compile OpenMP codes, pass the required `--compile-args`
|
| 271 |
+
and `--link-args`. For example with gcc::
|
| 272 |
+
|
| 273 |
+
%%cython --compile-args=-fopenmp --link-args=-fopenmp
|
| 274 |
+
...
|
| 275 |
+
|
| 276 |
+
To enable profile guided optimisation, pass the ``--pgo`` option.
|
| 277 |
+
Note that the cell itself needs to take care of establishing a suitable
|
| 278 |
+
profile when executed. This can be done by implementing the functions to
|
| 279 |
+
optimise, and then calling them directly in the same cell on some realistic
|
| 280 |
+
training data like this::
|
| 281 |
+
|
| 282 |
+
%%cython --pgo
|
| 283 |
+
def critical_function(data):
|
| 284 |
+
for item in data:
|
| 285 |
+
...
|
| 286 |
+
|
| 287 |
+
# execute function several times to build profile
|
| 288 |
+
from somewhere import some_typical_data
|
| 289 |
+
for _ in range(100):
|
| 290 |
+
critical_function(some_typical_data)
|
| 291 |
+
|
| 292 |
+
In Python 3.5 and later, you can distinguish between the profile and
|
| 293 |
+
non-profile runs as follows::
|
| 294 |
+
|
| 295 |
+
if "_pgo_" in __name__:
|
| 296 |
+
... # execute critical code here
|
| 297 |
+
"""
|
| 298 |
+
args = magic_arguments.parse_argstring(self.cython, line)
|
| 299 |
+
code = cell if cell.endswith('\n') else cell + '\n'
|
| 300 |
+
lib_dir = os.path.join(get_ipython_cache_dir(), 'cython')
|
| 301 |
+
key = (code, line, sys.version_info, sys.executable, cython_version)
|
| 302 |
+
|
| 303 |
+
if not os.path.exists(lib_dir):
|
| 304 |
+
os.makedirs(lib_dir)
|
| 305 |
+
|
| 306 |
+
if args.pgo:
|
| 307 |
+
key += ('pgo',)
|
| 308 |
+
if args.force:
|
| 309 |
+
# Force a new module name by adding the current time to the
|
| 310 |
+
# key which is hashed to determine the module name.
|
| 311 |
+
key += (time.time(),)
|
| 312 |
+
|
| 313 |
+
if args.name:
|
| 314 |
+
module_name = str(args.name) # no-op in Py3
|
| 315 |
+
else:
|
| 316 |
+
module_name = "_cython_magic_" + hashlib.sha1(str(key).encode('utf-8')).hexdigest()
|
| 317 |
+
html_file = os.path.join(lib_dir, module_name + '.html')
|
| 318 |
+
module_path = os.path.join(lib_dir, module_name + self.so_ext)
|
| 319 |
+
|
| 320 |
+
have_module = os.path.isfile(module_path)
|
| 321 |
+
need_cythonize = args.pgo or not have_module
|
| 322 |
+
|
| 323 |
+
if args.annotate:
|
| 324 |
+
if not os.path.isfile(html_file):
|
| 325 |
+
need_cythonize = True
|
| 326 |
+
|
| 327 |
+
extension = None
|
| 328 |
+
if need_cythonize:
|
| 329 |
+
extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
|
| 330 |
+
if extensions is None:
|
| 331 |
+
# Compilation failed and printed error message
|
| 332 |
+
return None
|
| 333 |
+
assert len(extensions) == 1
|
| 334 |
+
extension = extensions[0]
|
| 335 |
+
self._code_cache[key] = module_name
|
| 336 |
+
|
| 337 |
+
if args.pgo:
|
| 338 |
+
self._profile_pgo_wrapper(extension, lib_dir)
|
| 339 |
+
|
| 340 |
+
def print_compiler_output(stdout, stderr, where):
|
| 341 |
+
# On windows, errors are printed to stdout, we redirect both to sys.stderr.
|
| 342 |
+
print_captured(stdout, where, u"Content of stdout:\n")
|
| 343 |
+
print_captured(stderr, where, u"Content of stderr:\n")
|
| 344 |
+
|
| 345 |
+
get_stderr = get_stdout = None
|
| 346 |
+
try:
|
| 347 |
+
with captured_fd(1) as get_stdout:
|
| 348 |
+
with captured_fd(2) as get_stderr:
|
| 349 |
+
self._build_extension(
|
| 350 |
+
extension, lib_dir, pgo_step_name='use' if args.pgo else None, quiet=args.quiet)
|
| 351 |
+
except (distutils.errors.CompileError, distutils.errors.LinkError):
|
| 352 |
+
# Build failed, print error message from compiler/linker
|
| 353 |
+
print_compiler_output(get_stdout(), get_stderr(), sys.stderr)
|
| 354 |
+
return None
|
| 355 |
+
|
| 356 |
+
# Build seems ok, but we might still want to show any warnings that occurred
|
| 357 |
+
print_compiler_output(get_stdout(), get_stderr(), sys.stdout)
|
| 358 |
+
|
| 359 |
+
module = load_dynamic(module_name, module_path)
|
| 360 |
+
self._import_all(module)
|
| 361 |
+
|
| 362 |
+
if args.annotate:
|
| 363 |
+
try:
|
| 364 |
+
with io.open(html_file, encoding='utf-8') as f:
|
| 365 |
+
annotated_html = f.read()
|
| 366 |
+
except IOError as e:
|
| 367 |
+
# File could not be opened. Most likely the user has a version
|
| 368 |
+
# of Cython before 0.15.1 (when `cythonize` learned the
|
| 369 |
+
# `force` keyword argument) and has already compiled this
|
| 370 |
+
# exact source without annotation.
|
| 371 |
+
print('Cython completed successfully but the annotated '
|
| 372 |
+
'source could not be read.', file=sys.stderr)
|
| 373 |
+
print(e, file=sys.stderr)
|
| 374 |
+
else:
|
| 375 |
+
return display.HTML(self.clean_annotated_html(annotated_html))
|
| 376 |
+
|
| 377 |
+
def _profile_pgo_wrapper(self, extension, lib_dir):
|
| 378 |
+
"""
|
| 379 |
+
Generate a .c file for a separate extension module that calls the
|
| 380 |
+
module init function of the original module. This makes sure that the
|
| 381 |
+
PGO profiler sees the correct .o file of the final module, but it still
|
| 382 |
+
allows us to import the module under a different name for profiling,
|
| 383 |
+
before recompiling it into the PGO optimised module. Overwriting and
|
| 384 |
+
reimporting the same shared library is not portable.
|
| 385 |
+
"""
|
| 386 |
+
extension = copy.copy(extension) # shallow copy, do not modify sources in place!
|
| 387 |
+
module_name = extension.name
|
| 388 |
+
pgo_module_name = '_pgo_' + module_name
|
| 389 |
+
pgo_wrapper_c_file = os.path.join(lib_dir, pgo_module_name + '.c')
|
| 390 |
+
with io.open(pgo_wrapper_c_file, 'w', encoding='utf-8') as f:
|
| 391 |
+
f.write(textwrap.dedent(u"""
|
| 392 |
+
#include "Python.h"
|
| 393 |
+
#if PY_MAJOR_VERSION < 3
|
| 394 |
+
extern PyMODINIT_FUNC init%(module_name)s(void);
|
| 395 |
+
PyMODINIT_FUNC init%(pgo_module_name)s(void); /*proto*/
|
| 396 |
+
PyMODINIT_FUNC init%(pgo_module_name)s(void) {
|
| 397 |
+
PyObject *sys_modules;
|
| 398 |
+
init%(module_name)s(); if (PyErr_Occurred()) return;
|
| 399 |
+
sys_modules = PyImport_GetModuleDict(); /* borrowed, no exception, "never" fails */
|
| 400 |
+
if (sys_modules) {
|
| 401 |
+
PyObject *module = PyDict_GetItemString(sys_modules, "%(module_name)s"); if (!module) return;
|
| 402 |
+
PyDict_SetItemString(sys_modules, "%(pgo_module_name)s", module);
|
| 403 |
+
Py_DECREF(module);
|
| 404 |
+
}
|
| 405 |
+
}
|
| 406 |
+
#else
|
| 407 |
+
extern PyMODINIT_FUNC PyInit_%(module_name)s(void);
|
| 408 |
+
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void); /*proto*/
|
| 409 |
+
PyMODINIT_FUNC PyInit_%(pgo_module_name)s(void) {
|
| 410 |
+
return PyInit_%(module_name)s();
|
| 411 |
+
}
|
| 412 |
+
#endif
|
| 413 |
+
""" % {'module_name': module_name, 'pgo_module_name': pgo_module_name}))
|
| 414 |
+
|
| 415 |
+
extension.sources = extension.sources + [pgo_wrapper_c_file] # do not modify in place!
|
| 416 |
+
extension.name = pgo_module_name
|
| 417 |
+
|
| 418 |
+
self._build_extension(extension, lib_dir, pgo_step_name='gen')
|
| 419 |
+
|
| 420 |
+
# import and execute module code to generate profile
|
| 421 |
+
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
|
| 422 |
+
load_dynamic(pgo_module_name, so_module_path)
|
| 423 |
+
|
| 424 |
+
def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
|
| 425 |
+
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
|
| 426 |
+
pyx_file = encode_fs(pyx_file)
|
| 427 |
+
|
| 428 |
+
c_include_dirs = args.include
|
| 429 |
+
c_src_files = list(map(str, args.src))
|
| 430 |
+
if 'numpy' in code:
|
| 431 |
+
import numpy
|
| 432 |
+
c_include_dirs.append(numpy.get_include())
|
| 433 |
+
with io.open(pyx_file, 'w', encoding='utf-8') as f:
|
| 434 |
+
f.write(code)
|
| 435 |
+
extension = Extension(
|
| 436 |
+
name=module_name,
|
| 437 |
+
sources=[pyx_file] + c_src_files,
|
| 438 |
+
include_dirs=c_include_dirs,
|
| 439 |
+
library_dirs=args.library_dirs,
|
| 440 |
+
extra_compile_args=args.compile_args,
|
| 441 |
+
extra_link_args=args.link_args,
|
| 442 |
+
libraries=args.lib,
|
| 443 |
+
language='c++' if args.cplus else 'c',
|
| 444 |
+
)
|
| 445 |
+
try:
|
| 446 |
+
opts = dict(
|
| 447 |
+
quiet=quiet,
|
| 448 |
+
annotate=args.annotate,
|
| 449 |
+
force=True,
|
| 450 |
+
language_level=min(3, sys.version_info[0]),
|
| 451 |
+
)
|
| 452 |
+
if args.language_level is not None:
|
| 453 |
+
assert args.language_level in (2, 3)
|
| 454 |
+
opts['language_level'] = args.language_level
|
| 455 |
+
return cythonize([extension], **opts)
|
| 456 |
+
except CompileError:
|
| 457 |
+
return None
|
| 458 |
+
|
| 459 |
+
def _build_extension(self, extension, lib_dir, temp_dir=None, pgo_step_name=None, quiet=True):
|
| 460 |
+
build_extension = self._get_build_extension(
|
| 461 |
+
extension, lib_dir=lib_dir, temp_dir=temp_dir, pgo_step_name=pgo_step_name)
|
| 462 |
+
old_threshold = None
|
| 463 |
+
try:
|
| 464 |
+
if not quiet:
|
| 465 |
+
old_threshold = distutils.log.set_threshold(distutils.log.DEBUG)
|
| 466 |
+
build_extension.run()
|
| 467 |
+
finally:
|
| 468 |
+
if not quiet and old_threshold is not None:
|
| 469 |
+
distutils.log.set_threshold(old_threshold)
|
| 470 |
+
|
| 471 |
+
def _add_pgo_flags(self, build_extension, step_name, temp_dir):
|
| 472 |
+
compiler_type = build_extension.compiler.compiler_type
|
| 473 |
+
if compiler_type == 'unix':
|
| 474 |
+
compiler_cmd = build_extension.compiler.compiler_so
|
| 475 |
+
# TODO: we could try to call "[cmd] --version" for better insights
|
| 476 |
+
if not compiler_cmd:
|
| 477 |
+
pass
|
| 478 |
+
elif 'clang' in compiler_cmd or 'clang' in compiler_cmd[0]:
|
| 479 |
+
compiler_type = 'clang'
|
| 480 |
+
elif 'icc' in compiler_cmd or 'icc' in compiler_cmd[0]:
|
| 481 |
+
compiler_type = 'icc'
|
| 482 |
+
elif 'gcc' in compiler_cmd or 'gcc' in compiler_cmd[0]:
|
| 483 |
+
compiler_type = 'gcc'
|
| 484 |
+
elif 'g++' in compiler_cmd or 'g++' in compiler_cmd[0]:
|
| 485 |
+
compiler_type = 'gcc'
|
| 486 |
+
config = PGO_CONFIG.get(compiler_type)
|
| 487 |
+
orig_flags = []
|
| 488 |
+
if config and step_name in config:
|
| 489 |
+
flags = [f.format(TEMPDIR=temp_dir) for f in config[step_name]]
|
| 490 |
+
for extension in build_extension.extensions:
|
| 491 |
+
orig_flags.append((extension.extra_compile_args, extension.extra_link_args))
|
| 492 |
+
extension.extra_compile_args = extension.extra_compile_args + flags
|
| 493 |
+
extension.extra_link_args = extension.extra_link_args + flags
|
| 494 |
+
else:
|
| 495 |
+
print("No PGO %s configuration known for C compiler type '%s'" % (step_name, compiler_type),
|
| 496 |
+
file=sys.stderr)
|
| 497 |
+
return orig_flags
|
| 498 |
+
|
| 499 |
+
@property
|
| 500 |
+
def so_ext(self):
|
| 501 |
+
"""The extension suffix for compiled modules."""
|
| 502 |
+
try:
|
| 503 |
+
return self._so_ext
|
| 504 |
+
except AttributeError:
|
| 505 |
+
self._so_ext = self._get_build_extension().get_ext_filename('')
|
| 506 |
+
return self._so_ext
|
| 507 |
+
|
| 508 |
+
def _clear_distutils_mkpath_cache(self):
|
| 509 |
+
"""clear distutils mkpath cache
|
| 510 |
+
|
| 511 |
+
prevents distutils from skipping re-creation of dirs that have been removed
|
| 512 |
+
"""
|
| 513 |
+
try:
|
| 514 |
+
from distutils.dir_util import _path_created
|
| 515 |
+
except ImportError:
|
| 516 |
+
pass
|
| 517 |
+
else:
|
| 518 |
+
_path_created.clear()
|
| 519 |
+
|
| 520 |
+
def _get_build_extension(self, extension=None, lib_dir=None, temp_dir=None,
|
| 521 |
+
pgo_step_name=None, _build_ext=build_ext):
|
| 522 |
+
self._clear_distutils_mkpath_cache()
|
| 523 |
+
dist = Distribution()
|
| 524 |
+
config_files = dist.find_config_files()
|
| 525 |
+
try:
|
| 526 |
+
config_files.remove('setup.cfg')
|
| 527 |
+
except ValueError:
|
| 528 |
+
pass
|
| 529 |
+
dist.parse_config_files(config_files)
|
| 530 |
+
|
| 531 |
+
if not temp_dir:
|
| 532 |
+
temp_dir = lib_dir
|
| 533 |
+
add_pgo_flags = self._add_pgo_flags
|
| 534 |
+
|
| 535 |
+
if pgo_step_name:
|
| 536 |
+
base_build_ext = _build_ext
|
| 537 |
+
class _build_ext(_build_ext):
|
| 538 |
+
def build_extensions(self):
|
| 539 |
+
add_pgo_flags(self, pgo_step_name, temp_dir)
|
| 540 |
+
base_build_ext.build_extensions(self)
|
| 541 |
+
|
| 542 |
+
build_extension = _build_ext(dist)
|
| 543 |
+
build_extension.finalize_options()
|
| 544 |
+
if temp_dir:
|
| 545 |
+
temp_dir = encode_fs(temp_dir)
|
| 546 |
+
build_extension.build_temp = temp_dir
|
| 547 |
+
if lib_dir:
|
| 548 |
+
lib_dir = encode_fs(lib_dir)
|
| 549 |
+
build_extension.build_lib = lib_dir
|
| 550 |
+
if extension is not None:
|
| 551 |
+
build_extension.extensions = [extension]
|
| 552 |
+
return build_extension
|
| 553 |
+
|
| 554 |
+
@staticmethod
|
| 555 |
+
def clean_annotated_html(html):
|
| 556 |
+
"""Clean up the annotated HTML source.
|
| 557 |
+
|
| 558 |
+
Strips the link to the generated C or C++ file, which we do not
|
| 559 |
+
present to the user.
|
| 560 |
+
"""
|
| 561 |
+
r = re.compile('<p>Raw output: <a href="(.*)">(.*)</a>')
|
| 562 |
+
html = '\n'.join(l for l in html.splitlines() if not r.match(l))
|
| 563 |
+
return html
|
| 564 |
+
|
| 565 |
+
__doc__ = __doc__.format(
|
| 566 |
+
# rST doesn't see the -+ flag as part of an option list, so we
|
| 567 |
+
# hide it from the module-level docstring.
|
| 568 |
+
CYTHON_DOC=dedent(CythonMagics.cython.__doc__
|
| 569 |
+
.replace('-+, --cplus', '--cplus ')),
|
| 570 |
+
CYTHON_INLINE_DOC=dedent(CythonMagics.cython_inline.__doc__),
|
| 571 |
+
CYTHON_PYXIMPORT_DOC=dedent(CythonMagics.cython_pyximport.__doc__),
|
| 572 |
+
)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Tests/TestCythonizeArgsParser.py
ADDED
|
@@ -0,0 +1,482 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from Cython.Build.Cythonize import (
|
| 2 |
+
create_args_parser, parse_args_raw, parse_args,
|
| 3 |
+
parallel_compiles
|
| 4 |
+
)
|
| 5 |
+
|
| 6 |
+
from Cython.Compiler import Options
|
| 7 |
+
from Cython.Compiler.Tests.Utils import backup_Options, restore_Options, check_global_options
|
| 8 |
+
|
| 9 |
+
from unittest import TestCase
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
try:
|
| 13 |
+
from StringIO import StringIO
|
| 14 |
+
except ImportError:
|
| 15 |
+
from io import StringIO # doesn't accept 'str' in Py2
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class TestCythonizeArgsParser(TestCase):
|
| 19 |
+
|
| 20 |
+
def setUp(self):
|
| 21 |
+
TestCase.setUp(self)
|
| 22 |
+
self.parse_args = lambda x, parser=create_args_parser() : parse_args_raw(parser, x)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def are_default(self, options, skip):
|
| 26 |
+
# empty containers
|
| 27 |
+
empty_containers = ['directives', 'compile_time_env', 'options', 'excludes']
|
| 28 |
+
are_none = ['language_level', 'annotate', 'build', 'build_inplace', 'force', 'quiet', 'lenient', 'keep_going', 'no_docstrings']
|
| 29 |
+
for opt_name in empty_containers:
|
| 30 |
+
if len(getattr(options, opt_name))!=0 and (opt_name not in skip):
|
| 31 |
+
self.assertEqual(opt_name,"", msg="For option "+opt_name)
|
| 32 |
+
return False
|
| 33 |
+
for opt_name in are_none:
|
| 34 |
+
if (getattr(options, opt_name) is not None) and (opt_name not in skip):
|
| 35 |
+
self.assertEqual(opt_name,"", msg="For option "+opt_name)
|
| 36 |
+
return False
|
| 37 |
+
if options.parallel!=parallel_compiles and ('parallel' not in skip):
|
| 38 |
+
return False
|
| 39 |
+
return True
|
| 40 |
+
|
| 41 |
+
# testing directives:
|
| 42 |
+
def test_directive_short(self):
|
| 43 |
+
options, args = self.parse_args(['-X', 'cdivision=True'])
|
| 44 |
+
self.assertFalse(args)
|
| 45 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 46 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 47 |
+
|
| 48 |
+
def test_directive_long(self):
|
| 49 |
+
options, args = self.parse_args(['--directive', 'cdivision=True'])
|
| 50 |
+
self.assertFalse(args)
|
| 51 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 52 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 53 |
+
|
| 54 |
+
def test_directive_multiple(self):
|
| 55 |
+
options, args = self.parse_args(['-X', 'cdivision=True', '-X', 'c_string_type=bytes'])
|
| 56 |
+
self.assertFalse(args)
|
| 57 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 58 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 59 |
+
self.assertEqual(options.directives['c_string_type'], 'bytes')
|
| 60 |
+
|
| 61 |
+
def test_directive_multiple_v2(self):
|
| 62 |
+
options, args = self.parse_args(['-X', 'cdivision=True,c_string_type=bytes'])
|
| 63 |
+
self.assertFalse(args)
|
| 64 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 65 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 66 |
+
self.assertEqual(options.directives['c_string_type'], 'bytes')
|
| 67 |
+
|
| 68 |
+
def test_directive_value_yes(self):
|
| 69 |
+
options, args = self.parse_args(['-X', 'cdivision=YeS'])
|
| 70 |
+
self.assertFalse(args)
|
| 71 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 72 |
+
self.assertEqual(options.directives['cdivision'], True)
|
| 73 |
+
|
| 74 |
+
def test_directive_value_no(self):
|
| 75 |
+
options, args = self.parse_args(['-X', 'cdivision=no'])
|
| 76 |
+
self.assertFalse(args)
|
| 77 |
+
self.assertTrue(self.are_default(options, ['directives']))
|
| 78 |
+
self.assertEqual(options.directives['cdivision'], False)
|
| 79 |
+
|
| 80 |
+
def test_directive_value_invalid(self):
|
| 81 |
+
with self.assertRaises(ValueError) as context:
|
| 82 |
+
options, args = self.parse_args(['-X', 'cdivision=sadfasd'])
|
| 83 |
+
|
| 84 |
+
def test_directive_key_invalid(self):
|
| 85 |
+
with self.assertRaises(ValueError) as context:
|
| 86 |
+
options, args = self.parse_args(['-X', 'abracadabra'])
|
| 87 |
+
|
| 88 |
+
def test_directive_no_value(self):
|
| 89 |
+
with self.assertRaises(ValueError) as context:
|
| 90 |
+
options, args = self.parse_args(['-X', 'cdivision'])
|
| 91 |
+
|
| 92 |
+
def test_directives_types(self):
|
| 93 |
+
directives = {
|
| 94 |
+
'auto_pickle': True,
|
| 95 |
+
'c_string_type': 'bytearray',
|
| 96 |
+
'c_string_type': 'bytes',
|
| 97 |
+
'c_string_type': 'str',
|
| 98 |
+
'c_string_type': 'bytearray',
|
| 99 |
+
'c_string_type': 'unicode',
|
| 100 |
+
'c_string_encoding' : 'ascii',
|
| 101 |
+
'language_level' : 2,
|
| 102 |
+
'language_level' : 3,
|
| 103 |
+
'language_level' : '3str',
|
| 104 |
+
'set_initial_path' : 'my_initial_path',
|
| 105 |
+
}
|
| 106 |
+
for key, value in directives.items():
|
| 107 |
+
cmd = '{key}={value}'.format(key=key, value=str(value))
|
| 108 |
+
options, args = self.parse_args(['-X', cmd])
|
| 109 |
+
self.assertFalse(args)
|
| 110 |
+
self.assertTrue(self.are_default(options, ['directives']), msg = "Error for option: "+cmd)
|
| 111 |
+
self.assertEqual(options.directives[key], value, msg = "Error for option: "+cmd)
|
| 112 |
+
|
| 113 |
+
def test_directives_wrong(self):
|
| 114 |
+
directives = {
|
| 115 |
+
'auto_pickle': 42, # for bool type
|
| 116 |
+
'auto_pickle': 'NONONO', # for bool type
|
| 117 |
+
'c_string_type': 'bites',
|
| 118 |
+
#'c_string_encoding' : 'a',
|
| 119 |
+
#'language_level' : 4,
|
| 120 |
+
}
|
| 121 |
+
for key, value in directives.items():
|
| 122 |
+
cmd = '{key}={value}'.format(key=key, value=str(value))
|
| 123 |
+
with self.assertRaises(ValueError, msg = "Error for option: "+cmd) as context:
|
| 124 |
+
options, args = self.parse_args(['-X', cmd])
|
| 125 |
+
|
| 126 |
+
def test_compile_time_env_short(self):
|
| 127 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10'])
|
| 128 |
+
self.assertFalse(args)
|
| 129 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 130 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 131 |
+
|
| 132 |
+
def test_compile_time_env_long(self):
|
| 133 |
+
options, args = self.parse_args(['--compile-time-env', 'MYSIZE=10'])
|
| 134 |
+
self.assertFalse(args)
|
| 135 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 136 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 137 |
+
|
| 138 |
+
def test_compile_time_env_multiple(self):
|
| 139 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10', '-E', 'ARRSIZE=11'])
|
| 140 |
+
self.assertFalse(args)
|
| 141 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 142 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 143 |
+
self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
|
| 144 |
+
|
| 145 |
+
def test_compile_time_env_multiple_v2(self):
|
| 146 |
+
options, args = self.parse_args(['-E', 'MYSIZE=10,ARRSIZE=11'])
|
| 147 |
+
self.assertFalse(args)
|
| 148 |
+
self.assertTrue(self.are_default(options, ['compile_time_env']))
|
| 149 |
+
self.assertEqual(options.compile_time_env['MYSIZE'], 10)
|
| 150 |
+
self.assertEqual(options.compile_time_env['ARRSIZE'], 11)
|
| 151 |
+
|
| 152 |
+
#testing options
|
| 153 |
+
def test_option_short(self):
|
| 154 |
+
options, args = self.parse_args(['-s', 'docstrings=True'])
|
| 155 |
+
self.assertFalse(args)
|
| 156 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 157 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 158 |
+
|
| 159 |
+
def test_option_long(self):
|
| 160 |
+
options, args = self.parse_args(['--option', 'docstrings=True'])
|
| 161 |
+
self.assertFalse(args)
|
| 162 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 163 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 164 |
+
|
| 165 |
+
def test_option_multiple(self):
|
| 166 |
+
options, args = self.parse_args(['-s', 'docstrings=True', '-s', 'buffer_max_dims=8'])
|
| 167 |
+
self.assertFalse(args)
|
| 168 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 169 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 170 |
+
self.assertEqual(options.options['buffer_max_dims'], True) # really?
|
| 171 |
+
|
| 172 |
+
def test_option_multiple_v2(self):
|
| 173 |
+
options, args = self.parse_args(['-s', 'docstrings=True,buffer_max_dims=8'])
|
| 174 |
+
self.assertFalse(args)
|
| 175 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 176 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 177 |
+
self.assertEqual(options.options['buffer_max_dims'], True) # really?
|
| 178 |
+
|
| 179 |
+
def test_option_value_yes(self):
|
| 180 |
+
options, args = self.parse_args(['-s', 'docstrings=YeS'])
|
| 181 |
+
self.assertFalse(args)
|
| 182 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 183 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 184 |
+
|
| 185 |
+
def test_option_value_4242(self):
|
| 186 |
+
options, args = self.parse_args(['-s', 'docstrings=4242'])
|
| 187 |
+
self.assertFalse(args)
|
| 188 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 189 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 190 |
+
|
| 191 |
+
def test_option_value_0(self):
|
| 192 |
+
options, args = self.parse_args(['-s', 'docstrings=0'])
|
| 193 |
+
self.assertFalse(args)
|
| 194 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 195 |
+
self.assertEqual(options.options['docstrings'], False)
|
| 196 |
+
|
| 197 |
+
def test_option_value_emptystr(self):
|
| 198 |
+
options, args = self.parse_args(['-s', 'docstrings='])
|
| 199 |
+
self.assertFalse(args)
|
| 200 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 201 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 202 |
+
|
| 203 |
+
def test_option_value_a_str(self):
|
| 204 |
+
options, args = self.parse_args(['-s', 'docstrings=BB'])
|
| 205 |
+
self.assertFalse(args)
|
| 206 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 207 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 208 |
+
|
| 209 |
+
def test_option_value_no(self):
|
| 210 |
+
options, args = self.parse_args(['-s', 'docstrings=nO'])
|
| 211 |
+
self.assertFalse(args)
|
| 212 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 213 |
+
self.assertEqual(options.options['docstrings'], False)
|
| 214 |
+
|
| 215 |
+
def test_option_no_value(self):
|
| 216 |
+
options, args = self.parse_args(['-s', 'docstrings'])
|
| 217 |
+
self.assertFalse(args)
|
| 218 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 219 |
+
self.assertEqual(options.options['docstrings'], True)
|
| 220 |
+
|
| 221 |
+
def test_option_any_key(self):
|
| 222 |
+
options, args = self.parse_args(['-s', 'abracadabra'])
|
| 223 |
+
self.assertFalse(args)
|
| 224 |
+
self.assertTrue(self.are_default(options, ['options']))
|
| 225 |
+
self.assertEqual(options.options['abracadabra'], True)
|
| 226 |
+
|
| 227 |
+
def test_language_level_2(self):
|
| 228 |
+
options, args = self.parse_args(['-2'])
|
| 229 |
+
self.assertFalse(args)
|
| 230 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 231 |
+
self.assertEqual(options.language_level, 2)
|
| 232 |
+
|
| 233 |
+
def test_language_level_3(self):
|
| 234 |
+
options, args = self.parse_args(['-3'])
|
| 235 |
+
self.assertFalse(args)
|
| 236 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 237 |
+
self.assertEqual(options.language_level, 3)
|
| 238 |
+
|
| 239 |
+
def test_language_level_3str(self):
|
| 240 |
+
options, args = self.parse_args(['--3str'])
|
| 241 |
+
self.assertFalse(args)
|
| 242 |
+
self.assertTrue(self.are_default(options, ['language_level']))
|
| 243 |
+
self.assertEqual(options.language_level, '3str')
|
| 244 |
+
|
| 245 |
+
def test_annotate_short(self):
|
| 246 |
+
options, args = self.parse_args(['-a'])
|
| 247 |
+
self.assertFalse(args)
|
| 248 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 249 |
+
self.assertEqual(options.annotate, 'default')
|
| 250 |
+
|
| 251 |
+
def test_annotate_long(self):
|
| 252 |
+
options, args = self.parse_args(['--annotate'])
|
| 253 |
+
self.assertFalse(args)
|
| 254 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 255 |
+
self.assertEqual(options.annotate, 'default')
|
| 256 |
+
|
| 257 |
+
def test_annotate_fullc(self):
|
| 258 |
+
options, args = self.parse_args(['--annotate-fullc'])
|
| 259 |
+
self.assertFalse(args)
|
| 260 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 261 |
+
self.assertEqual(options.annotate, 'fullc')
|
| 262 |
+
|
| 263 |
+
def test_annotate_and_positional(self):
|
| 264 |
+
options, args = self.parse_args(['-a', 'foo.pyx'])
|
| 265 |
+
self.assertEqual(args, ['foo.pyx'])
|
| 266 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 267 |
+
self.assertEqual(options.annotate, 'default')
|
| 268 |
+
|
| 269 |
+
def test_annotate_and_optional(self):
|
| 270 |
+
options, args = self.parse_args(['-a', '--3str'])
|
| 271 |
+
self.assertFalse(args)
|
| 272 |
+
self.assertTrue(self.are_default(options, ['annotate', 'language_level']))
|
| 273 |
+
self.assertEqual(options.annotate, 'default')
|
| 274 |
+
self.assertEqual(options.language_level, '3str')
|
| 275 |
+
|
| 276 |
+
def test_exclude_short(self):
|
| 277 |
+
options, args = self.parse_args(['-x', '*.pyx'])
|
| 278 |
+
self.assertFalse(args)
|
| 279 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 280 |
+
self.assertTrue('*.pyx' in options.excludes)
|
| 281 |
+
|
| 282 |
+
def test_exclude_long(self):
|
| 283 |
+
options, args = self.parse_args(['--exclude', '*.pyx'])
|
| 284 |
+
self.assertFalse(args)
|
| 285 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 286 |
+
self.assertTrue('*.pyx' in options.excludes)
|
| 287 |
+
|
| 288 |
+
def test_exclude_multiple(self):
|
| 289 |
+
options, args = self.parse_args(['--exclude', '*.pyx', '--exclude', '*.py', ])
|
| 290 |
+
self.assertFalse(args)
|
| 291 |
+
self.assertTrue(self.are_default(options, ['excludes']))
|
| 292 |
+
self.assertEqual(options.excludes, ['*.pyx', '*.py'])
|
| 293 |
+
|
| 294 |
+
def test_build_short(self):
|
| 295 |
+
options, args = self.parse_args(['-b'])
|
| 296 |
+
self.assertFalse(args)
|
| 297 |
+
self.assertTrue(self.are_default(options, ['build']))
|
| 298 |
+
self.assertEqual(options.build, True)
|
| 299 |
+
|
| 300 |
+
def test_build_long(self):
|
| 301 |
+
options, args = self.parse_args(['--build'])
|
| 302 |
+
self.assertFalse(args)
|
| 303 |
+
self.assertTrue(self.are_default(options, ['build']))
|
| 304 |
+
self.assertEqual(options.build, True)
|
| 305 |
+
|
| 306 |
+
def test_inplace_short(self):
|
| 307 |
+
options, args = self.parse_args(['-i'])
|
| 308 |
+
self.assertFalse(args)
|
| 309 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 310 |
+
self.assertEqual(options.build_inplace, True)
|
| 311 |
+
|
| 312 |
+
def test_inplace_long(self):
|
| 313 |
+
options, args = self.parse_args(['--inplace'])
|
| 314 |
+
self.assertFalse(args)
|
| 315 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 316 |
+
self.assertEqual(options.build_inplace, True)
|
| 317 |
+
|
| 318 |
+
def test_parallel_short(self):
|
| 319 |
+
options, args = self.parse_args(['-j', '42'])
|
| 320 |
+
self.assertFalse(args)
|
| 321 |
+
self.assertTrue(self.are_default(options, ['parallel']))
|
| 322 |
+
self.assertEqual(options.parallel, 42)
|
| 323 |
+
|
| 324 |
+
def test_parallel_long(self):
|
| 325 |
+
options, args = self.parse_args(['--parallel', '42'])
|
| 326 |
+
self.assertFalse(args)
|
| 327 |
+
self.assertTrue(self.are_default(options, ['parallel']))
|
| 328 |
+
self.assertEqual(options.parallel, 42)
|
| 329 |
+
|
| 330 |
+
def test_force_short(self):
|
| 331 |
+
options, args = self.parse_args(['-f'])
|
| 332 |
+
self.assertFalse(args)
|
| 333 |
+
self.assertTrue(self.are_default(options, ['force']))
|
| 334 |
+
self.assertEqual(options.force, True)
|
| 335 |
+
|
| 336 |
+
def test_force_long(self):
|
| 337 |
+
options, args = self.parse_args(['--force'])
|
| 338 |
+
self.assertFalse(args)
|
| 339 |
+
self.assertTrue(self.are_default(options, ['force']))
|
| 340 |
+
self.assertEqual(options.force, True)
|
| 341 |
+
|
| 342 |
+
def test_quite_short(self):
|
| 343 |
+
options, args = self.parse_args(['-q'])
|
| 344 |
+
self.assertFalse(args)
|
| 345 |
+
self.assertTrue(self.are_default(options, ['quiet']))
|
| 346 |
+
self.assertEqual(options.quiet, True)
|
| 347 |
+
|
| 348 |
+
def test_quite_long(self):
|
| 349 |
+
options, args = self.parse_args(['--quiet'])
|
| 350 |
+
self.assertFalse(args)
|
| 351 |
+
self.assertTrue(self.are_default(options, ['quiet']))
|
| 352 |
+
self.assertEqual(options.quiet, True)
|
| 353 |
+
|
| 354 |
+
def test_lenient_long(self):
|
| 355 |
+
options, args = self.parse_args(['--lenient'])
|
| 356 |
+
self.assertTrue(self.are_default(options, ['lenient']))
|
| 357 |
+
self.assertFalse(args)
|
| 358 |
+
self.assertEqual(options.lenient, True)
|
| 359 |
+
|
| 360 |
+
def test_keep_going_short(self):
|
| 361 |
+
options, args = self.parse_args(['-k'])
|
| 362 |
+
self.assertFalse(args)
|
| 363 |
+
self.assertTrue(self.are_default(options, ['keep_going']))
|
| 364 |
+
self.assertEqual(options.keep_going, True)
|
| 365 |
+
|
| 366 |
+
def test_keep_going_long(self):
|
| 367 |
+
options, args = self.parse_args(['--keep-going'])
|
| 368 |
+
self.assertFalse(args)
|
| 369 |
+
self.assertTrue(self.are_default(options, ['keep_going']))
|
| 370 |
+
self.assertEqual(options.keep_going, True)
|
| 371 |
+
|
| 372 |
+
def test_no_docstrings_long(self):
|
| 373 |
+
options, args = self.parse_args(['--no-docstrings'])
|
| 374 |
+
self.assertFalse(args)
|
| 375 |
+
self.assertTrue(self.are_default(options, ['no_docstrings']))
|
| 376 |
+
self.assertEqual(options.no_docstrings, True)
|
| 377 |
+
|
| 378 |
+
def test_file_name(self):
|
| 379 |
+
options, args = self.parse_args(['file1.pyx', 'file2.pyx'])
|
| 380 |
+
self.assertEqual(len(args), 2)
|
| 381 |
+
self.assertEqual(args[0], 'file1.pyx')
|
| 382 |
+
self.assertEqual(args[1], 'file2.pyx')
|
| 383 |
+
self.assertTrue(self.are_default(options, []))
|
| 384 |
+
|
| 385 |
+
def test_option_first(self):
|
| 386 |
+
options, args = self.parse_args(['-i', 'file.pyx'])
|
| 387 |
+
self.assertEqual(args, ['file.pyx'])
|
| 388 |
+
self.assertEqual(options.build_inplace, True)
|
| 389 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 390 |
+
|
| 391 |
+
def test_file_inbetween(self):
|
| 392 |
+
options, args = self.parse_args(['-i', 'file.pyx', '-a'])
|
| 393 |
+
self.assertEqual(args, ['file.pyx'])
|
| 394 |
+
self.assertEqual(options.build_inplace, True)
|
| 395 |
+
self.assertEqual(options.annotate, 'default')
|
| 396 |
+
self.assertTrue(self.are_default(options, ['build_inplace', 'annotate']))
|
| 397 |
+
|
| 398 |
+
def test_option_trailing(self):
|
| 399 |
+
options, args = self.parse_args(['file.pyx', '-i'])
|
| 400 |
+
self.assertEqual(args, ['file.pyx'])
|
| 401 |
+
self.assertEqual(options.build_inplace, True)
|
| 402 |
+
self.assertTrue(self.are_default(options, ['build_inplace']))
|
| 403 |
+
|
| 404 |
+
def test_interspersed_positional(self):
|
| 405 |
+
options, sources = self.parse_args([
|
| 406 |
+
'file1.pyx', '-a',
|
| 407 |
+
'file2.pyx'
|
| 408 |
+
])
|
| 409 |
+
self.assertEqual(sources, ['file1.pyx', 'file2.pyx'])
|
| 410 |
+
self.assertEqual(options.annotate, 'default')
|
| 411 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 412 |
+
|
| 413 |
+
def test_interspersed_positional2(self):
|
| 414 |
+
options, sources = self.parse_args([
|
| 415 |
+
'file1.pyx', '-a',
|
| 416 |
+
'file2.pyx', '-a', 'file3.pyx'
|
| 417 |
+
])
|
| 418 |
+
self.assertEqual(sources, ['file1.pyx', 'file2.pyx', 'file3.pyx'])
|
| 419 |
+
self.assertEqual(options.annotate, 'default')
|
| 420 |
+
self.assertTrue(self.are_default(options, ['annotate']))
|
| 421 |
+
|
| 422 |
+
def test_interspersed_positional3(self):
|
| 423 |
+
options, sources = self.parse_args([
|
| 424 |
+
'-f', 'f1', 'f2', '-a',
|
| 425 |
+
'f3', 'f4', '-a', 'f5'
|
| 426 |
+
])
|
| 427 |
+
self.assertEqual(sources, ['f1', 'f2', 'f3', 'f4', 'f5'])
|
| 428 |
+
self.assertEqual(options.annotate, 'default')
|
| 429 |
+
self.assertEqual(options.force, True)
|
| 430 |
+
self.assertTrue(self.are_default(options, ['annotate', 'force']))
|
| 431 |
+
|
| 432 |
+
def test_wrong_option(self):
|
| 433 |
+
old_stderr = sys.stderr
|
| 434 |
+
stderr = sys.stderr = StringIO()
|
| 435 |
+
try:
|
| 436 |
+
self.assertRaises(SystemExit, self.parse_args,
|
| 437 |
+
['--unknown-option']
|
| 438 |
+
)
|
| 439 |
+
finally:
|
| 440 |
+
sys.stderr = old_stderr
|
| 441 |
+
self.assertTrue(stderr.getvalue())
|
| 442 |
+
|
| 443 |
+
|
| 444 |
+
class TestParseArgs(TestCase):
|
| 445 |
+
def setUp(self):
|
| 446 |
+
self._options_backup = backup_Options()
|
| 447 |
+
|
| 448 |
+
def tearDown(self):
|
| 449 |
+
restore_Options(self._options_backup)
|
| 450 |
+
|
| 451 |
+
def check_default_global_options(self, white_list=[]):
|
| 452 |
+
self.assertEqual(check_global_options(self._options_backup, white_list), "")
|
| 453 |
+
|
| 454 |
+
def test_build_set_for_inplace(self):
|
| 455 |
+
options, args = parse_args(['foo.pyx', '-i'])
|
| 456 |
+
self.assertEqual(options.build, True)
|
| 457 |
+
self.check_default_global_options()
|
| 458 |
+
|
| 459 |
+
def test_lenient(self):
|
| 460 |
+
options, sources = parse_args(['foo.pyx', '--lenient'])
|
| 461 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 462 |
+
self.assertEqual(Options.error_on_unknown_names, False)
|
| 463 |
+
self.assertEqual(Options.error_on_uninitialized, False)
|
| 464 |
+
self.check_default_global_options(['error_on_unknown_names', 'error_on_uninitialized'])
|
| 465 |
+
|
| 466 |
+
def test_annotate(self):
|
| 467 |
+
options, sources = parse_args(['foo.pyx', '--annotate'])
|
| 468 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 469 |
+
self.assertEqual(Options.annotate, 'default')
|
| 470 |
+
self.check_default_global_options(['annotate'])
|
| 471 |
+
|
| 472 |
+
def test_annotate_fullc(self):
|
| 473 |
+
options, sources = parse_args(['foo.pyx', '--annotate-fullc'])
|
| 474 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 475 |
+
self.assertEqual(Options.annotate, 'fullc')
|
| 476 |
+
self.check_default_global_options(['annotate'])
|
| 477 |
+
|
| 478 |
+
def test_no_docstrings(self):
|
| 479 |
+
options, sources = parse_args(['foo.pyx', '--no-docstrings'])
|
| 480 |
+
self.assertEqual(sources, ['foo.pyx'])
|
| 481 |
+
self.assertEqual(Options.docstrings, False)
|
| 482 |
+
self.check_default_global_options(['docstrings'])
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Tests/TestInline.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import tempfile
|
| 3 |
+
import unittest
|
| 4 |
+
from Cython.Shadow import inline
|
| 5 |
+
from Cython.Build.Inline import safe_type
|
| 6 |
+
from Cython.TestUtils import CythonTest
|
| 7 |
+
|
| 8 |
+
try:
|
| 9 |
+
import numpy
|
| 10 |
+
has_numpy = True
|
| 11 |
+
except:
|
| 12 |
+
has_numpy = False
|
| 13 |
+
|
| 14 |
+
test_kwds = dict(force=True, quiet=True)
|
| 15 |
+
|
| 16 |
+
global_value = 100
|
| 17 |
+
|
| 18 |
+
class TestInline(CythonTest):
|
| 19 |
+
def setUp(self):
|
| 20 |
+
CythonTest.setUp(self)
|
| 21 |
+
self._call_kwds = dict(test_kwds)
|
| 22 |
+
if os.path.isdir('TEST_TMP'):
|
| 23 |
+
lib_dir = os.path.join('TEST_TMP','inline')
|
| 24 |
+
else:
|
| 25 |
+
lib_dir = tempfile.mkdtemp(prefix='cython_inline_')
|
| 26 |
+
self._call_kwds['lib_dir'] = lib_dir
|
| 27 |
+
|
| 28 |
+
def test_simple(self):
|
| 29 |
+
self.assertEqual(inline("return 1+2", **self._call_kwds), 3)
|
| 30 |
+
|
| 31 |
+
def test_types(self):
|
| 32 |
+
self.assertEqual(inline("""
|
| 33 |
+
cimport cython
|
| 34 |
+
return cython.typeof(a), cython.typeof(b)
|
| 35 |
+
""", a=1.0, b=[], **self._call_kwds), ('double', 'list object'))
|
| 36 |
+
|
| 37 |
+
def test_locals(self):
|
| 38 |
+
a = 1
|
| 39 |
+
b = 2
|
| 40 |
+
self.assertEqual(inline("return a+b", **self._call_kwds), 3)
|
| 41 |
+
|
| 42 |
+
def test_globals(self):
|
| 43 |
+
self.assertEqual(inline("return global_value + 1", **self._call_kwds), global_value + 1)
|
| 44 |
+
|
| 45 |
+
def test_no_return(self):
|
| 46 |
+
self.assertEqual(inline("""
|
| 47 |
+
a = 1
|
| 48 |
+
cdef double b = 2
|
| 49 |
+
cdef c = []
|
| 50 |
+
""", **self._call_kwds), dict(a=1, b=2.0, c=[]))
|
| 51 |
+
|
| 52 |
+
def test_def_node(self):
|
| 53 |
+
foo = inline("def foo(x): return x * x", **self._call_kwds)['foo']
|
| 54 |
+
self.assertEqual(foo(7), 49)
|
| 55 |
+
|
| 56 |
+
def test_class_ref(self):
|
| 57 |
+
class Type(object):
|
| 58 |
+
pass
|
| 59 |
+
tp = inline("Type")['Type']
|
| 60 |
+
self.assertEqual(tp, Type)
|
| 61 |
+
|
| 62 |
+
def test_pure(self):
|
| 63 |
+
import cython as cy
|
| 64 |
+
b = inline("""
|
| 65 |
+
b = cy.declare(float, a)
|
| 66 |
+
c = cy.declare(cy.pointer(cy.float), &b)
|
| 67 |
+
return b
|
| 68 |
+
""", a=3, **self._call_kwds)
|
| 69 |
+
self.assertEqual(type(b), float)
|
| 70 |
+
|
| 71 |
+
def test_compiler_directives(self):
|
| 72 |
+
self.assertEqual(
|
| 73 |
+
inline('return sum(x)',
|
| 74 |
+
x=[1, 2, 3],
|
| 75 |
+
cython_compiler_directives={'boundscheck': False}),
|
| 76 |
+
6
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
def test_lang_version(self):
|
| 80 |
+
# GH-3419. Caching for inline code didn't always respect compiler directives.
|
| 81 |
+
inline_divcode = "def f(int a, int b): return a/b"
|
| 82 |
+
self.assertEqual(
|
| 83 |
+
inline(inline_divcode, language_level=2)['f'](5,2),
|
| 84 |
+
2
|
| 85 |
+
)
|
| 86 |
+
self.assertEqual(
|
| 87 |
+
inline(inline_divcode, language_level=3)['f'](5,2),
|
| 88 |
+
2.5
|
| 89 |
+
)
|
| 90 |
+
self.assertEqual(
|
| 91 |
+
inline(inline_divcode, language_level=2)['f'](5,2),
|
| 92 |
+
2
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
def test_repeated_use(self):
|
| 96 |
+
inline_mulcode = "def f(int a, int b): return a * b"
|
| 97 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
|
| 98 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 3), 15)
|
| 99 |
+
self.assertEqual(inline(inline_mulcode)['f'](6, 2), 12)
|
| 100 |
+
self.assertEqual(inline(inline_mulcode)['f'](5, 2), 10)
|
| 101 |
+
|
| 102 |
+
f = inline(inline_mulcode)['f']
|
| 103 |
+
self.assertEqual(f(5, 2), 10)
|
| 104 |
+
self.assertEqual(f(5, 3), 15)
|
| 105 |
+
|
| 106 |
+
@unittest.skipIf(not has_numpy, "NumPy is not available")
|
| 107 |
+
def test_numpy(self):
|
| 108 |
+
import numpy
|
| 109 |
+
a = numpy.ndarray((10, 20))
|
| 110 |
+
a[0,0] = 10
|
| 111 |
+
self.assertEqual(safe_type(a), 'numpy.ndarray[numpy.float64_t, ndim=2]')
|
| 112 |
+
self.assertEqual(inline("return a[0,0]", a=a, **self._call_kwds), 10.0)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Tests/TestRecythonize.py
ADDED
|
@@ -0,0 +1,212 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import shutil
|
| 2 |
+
import os
|
| 3 |
+
import tempfile
|
| 4 |
+
import time
|
| 5 |
+
|
| 6 |
+
import Cython.Build.Dependencies
|
| 7 |
+
import Cython.Utils
|
| 8 |
+
from Cython.TestUtils import CythonTest
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def fresh_cythonize(*args, **kwargs):
|
| 12 |
+
Cython.Utils.clear_function_caches()
|
| 13 |
+
Cython.Build.Dependencies._dep_tree = None # discard method caches
|
| 14 |
+
Cython.Build.Dependencies.cythonize(*args, **kwargs)
|
| 15 |
+
|
| 16 |
+
class TestRecythonize(CythonTest):
|
| 17 |
+
|
| 18 |
+
def setUp(self):
|
| 19 |
+
CythonTest.setUp(self)
|
| 20 |
+
self.temp_dir = (
|
| 21 |
+
tempfile.mkdtemp(
|
| 22 |
+
prefix='recythonize-test',
|
| 23 |
+
dir='TEST_TMP' if os.path.isdir('TEST_TMP') else None
|
| 24 |
+
)
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
def tearDown(self):
|
| 28 |
+
CythonTest.tearDown(self)
|
| 29 |
+
shutil.rmtree(self.temp_dir)
|
| 30 |
+
|
| 31 |
+
def test_recythonize_pyx_on_pxd_change(self):
|
| 32 |
+
|
| 33 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 34 |
+
|
| 35 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 36 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 37 |
+
a_c = os.path.join(src_dir, 'a.c')
|
| 38 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 39 |
+
|
| 40 |
+
with open(a_pxd, 'w') as f:
|
| 41 |
+
f.write('cdef int value\n')
|
| 42 |
+
|
| 43 |
+
with open(a_pyx, 'w') as f:
|
| 44 |
+
f.write('value = 1\n')
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# The dependencies for "a.pyx" are "a.pxd" and "a.pyx".
|
| 48 |
+
self.assertEqual({a_pxd, a_pyx}, dep_tree.all_dependencies(a_pyx))
|
| 49 |
+
|
| 50 |
+
# Cythonize to create a.c
|
| 51 |
+
fresh_cythonize(a_pyx)
|
| 52 |
+
|
| 53 |
+
# Sleep to address coarse time-stamp precision.
|
| 54 |
+
time.sleep(1)
|
| 55 |
+
|
| 56 |
+
with open(a_c) as f:
|
| 57 |
+
a_c_contents1 = f.read()
|
| 58 |
+
|
| 59 |
+
with open(a_pxd, 'w') as f:
|
| 60 |
+
f.write('cdef double value\n')
|
| 61 |
+
|
| 62 |
+
fresh_cythonize(a_pyx)
|
| 63 |
+
|
| 64 |
+
with open(a_c) as f:
|
| 65 |
+
a_c_contents2 = f.read()
|
| 66 |
+
|
| 67 |
+
self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
|
| 68 |
+
self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
|
| 69 |
+
self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
|
| 70 |
+
self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def test_recythonize_py_on_pxd_change(self):
|
| 74 |
+
|
| 75 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 76 |
+
|
| 77 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 78 |
+
a_py = os.path.join(src_dir, 'a.py')
|
| 79 |
+
a_c = os.path.join(src_dir, 'a.c')
|
| 80 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 81 |
+
|
| 82 |
+
with open(a_pxd, 'w') as f:
|
| 83 |
+
f.write('cdef int value\n')
|
| 84 |
+
|
| 85 |
+
with open(a_py, 'w') as f:
|
| 86 |
+
f.write('value = 1\n')
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# The dependencies for "a.py" are "a.pxd" and "a.py".
|
| 90 |
+
self.assertEqual({a_pxd, a_py}, dep_tree.all_dependencies(a_py))
|
| 91 |
+
|
| 92 |
+
# Cythonize to create a.c
|
| 93 |
+
fresh_cythonize(a_py)
|
| 94 |
+
|
| 95 |
+
# Sleep to address coarse time-stamp precision.
|
| 96 |
+
time.sleep(1)
|
| 97 |
+
|
| 98 |
+
with open(a_c) as f:
|
| 99 |
+
a_c_contents1 = f.read()
|
| 100 |
+
|
| 101 |
+
with open(a_pxd, 'w') as f:
|
| 102 |
+
f.write('cdef double value\n')
|
| 103 |
+
|
| 104 |
+
fresh_cythonize(a_py)
|
| 105 |
+
|
| 106 |
+
with open(a_c) as f:
|
| 107 |
+
a_c_contents2 = f.read()
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
self.assertTrue("__pyx_v_1a_value = 1;" in a_c_contents1)
|
| 111 |
+
self.assertFalse("__pyx_v_1a_value = 1;" in a_c_contents2)
|
| 112 |
+
self.assertTrue("__pyx_v_1a_value = 1.0;" in a_c_contents2)
|
| 113 |
+
self.assertFalse("__pyx_v_1a_value = 1.0;" in a_c_contents1)
|
| 114 |
+
|
| 115 |
+
def test_recythonize_pyx_on_dep_pxd_change(self):
|
| 116 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 117 |
+
|
| 118 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 119 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 120 |
+
b_pyx = os.path.join(src_dir, 'b.pyx')
|
| 121 |
+
b_c = os.path.join(src_dir, 'b.c')
|
| 122 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 123 |
+
|
| 124 |
+
with open(a_pxd, 'w') as f:
|
| 125 |
+
f.write('cdef int value\n')
|
| 126 |
+
|
| 127 |
+
with open(a_pyx, 'w') as f:
|
| 128 |
+
f.write('value = 1\n')
|
| 129 |
+
|
| 130 |
+
with open(b_pyx, 'w') as f:
|
| 131 |
+
f.write('cimport a\n' + 'a.value = 2\n')
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
# The dependencies for "b.pyx" are "a.pxd" and "b.pyx".
|
| 135 |
+
self.assertEqual({a_pxd, b_pyx}, dep_tree.all_dependencies(b_pyx))
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
# Cythonize to create b.c
|
| 139 |
+
fresh_cythonize([a_pyx, b_pyx])
|
| 140 |
+
|
| 141 |
+
# Sleep to address coarse time-stamp precision.
|
| 142 |
+
time.sleep(1)
|
| 143 |
+
|
| 144 |
+
with open(b_c) as f:
|
| 145 |
+
b_c_contents1 = f.read()
|
| 146 |
+
|
| 147 |
+
with open(a_pxd, 'w') as f:
|
| 148 |
+
f.write('cdef double value\n')
|
| 149 |
+
|
| 150 |
+
fresh_cythonize([a_pyx, b_pyx])
|
| 151 |
+
|
| 152 |
+
with open(b_c) as f:
|
| 153 |
+
b_c_contents2 = f.read()
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
|
| 158 |
+
self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
|
| 159 |
+
self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
|
| 160 |
+
self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def test_recythonize_py_on_dep_pxd_change(self):
|
| 165 |
+
|
| 166 |
+
src_dir = tempfile.mkdtemp(prefix='src', dir=self.temp_dir)
|
| 167 |
+
|
| 168 |
+
a_pxd = os.path.join(src_dir, 'a.pxd')
|
| 169 |
+
a_pyx = os.path.join(src_dir, 'a.pyx')
|
| 170 |
+
b_pxd = os.path.join(src_dir, 'b.pxd')
|
| 171 |
+
b_py = os.path.join(src_dir, 'b.py')
|
| 172 |
+
b_c = os.path.join(src_dir, 'b.c')
|
| 173 |
+
dep_tree = Cython.Build.Dependencies.create_dependency_tree()
|
| 174 |
+
|
| 175 |
+
with open(a_pxd, 'w') as f:
|
| 176 |
+
f.write('cdef int value\n')
|
| 177 |
+
|
| 178 |
+
with open(a_pyx, 'w') as f:
|
| 179 |
+
f.write('value = 1\n')
|
| 180 |
+
|
| 181 |
+
with open(b_pxd, 'w') as f:
|
| 182 |
+
f.write('cimport a\n')
|
| 183 |
+
|
| 184 |
+
with open(b_py, 'w') as f:
|
| 185 |
+
f.write('a.value = 2\n')
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
# The dependencies for b.py are "a.pxd", "b.pxd" and "b.py".
|
| 189 |
+
self.assertEqual({a_pxd, b_pxd, b_py}, dep_tree.all_dependencies(b_py))
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
# Cythonize to create b.c
|
| 193 |
+
fresh_cythonize([a_pyx, b_py])
|
| 194 |
+
|
| 195 |
+
# Sleep to address coarse time-stamp precision.
|
| 196 |
+
time.sleep(1)
|
| 197 |
+
|
| 198 |
+
with open(b_c) as f:
|
| 199 |
+
b_c_contents1 = f.read()
|
| 200 |
+
|
| 201 |
+
with open(a_pxd, 'w') as f:
|
| 202 |
+
f.write('cdef double value\n')
|
| 203 |
+
|
| 204 |
+
fresh_cythonize([a_pyx, b_py])
|
| 205 |
+
|
| 206 |
+
with open(b_c) as f:
|
| 207 |
+
b_c_contents2 = f.read()
|
| 208 |
+
|
| 209 |
+
self.assertTrue("__pyx_v_1a_value = 2;" in b_c_contents1)
|
| 210 |
+
self.assertFalse("__pyx_v_1a_value = 2;" in b_c_contents2)
|
| 211 |
+
self.assertTrue("__pyx_v_1a_value = 2.0;" in b_c_contents2)
|
| 212 |
+
self.assertFalse("__pyx_v_1a_value = 2.0;" in b_c_contents1)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/Tests/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# empty file
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .Dependencies import cythonize
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
if sys.version_info < (3, 7):
|
| 5 |
+
from .Distutils import build_ext
|
| 6 |
+
del sys
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def __getattr__(name):
|
| 10 |
+
if name == 'build_ext':
|
| 11 |
+
# Lazy import, fails if distutils is not available (in Python 3.12+).
|
| 12 |
+
from .Distutils import build_ext
|
| 13 |
+
return build_ext
|
| 14 |
+
raise AttributeError("module '%s' has no attribute '%s'" % (__name__, name))
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/BuildExecutable.cpython-311.pyc
ADDED
|
Binary file (9.59 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/Cythonize.cpython-311.pyc
ADDED
|
Binary file (13.8 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/Dependencies.cpython-311.pyc
ADDED
|
Binary file (72 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/Distutils.cpython-311.pyc
ADDED
|
Binary file (288 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/Inline.cpython-311.pyc
ADDED
|
Binary file (20.2 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/IpythonMagic.cpython-311.pyc
ADDED
|
Binary file (28.1 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Build/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (785 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/AutoDocTransforms.py
ADDED
|
@@ -0,0 +1,318 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import, print_function
|
| 2 |
+
|
| 3 |
+
from .Visitor import CythonTransform
|
| 4 |
+
from .StringEncoding import EncodedString
|
| 5 |
+
from . import Options
|
| 6 |
+
from . import PyrexTypes
|
| 7 |
+
from ..CodeWriter import ExpressionWriter
|
| 8 |
+
from .Errors import warning
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class AnnotationWriter(ExpressionWriter):
|
| 12 |
+
"""
|
| 13 |
+
A Cython code writer for Python expressions in argument/variable annotations.
|
| 14 |
+
"""
|
| 15 |
+
def __init__(self, description=None):
|
| 16 |
+
"""description is optional. If specified it is used in
|
| 17 |
+
warning messages for the nodes that don't convert to string properly.
|
| 18 |
+
If not specified then no messages are generated.
|
| 19 |
+
"""
|
| 20 |
+
ExpressionWriter.__init__(self)
|
| 21 |
+
self.description = description
|
| 22 |
+
self.incomplete = False
|
| 23 |
+
|
| 24 |
+
def visit_Node(self, node):
|
| 25 |
+
self.put(u"<???>")
|
| 26 |
+
self.incomplete = True
|
| 27 |
+
if self.description:
|
| 28 |
+
warning(node.pos,
|
| 29 |
+
"Failed to convert code to string representation in {0}".format(
|
| 30 |
+
self.description), level=1)
|
| 31 |
+
|
| 32 |
+
def visit_LambdaNode(self, node):
|
| 33 |
+
# XXX Should we do better?
|
| 34 |
+
self.put("<lambda>")
|
| 35 |
+
self.incomplete = True
|
| 36 |
+
if self.description:
|
| 37 |
+
warning(node.pos,
|
| 38 |
+
"Failed to convert lambda to string representation in {0}".format(
|
| 39 |
+
self.description), level=1)
|
| 40 |
+
|
| 41 |
+
def visit_UnicodeNode(self, node):
|
| 42 |
+
# Discard Unicode prefix in annotations. Any tool looking at them
|
| 43 |
+
# would probably expect Py3 string semantics.
|
| 44 |
+
self.emit_string(node, "")
|
| 45 |
+
|
| 46 |
+
def visit_AnnotationNode(self, node):
|
| 47 |
+
self.put(node.string.unicode_value)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class EmbedSignature(CythonTransform):
|
| 51 |
+
|
| 52 |
+
def __init__(self, context):
|
| 53 |
+
super(EmbedSignature, self).__init__(context)
|
| 54 |
+
self.class_name = None
|
| 55 |
+
self.class_node = None
|
| 56 |
+
|
| 57 |
+
def _fmt_expr(self, node):
|
| 58 |
+
writer = ExpressionWriter()
|
| 59 |
+
result = writer.write(node)
|
| 60 |
+
# print(type(node).__name__, '-->', result)
|
| 61 |
+
return result
|
| 62 |
+
|
| 63 |
+
def _fmt_annotation(self, node):
|
| 64 |
+
writer = AnnotationWriter()
|
| 65 |
+
result = writer.write(node)
|
| 66 |
+
# print(type(node).__name__, '-->', result)
|
| 67 |
+
return result
|
| 68 |
+
|
| 69 |
+
def _setup_format(self):
|
| 70 |
+
signature_format = self.current_directives['embedsignature.format']
|
| 71 |
+
self.is_format_c = signature_format == 'c'
|
| 72 |
+
self.is_format_python = signature_format == 'python'
|
| 73 |
+
self.is_format_clinic = signature_format == 'clinic'
|
| 74 |
+
|
| 75 |
+
def _fmt_arg(self, arg):
|
| 76 |
+
arg_doc = arg.name
|
| 77 |
+
annotation = None
|
| 78 |
+
defaultval = None
|
| 79 |
+
if arg.is_self_arg:
|
| 80 |
+
if self.is_format_clinic:
|
| 81 |
+
arg_doc = '$self'
|
| 82 |
+
elif arg.is_type_arg:
|
| 83 |
+
if self.is_format_clinic:
|
| 84 |
+
arg_doc = '$type'
|
| 85 |
+
elif self.is_format_c:
|
| 86 |
+
if arg.type is not PyrexTypes.py_object_type:
|
| 87 |
+
arg_doc = arg.type.declaration_code(arg.name, for_display=1)
|
| 88 |
+
elif self.is_format_python:
|
| 89 |
+
if not arg.annotation:
|
| 90 |
+
annotation = self._fmt_type(arg.type)
|
| 91 |
+
if arg.annotation:
|
| 92 |
+
if not self.is_format_clinic:
|
| 93 |
+
annotation = self._fmt_annotation(arg.annotation)
|
| 94 |
+
if arg.default:
|
| 95 |
+
defaultval = self._fmt_expr(arg.default)
|
| 96 |
+
if annotation:
|
| 97 |
+
arg_doc = arg_doc + (': %s' % annotation)
|
| 98 |
+
if defaultval:
|
| 99 |
+
arg_doc = arg_doc + (' = %s' % defaultval)
|
| 100 |
+
elif defaultval:
|
| 101 |
+
arg_doc = arg_doc + ('=%s' % defaultval)
|
| 102 |
+
return arg_doc
|
| 103 |
+
|
| 104 |
+
def _fmt_star_arg(self, arg):
|
| 105 |
+
arg_doc = arg.name
|
| 106 |
+
if arg.annotation:
|
| 107 |
+
if not self.is_format_clinic:
|
| 108 |
+
annotation = self._fmt_annotation(arg.annotation)
|
| 109 |
+
arg_doc = arg_doc + (': %s' % annotation)
|
| 110 |
+
return arg_doc
|
| 111 |
+
|
| 112 |
+
def _fmt_arglist(self, args,
|
| 113 |
+
npoargs=0, npargs=0, pargs=None,
|
| 114 |
+
nkargs=0, kargs=None,
|
| 115 |
+
hide_self=False):
|
| 116 |
+
arglist = []
|
| 117 |
+
for arg in args:
|
| 118 |
+
if not hide_self or not arg.entry.is_self_arg:
|
| 119 |
+
arg_doc = self._fmt_arg(arg)
|
| 120 |
+
arglist.append(arg_doc)
|
| 121 |
+
if pargs:
|
| 122 |
+
arg_doc = self._fmt_star_arg(pargs)
|
| 123 |
+
arglist.insert(npargs + npoargs, '*%s' % arg_doc)
|
| 124 |
+
elif nkargs:
|
| 125 |
+
arglist.insert(npargs + npoargs, '*')
|
| 126 |
+
if npoargs:
|
| 127 |
+
arglist.insert(npoargs, '/')
|
| 128 |
+
if kargs:
|
| 129 |
+
arg_doc = self._fmt_star_arg(kargs)
|
| 130 |
+
arglist.append('**%s' % arg_doc)
|
| 131 |
+
return arglist
|
| 132 |
+
|
| 133 |
+
def _fmt_type(self, type):
|
| 134 |
+
if type is PyrexTypes.py_object_type:
|
| 135 |
+
return None
|
| 136 |
+
elif self.is_format_c:
|
| 137 |
+
code = type.declaration_code("", for_display=1)
|
| 138 |
+
return code
|
| 139 |
+
elif self.is_format_python:
|
| 140 |
+
annotation = None
|
| 141 |
+
if type.is_string:
|
| 142 |
+
annotation = self.current_directives['c_string_type']
|
| 143 |
+
elif type.is_numeric:
|
| 144 |
+
annotation = type.py_type_name()
|
| 145 |
+
if annotation is None:
|
| 146 |
+
code = type.declaration_code('', for_display=1)
|
| 147 |
+
annotation = code.replace(' ', '_').replace('*', 'p')
|
| 148 |
+
return annotation
|
| 149 |
+
return None
|
| 150 |
+
|
| 151 |
+
def _fmt_signature(self, cls_name, func_name, args,
|
| 152 |
+
npoargs=0, npargs=0, pargs=None,
|
| 153 |
+
nkargs=0, kargs=None,
|
| 154 |
+
return_expr=None, return_type=None,
|
| 155 |
+
hide_self=False):
|
| 156 |
+
arglist = self._fmt_arglist(
|
| 157 |
+
args, npoargs, npargs, pargs, nkargs, kargs,
|
| 158 |
+
hide_self=hide_self,
|
| 159 |
+
)
|
| 160 |
+
arglist_doc = ', '.join(arglist)
|
| 161 |
+
func_doc = '%s(%s)' % (func_name, arglist_doc)
|
| 162 |
+
if self.is_format_c and cls_name:
|
| 163 |
+
func_doc = '%s.%s' % (cls_name, func_doc)
|
| 164 |
+
if not self.is_format_clinic:
|
| 165 |
+
ret_doc = None
|
| 166 |
+
if return_expr:
|
| 167 |
+
ret_doc = self._fmt_annotation(return_expr)
|
| 168 |
+
elif return_type:
|
| 169 |
+
ret_doc = self._fmt_type(return_type)
|
| 170 |
+
if ret_doc:
|
| 171 |
+
func_doc = '%s -> %s' % (func_doc, ret_doc)
|
| 172 |
+
return func_doc
|
| 173 |
+
|
| 174 |
+
def _embed_signature(self, signature, node_doc):
|
| 175 |
+
if self.is_format_clinic and self.current_directives['binding']:
|
| 176 |
+
return node_doc
|
| 177 |
+
if node_doc:
|
| 178 |
+
if self.is_format_clinic:
|
| 179 |
+
docfmt = "%s\n--\n\n%s"
|
| 180 |
+
else:
|
| 181 |
+
docfmt = "%s\n%s"
|
| 182 |
+
return docfmt % (signature, node_doc)
|
| 183 |
+
else:
|
| 184 |
+
if self.is_format_clinic:
|
| 185 |
+
docfmt = "%s\n--\n\n"
|
| 186 |
+
else:
|
| 187 |
+
docfmt = "%s"
|
| 188 |
+
return docfmt % signature
|
| 189 |
+
|
| 190 |
+
def __call__(self, node):
|
| 191 |
+
if not Options.docstrings:
|
| 192 |
+
return node
|
| 193 |
+
else:
|
| 194 |
+
return super(EmbedSignature, self).__call__(node)
|
| 195 |
+
|
| 196 |
+
def visit_ClassDefNode(self, node):
|
| 197 |
+
oldname = self.class_name
|
| 198 |
+
oldclass = self.class_node
|
| 199 |
+
self.class_node = node
|
| 200 |
+
try:
|
| 201 |
+
# PyClassDefNode
|
| 202 |
+
self.class_name = node.name
|
| 203 |
+
except AttributeError:
|
| 204 |
+
# CClassDefNode
|
| 205 |
+
self.class_name = node.class_name
|
| 206 |
+
self.visitchildren(node)
|
| 207 |
+
self.class_name = oldname
|
| 208 |
+
self.class_node = oldclass
|
| 209 |
+
return node
|
| 210 |
+
|
| 211 |
+
def visit_LambdaNode(self, node):
|
| 212 |
+
# lambda expressions so not have signature or inner functions
|
| 213 |
+
return node
|
| 214 |
+
|
| 215 |
+
def visit_DefNode(self, node):
|
| 216 |
+
if not self.current_directives['embedsignature']:
|
| 217 |
+
return node
|
| 218 |
+
self._setup_format()
|
| 219 |
+
|
| 220 |
+
is_constructor = False
|
| 221 |
+
hide_self = False
|
| 222 |
+
if node.entry.is_special:
|
| 223 |
+
is_constructor = self.class_node and node.name == '__init__'
|
| 224 |
+
if not is_constructor:
|
| 225 |
+
return node
|
| 226 |
+
class_name = None
|
| 227 |
+
func_name = node.name
|
| 228 |
+
if self.is_format_c:
|
| 229 |
+
func_name = self.class_name
|
| 230 |
+
hide_self = True
|
| 231 |
+
else:
|
| 232 |
+
class_name, func_name = self.class_name, node.name
|
| 233 |
+
|
| 234 |
+
npoargs = getattr(node, 'num_posonly_args', 0)
|
| 235 |
+
nkargs = getattr(node, 'num_kwonly_args', 0)
|
| 236 |
+
npargs = len(node.args) - nkargs - npoargs
|
| 237 |
+
signature = self._fmt_signature(
|
| 238 |
+
class_name, func_name, node.args,
|
| 239 |
+
npoargs, npargs, node.star_arg,
|
| 240 |
+
nkargs, node.starstar_arg,
|
| 241 |
+
return_expr=node.return_type_annotation,
|
| 242 |
+
return_type=None, hide_self=hide_self)
|
| 243 |
+
if signature:
|
| 244 |
+
if is_constructor and self.is_format_c:
|
| 245 |
+
doc_holder = self.class_node.entry.type.scope
|
| 246 |
+
else:
|
| 247 |
+
doc_holder = node.entry
|
| 248 |
+
if doc_holder.doc is not None:
|
| 249 |
+
old_doc = doc_holder.doc
|
| 250 |
+
elif not is_constructor and getattr(node, 'py_func', None) is not None:
|
| 251 |
+
old_doc = node.py_func.entry.doc
|
| 252 |
+
else:
|
| 253 |
+
old_doc = None
|
| 254 |
+
new_doc = self._embed_signature(signature, old_doc)
|
| 255 |
+
doc_holder.doc = EncodedString(new_doc)
|
| 256 |
+
if not is_constructor and getattr(node, 'py_func', None) is not None:
|
| 257 |
+
node.py_func.entry.doc = EncodedString(new_doc)
|
| 258 |
+
return node
|
| 259 |
+
|
| 260 |
+
def visit_CFuncDefNode(self, node):
|
| 261 |
+
if not node.overridable: # not cpdef FOO(...):
|
| 262 |
+
return node
|
| 263 |
+
if not self.current_directives['embedsignature']:
|
| 264 |
+
return node
|
| 265 |
+
self._setup_format()
|
| 266 |
+
|
| 267 |
+
signature = self._fmt_signature(
|
| 268 |
+
self.class_name, node.declarator.base.name,
|
| 269 |
+
node.declarator.args,
|
| 270 |
+
return_type=node.return_type)
|
| 271 |
+
if signature:
|
| 272 |
+
if node.entry.doc is not None:
|
| 273 |
+
old_doc = node.entry.doc
|
| 274 |
+
elif getattr(node, 'py_func', None) is not None:
|
| 275 |
+
old_doc = node.py_func.entry.doc
|
| 276 |
+
else:
|
| 277 |
+
old_doc = None
|
| 278 |
+
new_doc = self._embed_signature(signature, old_doc)
|
| 279 |
+
node.entry.doc = EncodedString(new_doc)
|
| 280 |
+
py_func = getattr(node, 'py_func', None)
|
| 281 |
+
if py_func is not None:
|
| 282 |
+
py_func.entry.doc = EncodedString(new_doc)
|
| 283 |
+
return node
|
| 284 |
+
|
| 285 |
+
def visit_PropertyNode(self, node):
|
| 286 |
+
if not self.current_directives['embedsignature']:
|
| 287 |
+
return node
|
| 288 |
+
self._setup_format()
|
| 289 |
+
|
| 290 |
+
entry = node.entry
|
| 291 |
+
body = node.body
|
| 292 |
+
prop_name = entry.name
|
| 293 |
+
type_name = None
|
| 294 |
+
if entry.visibility == 'public':
|
| 295 |
+
if self.is_format_c:
|
| 296 |
+
# property synthesised from a cdef public attribute
|
| 297 |
+
type_name = entry.type.declaration_code("", for_display=1)
|
| 298 |
+
if not entry.type.is_pyobject:
|
| 299 |
+
type_name = "'%s'" % type_name
|
| 300 |
+
elif entry.type.is_extension_type:
|
| 301 |
+
type_name = entry.type.module_name + '.' + type_name
|
| 302 |
+
elif self.is_format_python:
|
| 303 |
+
type_name = self._fmt_type(entry.type)
|
| 304 |
+
if type_name is None:
|
| 305 |
+
for stat in body.stats:
|
| 306 |
+
if stat.name != '__get__':
|
| 307 |
+
continue
|
| 308 |
+
if self.is_format_c:
|
| 309 |
+
prop_name = '%s.%s' % (self.class_name, prop_name)
|
| 310 |
+
ret_annotation = stat.return_type_annotation
|
| 311 |
+
if ret_annotation:
|
| 312 |
+
type_name = self._fmt_annotation(ret_annotation)
|
| 313 |
+
if type_name is not None :
|
| 314 |
+
signature = '%s: %s' % (prop_name, type_name)
|
| 315 |
+
new_doc = self._embed_signature(signature, entry.doc)
|
| 316 |
+
if not self.is_format_clinic:
|
| 317 |
+
entry.doc = EncodedString(new_doc)
|
| 318 |
+
return node
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/CmdLine.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Cython - Command Line Parsing
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
from argparse import ArgumentParser, Action, SUPPRESS
|
| 10 |
+
from . import Options
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
if sys.version_info < (3, 3):
|
| 14 |
+
# TODO: This workaround can be removed in Cython 3.1
|
| 15 |
+
FileNotFoundError = IOError
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class ParseDirectivesAction(Action):
|
| 19 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 20 |
+
old_directives = dict(getattr(namespace, self.dest,
|
| 21 |
+
Options.get_directive_defaults()))
|
| 22 |
+
directives = Options.parse_directive_list(
|
| 23 |
+
values, relaxed_bool=True, current_settings=old_directives)
|
| 24 |
+
setattr(namespace, self.dest, directives)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class ParseOptionsAction(Action):
|
| 28 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 29 |
+
options = dict(getattr(namespace, self.dest, {}))
|
| 30 |
+
for opt in values.split(','):
|
| 31 |
+
if '=' in opt:
|
| 32 |
+
n, v = opt.split('=', 1)
|
| 33 |
+
v = v.lower() not in ('false', 'f', '0', 'no')
|
| 34 |
+
else:
|
| 35 |
+
n, v = opt, True
|
| 36 |
+
options[n] = v
|
| 37 |
+
setattr(namespace, self.dest, options)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class ParseCompileTimeEnvAction(Action):
|
| 41 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 42 |
+
old_env = dict(getattr(namespace, self.dest, {}))
|
| 43 |
+
new_env = Options.parse_compile_time_env(values, current_settings=old_env)
|
| 44 |
+
setattr(namespace, self.dest, new_env)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class ActivateAllWarningsAction(Action):
|
| 48 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 49 |
+
directives = getattr(namespace, 'compiler_directives', {})
|
| 50 |
+
directives.update(Options.extra_warnings)
|
| 51 |
+
namespace.compiler_directives = directives
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class SetLenientAction(Action):
|
| 55 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 56 |
+
namespace.error_on_unknown_names = False
|
| 57 |
+
namespace.error_on_uninitialized = False
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class SetGDBDebugAction(Action):
|
| 61 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 62 |
+
namespace.gdb_debug = True
|
| 63 |
+
namespace.output_dir = os.curdir
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class SetGDBDebugOutputAction(Action):
|
| 67 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 68 |
+
namespace.gdb_debug = True
|
| 69 |
+
namespace.output_dir = values
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class SetAnnotateCoverageAction(Action):
|
| 73 |
+
def __call__(self, parser, namespace, values, option_string=None):
|
| 74 |
+
namespace.annotate = True
|
| 75 |
+
namespace.annotate_coverage_xml = values
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def create_cython_argparser():
|
| 79 |
+
description = "Cython (https://cython.org/) is a compiler for code written in the "\
|
| 80 |
+
"Cython language. Cython is based on Pyrex by Greg Ewing."
|
| 81 |
+
|
| 82 |
+
parser = ArgumentParser(description=description, argument_default=SUPPRESS)
|
| 83 |
+
|
| 84 |
+
parser.add_argument("-V", "--version", dest='show_version', action='store_const', const=1,
|
| 85 |
+
help='Display version number of cython compiler')
|
| 86 |
+
parser.add_argument("-l", "--create-listing", dest='use_listing_file', action='store_const', const=1,
|
| 87 |
+
help='Write error messages to a listing file')
|
| 88 |
+
parser.add_argument("-I", "--include-dir", dest='include_path', action='append',
|
| 89 |
+
help='Search for include files in named directory '
|
| 90 |
+
'(multiple include directories are allowed).')
|
| 91 |
+
parser.add_argument("-o", "--output-file", dest='output_file', action='store', type=str,
|
| 92 |
+
help='Specify name of generated C file')
|
| 93 |
+
parser.add_argument("-t", "--timestamps", dest='timestamps', action='store_const', const=1,
|
| 94 |
+
help='Only compile newer source files')
|
| 95 |
+
parser.add_argument("-f", "--force", dest='timestamps', action='store_const', const=0,
|
| 96 |
+
help='Compile all source files (overrides implied -t)')
|
| 97 |
+
parser.add_argument("-v", "--verbose", dest='verbose', action='count',
|
| 98 |
+
help='Be verbose, print file names on multiple compilation')
|
| 99 |
+
parser.add_argument("-p", "--embed-positions", dest='embed_pos_in_docstring', action='store_const', const=1,
|
| 100 |
+
help='If specified, the positions in Cython files of each '
|
| 101 |
+
'function definition is embedded in its docstring.')
|
| 102 |
+
parser.add_argument("--cleanup", dest='generate_cleanup_code', action='store', type=int,
|
| 103 |
+
help='Release interned objects on python exit, for memory debugging. '
|
| 104 |
+
'Level indicates aggressiveness, default 0 releases nothing.')
|
| 105 |
+
parser.add_argument("-w", "--working", dest='working_path', action='store', type=str,
|
| 106 |
+
help='Sets the working directory for Cython (the directory modules are searched from)')
|
| 107 |
+
parser.add_argument("--gdb", action=SetGDBDebugAction, nargs=0,
|
| 108 |
+
help='Output debug information for cygdb')
|
| 109 |
+
parser.add_argument("--gdb-outdir", action=SetGDBDebugOutputAction, type=str,
|
| 110 |
+
help='Specify gdb debug information output directory. Implies --gdb.')
|
| 111 |
+
parser.add_argument("-D", "--no-docstrings", dest='docstrings', action='store_false',
|
| 112 |
+
help='Strip docstrings from the compiled module.')
|
| 113 |
+
parser.add_argument('-a', '--annotate', action='store_const', const='default', dest='annotate',
|
| 114 |
+
help='Produce a colorized HTML version of the source.')
|
| 115 |
+
parser.add_argument('--annotate-fullc', action='store_const', const='fullc', dest='annotate',
|
| 116 |
+
help='Produce a colorized HTML version of the source '
|
| 117 |
+
'which includes entire generated C/C++-code.')
|
| 118 |
+
parser.add_argument("--annotate-coverage", dest='annotate_coverage_xml', action=SetAnnotateCoverageAction, type=str,
|
| 119 |
+
help='Annotate and include coverage information from cov.xml.')
|
| 120 |
+
parser.add_argument("--line-directives", dest='emit_linenums', action='store_true',
|
| 121 |
+
help='Produce #line directives pointing to the .pyx source')
|
| 122 |
+
parser.add_argument("-+", "--cplus", dest='cplus', action='store_const', const=1,
|
| 123 |
+
help='Output a C++ rather than C file.')
|
| 124 |
+
parser.add_argument('--embed', action='store_const', const='main',
|
| 125 |
+
help='Generate a main() function that embeds the Python interpreter. '
|
| 126 |
+
'Pass --embed=<method_name> for a name other than main().')
|
| 127 |
+
parser.add_argument('-2', dest='language_level', action='store_const', const=2,
|
| 128 |
+
help='Compile based on Python-2 syntax and code semantics.')
|
| 129 |
+
parser.add_argument('-3', dest='language_level', action='store_const', const=3,
|
| 130 |
+
help='Compile based on Python-3 syntax and code semantics.')
|
| 131 |
+
parser.add_argument('--3str', dest='language_level', action='store_const', const='3str',
|
| 132 |
+
help='Compile based on Python-3 syntax and code semantics without '
|
| 133 |
+
'assuming unicode by default for string literals under Python 2.')
|
| 134 |
+
parser.add_argument("--lenient", action=SetLenientAction, nargs=0,
|
| 135 |
+
help='Change some compile time errors to runtime errors to '
|
| 136 |
+
'improve Python compatibility')
|
| 137 |
+
parser.add_argument("--capi-reexport-cincludes", dest='capi_reexport_cincludes', action='store_true',
|
| 138 |
+
help='Add cincluded headers to any auto-generated header files.')
|
| 139 |
+
parser.add_argument("--fast-fail", dest='fast_fail', action='store_true',
|
| 140 |
+
help='Abort the compilation on the first error')
|
| 141 |
+
parser.add_argument("-Werror", "--warning-errors", dest='warning_errors', action='store_true',
|
| 142 |
+
help='Make all warnings into errors')
|
| 143 |
+
parser.add_argument("-Wextra", "--warning-extra", action=ActivateAllWarningsAction, nargs=0,
|
| 144 |
+
help='Enable extra warnings')
|
| 145 |
+
|
| 146 |
+
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
|
| 147 |
+
dest='compiler_directives', type=str,
|
| 148 |
+
action=ParseDirectivesAction,
|
| 149 |
+
help='Overrides a compiler directive')
|
| 150 |
+
parser.add_argument('-E', '--compile-time-env', metavar='NAME=VALUE,...',
|
| 151 |
+
dest='compile_time_env', type=str,
|
| 152 |
+
action=ParseCompileTimeEnvAction,
|
| 153 |
+
help='Provides compile time env like DEF would do.')
|
| 154 |
+
parser.add_argument("--module-name",
|
| 155 |
+
dest='module_name', type=str, action='store',
|
| 156 |
+
help='Fully qualified module name. If not given, is '
|
| 157 |
+
'deduced from the import path if source file is in '
|
| 158 |
+
'a package, or equals the filename otherwise.')
|
| 159 |
+
parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
|
| 160 |
+
parser.add_argument('sources', nargs='*', default=[])
|
| 161 |
+
|
| 162 |
+
# TODO: add help
|
| 163 |
+
parser.add_argument("-z", "--pre-import", dest='pre_import', action='store', type=str, help=SUPPRESS)
|
| 164 |
+
parser.add_argument("--convert-range", dest='convert_range', action='store_true', help=SUPPRESS)
|
| 165 |
+
parser.add_argument("--no-c-in-traceback", dest='c_line_in_traceback', action='store_false', help=SUPPRESS)
|
| 166 |
+
parser.add_argument("--cimport-from-pyx", dest='cimport_from_pyx', action='store_true', help=SUPPRESS)
|
| 167 |
+
parser.add_argument("--old-style-globals", dest='old_style_globals', action='store_true', help=SUPPRESS)
|
| 168 |
+
|
| 169 |
+
# debug stuff:
|
| 170 |
+
from . import DebugFlags
|
| 171 |
+
for name in vars(DebugFlags):
|
| 172 |
+
if name.startswith("debug"):
|
| 173 |
+
option_name = name.replace('_', '-')
|
| 174 |
+
parser.add_argument("--" + option_name, action='store_true', help=SUPPRESS)
|
| 175 |
+
|
| 176 |
+
return parser
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def parse_command_line_raw(parser, args):
|
| 180 |
+
# special handling for --embed and --embed=xxxx as they aren't correctly parsed
|
| 181 |
+
def filter_out_embed_options(args):
|
| 182 |
+
with_embed, without_embed = [], []
|
| 183 |
+
for x in args:
|
| 184 |
+
if x == '--embed' or x.startswith('--embed='):
|
| 185 |
+
with_embed.append(x)
|
| 186 |
+
else:
|
| 187 |
+
without_embed.append(x)
|
| 188 |
+
return with_embed, without_embed
|
| 189 |
+
|
| 190 |
+
with_embed, args_without_embed = filter_out_embed_options(args)
|
| 191 |
+
|
| 192 |
+
arguments, unknown = parser.parse_known_args(args_without_embed)
|
| 193 |
+
|
| 194 |
+
sources = arguments.sources
|
| 195 |
+
del arguments.sources
|
| 196 |
+
|
| 197 |
+
# unknown can be either debug, embed or input files or really unknown
|
| 198 |
+
for option in unknown:
|
| 199 |
+
if option.startswith('-'):
|
| 200 |
+
parser.error("unknown option " + option)
|
| 201 |
+
else:
|
| 202 |
+
sources.append(option)
|
| 203 |
+
|
| 204 |
+
# embed-stuff must be handled extra:
|
| 205 |
+
for x in with_embed:
|
| 206 |
+
if x == '--embed':
|
| 207 |
+
name = 'main' # default value
|
| 208 |
+
else:
|
| 209 |
+
name = x[len('--embed='):]
|
| 210 |
+
setattr(arguments, 'embed', name)
|
| 211 |
+
|
| 212 |
+
return arguments, sources
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def parse_command_line(args):
|
| 216 |
+
parser = create_cython_argparser()
|
| 217 |
+
arguments, sources = parse_command_line_raw(parser, args)
|
| 218 |
+
|
| 219 |
+
work_dir = getattr(arguments, 'working_path', '')
|
| 220 |
+
for source in sources:
|
| 221 |
+
if work_dir and not os.path.isabs(source):
|
| 222 |
+
source = os.path.join(work_dir, source)
|
| 223 |
+
if not os.path.exists(source):
|
| 224 |
+
import errno
|
| 225 |
+
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), source)
|
| 226 |
+
|
| 227 |
+
options = Options.CompilationOptions(Options.default_options)
|
| 228 |
+
for name, value in vars(arguments).items():
|
| 229 |
+
if name.startswith('debug'):
|
| 230 |
+
from . import DebugFlags
|
| 231 |
+
if name in dir(DebugFlags):
|
| 232 |
+
setattr(DebugFlags, name, value)
|
| 233 |
+
else:
|
| 234 |
+
parser.error("Unknown debug flag: %s\n" % name)
|
| 235 |
+
elif hasattr(Options, name):
|
| 236 |
+
setattr(Options, name, value)
|
| 237 |
+
else:
|
| 238 |
+
setattr(options, name, value)
|
| 239 |
+
|
| 240 |
+
if options.use_listing_file and len(sources) > 1:
|
| 241 |
+
parser.error("cython: Only one source file allowed when using -o\n")
|
| 242 |
+
if len(sources) == 0 and not options.show_version:
|
| 243 |
+
parser.error("cython: Need at least one source file\n")
|
| 244 |
+
if Options.embed and len(sources) > 1:
|
| 245 |
+
parser.error("cython: Only one source file allowed when using --embed\n")
|
| 246 |
+
if options.module_name:
|
| 247 |
+
if options.timestamps:
|
| 248 |
+
parser.error("cython: Cannot use --module-name with --timestamps\n")
|
| 249 |
+
if len(sources) > 1:
|
| 250 |
+
parser.error("cython: Only one source file allowed when using --module-name\n")
|
| 251 |
+
return options, sources
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Dataclass.py
ADDED
|
@@ -0,0 +1,839 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# functions to transform a c class into a dataclass
|
| 2 |
+
|
| 3 |
+
from collections import OrderedDict
|
| 4 |
+
from textwrap import dedent
|
| 5 |
+
import operator
|
| 6 |
+
|
| 7 |
+
from . import ExprNodes
|
| 8 |
+
from . import Nodes
|
| 9 |
+
from . import PyrexTypes
|
| 10 |
+
from . import Builtin
|
| 11 |
+
from . import Naming
|
| 12 |
+
from .Errors import error, warning
|
| 13 |
+
from .Code import UtilityCode, TempitaUtilityCode, PyxCodeWriter
|
| 14 |
+
from .Visitor import VisitorTransform
|
| 15 |
+
from .StringEncoding import EncodedString
|
| 16 |
+
from .TreeFragment import TreeFragment
|
| 17 |
+
from .ParseTreeTransforms import NormalizeTree, SkipDeclarations
|
| 18 |
+
from .Options import copy_inherited_directives
|
| 19 |
+
|
| 20 |
+
_dataclass_loader_utilitycode = None
|
| 21 |
+
|
| 22 |
+
def make_dataclasses_module_callnode(pos):
|
| 23 |
+
global _dataclass_loader_utilitycode
|
| 24 |
+
if not _dataclass_loader_utilitycode:
|
| 25 |
+
python_utility_code = UtilityCode.load_cached("Dataclasses_fallback", "Dataclasses.py")
|
| 26 |
+
python_utility_code = EncodedString(python_utility_code.impl)
|
| 27 |
+
_dataclass_loader_utilitycode = TempitaUtilityCode.load(
|
| 28 |
+
"SpecificModuleLoader", "Dataclasses.c",
|
| 29 |
+
context={'cname': "dataclasses", 'py_code': python_utility_code.as_c_string_literal()})
|
| 30 |
+
return ExprNodes.PythonCapiCallNode(
|
| 31 |
+
pos, "__Pyx_Load_dataclasses_Module",
|
| 32 |
+
PyrexTypes.CFuncType(PyrexTypes.py_object_type, []),
|
| 33 |
+
utility_code=_dataclass_loader_utilitycode,
|
| 34 |
+
args=[],
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
def make_dataclass_call_helper(pos, callable, kwds):
|
| 38 |
+
utility_code = UtilityCode.load_cached("DataclassesCallHelper", "Dataclasses.c")
|
| 39 |
+
func_type = PyrexTypes.CFuncType(
|
| 40 |
+
PyrexTypes.py_object_type, [
|
| 41 |
+
PyrexTypes.CFuncTypeArg("callable", PyrexTypes.py_object_type, None),
|
| 42 |
+
PyrexTypes.CFuncTypeArg("kwds", PyrexTypes.py_object_type, None)
|
| 43 |
+
],
|
| 44 |
+
)
|
| 45 |
+
return ExprNodes.PythonCapiCallNode(
|
| 46 |
+
pos,
|
| 47 |
+
function_name="__Pyx_DataclassesCallHelper",
|
| 48 |
+
func_type=func_type,
|
| 49 |
+
utility_code=utility_code,
|
| 50 |
+
args=[callable, kwds],
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class RemoveAssignmentsToNames(VisitorTransform, SkipDeclarations):
|
| 55 |
+
"""
|
| 56 |
+
Cython (and Python) normally treats
|
| 57 |
+
|
| 58 |
+
class A:
|
| 59 |
+
x = 1
|
| 60 |
+
|
| 61 |
+
as generating a class attribute. However for dataclasses the `= 1` should be interpreted as
|
| 62 |
+
a default value to initialize an instance attribute with.
|
| 63 |
+
This transform therefore removes the `x=1` assignment so that the class attribute isn't
|
| 64 |
+
generated, while recording what it has removed so that it can be used in the initialization.
|
| 65 |
+
"""
|
| 66 |
+
def __init__(self, names):
|
| 67 |
+
super(RemoveAssignmentsToNames, self).__init__()
|
| 68 |
+
self.names = names
|
| 69 |
+
self.removed_assignments = {}
|
| 70 |
+
|
| 71 |
+
def visit_CClassNode(self, node):
|
| 72 |
+
self.visitchildren(node)
|
| 73 |
+
return node
|
| 74 |
+
|
| 75 |
+
def visit_PyClassNode(self, node):
|
| 76 |
+
return node # go no further
|
| 77 |
+
|
| 78 |
+
def visit_FuncDefNode(self, node):
|
| 79 |
+
return node # go no further
|
| 80 |
+
|
| 81 |
+
def visit_SingleAssignmentNode(self, node):
|
| 82 |
+
if node.lhs.is_name and node.lhs.name in self.names:
|
| 83 |
+
if node.lhs.name in self.removed_assignments:
|
| 84 |
+
warning(node.pos, ("Multiple assignments for '%s' in dataclass; "
|
| 85 |
+
"using most recent") % node.lhs.name, 1)
|
| 86 |
+
self.removed_assignments[node.lhs.name] = node.rhs
|
| 87 |
+
return []
|
| 88 |
+
return node
|
| 89 |
+
|
| 90 |
+
# I believe cascaded assignment is always a syntax error with annotations
|
| 91 |
+
# so there's no need to define visit_CascadedAssignmentNode
|
| 92 |
+
|
| 93 |
+
def visit_Node(self, node):
|
| 94 |
+
self.visitchildren(node)
|
| 95 |
+
return node
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class TemplateCode(object):
|
| 99 |
+
"""
|
| 100 |
+
Adds the ability to keep track of placeholder argument names to PyxCodeWriter.
|
| 101 |
+
|
| 102 |
+
Also adds extra_stats which are nodes bundled at the end when this
|
| 103 |
+
is converted to a tree.
|
| 104 |
+
"""
|
| 105 |
+
_placeholder_count = 0
|
| 106 |
+
|
| 107 |
+
def __init__(self, writer=None, placeholders=None, extra_stats=None):
|
| 108 |
+
self.writer = PyxCodeWriter() if writer is None else writer
|
| 109 |
+
self.placeholders = {} if placeholders is None else placeholders
|
| 110 |
+
self.extra_stats = [] if extra_stats is None else extra_stats
|
| 111 |
+
|
| 112 |
+
def add_code_line(self, code_line):
|
| 113 |
+
self.writer.putln(code_line)
|
| 114 |
+
|
| 115 |
+
def add_code_lines(self, code_lines):
|
| 116 |
+
for line in code_lines:
|
| 117 |
+
self.writer.putln(line)
|
| 118 |
+
|
| 119 |
+
def reset(self):
|
| 120 |
+
# don't attempt to reset placeholders - it really doesn't matter if
|
| 121 |
+
# we have unused placeholders
|
| 122 |
+
self.writer.reset()
|
| 123 |
+
|
| 124 |
+
def empty(self):
|
| 125 |
+
return self.writer.empty()
|
| 126 |
+
|
| 127 |
+
def indenter(self):
|
| 128 |
+
return self.writer.indenter()
|
| 129 |
+
|
| 130 |
+
def new_placeholder(self, field_names, value):
|
| 131 |
+
name = self._new_placeholder_name(field_names)
|
| 132 |
+
self.placeholders[name] = value
|
| 133 |
+
return name
|
| 134 |
+
|
| 135 |
+
def add_extra_statements(self, statements):
|
| 136 |
+
if self.extra_stats is None:
|
| 137 |
+
assert False, "Can only use add_extra_statements on top-level writer"
|
| 138 |
+
self.extra_stats.extend(statements)
|
| 139 |
+
|
| 140 |
+
def _new_placeholder_name(self, field_names):
|
| 141 |
+
while True:
|
| 142 |
+
name = "DATACLASS_PLACEHOLDER_%d" % self._placeholder_count
|
| 143 |
+
if (name not in self.placeholders
|
| 144 |
+
and name not in field_names):
|
| 145 |
+
# make sure name isn't already used and doesn't
|
| 146 |
+
# conflict with a variable name (which is unlikely but possible)
|
| 147 |
+
break
|
| 148 |
+
self._placeholder_count += 1
|
| 149 |
+
return name
|
| 150 |
+
|
| 151 |
+
def generate_tree(self, level='c_class'):
|
| 152 |
+
stat_list_node = TreeFragment(
|
| 153 |
+
self.writer.getvalue(),
|
| 154 |
+
level=level,
|
| 155 |
+
pipeline=[NormalizeTree(None)],
|
| 156 |
+
).substitute(self.placeholders)
|
| 157 |
+
|
| 158 |
+
stat_list_node.stats += self.extra_stats
|
| 159 |
+
return stat_list_node
|
| 160 |
+
|
| 161 |
+
def insertion_point(self):
|
| 162 |
+
new_writer = self.writer.insertion_point()
|
| 163 |
+
return TemplateCode(
|
| 164 |
+
writer=new_writer,
|
| 165 |
+
placeholders=self.placeholders,
|
| 166 |
+
extra_stats=self.extra_stats
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class _MISSING_TYPE(object):
|
| 171 |
+
pass
|
| 172 |
+
MISSING = _MISSING_TYPE()
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
class Field(object):
|
| 176 |
+
"""
|
| 177 |
+
Field is based on the dataclasses.field class from the standard library module.
|
| 178 |
+
It is used internally during the generation of Cython dataclasses to keep track
|
| 179 |
+
of the settings for individual attributes.
|
| 180 |
+
|
| 181 |
+
Attributes of this class are stored as nodes so they can be used in code construction
|
| 182 |
+
more readily (i.e. we store BoolNode rather than bool)
|
| 183 |
+
"""
|
| 184 |
+
default = MISSING
|
| 185 |
+
default_factory = MISSING
|
| 186 |
+
private = False
|
| 187 |
+
|
| 188 |
+
literal_keys = ("repr", "hash", "init", "compare", "metadata")
|
| 189 |
+
|
| 190 |
+
# default values are defined by the CPython dataclasses.field
|
| 191 |
+
def __init__(self, pos, default=MISSING, default_factory=MISSING,
|
| 192 |
+
repr=None, hash=None, init=None,
|
| 193 |
+
compare=None, metadata=None,
|
| 194 |
+
is_initvar=False, is_classvar=False,
|
| 195 |
+
**additional_kwds):
|
| 196 |
+
if default is not MISSING:
|
| 197 |
+
self.default = default
|
| 198 |
+
if default_factory is not MISSING:
|
| 199 |
+
self.default_factory = default_factory
|
| 200 |
+
self.repr = repr or ExprNodes.BoolNode(pos, value=True)
|
| 201 |
+
self.hash = hash or ExprNodes.NoneNode(pos)
|
| 202 |
+
self.init = init or ExprNodes.BoolNode(pos, value=True)
|
| 203 |
+
self.compare = compare or ExprNodes.BoolNode(pos, value=True)
|
| 204 |
+
self.metadata = metadata or ExprNodes.NoneNode(pos)
|
| 205 |
+
self.is_initvar = is_initvar
|
| 206 |
+
self.is_classvar = is_classvar
|
| 207 |
+
|
| 208 |
+
for k, v in additional_kwds.items():
|
| 209 |
+
# There should not be any additional keywords!
|
| 210 |
+
error(v.pos, "cython.dataclasses.field() got an unexpected keyword argument '%s'" % k)
|
| 211 |
+
|
| 212 |
+
for field_name in self.literal_keys:
|
| 213 |
+
field_value = getattr(self, field_name)
|
| 214 |
+
if not field_value.is_literal:
|
| 215 |
+
error(field_value.pos,
|
| 216 |
+
"cython.dataclasses.field parameter '%s' must be a literal value" % field_name)
|
| 217 |
+
|
| 218 |
+
def iterate_record_node_arguments(self):
|
| 219 |
+
for key in (self.literal_keys + ('default', 'default_factory')):
|
| 220 |
+
value = getattr(self, key)
|
| 221 |
+
if value is not MISSING:
|
| 222 |
+
yield key, value
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def process_class_get_fields(node):
|
| 226 |
+
var_entries = node.scope.var_entries
|
| 227 |
+
# order of definition is used in the dataclass
|
| 228 |
+
var_entries = sorted(var_entries, key=operator.attrgetter('pos'))
|
| 229 |
+
var_names = [entry.name for entry in var_entries]
|
| 230 |
+
|
| 231 |
+
# don't treat `x = 1` as an assignment of a class attribute within the dataclass
|
| 232 |
+
transform = RemoveAssignmentsToNames(var_names)
|
| 233 |
+
transform(node)
|
| 234 |
+
default_value_assignments = transform.removed_assignments
|
| 235 |
+
|
| 236 |
+
base_type = node.base_type
|
| 237 |
+
fields = OrderedDict()
|
| 238 |
+
while base_type:
|
| 239 |
+
if base_type.is_external or not base_type.scope.implemented:
|
| 240 |
+
warning(node.pos, "Cannot reliably handle Cython dataclasses with base types "
|
| 241 |
+
"in external modules since it is not possible to tell what fields they have", 2)
|
| 242 |
+
if base_type.dataclass_fields:
|
| 243 |
+
fields = base_type.dataclass_fields.copy()
|
| 244 |
+
break
|
| 245 |
+
base_type = base_type.base_type
|
| 246 |
+
|
| 247 |
+
for entry in var_entries:
|
| 248 |
+
name = entry.name
|
| 249 |
+
is_initvar = entry.declared_with_pytyping_modifier("dataclasses.InitVar")
|
| 250 |
+
# TODO - classvars aren't included in "var_entries" so are missed here
|
| 251 |
+
# and thus this code is never triggered
|
| 252 |
+
is_classvar = entry.declared_with_pytyping_modifier("typing.ClassVar")
|
| 253 |
+
if name in default_value_assignments:
|
| 254 |
+
assignment = default_value_assignments[name]
|
| 255 |
+
if (isinstance(assignment, ExprNodes.CallNode) and (
|
| 256 |
+
assignment.function.as_cython_attribute() == "dataclasses.field" or
|
| 257 |
+
Builtin.exprnode_to_known_standard_library_name(
|
| 258 |
+
assignment.function, node.scope) == "dataclasses.field")):
|
| 259 |
+
# I believe most of this is well-enforced when it's treated as a directive
|
| 260 |
+
# but it doesn't hurt to make sure
|
| 261 |
+
valid_general_call = (isinstance(assignment, ExprNodes.GeneralCallNode)
|
| 262 |
+
and isinstance(assignment.positional_args, ExprNodes.TupleNode)
|
| 263 |
+
and not assignment.positional_args.args
|
| 264 |
+
and (assignment.keyword_args is None or isinstance(assignment.keyword_args, ExprNodes.DictNode)))
|
| 265 |
+
valid_simple_call = (isinstance(assignment, ExprNodes.SimpleCallNode) and not assignment.args)
|
| 266 |
+
if not (valid_general_call or valid_simple_call):
|
| 267 |
+
error(assignment.pos, "Call to 'cython.dataclasses.field' must only consist "
|
| 268 |
+
"of compile-time keyword arguments")
|
| 269 |
+
continue
|
| 270 |
+
keyword_args = assignment.keyword_args.as_python_dict() if valid_general_call and assignment.keyword_args else {}
|
| 271 |
+
if 'default' in keyword_args and 'default_factory' in keyword_args:
|
| 272 |
+
error(assignment.pos, "cannot specify both default and default_factory")
|
| 273 |
+
continue
|
| 274 |
+
field = Field(node.pos, **keyword_args)
|
| 275 |
+
else:
|
| 276 |
+
if assignment.type in [Builtin.list_type, Builtin.dict_type, Builtin.set_type]:
|
| 277 |
+
# The standard library module generates a TypeError at runtime
|
| 278 |
+
# in this situation.
|
| 279 |
+
# Error message is copied from CPython
|
| 280 |
+
error(assignment.pos, "mutable default <class '{0}'> for field {1} is not allowed: "
|
| 281 |
+
"use default_factory".format(assignment.type.name, name))
|
| 282 |
+
|
| 283 |
+
field = Field(node.pos, default=assignment)
|
| 284 |
+
else:
|
| 285 |
+
field = Field(node.pos)
|
| 286 |
+
field.is_initvar = is_initvar
|
| 287 |
+
field.is_classvar = is_classvar
|
| 288 |
+
if entry.visibility == "private":
|
| 289 |
+
field.private = True
|
| 290 |
+
fields[name] = field
|
| 291 |
+
node.entry.type.dataclass_fields = fields
|
| 292 |
+
return fields
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
|
| 296 |
+
# default argument values from https://docs.python.org/3/library/dataclasses.html
|
| 297 |
+
kwargs = dict(init=True, repr=True, eq=True,
|
| 298 |
+
order=False, unsafe_hash=False,
|
| 299 |
+
frozen=False, kw_only=False)
|
| 300 |
+
if dataclass_args is not None:
|
| 301 |
+
if dataclass_args[0]:
|
| 302 |
+
error(node.pos, "cython.dataclasses.dataclass takes no positional arguments")
|
| 303 |
+
for k, v in dataclass_args[1].items():
|
| 304 |
+
if k not in kwargs:
|
| 305 |
+
error(node.pos,
|
| 306 |
+
"cython.dataclasses.dataclass() got an unexpected keyword argument '%s'" % k)
|
| 307 |
+
if not isinstance(v, ExprNodes.BoolNode):
|
| 308 |
+
error(node.pos,
|
| 309 |
+
"Arguments passed to cython.dataclasses.dataclass must be True or False")
|
| 310 |
+
kwargs[k] = v.value
|
| 311 |
+
|
| 312 |
+
kw_only = kwargs['kw_only']
|
| 313 |
+
|
| 314 |
+
fields = process_class_get_fields(node)
|
| 315 |
+
|
| 316 |
+
dataclass_module = make_dataclasses_module_callnode(node.pos)
|
| 317 |
+
|
| 318 |
+
# create __dataclass_params__ attribute. I try to use the exact
|
| 319 |
+
# `_DataclassParams` class defined in the standard library module if at all possible
|
| 320 |
+
# for maximum duck-typing compatibility.
|
| 321 |
+
dataclass_params_func = ExprNodes.AttributeNode(node.pos, obj=dataclass_module,
|
| 322 |
+
attribute=EncodedString("_DataclassParams"))
|
| 323 |
+
dataclass_params_keywords = ExprNodes.DictNode.from_pairs(
|
| 324 |
+
node.pos,
|
| 325 |
+
[ (ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 326 |
+
ExprNodes.BoolNode(node.pos, value=v))
|
| 327 |
+
for k, v in kwargs.items() ] +
|
| 328 |
+
[ (ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 329 |
+
ExprNodes.BoolNode(node.pos, value=v))
|
| 330 |
+
for k, v in [('kw_only', kw_only), ('match_args', False),
|
| 331 |
+
('slots', False), ('weakref_slot', False)]
|
| 332 |
+
])
|
| 333 |
+
dataclass_params = make_dataclass_call_helper(
|
| 334 |
+
node.pos, dataclass_params_func, dataclass_params_keywords)
|
| 335 |
+
dataclass_params_assignment = Nodes.SingleAssignmentNode(
|
| 336 |
+
node.pos,
|
| 337 |
+
lhs = ExprNodes.NameNode(node.pos, name=EncodedString("__dataclass_params__")),
|
| 338 |
+
rhs = dataclass_params)
|
| 339 |
+
|
| 340 |
+
dataclass_fields_stats = _set_up_dataclass_fields(node, fields, dataclass_module)
|
| 341 |
+
|
| 342 |
+
stats = Nodes.StatListNode(node.pos,
|
| 343 |
+
stats=[dataclass_params_assignment] + dataclass_fields_stats)
|
| 344 |
+
|
| 345 |
+
code = TemplateCode()
|
| 346 |
+
generate_init_code(code, kwargs['init'], node, fields, kw_only)
|
| 347 |
+
generate_repr_code(code, kwargs['repr'], node, fields)
|
| 348 |
+
generate_eq_code(code, kwargs['eq'], node, fields)
|
| 349 |
+
generate_order_code(code, kwargs['order'], node, fields)
|
| 350 |
+
generate_hash_code(code, kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields)
|
| 351 |
+
|
| 352 |
+
stats.stats += code.generate_tree().stats
|
| 353 |
+
|
| 354 |
+
# turn off annotation typing, so all arguments to __init__ are accepted as
|
| 355 |
+
# generic objects and thus can accept _HAS_DEFAULT_FACTORY.
|
| 356 |
+
# Type conversion comes later
|
| 357 |
+
comp_directives = Nodes.CompilerDirectivesNode(node.pos,
|
| 358 |
+
directives=copy_inherited_directives(node.scope.directives, annotation_typing=False),
|
| 359 |
+
body=stats)
|
| 360 |
+
|
| 361 |
+
comp_directives.analyse_declarations(node.scope)
|
| 362 |
+
# probably already in this scope, but it doesn't hurt to make sure
|
| 363 |
+
analyse_decs_transform.enter_scope(node, node.scope)
|
| 364 |
+
analyse_decs_transform.visit(comp_directives)
|
| 365 |
+
analyse_decs_transform.exit_scope()
|
| 366 |
+
|
| 367 |
+
node.body.stats.append(comp_directives)
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
def generate_init_code(code, init, node, fields, kw_only):
|
| 371 |
+
"""
|
| 372 |
+
Notes on CPython generated "__init__":
|
| 373 |
+
* Implemented in `_init_fn`.
|
| 374 |
+
* The use of the `dataclasses._HAS_DEFAULT_FACTORY` sentinel value as
|
| 375 |
+
the default argument for fields that need constructing with a factory
|
| 376 |
+
function is copied from the CPython implementation. (`None` isn't
|
| 377 |
+
suitable because it could also be a value for the user to pass.)
|
| 378 |
+
There's no real reason why it needs importing from the dataclasses module
|
| 379 |
+
though - it could equally be a value generated by Cython when the module loads.
|
| 380 |
+
* seen_default and the associated error message are copied directly from Python
|
| 381 |
+
* Call to user-defined __post_init__ function (if it exists) is copied from
|
| 382 |
+
CPython.
|
| 383 |
+
|
| 384 |
+
Cython behaviour deviates a little here (to be decided if this is right...)
|
| 385 |
+
Because the class variable from the assignment does not exist Cython fields will
|
| 386 |
+
return None (or whatever their type default is) if not initialized while Python
|
| 387 |
+
dataclasses will fall back to looking up the class variable.
|
| 388 |
+
"""
|
| 389 |
+
if not init or node.scope.lookup_here("__init__"):
|
| 390 |
+
return
|
| 391 |
+
|
| 392 |
+
# selfname behaviour copied from the cpython module
|
| 393 |
+
selfname = "__dataclass_self__" if "self" in fields else "self"
|
| 394 |
+
args = [selfname]
|
| 395 |
+
|
| 396 |
+
if kw_only:
|
| 397 |
+
args.append("*")
|
| 398 |
+
|
| 399 |
+
function_start_point = code.insertion_point()
|
| 400 |
+
code = code.insertion_point()
|
| 401 |
+
|
| 402 |
+
# create a temp to get _HAS_DEFAULT_FACTORY
|
| 403 |
+
dataclass_module = make_dataclasses_module_callnode(node.pos)
|
| 404 |
+
has_default_factory = ExprNodes.AttributeNode(
|
| 405 |
+
node.pos,
|
| 406 |
+
obj=dataclass_module,
|
| 407 |
+
attribute=EncodedString("_HAS_DEFAULT_FACTORY")
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
default_factory_placeholder = code.new_placeholder(fields, has_default_factory)
|
| 411 |
+
|
| 412 |
+
seen_default = False
|
| 413 |
+
for name, field in fields.items():
|
| 414 |
+
entry = node.scope.lookup(name)
|
| 415 |
+
if entry.annotation:
|
| 416 |
+
annotation = u": %s" % entry.annotation.string.value
|
| 417 |
+
else:
|
| 418 |
+
annotation = u""
|
| 419 |
+
assignment = u''
|
| 420 |
+
if field.default is not MISSING or field.default_factory is not MISSING:
|
| 421 |
+
seen_default = True
|
| 422 |
+
if field.default_factory is not MISSING:
|
| 423 |
+
ph_name = default_factory_placeholder
|
| 424 |
+
else:
|
| 425 |
+
ph_name = code.new_placeholder(fields, field.default) # 'default' should be a node
|
| 426 |
+
assignment = u" = %s" % ph_name
|
| 427 |
+
elif seen_default and not kw_only and field.init.value:
|
| 428 |
+
error(entry.pos, ("non-default argument '%s' follows default argument "
|
| 429 |
+
"in dataclass __init__") % name)
|
| 430 |
+
code.reset()
|
| 431 |
+
return
|
| 432 |
+
|
| 433 |
+
if field.init.value:
|
| 434 |
+
args.append(u"%s%s%s" % (name, annotation, assignment))
|
| 435 |
+
|
| 436 |
+
if field.is_initvar:
|
| 437 |
+
continue
|
| 438 |
+
elif field.default_factory is MISSING:
|
| 439 |
+
if field.init.value:
|
| 440 |
+
code.add_code_line(u" %s.%s = %s" % (selfname, name, name))
|
| 441 |
+
elif assignment:
|
| 442 |
+
# not an argument to the function, but is still initialized
|
| 443 |
+
code.add_code_line(u" %s.%s%s" % (selfname, name, assignment))
|
| 444 |
+
else:
|
| 445 |
+
ph_name = code.new_placeholder(fields, field.default_factory)
|
| 446 |
+
if field.init.value:
|
| 447 |
+
# close to:
|
| 448 |
+
# def __init__(self, name=_PLACEHOLDER_VALUE):
|
| 449 |
+
# self.name = name_default_factory() if name is _PLACEHOLDER_VALUE else name
|
| 450 |
+
code.add_code_line(u" %s.%s = %s() if %s is %s else %s" % (
|
| 451 |
+
selfname, name, ph_name, name, default_factory_placeholder, name))
|
| 452 |
+
else:
|
| 453 |
+
# still need to use the default factory to initialize
|
| 454 |
+
code.add_code_line(u" %s.%s = %s()" % (
|
| 455 |
+
selfname, name, ph_name))
|
| 456 |
+
|
| 457 |
+
if node.scope.lookup("__post_init__"):
|
| 458 |
+
post_init_vars = ", ".join(name for name, field in fields.items()
|
| 459 |
+
if field.is_initvar)
|
| 460 |
+
code.add_code_line(" %s.__post_init__(%s)" % (selfname, post_init_vars))
|
| 461 |
+
|
| 462 |
+
if code.empty():
|
| 463 |
+
code.add_code_line(" pass")
|
| 464 |
+
|
| 465 |
+
args = u", ".join(args)
|
| 466 |
+
function_start_point.add_code_line(u"def __init__(%s):" % args)
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def generate_repr_code(code, repr, node, fields):
|
| 470 |
+
"""
|
| 471 |
+
The core of the CPython implementation is just:
|
| 472 |
+
['return self.__class__.__qualname__ + f"(' +
|
| 473 |
+
', '.join([f"{f.name}={{self.{f.name}!r}}"
|
| 474 |
+
for f in fields]) +
|
| 475 |
+
')"'],
|
| 476 |
+
|
| 477 |
+
The only notable difference here is self.__class__.__qualname__ -> type(self).__name__
|
| 478 |
+
which is because Cython currently supports Python 2.
|
| 479 |
+
|
| 480 |
+
However, it also has some guards for recursive repr invocations. In the standard
|
| 481 |
+
library implementation they're done with a wrapper decorator that captures a set
|
| 482 |
+
(with the set keyed by id and thread). Here we create a set as a thread local
|
| 483 |
+
variable and key only by id.
|
| 484 |
+
"""
|
| 485 |
+
if not repr or node.scope.lookup("__repr__"):
|
| 486 |
+
return
|
| 487 |
+
|
| 488 |
+
# The recursive guard is likely a little costly, so skip it if possible.
|
| 489 |
+
# is_gc_simple defines where it can contain recursive objects
|
| 490 |
+
needs_recursive_guard = False
|
| 491 |
+
for name in fields.keys():
|
| 492 |
+
entry = node.scope.lookup(name)
|
| 493 |
+
type_ = entry.type
|
| 494 |
+
if type_.is_memoryviewslice:
|
| 495 |
+
type_ = type_.dtype
|
| 496 |
+
if not type_.is_pyobject:
|
| 497 |
+
continue # no GC
|
| 498 |
+
if not type_.is_gc_simple:
|
| 499 |
+
needs_recursive_guard = True
|
| 500 |
+
break
|
| 501 |
+
|
| 502 |
+
if needs_recursive_guard:
|
| 503 |
+
code.add_code_line("__pyx_recursive_repr_guard = __import__('threading').local()")
|
| 504 |
+
code.add_code_line("__pyx_recursive_repr_guard.running = set()")
|
| 505 |
+
code.add_code_line("def __repr__(self):")
|
| 506 |
+
if needs_recursive_guard:
|
| 507 |
+
code.add_code_line(" key = id(self)")
|
| 508 |
+
code.add_code_line(" guard_set = self.__pyx_recursive_repr_guard.running")
|
| 509 |
+
code.add_code_line(" if key in guard_set: return '...'")
|
| 510 |
+
code.add_code_line(" guard_set.add(key)")
|
| 511 |
+
code.add_code_line(" try:")
|
| 512 |
+
strs = [u"%s={self.%s!r}" % (name, name)
|
| 513 |
+
for name, field in fields.items()
|
| 514 |
+
if field.repr.value and not field.is_initvar]
|
| 515 |
+
format_string = u", ".join(strs)
|
| 516 |
+
|
| 517 |
+
code.add_code_line(u' name = getattr(type(self), "__qualname__", type(self).__name__)')
|
| 518 |
+
code.add_code_line(u" return f'{name}(%s)'" % format_string)
|
| 519 |
+
if needs_recursive_guard:
|
| 520 |
+
code.add_code_line(" finally:")
|
| 521 |
+
code.add_code_line(" guard_set.remove(key)")
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
def generate_cmp_code(code, op, funcname, node, fields):
|
| 525 |
+
if node.scope.lookup_here(funcname):
|
| 526 |
+
return
|
| 527 |
+
|
| 528 |
+
names = [name for name, field in fields.items() if (field.compare.value and not field.is_initvar)]
|
| 529 |
+
|
| 530 |
+
code.add_code_lines([
|
| 531 |
+
"def %s(self, other):" % funcname,
|
| 532 |
+
" if other.__class__ is not self.__class__:"
|
| 533 |
+
" return NotImplemented",
|
| 534 |
+
#
|
| 535 |
+
" cdef %s other_cast" % node.class_name,
|
| 536 |
+
" other_cast = <%s>other" % node.class_name,
|
| 537 |
+
])
|
| 538 |
+
|
| 539 |
+
# The Python implementation of dataclasses.py does a tuple comparison
|
| 540 |
+
# (roughly):
|
| 541 |
+
# return self._attributes_to_tuple() {op} other._attributes_to_tuple()
|
| 542 |
+
#
|
| 543 |
+
# For the Cython implementation a tuple comparison isn't an option because
|
| 544 |
+
# not all attributes can be converted to Python objects and stored in a tuple
|
| 545 |
+
#
|
| 546 |
+
# TODO - better diagnostics of whether the types support comparison before
|
| 547 |
+
# generating the code. Plus, do we want to convert C structs to dicts and
|
| 548 |
+
# compare them that way (I think not, but it might be in demand)?
|
| 549 |
+
checks = []
|
| 550 |
+
op_without_equals = op.replace('=', '')
|
| 551 |
+
|
| 552 |
+
for name in names:
|
| 553 |
+
if op != '==':
|
| 554 |
+
# tuple comparison rules - early elements take precedence
|
| 555 |
+
code.add_code_line(" if self.%s %s other_cast.%s: return True" % (
|
| 556 |
+
name, op_without_equals, name))
|
| 557 |
+
code.add_code_line(" if self.%s != other_cast.%s: return False" % (
|
| 558 |
+
name, name))
|
| 559 |
+
if "=" in op:
|
| 560 |
+
code.add_code_line(" return True") # "() == ()" is True
|
| 561 |
+
else:
|
| 562 |
+
code.add_code_line(" return False")
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
def generate_eq_code(code, eq, node, fields):
|
| 566 |
+
if not eq:
|
| 567 |
+
return
|
| 568 |
+
generate_cmp_code(code, "==", "__eq__", node, fields)
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def generate_order_code(code, order, node, fields):
|
| 572 |
+
if not order:
|
| 573 |
+
return
|
| 574 |
+
|
| 575 |
+
for op, name in [("<", "__lt__"),
|
| 576 |
+
("<=", "__le__"),
|
| 577 |
+
(">", "__gt__"),
|
| 578 |
+
(">=", "__ge__")]:
|
| 579 |
+
generate_cmp_code(code, op, name, node, fields)
|
| 580 |
+
|
| 581 |
+
|
| 582 |
+
def generate_hash_code(code, unsafe_hash, eq, frozen, node, fields):
|
| 583 |
+
"""
|
| 584 |
+
Copied from CPython implementation - the intention is to follow this as far as
|
| 585 |
+
is possible:
|
| 586 |
+
# +------------------- unsafe_hash= parameter
|
| 587 |
+
# | +----------- eq= parameter
|
| 588 |
+
# | | +--- frozen= parameter
|
| 589 |
+
# | | |
|
| 590 |
+
# v v v | | |
|
| 591 |
+
# | no | yes | <--- class has explicitly defined __hash__
|
| 592 |
+
# +=======+=======+=======+========+========+
|
| 593 |
+
# | False | False | False | | | No __eq__, use the base class __hash__
|
| 594 |
+
# +-------+-------+-------+--------+--------+
|
| 595 |
+
# | False | False | True | | | No __eq__, use the base class __hash__
|
| 596 |
+
# +-------+-------+-------+--------+--------+
|
| 597 |
+
# | False | True | False | None | | <-- the default, not hashable
|
| 598 |
+
# +-------+-------+-------+--------+--------+
|
| 599 |
+
# | False | True | True | add | | Frozen, so hashable, allows override
|
| 600 |
+
# +-------+-------+-------+--------+--------+
|
| 601 |
+
# | True | False | False | add | raise | Has no __eq__, but hashable
|
| 602 |
+
# +-------+-------+-------+--------+--------+
|
| 603 |
+
# | True | False | True | add | raise | Has no __eq__, but hashable
|
| 604 |
+
# +-------+-------+-------+--------+--------+
|
| 605 |
+
# | True | True | False | add | raise | Not frozen, but hashable
|
| 606 |
+
# +-------+-------+-------+--------+--------+
|
| 607 |
+
# | True | True | True | add | raise | Frozen, so hashable
|
| 608 |
+
# +=======+=======+=======+========+========+
|
| 609 |
+
# For boxes that are blank, __hash__ is untouched and therefore
|
| 610 |
+
# inherited from the base class. If the base is object, then
|
| 611 |
+
# id-based hashing is used.
|
| 612 |
+
|
| 613 |
+
The Python implementation creates a tuple of all the fields, then hashes them.
|
| 614 |
+
This implementation creates a tuple of all the hashes of all the fields and hashes that.
|
| 615 |
+
The reason for this slight difference is to avoid to-Python conversions for anything
|
| 616 |
+
that Cython knows how to hash directly (It doesn't look like this currently applies to
|
| 617 |
+
anything though...).
|
| 618 |
+
"""
|
| 619 |
+
|
| 620 |
+
hash_entry = node.scope.lookup_here("__hash__")
|
| 621 |
+
if hash_entry:
|
| 622 |
+
# TODO ideally assignment of __hash__ to None shouldn't trigger this
|
| 623 |
+
# but difficult to get the right information here
|
| 624 |
+
if unsafe_hash:
|
| 625 |
+
# error message taken from CPython dataclasses module
|
| 626 |
+
error(node.pos, "Cannot overwrite attribute __hash__ in class %s" % node.class_name)
|
| 627 |
+
return
|
| 628 |
+
|
| 629 |
+
if not unsafe_hash:
|
| 630 |
+
if not eq:
|
| 631 |
+
return
|
| 632 |
+
if not frozen:
|
| 633 |
+
code.add_extra_statements([
|
| 634 |
+
Nodes.SingleAssignmentNode(
|
| 635 |
+
node.pos,
|
| 636 |
+
lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")),
|
| 637 |
+
rhs=ExprNodes.NoneNode(node.pos),
|
| 638 |
+
)
|
| 639 |
+
])
|
| 640 |
+
return
|
| 641 |
+
|
| 642 |
+
names = [
|
| 643 |
+
name for name, field in fields.items()
|
| 644 |
+
if not field.is_initvar and (
|
| 645 |
+
field.compare.value if field.hash.value is None else field.hash.value)
|
| 646 |
+
]
|
| 647 |
+
|
| 648 |
+
# make a tuple of the hashes
|
| 649 |
+
hash_tuple_items = u", ".join(u"self.%s" % name for name in names)
|
| 650 |
+
if hash_tuple_items:
|
| 651 |
+
hash_tuple_items += u"," # ensure that one arg form is a tuple
|
| 652 |
+
|
| 653 |
+
# if we're here we want to generate a hash
|
| 654 |
+
code.add_code_lines([
|
| 655 |
+
"def __hash__(self):",
|
| 656 |
+
" return hash((%s))" % hash_tuple_items,
|
| 657 |
+
])
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
def get_field_type(pos, entry):
|
| 661 |
+
"""
|
| 662 |
+
sets the .type attribute for a field
|
| 663 |
+
|
| 664 |
+
Returns the annotation if possible (since this is what the dataclasses
|
| 665 |
+
module does). If not (for example, attributes defined with cdef) then
|
| 666 |
+
it creates a string fallback.
|
| 667 |
+
"""
|
| 668 |
+
if entry.annotation:
|
| 669 |
+
# Right now it doesn't look like cdef classes generate an
|
| 670 |
+
# __annotations__ dict, therefore it's safe to just return
|
| 671 |
+
# entry.annotation
|
| 672 |
+
# (TODO: remove .string if we ditch PEP563)
|
| 673 |
+
return entry.annotation.string
|
| 674 |
+
# If they do in future then we may need to look up into that
|
| 675 |
+
# to duplicating the node. The code below should do this:
|
| 676 |
+
#class_name_node = ExprNodes.NameNode(pos, name=entry.scope.name)
|
| 677 |
+
#annotations = ExprNodes.AttributeNode(
|
| 678 |
+
# pos, obj=class_name_node,
|
| 679 |
+
# attribute=EncodedString("__annotations__")
|
| 680 |
+
#)
|
| 681 |
+
#return ExprNodes.IndexNode(
|
| 682 |
+
# pos, base=annotations,
|
| 683 |
+
# index=ExprNodes.StringNode(pos, value=entry.name)
|
| 684 |
+
#)
|
| 685 |
+
else:
|
| 686 |
+
# it's slightly unclear what the best option is here - we could
|
| 687 |
+
# try to return PyType_Type. This case should only happen with
|
| 688 |
+
# attributes defined with cdef so Cython is free to make it's own
|
| 689 |
+
# decision
|
| 690 |
+
s = EncodedString(entry.type.declaration_code("", for_display=1))
|
| 691 |
+
return ExprNodes.StringNode(pos, value=s)
|
| 692 |
+
|
| 693 |
+
|
| 694 |
+
class FieldRecordNode(ExprNodes.ExprNode):
|
| 695 |
+
"""
|
| 696 |
+
__dataclass_fields__ contains a bunch of field objects recording how each field
|
| 697 |
+
of the dataclass was initialized (mainly corresponding to the arguments passed to
|
| 698 |
+
the "field" function). This node is used for the attributes of these field objects.
|
| 699 |
+
|
| 700 |
+
If possible, coerces `arg` to a Python object.
|
| 701 |
+
Otherwise, generates a sensible backup string.
|
| 702 |
+
"""
|
| 703 |
+
subexprs = ['arg']
|
| 704 |
+
|
| 705 |
+
def __init__(self, pos, arg):
|
| 706 |
+
super(FieldRecordNode, self).__init__(pos, arg=arg)
|
| 707 |
+
|
| 708 |
+
def analyse_types(self, env):
|
| 709 |
+
self.arg.analyse_types(env)
|
| 710 |
+
self.type = self.arg.type
|
| 711 |
+
return self
|
| 712 |
+
|
| 713 |
+
def coerce_to_pyobject(self, env):
|
| 714 |
+
if self.arg.type.can_coerce_to_pyobject(env):
|
| 715 |
+
return self.arg.coerce_to_pyobject(env)
|
| 716 |
+
else:
|
| 717 |
+
# A string representation of the code that gave the field seems like a reasonable
|
| 718 |
+
# fallback. This'll mostly happen for "default" and "default_factory" where the
|
| 719 |
+
# type may be a C-type that can't be converted to Python.
|
| 720 |
+
return self._make_string()
|
| 721 |
+
|
| 722 |
+
def _make_string(self):
|
| 723 |
+
from .AutoDocTransforms import AnnotationWriter
|
| 724 |
+
writer = AnnotationWriter(description="Dataclass field")
|
| 725 |
+
string = writer.write(self.arg)
|
| 726 |
+
return ExprNodes.StringNode(self.pos, value=EncodedString(string))
|
| 727 |
+
|
| 728 |
+
def generate_evaluation_code(self, code):
|
| 729 |
+
return self.arg.generate_evaluation_code(code)
|
| 730 |
+
|
| 731 |
+
|
| 732 |
+
def _set_up_dataclass_fields(node, fields, dataclass_module):
|
| 733 |
+
# For defaults and default_factories containing things like lambda,
|
| 734 |
+
# they're already declared in the class scope, and it creates a big
|
| 735 |
+
# problem if multiple copies are floating around in both the __init__
|
| 736 |
+
# function, and in the __dataclass_fields__ structure.
|
| 737 |
+
# Therefore, create module-level constants holding these values and
|
| 738 |
+
# pass those around instead
|
| 739 |
+
#
|
| 740 |
+
# If possible we use the `Field` class defined in the standard library
|
| 741 |
+
# module so that the information stored here is as close to a regular
|
| 742 |
+
# dataclass as is possible.
|
| 743 |
+
variables_assignment_stats = []
|
| 744 |
+
for name, field in fields.items():
|
| 745 |
+
if field.private:
|
| 746 |
+
continue # doesn't appear in the public interface
|
| 747 |
+
for attrname in [ "default", "default_factory" ]:
|
| 748 |
+
field_default = getattr(field, attrname)
|
| 749 |
+
if field_default is MISSING or field_default.is_literal or field_default.is_name:
|
| 750 |
+
# some simple cases where we don't need to set up
|
| 751 |
+
# the variable as a module-level constant
|
| 752 |
+
continue
|
| 753 |
+
global_scope = node.scope.global_scope()
|
| 754 |
+
module_field_name = global_scope.mangle(
|
| 755 |
+
global_scope.mangle(Naming.dataclass_field_default_cname, node.class_name),
|
| 756 |
+
name)
|
| 757 |
+
# create an entry in the global scope for this variable to live
|
| 758 |
+
field_node = ExprNodes.NameNode(field_default.pos, name=EncodedString(module_field_name))
|
| 759 |
+
field_node.entry = global_scope.declare_var(
|
| 760 |
+
field_node.name, type=field_default.type or PyrexTypes.unspecified_type,
|
| 761 |
+
pos=field_default.pos, cname=field_node.name, is_cdef=True,
|
| 762 |
+
# TODO: do we need to set 'pytyping_modifiers' here?
|
| 763 |
+
)
|
| 764 |
+
# replace the field so that future users just receive the namenode
|
| 765 |
+
setattr(field, attrname, field_node)
|
| 766 |
+
|
| 767 |
+
variables_assignment_stats.append(
|
| 768 |
+
Nodes.SingleAssignmentNode(field_default.pos, lhs=field_node, rhs=field_default))
|
| 769 |
+
|
| 770 |
+
placeholders = {}
|
| 771 |
+
field_func = ExprNodes.AttributeNode(node.pos, obj=dataclass_module,
|
| 772 |
+
attribute=EncodedString("field"))
|
| 773 |
+
dc_fields = ExprNodes.DictNode(node.pos, key_value_pairs=[])
|
| 774 |
+
dc_fields_namevalue_assignments = []
|
| 775 |
+
|
| 776 |
+
for name, field in fields.items():
|
| 777 |
+
if field.private:
|
| 778 |
+
continue # doesn't appear in the public interface
|
| 779 |
+
type_placeholder_name = "PLACEHOLDER_%s" % name
|
| 780 |
+
placeholders[type_placeholder_name] = get_field_type(
|
| 781 |
+
node.pos, node.scope.entries[name]
|
| 782 |
+
)
|
| 783 |
+
|
| 784 |
+
# defining these make the fields introspect more like a Python dataclass
|
| 785 |
+
field_type_placeholder_name = "PLACEHOLDER_FIELD_TYPE_%s" % name
|
| 786 |
+
if field.is_initvar:
|
| 787 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 788 |
+
node.pos, obj=dataclass_module,
|
| 789 |
+
attribute=EncodedString("_FIELD_INITVAR")
|
| 790 |
+
)
|
| 791 |
+
elif field.is_classvar:
|
| 792 |
+
# TODO - currently this isn't triggered
|
| 793 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 794 |
+
node.pos, obj=dataclass_module,
|
| 795 |
+
attribute=EncodedString("_FIELD_CLASSVAR")
|
| 796 |
+
)
|
| 797 |
+
else:
|
| 798 |
+
placeholders[field_type_placeholder_name] = ExprNodes.AttributeNode(
|
| 799 |
+
node.pos, obj=dataclass_module,
|
| 800 |
+
attribute=EncodedString("_FIELD")
|
| 801 |
+
)
|
| 802 |
+
|
| 803 |
+
dc_field_keywords = ExprNodes.DictNode.from_pairs(
|
| 804 |
+
node.pos,
|
| 805 |
+
[(ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(k)),
|
| 806 |
+
FieldRecordNode(node.pos, arg=v))
|
| 807 |
+
for k, v in field.iterate_record_node_arguments()]
|
| 808 |
+
|
| 809 |
+
)
|
| 810 |
+
dc_field_call = make_dataclass_call_helper(
|
| 811 |
+
node.pos, field_func, dc_field_keywords
|
| 812 |
+
)
|
| 813 |
+
dc_fields.key_value_pairs.append(
|
| 814 |
+
ExprNodes.DictItemNode(
|
| 815 |
+
node.pos,
|
| 816 |
+
key=ExprNodes.IdentifierStringNode(node.pos, value=EncodedString(name)),
|
| 817 |
+
value=dc_field_call))
|
| 818 |
+
dc_fields_namevalue_assignments.append(
|
| 819 |
+
dedent(u"""\
|
| 820 |
+
__dataclass_fields__[{0!r}].name = {0!r}
|
| 821 |
+
__dataclass_fields__[{0!r}].type = {1}
|
| 822 |
+
__dataclass_fields__[{0!r}]._field_type = {2}
|
| 823 |
+
""").format(name, type_placeholder_name, field_type_placeholder_name))
|
| 824 |
+
|
| 825 |
+
dataclass_fields_assignment = \
|
| 826 |
+
Nodes.SingleAssignmentNode(node.pos,
|
| 827 |
+
lhs = ExprNodes.NameNode(node.pos,
|
| 828 |
+
name=EncodedString("__dataclass_fields__")),
|
| 829 |
+
rhs = dc_fields)
|
| 830 |
+
|
| 831 |
+
dc_fields_namevalue_assignments = u"\n".join(dc_fields_namevalue_assignments)
|
| 832 |
+
dc_fields_namevalue_assignments = TreeFragment(dc_fields_namevalue_assignments,
|
| 833 |
+
level="c_class",
|
| 834 |
+
pipeline=[NormalizeTree(None)])
|
| 835 |
+
dc_fields_namevalue_assignments = dc_fields_namevalue_assignments.substitute(placeholders)
|
| 836 |
+
|
| 837 |
+
return (variables_assignment_stats
|
| 838 |
+
+ [dataclass_fields_assignment]
|
| 839 |
+
+ dc_fields_namevalue_assignments.stats)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/DebugFlags.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Can be enabled at the command line with --debug-xxx.
|
| 2 |
+
|
| 3 |
+
debug_disposal_code = 0
|
| 4 |
+
debug_temp_alloc = 0
|
| 5 |
+
debug_coercion = 0
|
| 6 |
+
|
| 7 |
+
# Write comments into the C code that show where temporary variables
|
| 8 |
+
# are allocated and released.
|
| 9 |
+
debug_temp_code_comments = 0
|
| 10 |
+
|
| 11 |
+
# Write a call trace of the code generation phase into the C code.
|
| 12 |
+
debug_trace_code_generation = 0
|
| 13 |
+
|
| 14 |
+
# Do not replace exceptions with user-friendly error messages.
|
| 15 |
+
debug_no_exception_intercept = 0
|
| 16 |
+
|
| 17 |
+
# Print a message each time a new stage in the pipeline is entered.
|
| 18 |
+
debug_verbose_pipeline = 0
|
| 19 |
+
|
| 20 |
+
# Raise an exception when an error is encountered.
|
| 21 |
+
debug_exception_on_error = 0
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Errors.py
ADDED
|
@@ -0,0 +1,300 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Errors
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from __builtin__ import basestring as any_string_type
|
| 9 |
+
except ImportError:
|
| 10 |
+
any_string_type = (bytes, str)
|
| 11 |
+
|
| 12 |
+
import sys
|
| 13 |
+
from contextlib import contextmanager
|
| 14 |
+
|
| 15 |
+
try:
|
| 16 |
+
from threading import local as _threadlocal
|
| 17 |
+
except ImportError:
|
| 18 |
+
class _threadlocal(object): pass
|
| 19 |
+
|
| 20 |
+
threadlocal = _threadlocal()
|
| 21 |
+
|
| 22 |
+
from ..Utils import open_new_file
|
| 23 |
+
from . import DebugFlags
|
| 24 |
+
from . import Options
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class PyrexError(Exception):
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class PyrexWarning(Exception):
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
class CannotSpecialize(PyrexError):
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
def context(position):
|
| 38 |
+
source = position[0]
|
| 39 |
+
assert not (isinstance(source, any_string_type)), (
|
| 40 |
+
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
|
| 41 |
+
try:
|
| 42 |
+
F = source.get_lines()
|
| 43 |
+
except UnicodeDecodeError:
|
| 44 |
+
# file has an encoding problem
|
| 45 |
+
s = u"[unprintable code]\n"
|
| 46 |
+
else:
|
| 47 |
+
s = u''.join(F[max(0, position[1]-6):position[1]])
|
| 48 |
+
s = u'...\n%s%s^\n' % (s, u' '*(position[2]))
|
| 49 |
+
s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60)
|
| 50 |
+
return s
|
| 51 |
+
|
| 52 |
+
def format_position(position):
|
| 53 |
+
if position:
|
| 54 |
+
return u"%s:%d:%d: " % (position[0].get_error_description(),
|
| 55 |
+
position[1], position[2])
|
| 56 |
+
return u''
|
| 57 |
+
|
| 58 |
+
def format_error(message, position):
|
| 59 |
+
if position:
|
| 60 |
+
pos_str = format_position(position)
|
| 61 |
+
cont = context(position)
|
| 62 |
+
message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'')
|
| 63 |
+
return message
|
| 64 |
+
|
| 65 |
+
class CompileError(PyrexError):
|
| 66 |
+
|
| 67 |
+
def __init__(self, position = None, message = u""):
|
| 68 |
+
self.position = position
|
| 69 |
+
self.message_only = message
|
| 70 |
+
self.formatted_message = format_error(message, position)
|
| 71 |
+
self.reported = False
|
| 72 |
+
Exception.__init__(self, self.formatted_message)
|
| 73 |
+
# Python Exception subclass pickling is broken,
|
| 74 |
+
# see https://bugs.python.org/issue1692335
|
| 75 |
+
self.args = (position, message)
|
| 76 |
+
|
| 77 |
+
def __str__(self):
|
| 78 |
+
return self.formatted_message
|
| 79 |
+
|
| 80 |
+
class CompileWarning(PyrexWarning):
|
| 81 |
+
|
| 82 |
+
def __init__(self, position = None, message = ""):
|
| 83 |
+
self.position = position
|
| 84 |
+
Exception.__init__(self, format_position(position) + message)
|
| 85 |
+
|
| 86 |
+
class InternalError(Exception):
|
| 87 |
+
# If this is ever raised, there is a bug in the compiler.
|
| 88 |
+
|
| 89 |
+
def __init__(self, message):
|
| 90 |
+
self.message_only = message
|
| 91 |
+
Exception.__init__(self, u"Internal compiler error: %s"
|
| 92 |
+
% message)
|
| 93 |
+
|
| 94 |
+
class AbortError(Exception):
|
| 95 |
+
# Throw this to stop the compilation immediately.
|
| 96 |
+
|
| 97 |
+
def __init__(self, message):
|
| 98 |
+
self.message_only = message
|
| 99 |
+
Exception.__init__(self, u"Abort error: %s" % message)
|
| 100 |
+
|
| 101 |
+
class CompilerCrash(CompileError):
|
| 102 |
+
# raised when an unexpected exception occurs in a transform
|
| 103 |
+
def __init__(self, pos, context, message, cause, stacktrace=None):
|
| 104 |
+
if message:
|
| 105 |
+
message = u'\n' + message
|
| 106 |
+
else:
|
| 107 |
+
message = u'\n'
|
| 108 |
+
self.message_only = message
|
| 109 |
+
if context:
|
| 110 |
+
message = u"Compiler crash in %s%s" % (context, message)
|
| 111 |
+
if stacktrace:
|
| 112 |
+
import traceback
|
| 113 |
+
message += (
|
| 114 |
+
u'\n\nCompiler crash traceback from this point on:\n' +
|
| 115 |
+
u''.join(traceback.format_tb(stacktrace)))
|
| 116 |
+
if cause:
|
| 117 |
+
if not stacktrace:
|
| 118 |
+
message += u'\n'
|
| 119 |
+
message += u'%s: %s' % (cause.__class__.__name__, cause)
|
| 120 |
+
CompileError.__init__(self, pos, message)
|
| 121 |
+
# Python Exception subclass pickling is broken,
|
| 122 |
+
# see https://bugs.python.org/issue1692335
|
| 123 |
+
self.args = (pos, context, message, cause, stacktrace)
|
| 124 |
+
|
| 125 |
+
class NoElementTreeInstalledException(PyrexError):
|
| 126 |
+
"""raised when the user enabled options.gdb_debug but no ElementTree
|
| 127 |
+
implementation was found
|
| 128 |
+
"""
|
| 129 |
+
|
| 130 |
+
def open_listing_file(path, echo_to_stderr=True):
|
| 131 |
+
# Begin a new error listing. If path is None, no file
|
| 132 |
+
# is opened, the error counter is just reset.
|
| 133 |
+
if path is not None:
|
| 134 |
+
threadlocal.cython_errors_listing_file = open_new_file(path)
|
| 135 |
+
else:
|
| 136 |
+
threadlocal.cython_errors_listing_file = None
|
| 137 |
+
if echo_to_stderr:
|
| 138 |
+
threadlocal.cython_errors_echo_file = sys.stderr
|
| 139 |
+
else:
|
| 140 |
+
threadlocal.cython_errors_echo_file = None
|
| 141 |
+
threadlocal.cython_errors_count = 0
|
| 142 |
+
|
| 143 |
+
def close_listing_file():
|
| 144 |
+
if threadlocal.cython_errors_listing_file:
|
| 145 |
+
threadlocal.cython_errors_listing_file.close()
|
| 146 |
+
threadlocal.cython_errors_listing_file = None
|
| 147 |
+
|
| 148 |
+
def report_error(err, use_stack=True):
|
| 149 |
+
error_stack = threadlocal.cython_errors_stack
|
| 150 |
+
if error_stack and use_stack:
|
| 151 |
+
error_stack[-1].append(err)
|
| 152 |
+
else:
|
| 153 |
+
# See Main.py for why dual reporting occurs. Quick fix for now.
|
| 154 |
+
if err.reported: return
|
| 155 |
+
err.reported = True
|
| 156 |
+
try: line = u"%s\n" % err
|
| 157 |
+
except UnicodeEncodeError:
|
| 158 |
+
# Python <= 2.5 does this for non-ASCII Unicode exceptions
|
| 159 |
+
line = format_error(getattr(err, 'message_only', "[unprintable exception message]"),
|
| 160 |
+
getattr(err, 'position', None)) + u'\n'
|
| 161 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 162 |
+
if listing_file:
|
| 163 |
+
try: listing_file.write(line)
|
| 164 |
+
except UnicodeEncodeError:
|
| 165 |
+
listing_file.write(line.encode('ASCII', 'replace'))
|
| 166 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 167 |
+
if echo_file:
|
| 168 |
+
try: echo_file.write(line)
|
| 169 |
+
except UnicodeEncodeError:
|
| 170 |
+
echo_file.write(line.encode('ASCII', 'replace'))
|
| 171 |
+
threadlocal.cython_errors_count += 1
|
| 172 |
+
if Options.fast_fail:
|
| 173 |
+
raise AbortError("fatal errors")
|
| 174 |
+
|
| 175 |
+
def error(position, message):
|
| 176 |
+
#print("Errors.error:", repr(position), repr(message)) ###
|
| 177 |
+
if position is None:
|
| 178 |
+
raise InternalError(message)
|
| 179 |
+
err = CompileError(position, message)
|
| 180 |
+
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
|
| 181 |
+
report_error(err)
|
| 182 |
+
return err
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
LEVEL = 1 # warn about all errors level 1 or higher
|
| 186 |
+
|
| 187 |
+
def _write_file_encode(file, line):
|
| 188 |
+
try:
|
| 189 |
+
file.write(line)
|
| 190 |
+
except UnicodeEncodeError:
|
| 191 |
+
file.write(line.encode('ascii', 'replace'))
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def performance_hint(position, message, env):
|
| 195 |
+
if not env.directives['show_performance_hints']:
|
| 196 |
+
return
|
| 197 |
+
warn = CompileWarning(position, message)
|
| 198 |
+
line = "performance hint: %s\n" % warn
|
| 199 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 200 |
+
if listing_file:
|
| 201 |
+
_write_file_encode(listing_file, line)
|
| 202 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 203 |
+
if echo_file:
|
| 204 |
+
_write_file_encode(echo_file, line)
|
| 205 |
+
return warn
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def message(position, message, level=1):
|
| 209 |
+
if level < LEVEL:
|
| 210 |
+
return
|
| 211 |
+
warn = CompileWarning(position, message)
|
| 212 |
+
line = u"note: %s\n" % warn
|
| 213 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 214 |
+
if listing_file:
|
| 215 |
+
_write_file_encode(listing_file, line)
|
| 216 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 217 |
+
if echo_file:
|
| 218 |
+
_write_file_encode(echo_file, line)
|
| 219 |
+
return warn
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def warning(position, message, level=0):
|
| 223 |
+
if level < LEVEL:
|
| 224 |
+
return
|
| 225 |
+
if Options.warning_errors and position:
|
| 226 |
+
return error(position, message)
|
| 227 |
+
warn = CompileWarning(position, message)
|
| 228 |
+
line = u"warning: %s\n" % warn
|
| 229 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 230 |
+
if listing_file:
|
| 231 |
+
_write_file_encode(listing_file, line)
|
| 232 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 233 |
+
if echo_file:
|
| 234 |
+
_write_file_encode(echo_file, line)
|
| 235 |
+
return warn
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def warn_once(position, message, level=0):
|
| 239 |
+
if level < LEVEL:
|
| 240 |
+
return
|
| 241 |
+
warn_once_seen = threadlocal.cython_errors_warn_once_seen
|
| 242 |
+
if message in warn_once_seen:
|
| 243 |
+
return
|
| 244 |
+
warn = CompileWarning(position, message)
|
| 245 |
+
line = u"warning: %s\n" % warn
|
| 246 |
+
listing_file = threadlocal.cython_errors_listing_file
|
| 247 |
+
if listing_file:
|
| 248 |
+
_write_file_encode(listing_file, line)
|
| 249 |
+
echo_file = threadlocal.cython_errors_echo_file
|
| 250 |
+
if echo_file:
|
| 251 |
+
_write_file_encode(echo_file, line)
|
| 252 |
+
warn_once_seen.add(message)
|
| 253 |
+
return warn
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
# These functions can be used to momentarily suppress errors.
|
| 257 |
+
|
| 258 |
+
def hold_errors():
|
| 259 |
+
errors = []
|
| 260 |
+
threadlocal.cython_errors_stack.append(errors)
|
| 261 |
+
return errors
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def release_errors(ignore=False):
|
| 265 |
+
held_errors = threadlocal.cython_errors_stack.pop()
|
| 266 |
+
if not ignore:
|
| 267 |
+
for err in held_errors:
|
| 268 |
+
report_error(err)
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def held_errors():
|
| 272 |
+
return threadlocal.cython_errors_stack[-1]
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
# same as context manager:
|
| 276 |
+
|
| 277 |
+
@contextmanager
|
| 278 |
+
def local_errors(ignore=False):
|
| 279 |
+
errors = hold_errors()
|
| 280 |
+
try:
|
| 281 |
+
yield errors
|
| 282 |
+
finally:
|
| 283 |
+
release_errors(ignore=ignore)
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
# Keep all global state in thread local storage to support parallel cythonisation in distutils.
|
| 287 |
+
|
| 288 |
+
def init_thread():
|
| 289 |
+
threadlocal.cython_errors_count = 0
|
| 290 |
+
threadlocal.cython_errors_listing_file = None
|
| 291 |
+
threadlocal.cython_errors_echo_file = None
|
| 292 |
+
threadlocal.cython_errors_warn_once_seen = set()
|
| 293 |
+
threadlocal.cython_errors_stack = []
|
| 294 |
+
|
| 295 |
+
def reset():
|
| 296 |
+
threadlocal.cython_errors_warn_once_seen.clear()
|
| 297 |
+
del threadlocal.cython_errors_stack[:]
|
| 298 |
+
|
| 299 |
+
def get_errors_count():
|
| 300 |
+
return threadlocal.cython_errors_count
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/FusedNode.py
ADDED
|
@@ -0,0 +1,1015 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import copy
|
| 4 |
+
|
| 5 |
+
from . import (ExprNodes, PyrexTypes, MemoryView,
|
| 6 |
+
ParseTreeTransforms, StringEncoding, Errors,
|
| 7 |
+
Naming)
|
| 8 |
+
from .ExprNodes import CloneNode, ProxyNode, TupleNode
|
| 9 |
+
from .Nodes import FuncDefNode, CFuncDefNode, StatListNode, DefNode
|
| 10 |
+
from ..Utils import OrderedSet
|
| 11 |
+
from .Errors import error, CannotSpecialize
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class FusedCFuncDefNode(StatListNode):
|
| 15 |
+
"""
|
| 16 |
+
This node replaces a function with fused arguments. It deep-copies the
|
| 17 |
+
function for every permutation of fused types, and allocates a new local
|
| 18 |
+
scope for it. It keeps track of the original function in self.node, and
|
| 19 |
+
the entry of the original function in the symbol table is given the
|
| 20 |
+
'fused_cfunction' attribute which points back to us.
|
| 21 |
+
Then when a function lookup occurs (to e.g. call it), the call can be
|
| 22 |
+
dispatched to the right function.
|
| 23 |
+
|
| 24 |
+
node FuncDefNode the original function
|
| 25 |
+
nodes [FuncDefNode] list of copies of node with different specific types
|
| 26 |
+
py_func DefNode the fused python function subscriptable from
|
| 27 |
+
Python space
|
| 28 |
+
__signatures__ A DictNode mapping signature specialization strings
|
| 29 |
+
to PyCFunction nodes
|
| 30 |
+
resulting_fused_function PyCFunction for the fused DefNode that delegates
|
| 31 |
+
to specializations
|
| 32 |
+
fused_func_assignment Assignment of the fused function to the function name
|
| 33 |
+
defaults_tuple TupleNode of defaults (letting PyCFunctionNode build
|
| 34 |
+
defaults would result in many different tuples)
|
| 35 |
+
specialized_pycfuncs List of synthesized pycfunction nodes for the
|
| 36 |
+
specializations
|
| 37 |
+
code_object CodeObjectNode shared by all specializations and the
|
| 38 |
+
fused function
|
| 39 |
+
|
| 40 |
+
fused_compound_types All fused (compound) types (e.g. floating[:])
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
__signatures__ = None
|
| 44 |
+
resulting_fused_function = None
|
| 45 |
+
fused_func_assignment = None
|
| 46 |
+
defaults_tuple = None
|
| 47 |
+
decorators = None
|
| 48 |
+
|
| 49 |
+
child_attrs = StatListNode.child_attrs + [
|
| 50 |
+
'__signatures__', 'resulting_fused_function', 'fused_func_assignment']
|
| 51 |
+
|
| 52 |
+
def __init__(self, node, env):
|
| 53 |
+
super(FusedCFuncDefNode, self).__init__(node.pos)
|
| 54 |
+
|
| 55 |
+
self.nodes = []
|
| 56 |
+
self.node = node
|
| 57 |
+
|
| 58 |
+
is_def = isinstance(self.node, DefNode)
|
| 59 |
+
if is_def:
|
| 60 |
+
# self.node.decorators = []
|
| 61 |
+
self.copy_def(env)
|
| 62 |
+
else:
|
| 63 |
+
self.copy_cdef(env)
|
| 64 |
+
|
| 65 |
+
# Perform some sanity checks. If anything fails, it's a bug
|
| 66 |
+
for n in self.nodes:
|
| 67 |
+
assert not n.entry.type.is_fused
|
| 68 |
+
assert not n.local_scope.return_type.is_fused
|
| 69 |
+
if node.return_type.is_fused:
|
| 70 |
+
assert not n.return_type.is_fused
|
| 71 |
+
|
| 72 |
+
if not is_def and n.cfunc_declarator.optional_arg_count:
|
| 73 |
+
assert n.type.op_arg_struct
|
| 74 |
+
|
| 75 |
+
node.entry.fused_cfunction = self
|
| 76 |
+
# Copy the nodes as AnalyseDeclarationsTransform will prepend
|
| 77 |
+
# self.py_func to self.stats, as we only want specialized
|
| 78 |
+
# CFuncDefNodes in self.nodes
|
| 79 |
+
self.stats = self.nodes[:]
|
| 80 |
+
|
| 81 |
+
def copy_def(self, env):
|
| 82 |
+
"""
|
| 83 |
+
Create a copy of the original def or lambda function for specialized
|
| 84 |
+
versions.
|
| 85 |
+
"""
|
| 86 |
+
fused_compound_types = PyrexTypes.unique(
|
| 87 |
+
[arg.type for arg in self.node.args if arg.type.is_fused])
|
| 88 |
+
fused_types = self._get_fused_base_types(fused_compound_types)
|
| 89 |
+
permutations = PyrexTypes.get_all_specialized_permutations(fused_types)
|
| 90 |
+
|
| 91 |
+
self.fused_compound_types = fused_compound_types
|
| 92 |
+
|
| 93 |
+
if self.node.entry in env.pyfunc_entries:
|
| 94 |
+
env.pyfunc_entries.remove(self.node.entry)
|
| 95 |
+
|
| 96 |
+
for cname, fused_to_specific in permutations:
|
| 97 |
+
copied_node = copy.deepcopy(self.node)
|
| 98 |
+
# keep signature object identity for special casing in DefNode.analyse_declarations()
|
| 99 |
+
copied_node.entry.signature = self.node.entry.signature
|
| 100 |
+
|
| 101 |
+
self._specialize_function_args(copied_node.args, fused_to_specific)
|
| 102 |
+
copied_node.return_type = self.node.return_type.specialize(
|
| 103 |
+
fused_to_specific)
|
| 104 |
+
|
| 105 |
+
copied_node.analyse_declarations(env)
|
| 106 |
+
# copied_node.is_staticmethod = self.node.is_staticmethod
|
| 107 |
+
# copied_node.is_classmethod = self.node.is_classmethod
|
| 108 |
+
self.create_new_local_scope(copied_node, env, fused_to_specific)
|
| 109 |
+
self.specialize_copied_def(copied_node, cname, self.node.entry,
|
| 110 |
+
fused_to_specific, fused_compound_types)
|
| 111 |
+
|
| 112 |
+
PyrexTypes.specialize_entry(copied_node.entry, cname)
|
| 113 |
+
copied_node.entry.used = True
|
| 114 |
+
env.entries[copied_node.entry.name] = copied_node.entry
|
| 115 |
+
|
| 116 |
+
if not self.replace_fused_typechecks(copied_node):
|
| 117 |
+
break
|
| 118 |
+
|
| 119 |
+
self.orig_py_func = self.node
|
| 120 |
+
self.py_func = self.make_fused_cpdef(self.node, env, is_def=True)
|
| 121 |
+
|
| 122 |
+
def copy_cdef(self, env):
|
| 123 |
+
"""
|
| 124 |
+
Create a copy of the original c(p)def function for all specialized
|
| 125 |
+
versions.
|
| 126 |
+
"""
|
| 127 |
+
permutations = self.node.type.get_all_specialized_permutations()
|
| 128 |
+
# print 'Node %s has %d specializations:' % (self.node.entry.name,
|
| 129 |
+
# len(permutations))
|
| 130 |
+
# import pprint; pprint.pprint([d for cname, d in permutations])
|
| 131 |
+
|
| 132 |
+
# Prevent copying of the python function
|
| 133 |
+
self.orig_py_func = orig_py_func = self.node.py_func
|
| 134 |
+
self.node.py_func = None
|
| 135 |
+
if orig_py_func:
|
| 136 |
+
env.pyfunc_entries.remove(orig_py_func.entry)
|
| 137 |
+
|
| 138 |
+
fused_types = self.node.type.get_fused_types()
|
| 139 |
+
self.fused_compound_types = fused_types
|
| 140 |
+
|
| 141 |
+
new_cfunc_entries = []
|
| 142 |
+
for cname, fused_to_specific in permutations:
|
| 143 |
+
copied_node = copy.deepcopy(self.node)
|
| 144 |
+
|
| 145 |
+
# Make the types in our CFuncType specific.
|
| 146 |
+
try:
|
| 147 |
+
type = copied_node.type.specialize(fused_to_specific)
|
| 148 |
+
except CannotSpecialize:
|
| 149 |
+
# unlike for the argument types, specializing the return type can fail
|
| 150 |
+
error(copied_node.pos, "Return type is a fused type that cannot "
|
| 151 |
+
"be determined from the function arguments")
|
| 152 |
+
self.py_func = None # this is just to let the compiler exit gracefully
|
| 153 |
+
return
|
| 154 |
+
entry = copied_node.entry
|
| 155 |
+
type.specialize_entry(entry, cname)
|
| 156 |
+
|
| 157 |
+
# Reuse existing Entries (e.g. from .pxd files).
|
| 158 |
+
for i, orig_entry in enumerate(env.cfunc_entries):
|
| 159 |
+
if entry.cname == orig_entry.cname and type.same_as_resolved_type(orig_entry.type):
|
| 160 |
+
copied_node.entry = env.cfunc_entries[i]
|
| 161 |
+
if not copied_node.entry.func_cname:
|
| 162 |
+
copied_node.entry.func_cname = entry.func_cname
|
| 163 |
+
entry = copied_node.entry
|
| 164 |
+
type = entry.type
|
| 165 |
+
break
|
| 166 |
+
else:
|
| 167 |
+
new_cfunc_entries.append(entry)
|
| 168 |
+
|
| 169 |
+
copied_node.type = type
|
| 170 |
+
entry.type, type.entry = type, entry
|
| 171 |
+
|
| 172 |
+
entry.used = (entry.used or
|
| 173 |
+
self.node.entry.defined_in_pxd or
|
| 174 |
+
env.is_c_class_scope or
|
| 175 |
+
entry.is_cmethod)
|
| 176 |
+
|
| 177 |
+
if self.node.cfunc_declarator.optional_arg_count:
|
| 178 |
+
self.node.cfunc_declarator.declare_optional_arg_struct(
|
| 179 |
+
type, env, fused_cname=cname)
|
| 180 |
+
|
| 181 |
+
copied_node.return_type = type.return_type
|
| 182 |
+
self.create_new_local_scope(copied_node, env, fused_to_specific)
|
| 183 |
+
|
| 184 |
+
# Make the argument types in the CFuncDeclarator specific
|
| 185 |
+
self._specialize_function_args(copied_node.cfunc_declarator.args,
|
| 186 |
+
fused_to_specific)
|
| 187 |
+
|
| 188 |
+
# If a cpdef, declare all specialized cpdefs (this
|
| 189 |
+
# also calls analyse_declarations)
|
| 190 |
+
copied_node.declare_cpdef_wrapper(env)
|
| 191 |
+
if copied_node.py_func:
|
| 192 |
+
env.pyfunc_entries.remove(copied_node.py_func.entry)
|
| 193 |
+
|
| 194 |
+
self.specialize_copied_def(
|
| 195 |
+
copied_node.py_func, cname, self.node.entry.as_variable,
|
| 196 |
+
fused_to_specific, fused_types)
|
| 197 |
+
|
| 198 |
+
if not self.replace_fused_typechecks(copied_node):
|
| 199 |
+
break
|
| 200 |
+
|
| 201 |
+
# replace old entry with new entries
|
| 202 |
+
if self.node.entry in env.cfunc_entries:
|
| 203 |
+
cindex = env.cfunc_entries.index(self.node.entry)
|
| 204 |
+
env.cfunc_entries[cindex:cindex+1] = new_cfunc_entries
|
| 205 |
+
else:
|
| 206 |
+
env.cfunc_entries.extend(new_cfunc_entries)
|
| 207 |
+
|
| 208 |
+
if orig_py_func:
|
| 209 |
+
self.py_func = self.make_fused_cpdef(orig_py_func, env,
|
| 210 |
+
is_def=False)
|
| 211 |
+
else:
|
| 212 |
+
self.py_func = orig_py_func
|
| 213 |
+
|
| 214 |
+
def _get_fused_base_types(self, fused_compound_types):
|
| 215 |
+
"""
|
| 216 |
+
Get a list of unique basic fused types, from a list of
|
| 217 |
+
(possibly) compound fused types.
|
| 218 |
+
"""
|
| 219 |
+
base_types = []
|
| 220 |
+
seen = set()
|
| 221 |
+
for fused_type in fused_compound_types:
|
| 222 |
+
fused_type.get_fused_types(result=base_types, seen=seen)
|
| 223 |
+
return base_types
|
| 224 |
+
|
| 225 |
+
def _specialize_function_args(self, args, fused_to_specific):
|
| 226 |
+
for arg in args:
|
| 227 |
+
if arg.type.is_fused:
|
| 228 |
+
arg.type = arg.type.specialize(fused_to_specific)
|
| 229 |
+
if arg.type.is_memoryviewslice:
|
| 230 |
+
arg.type.validate_memslice_dtype(arg.pos)
|
| 231 |
+
if arg.annotation:
|
| 232 |
+
# TODO might be nice if annotations were specialized instead?
|
| 233 |
+
# (Or might be hard to do reliably)
|
| 234 |
+
arg.annotation.untyped = True
|
| 235 |
+
|
| 236 |
+
def create_new_local_scope(self, node, env, f2s):
|
| 237 |
+
"""
|
| 238 |
+
Create a new local scope for the copied node and append it to
|
| 239 |
+
self.nodes. A new local scope is needed because the arguments with the
|
| 240 |
+
fused types are already in the local scope, and we need the specialized
|
| 241 |
+
entries created after analyse_declarations on each specialized version
|
| 242 |
+
of the (CFunc)DefNode.
|
| 243 |
+
f2s is a dict mapping each fused type to its specialized version
|
| 244 |
+
"""
|
| 245 |
+
node.create_local_scope(env)
|
| 246 |
+
node.local_scope.fused_to_specific = f2s
|
| 247 |
+
|
| 248 |
+
# This is copied from the original function, set it to false to
|
| 249 |
+
# stop recursion
|
| 250 |
+
node.has_fused_arguments = False
|
| 251 |
+
self.nodes.append(node)
|
| 252 |
+
|
| 253 |
+
def specialize_copied_def(self, node, cname, py_entry, f2s, fused_compound_types):
|
| 254 |
+
"""Specialize the copy of a DefNode given the copied node,
|
| 255 |
+
the specialization cname and the original DefNode entry"""
|
| 256 |
+
fused_types = self._get_fused_base_types(fused_compound_types)
|
| 257 |
+
type_strings = [
|
| 258 |
+
PyrexTypes.specialization_signature_string(fused_type, f2s)
|
| 259 |
+
for fused_type in fused_types
|
| 260 |
+
]
|
| 261 |
+
|
| 262 |
+
node.specialized_signature_string = '|'.join(type_strings)
|
| 263 |
+
|
| 264 |
+
node.entry.pymethdef_cname = PyrexTypes.get_fused_cname(
|
| 265 |
+
cname, node.entry.pymethdef_cname)
|
| 266 |
+
node.entry.doc = py_entry.doc
|
| 267 |
+
node.entry.doc_cname = py_entry.doc_cname
|
| 268 |
+
|
| 269 |
+
def replace_fused_typechecks(self, copied_node):
|
| 270 |
+
"""
|
| 271 |
+
Branch-prune fused type checks like
|
| 272 |
+
|
| 273 |
+
if fused_t is int:
|
| 274 |
+
...
|
| 275 |
+
|
| 276 |
+
Returns whether an error was issued and whether we should stop in
|
| 277 |
+
in order to prevent a flood of errors.
|
| 278 |
+
"""
|
| 279 |
+
num_errors = Errors.get_errors_count()
|
| 280 |
+
transform = ParseTreeTransforms.ReplaceFusedTypeChecks(
|
| 281 |
+
copied_node.local_scope)
|
| 282 |
+
transform(copied_node)
|
| 283 |
+
|
| 284 |
+
if Errors.get_errors_count() > num_errors:
|
| 285 |
+
return False
|
| 286 |
+
|
| 287 |
+
return True
|
| 288 |
+
|
| 289 |
+
def _fused_instance_checks(self, normal_types, pyx_code, env):
|
| 290 |
+
"""
|
| 291 |
+
Generate Cython code for instance checks, matching an object to
|
| 292 |
+
specialized types.
|
| 293 |
+
"""
|
| 294 |
+
for specialized_type in normal_types:
|
| 295 |
+
# all_numeric = all_numeric and specialized_type.is_numeric
|
| 296 |
+
py_type_name = specialized_type.py_type_name()
|
| 297 |
+
if py_type_name == 'int':
|
| 298 |
+
# Support Python 2 long
|
| 299 |
+
py_type_name = '(int, long)'
|
| 300 |
+
pyx_code.context.update(
|
| 301 |
+
py_type_name=py_type_name,
|
| 302 |
+
specialized_type_name=specialized_type.specialization_string,
|
| 303 |
+
)
|
| 304 |
+
pyx_code.put_chunk(
|
| 305 |
+
u"""
|
| 306 |
+
if isinstance(arg, {{py_type_name}}):
|
| 307 |
+
dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'; break
|
| 308 |
+
""")
|
| 309 |
+
|
| 310 |
+
def _dtype_name(self, dtype):
|
| 311 |
+
name = str(dtype).replace('_', '__').replace(' ', '_')
|
| 312 |
+
if dtype.is_typedef:
|
| 313 |
+
name = Naming.fused_dtype_prefix + name
|
| 314 |
+
return name
|
| 315 |
+
|
| 316 |
+
def _dtype_type(self, dtype):
|
| 317 |
+
if dtype.is_typedef:
|
| 318 |
+
return self._dtype_name(dtype)
|
| 319 |
+
return str(dtype)
|
| 320 |
+
|
| 321 |
+
def _sizeof_dtype(self, dtype):
|
| 322 |
+
if dtype.is_pyobject:
|
| 323 |
+
return 'sizeof(void *)'
|
| 324 |
+
else:
|
| 325 |
+
return "sizeof(%s)" % self._dtype_type(dtype)
|
| 326 |
+
|
| 327 |
+
def _buffer_check_numpy_dtype_setup_cases(self, pyx_code):
|
| 328 |
+
"Setup some common cases to match dtypes against specializations"
|
| 329 |
+
with pyx_code.indenter("if kind in u'iu':"):
|
| 330 |
+
pyx_code.putln("pass")
|
| 331 |
+
pyx_code.named_insertion_point("dtype_int")
|
| 332 |
+
|
| 333 |
+
with pyx_code.indenter("elif kind == u'f':"):
|
| 334 |
+
pyx_code.putln("pass")
|
| 335 |
+
pyx_code.named_insertion_point("dtype_float")
|
| 336 |
+
|
| 337 |
+
with pyx_code.indenter("elif kind == u'c':"):
|
| 338 |
+
pyx_code.putln("pass")
|
| 339 |
+
pyx_code.named_insertion_point("dtype_complex")
|
| 340 |
+
|
| 341 |
+
with pyx_code.indenter("elif kind == u'O':"):
|
| 342 |
+
pyx_code.putln("pass")
|
| 343 |
+
pyx_code.named_insertion_point("dtype_object")
|
| 344 |
+
|
| 345 |
+
match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'"
|
| 346 |
+
no_match = "dest_sig[{{dest_sig_idx}}] = None"
|
| 347 |
+
def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types, pythran_types):
|
| 348 |
+
"""
|
| 349 |
+
Match a numpy dtype object to the individual specializations.
|
| 350 |
+
"""
|
| 351 |
+
self._buffer_check_numpy_dtype_setup_cases(pyx_code)
|
| 352 |
+
|
| 353 |
+
for specialized_type in pythran_types+specialized_buffer_types:
|
| 354 |
+
final_type = specialized_type
|
| 355 |
+
if specialized_type.is_pythran_expr:
|
| 356 |
+
specialized_type = specialized_type.org_buffer
|
| 357 |
+
dtype = specialized_type.dtype
|
| 358 |
+
pyx_code.context.update(
|
| 359 |
+
itemsize_match=self._sizeof_dtype(dtype) + " == itemsize",
|
| 360 |
+
signed_match="not (%s_is_signed ^ dtype_signed)" % self._dtype_name(dtype),
|
| 361 |
+
dtype=dtype,
|
| 362 |
+
specialized_type_name=final_type.specialization_string)
|
| 363 |
+
|
| 364 |
+
dtypes = [
|
| 365 |
+
(dtype.is_int, pyx_code.dtype_int),
|
| 366 |
+
(dtype.is_float, pyx_code.dtype_float),
|
| 367 |
+
(dtype.is_complex, pyx_code.dtype_complex)
|
| 368 |
+
]
|
| 369 |
+
|
| 370 |
+
for dtype_category, codewriter in dtypes:
|
| 371 |
+
if not dtype_category:
|
| 372 |
+
continue
|
| 373 |
+
cond = '{{itemsize_match}} and (<Py_ssize_t>arg.ndim) == %d' % (
|
| 374 |
+
specialized_type.ndim,)
|
| 375 |
+
if dtype.is_int:
|
| 376 |
+
cond += ' and {{signed_match}}'
|
| 377 |
+
|
| 378 |
+
if final_type.is_pythran_expr:
|
| 379 |
+
cond += ' and arg_is_pythran_compatible'
|
| 380 |
+
|
| 381 |
+
with codewriter.indenter("if %s:" % cond):
|
| 382 |
+
#codewriter.putln("print 'buffer match found based on numpy dtype'")
|
| 383 |
+
codewriter.putln(self.match)
|
| 384 |
+
codewriter.putln("break")
|
| 385 |
+
|
| 386 |
+
def _buffer_parse_format_string_check(self, pyx_code, decl_code,
|
| 387 |
+
specialized_type, env):
|
| 388 |
+
"""
|
| 389 |
+
For each specialized type, try to coerce the object to a memoryview
|
| 390 |
+
slice of that type. This means obtaining a buffer and parsing the
|
| 391 |
+
format string.
|
| 392 |
+
TODO: separate buffer acquisition from format parsing
|
| 393 |
+
"""
|
| 394 |
+
dtype = specialized_type.dtype
|
| 395 |
+
if specialized_type.is_buffer:
|
| 396 |
+
axes = [('direct', 'strided')] * specialized_type.ndim
|
| 397 |
+
else:
|
| 398 |
+
axes = specialized_type.axes
|
| 399 |
+
|
| 400 |
+
memslice_type = PyrexTypes.MemoryViewSliceType(dtype, axes)
|
| 401 |
+
memslice_type.create_from_py_utility_code(env)
|
| 402 |
+
pyx_code.context.update(
|
| 403 |
+
coerce_from_py_func=memslice_type.from_py_function,
|
| 404 |
+
dtype=dtype)
|
| 405 |
+
decl_code.putln(
|
| 406 |
+
"{{memviewslice_cname}} {{coerce_from_py_func}}(object, int)")
|
| 407 |
+
|
| 408 |
+
pyx_code.context.update(
|
| 409 |
+
specialized_type_name=specialized_type.specialization_string,
|
| 410 |
+
sizeof_dtype=self._sizeof_dtype(dtype),
|
| 411 |
+
ndim_dtype=specialized_type.ndim,
|
| 412 |
+
dtype_is_struct_obj=int(dtype.is_struct or dtype.is_pyobject))
|
| 413 |
+
|
| 414 |
+
# use the memoryview object to check itemsize and ndim.
|
| 415 |
+
# In principle it could check more, but these are the easiest to do quickly
|
| 416 |
+
pyx_code.put_chunk(
|
| 417 |
+
u"""
|
| 418 |
+
# try {{dtype}}
|
| 419 |
+
if (((itemsize == -1 and arg_as_memoryview.itemsize == {{sizeof_dtype}})
|
| 420 |
+
or itemsize == {{sizeof_dtype}})
|
| 421 |
+
and arg_as_memoryview.ndim == {{ndim_dtype}}):
|
| 422 |
+
{{if dtype_is_struct_obj}}
|
| 423 |
+
if __PYX_IS_PYPY2:
|
| 424 |
+
# I wasn't able to diagnose why, but PyPy2 fails to convert a
|
| 425 |
+
# memoryview to a Cython memoryview in this case
|
| 426 |
+
memslice = {{coerce_from_py_func}}(arg, 0)
|
| 427 |
+
else:
|
| 428 |
+
{{else}}
|
| 429 |
+
if True:
|
| 430 |
+
{{endif}}
|
| 431 |
+
memslice = {{coerce_from_py_func}}(arg_as_memoryview, 0)
|
| 432 |
+
if memslice.memview:
|
| 433 |
+
__PYX_XCLEAR_MEMVIEW(&memslice, 1)
|
| 434 |
+
# print 'found a match for the buffer through format parsing'
|
| 435 |
+
%s
|
| 436 |
+
break
|
| 437 |
+
else:
|
| 438 |
+
__pyx_PyErr_Clear()
|
| 439 |
+
""" % self.match)
|
| 440 |
+
|
| 441 |
+
def _buffer_checks(self, buffer_types, pythran_types, pyx_code, decl_code, accept_none, env):
|
| 442 |
+
"""
|
| 443 |
+
Generate Cython code to match objects to buffer specializations.
|
| 444 |
+
First try to get a numpy dtype object and match it against the individual
|
| 445 |
+
specializations. If that fails, try naively to coerce the object
|
| 446 |
+
to each specialization, which obtains the buffer each time and tries
|
| 447 |
+
to match the format string.
|
| 448 |
+
"""
|
| 449 |
+
# The first thing to find a match in this loop breaks out of the loop
|
| 450 |
+
pyx_code.put_chunk(
|
| 451 |
+
u"""
|
| 452 |
+
""" + (u"arg_is_pythran_compatible = False" if pythran_types else u"") + u"""
|
| 453 |
+
if ndarray is not None:
|
| 454 |
+
if isinstance(arg, ndarray):
|
| 455 |
+
dtype = arg.dtype
|
| 456 |
+
""" + (u"arg_is_pythran_compatible = True" if pythran_types else u"") + u"""
|
| 457 |
+
elif __pyx_memoryview_check(arg):
|
| 458 |
+
arg_base = arg.base
|
| 459 |
+
if isinstance(arg_base, ndarray):
|
| 460 |
+
dtype = arg_base.dtype
|
| 461 |
+
else:
|
| 462 |
+
dtype = None
|
| 463 |
+
else:
|
| 464 |
+
dtype = None
|
| 465 |
+
|
| 466 |
+
itemsize = -1
|
| 467 |
+
if dtype is not None:
|
| 468 |
+
itemsize = dtype.itemsize
|
| 469 |
+
kind = ord(dtype.kind)
|
| 470 |
+
dtype_signed = kind == u'i'
|
| 471 |
+
""")
|
| 472 |
+
pyx_code.indent(2)
|
| 473 |
+
if pythran_types:
|
| 474 |
+
pyx_code.put_chunk(
|
| 475 |
+
u"""
|
| 476 |
+
# Pythran only supports the endianness of the current compiler
|
| 477 |
+
byteorder = dtype.byteorder
|
| 478 |
+
if byteorder == "<" and not __Pyx_Is_Little_Endian():
|
| 479 |
+
arg_is_pythran_compatible = False
|
| 480 |
+
elif byteorder == ">" and __Pyx_Is_Little_Endian():
|
| 481 |
+
arg_is_pythran_compatible = False
|
| 482 |
+
if arg_is_pythran_compatible:
|
| 483 |
+
cur_stride = itemsize
|
| 484 |
+
shape = arg.shape
|
| 485 |
+
strides = arg.strides
|
| 486 |
+
for i in range(arg.ndim-1, -1, -1):
|
| 487 |
+
if (<Py_ssize_t>strides[i]) != cur_stride:
|
| 488 |
+
arg_is_pythran_compatible = False
|
| 489 |
+
break
|
| 490 |
+
cur_stride *= <Py_ssize_t> shape[i]
|
| 491 |
+
else:
|
| 492 |
+
arg_is_pythran_compatible = not (arg.flags.f_contiguous and (<Py_ssize_t>arg.ndim) > 1)
|
| 493 |
+
""")
|
| 494 |
+
pyx_code.named_insertion_point("numpy_dtype_checks")
|
| 495 |
+
self._buffer_check_numpy_dtype(pyx_code, buffer_types, pythran_types)
|
| 496 |
+
pyx_code.dedent(2)
|
| 497 |
+
|
| 498 |
+
if accept_none:
|
| 499 |
+
# If None is acceptable, then Cython <3.0 matched None with the
|
| 500 |
+
# first type. This behaviour isn't ideal, but keep it for backwards
|
| 501 |
+
# compatibility. Better behaviour would be to see if subsequent
|
| 502 |
+
# arguments give a stronger match.
|
| 503 |
+
pyx_code.context.update(
|
| 504 |
+
specialized_type_name=buffer_types[0].specialization_string
|
| 505 |
+
)
|
| 506 |
+
pyx_code.put_chunk(
|
| 507 |
+
"""
|
| 508 |
+
if arg is None:
|
| 509 |
+
%s
|
| 510 |
+
break
|
| 511 |
+
""" % self.match)
|
| 512 |
+
|
| 513 |
+
# creating a Cython memoryview from a Python memoryview avoids the
|
| 514 |
+
# need to get the buffer multiple times, and we can
|
| 515 |
+
# also use it to check itemsizes etc
|
| 516 |
+
pyx_code.put_chunk(
|
| 517 |
+
"""
|
| 518 |
+
try:
|
| 519 |
+
arg_as_memoryview = memoryview(arg)
|
| 520 |
+
except (ValueError, TypeError):
|
| 521 |
+
pass
|
| 522 |
+
""")
|
| 523 |
+
with pyx_code.indenter("else:"):
|
| 524 |
+
for specialized_type in buffer_types:
|
| 525 |
+
self._buffer_parse_format_string_check(
|
| 526 |
+
pyx_code, decl_code, specialized_type, env)
|
| 527 |
+
|
| 528 |
+
def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types, pythran_types):
|
| 529 |
+
"""
|
| 530 |
+
If we have any buffer specializations, write out some variable
|
| 531 |
+
declarations and imports.
|
| 532 |
+
"""
|
| 533 |
+
decl_code.put_chunk(
|
| 534 |
+
u"""
|
| 535 |
+
ctypedef struct {{memviewslice_cname}}:
|
| 536 |
+
void *memview
|
| 537 |
+
|
| 538 |
+
void __PYX_XCLEAR_MEMVIEW({{memviewslice_cname}} *, int have_gil)
|
| 539 |
+
bint __pyx_memoryview_check(object)
|
| 540 |
+
bint __PYX_IS_PYPY2 "(CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION == 2)"
|
| 541 |
+
""")
|
| 542 |
+
|
| 543 |
+
pyx_code.local_variable_declarations.put_chunk(
|
| 544 |
+
u"""
|
| 545 |
+
cdef {{memviewslice_cname}} memslice
|
| 546 |
+
cdef Py_ssize_t itemsize
|
| 547 |
+
cdef bint dtype_signed
|
| 548 |
+
cdef Py_UCS4 kind
|
| 549 |
+
|
| 550 |
+
itemsize = -1
|
| 551 |
+
""")
|
| 552 |
+
|
| 553 |
+
if pythran_types:
|
| 554 |
+
pyx_code.local_variable_declarations.put_chunk(u"""
|
| 555 |
+
cdef bint arg_is_pythran_compatible
|
| 556 |
+
cdef Py_ssize_t cur_stride
|
| 557 |
+
""")
|
| 558 |
+
|
| 559 |
+
pyx_code.imports.put_chunk(
|
| 560 |
+
u"""
|
| 561 |
+
cdef type ndarray
|
| 562 |
+
ndarray = __Pyx_ImportNumPyArrayTypeIfAvailable()
|
| 563 |
+
""")
|
| 564 |
+
|
| 565 |
+
pyx_code.imports.put_chunk(
|
| 566 |
+
u"""
|
| 567 |
+
cdef memoryview arg_as_memoryview
|
| 568 |
+
"""
|
| 569 |
+
)
|
| 570 |
+
|
| 571 |
+
seen_typedefs = set()
|
| 572 |
+
seen_int_dtypes = set()
|
| 573 |
+
for buffer_type in all_buffer_types:
|
| 574 |
+
dtype = buffer_type.dtype
|
| 575 |
+
dtype_name = self._dtype_name(dtype)
|
| 576 |
+
if dtype.is_typedef:
|
| 577 |
+
if dtype_name not in seen_typedefs:
|
| 578 |
+
seen_typedefs.add(dtype_name)
|
| 579 |
+
decl_code.putln(
|
| 580 |
+
'ctypedef %s %s "%s"' % (dtype.resolve(), dtype_name,
|
| 581 |
+
dtype.empty_declaration_code()))
|
| 582 |
+
|
| 583 |
+
if buffer_type.dtype.is_int:
|
| 584 |
+
if str(dtype) not in seen_int_dtypes:
|
| 585 |
+
seen_int_dtypes.add(str(dtype))
|
| 586 |
+
pyx_code.context.update(dtype_name=dtype_name,
|
| 587 |
+
dtype_type=self._dtype_type(dtype))
|
| 588 |
+
pyx_code.local_variable_declarations.put_chunk(
|
| 589 |
+
u"""
|
| 590 |
+
cdef bint {{dtype_name}}_is_signed
|
| 591 |
+
{{dtype_name}}_is_signed = not (<{{dtype_type}}> -1 > 0)
|
| 592 |
+
""")
|
| 593 |
+
|
| 594 |
+
def _split_fused_types(self, arg):
|
| 595 |
+
"""
|
| 596 |
+
Specialize fused types and split into normal types and buffer types.
|
| 597 |
+
"""
|
| 598 |
+
specialized_types = PyrexTypes.get_specialized_types(arg.type)
|
| 599 |
+
|
| 600 |
+
# Prefer long over int, etc by sorting (see type classes in PyrexTypes.py)
|
| 601 |
+
specialized_types.sort()
|
| 602 |
+
|
| 603 |
+
seen_py_type_names = set()
|
| 604 |
+
normal_types, buffer_types, pythran_types = [], [], []
|
| 605 |
+
has_object_fallback = False
|
| 606 |
+
for specialized_type in specialized_types:
|
| 607 |
+
py_type_name = specialized_type.py_type_name()
|
| 608 |
+
if py_type_name:
|
| 609 |
+
if py_type_name in seen_py_type_names:
|
| 610 |
+
continue
|
| 611 |
+
seen_py_type_names.add(py_type_name)
|
| 612 |
+
if py_type_name == 'object':
|
| 613 |
+
has_object_fallback = True
|
| 614 |
+
else:
|
| 615 |
+
normal_types.append(specialized_type)
|
| 616 |
+
elif specialized_type.is_pythran_expr:
|
| 617 |
+
pythran_types.append(specialized_type)
|
| 618 |
+
elif specialized_type.is_buffer or specialized_type.is_memoryviewslice:
|
| 619 |
+
buffer_types.append(specialized_type)
|
| 620 |
+
|
| 621 |
+
return normal_types, buffer_types, pythran_types, has_object_fallback
|
| 622 |
+
|
| 623 |
+
def _unpack_argument(self, pyx_code):
|
| 624 |
+
pyx_code.put_chunk(
|
| 625 |
+
u"""
|
| 626 |
+
# PROCESSING ARGUMENT {{arg_tuple_idx}}
|
| 627 |
+
if {{arg_tuple_idx}} < len(<tuple>args):
|
| 628 |
+
arg = (<tuple>args)[{{arg_tuple_idx}}]
|
| 629 |
+
elif kwargs is not None and '{{arg.name}}' in <dict>kwargs:
|
| 630 |
+
arg = (<dict>kwargs)['{{arg.name}}']
|
| 631 |
+
else:
|
| 632 |
+
{{if arg.default}}
|
| 633 |
+
arg = (<tuple>defaults)[{{default_idx}}]
|
| 634 |
+
{{else}}
|
| 635 |
+
{{if arg_tuple_idx < min_positional_args}}
|
| 636 |
+
raise TypeError("Expected at least %d argument%s, got %d" % (
|
| 637 |
+
{{min_positional_args}}, {{'"s"' if min_positional_args != 1 else '""'}}, len(<tuple>args)))
|
| 638 |
+
{{else}}
|
| 639 |
+
raise TypeError("Missing keyword-only argument: '%s'" % "{{arg.default}}")
|
| 640 |
+
{{endif}}
|
| 641 |
+
{{endif}}
|
| 642 |
+
""")
|
| 643 |
+
|
| 644 |
+
def _fused_signature_index(self, pyx_code):
|
| 645 |
+
"""
|
| 646 |
+
Generate Cython code for constructing a persistent nested dictionary index of
|
| 647 |
+
fused type specialization signatures.
|
| 648 |
+
"""
|
| 649 |
+
pyx_code.put_chunk(
|
| 650 |
+
u"""
|
| 651 |
+
if not _fused_sigindex:
|
| 652 |
+
for sig in <dict> signatures:
|
| 653 |
+
sigindex_node = <dict> _fused_sigindex
|
| 654 |
+
*sig_series, last_type = sig.strip('()').split('|')
|
| 655 |
+
for sig_type in sig_series:
|
| 656 |
+
if sig_type not in sigindex_node:
|
| 657 |
+
sigindex_node[sig_type] = sigindex_node = {}
|
| 658 |
+
else:
|
| 659 |
+
sigindex_node = <dict> sigindex_node[sig_type]
|
| 660 |
+
sigindex_node[last_type] = sig
|
| 661 |
+
"""
|
| 662 |
+
)
|
| 663 |
+
|
| 664 |
+
def make_fused_cpdef(self, orig_py_func, env, is_def):
|
| 665 |
+
"""
|
| 666 |
+
This creates the function that is indexable from Python and does
|
| 667 |
+
runtime dispatch based on the argument types. The function gets the
|
| 668 |
+
arg tuple and kwargs dict (or None) and the defaults tuple
|
| 669 |
+
as arguments from the Binding Fused Function's tp_call.
|
| 670 |
+
"""
|
| 671 |
+
from . import TreeFragment, Code, UtilityCode
|
| 672 |
+
|
| 673 |
+
fused_types = self._get_fused_base_types([
|
| 674 |
+
arg.type for arg in self.node.args if arg.type.is_fused])
|
| 675 |
+
|
| 676 |
+
context = {
|
| 677 |
+
'memviewslice_cname': MemoryView.memviewslice_cname,
|
| 678 |
+
'func_args': self.node.args,
|
| 679 |
+
'n_fused': len(fused_types),
|
| 680 |
+
'min_positional_args':
|
| 681 |
+
self.node.num_required_args - self.node.num_required_kw_args
|
| 682 |
+
if is_def else
|
| 683 |
+
sum(1 for arg in self.node.args if arg.default is None),
|
| 684 |
+
'name': orig_py_func.entry.name,
|
| 685 |
+
}
|
| 686 |
+
|
| 687 |
+
pyx_code = Code.PyxCodeWriter(context=context)
|
| 688 |
+
decl_code = Code.PyxCodeWriter(context=context)
|
| 689 |
+
decl_code.put_chunk(
|
| 690 |
+
u"""
|
| 691 |
+
cdef extern from *:
|
| 692 |
+
void __pyx_PyErr_Clear "PyErr_Clear" ()
|
| 693 |
+
type __Pyx_ImportNumPyArrayTypeIfAvailable()
|
| 694 |
+
int __Pyx_Is_Little_Endian()
|
| 695 |
+
""")
|
| 696 |
+
decl_code.indent()
|
| 697 |
+
|
| 698 |
+
pyx_code.put_chunk(
|
| 699 |
+
u"""
|
| 700 |
+
def __pyx_fused_cpdef(signatures, args, kwargs, defaults, _fused_sigindex={}):
|
| 701 |
+
# FIXME: use a typed signature - currently fails badly because
|
| 702 |
+
# default arguments inherit the types we specify here!
|
| 703 |
+
|
| 704 |
+
cdef list search_list
|
| 705 |
+
cdef dict sigindex_node
|
| 706 |
+
|
| 707 |
+
dest_sig = [None] * {{n_fused}}
|
| 708 |
+
|
| 709 |
+
if kwargs is not None and not kwargs:
|
| 710 |
+
kwargs = None
|
| 711 |
+
|
| 712 |
+
cdef Py_ssize_t i
|
| 713 |
+
|
| 714 |
+
# instance check body
|
| 715 |
+
""")
|
| 716 |
+
|
| 717 |
+
pyx_code.indent() # indent following code to function body
|
| 718 |
+
pyx_code.named_insertion_point("imports")
|
| 719 |
+
pyx_code.named_insertion_point("func_defs")
|
| 720 |
+
pyx_code.named_insertion_point("local_variable_declarations")
|
| 721 |
+
|
| 722 |
+
fused_index = 0
|
| 723 |
+
default_idx = 0
|
| 724 |
+
all_buffer_types = OrderedSet()
|
| 725 |
+
seen_fused_types = set()
|
| 726 |
+
for i, arg in enumerate(self.node.args):
|
| 727 |
+
if arg.type.is_fused:
|
| 728 |
+
arg_fused_types = arg.type.get_fused_types()
|
| 729 |
+
if len(arg_fused_types) > 1:
|
| 730 |
+
raise NotImplementedError("Determination of more than one fused base "
|
| 731 |
+
"type per argument is not implemented.")
|
| 732 |
+
fused_type = arg_fused_types[0]
|
| 733 |
+
|
| 734 |
+
if arg.type.is_fused and fused_type not in seen_fused_types:
|
| 735 |
+
seen_fused_types.add(fused_type)
|
| 736 |
+
|
| 737 |
+
context.update(
|
| 738 |
+
arg_tuple_idx=i,
|
| 739 |
+
arg=arg,
|
| 740 |
+
dest_sig_idx=fused_index,
|
| 741 |
+
default_idx=default_idx,
|
| 742 |
+
)
|
| 743 |
+
|
| 744 |
+
normal_types, buffer_types, pythran_types, has_object_fallback = self._split_fused_types(arg)
|
| 745 |
+
self._unpack_argument(pyx_code)
|
| 746 |
+
|
| 747 |
+
# 'unrolled' loop, first match breaks out of it
|
| 748 |
+
with pyx_code.indenter("while 1:"):
|
| 749 |
+
if normal_types:
|
| 750 |
+
self._fused_instance_checks(normal_types, pyx_code, env)
|
| 751 |
+
if buffer_types or pythran_types:
|
| 752 |
+
env.use_utility_code(Code.UtilityCode.load_cached("IsLittleEndian", "ModuleSetupCode.c"))
|
| 753 |
+
self._buffer_checks(
|
| 754 |
+
buffer_types, pythran_types, pyx_code, decl_code,
|
| 755 |
+
arg.accept_none, env)
|
| 756 |
+
if has_object_fallback:
|
| 757 |
+
pyx_code.context.update(specialized_type_name='object')
|
| 758 |
+
pyx_code.putln(self.match)
|
| 759 |
+
else:
|
| 760 |
+
pyx_code.putln(self.no_match)
|
| 761 |
+
pyx_code.putln("break")
|
| 762 |
+
|
| 763 |
+
fused_index += 1
|
| 764 |
+
all_buffer_types.update(buffer_types)
|
| 765 |
+
all_buffer_types.update(ty.org_buffer for ty in pythran_types)
|
| 766 |
+
|
| 767 |
+
if arg.default:
|
| 768 |
+
default_idx += 1
|
| 769 |
+
|
| 770 |
+
if all_buffer_types:
|
| 771 |
+
self._buffer_declarations(pyx_code, decl_code, all_buffer_types, pythran_types)
|
| 772 |
+
env.use_utility_code(Code.UtilityCode.load_cached("Import", "ImportExport.c"))
|
| 773 |
+
env.use_utility_code(Code.UtilityCode.load_cached("ImportNumPyArray", "ImportExport.c"))
|
| 774 |
+
|
| 775 |
+
self._fused_signature_index(pyx_code)
|
| 776 |
+
|
| 777 |
+
pyx_code.put_chunk(
|
| 778 |
+
u"""
|
| 779 |
+
sigindex_matches = []
|
| 780 |
+
sigindex_candidates = [_fused_sigindex]
|
| 781 |
+
|
| 782 |
+
for dst_type in dest_sig:
|
| 783 |
+
found_matches = []
|
| 784 |
+
found_candidates = []
|
| 785 |
+
# Make two separate lists: One for signature sub-trees
|
| 786 |
+
# with at least one definite match, and another for
|
| 787 |
+
# signature sub-trees with only ambiguous matches
|
| 788 |
+
# (where `dest_sig[i] is None`).
|
| 789 |
+
if dst_type is None:
|
| 790 |
+
for sn in sigindex_matches:
|
| 791 |
+
found_matches.extend((<dict> sn).values())
|
| 792 |
+
for sn in sigindex_candidates:
|
| 793 |
+
found_candidates.extend((<dict> sn).values())
|
| 794 |
+
else:
|
| 795 |
+
for search_list in (sigindex_matches, sigindex_candidates):
|
| 796 |
+
for sn in search_list:
|
| 797 |
+
type_match = (<dict> sn).get(dst_type)
|
| 798 |
+
if type_match is not None:
|
| 799 |
+
found_matches.append(type_match)
|
| 800 |
+
sigindex_matches = found_matches
|
| 801 |
+
sigindex_candidates = found_candidates
|
| 802 |
+
if not (found_matches or found_candidates):
|
| 803 |
+
break
|
| 804 |
+
|
| 805 |
+
candidates = sigindex_matches
|
| 806 |
+
|
| 807 |
+
if not candidates:
|
| 808 |
+
raise TypeError("No matching signature found")
|
| 809 |
+
elif len(candidates) > 1:
|
| 810 |
+
raise TypeError("Function call with ambiguous argument types")
|
| 811 |
+
else:
|
| 812 |
+
return (<dict>signatures)[candidates[0]]
|
| 813 |
+
""")
|
| 814 |
+
|
| 815 |
+
fragment_code = pyx_code.getvalue()
|
| 816 |
+
# print decl_code.getvalue()
|
| 817 |
+
# print fragment_code
|
| 818 |
+
from .Optimize import ConstantFolding
|
| 819 |
+
fragment = TreeFragment.TreeFragment(
|
| 820 |
+
fragment_code, level='module', pipeline=[ConstantFolding()])
|
| 821 |
+
ast = TreeFragment.SetPosTransform(self.node.pos)(fragment.root)
|
| 822 |
+
UtilityCode.declare_declarations_in_scope(
|
| 823 |
+
decl_code.getvalue(), env.global_scope())
|
| 824 |
+
ast.scope = env
|
| 825 |
+
# FIXME: for static methods of cdef classes, we build the wrong signature here: first arg becomes 'self'
|
| 826 |
+
ast.analyse_declarations(env)
|
| 827 |
+
py_func = ast.stats[-1] # the DefNode
|
| 828 |
+
self.fragment_scope = ast.scope
|
| 829 |
+
|
| 830 |
+
if isinstance(self.node, DefNode):
|
| 831 |
+
py_func.specialized_cpdefs = self.nodes[:]
|
| 832 |
+
else:
|
| 833 |
+
py_func.specialized_cpdefs = [n.py_func for n in self.nodes]
|
| 834 |
+
|
| 835 |
+
return py_func
|
| 836 |
+
|
| 837 |
+
def update_fused_defnode_entry(self, env):
|
| 838 |
+
copy_attributes = (
|
| 839 |
+
'name', 'pos', 'cname', 'func_cname', 'pyfunc_cname',
|
| 840 |
+
'pymethdef_cname', 'doc', 'doc_cname', 'is_member',
|
| 841 |
+
'scope'
|
| 842 |
+
)
|
| 843 |
+
|
| 844 |
+
entry = self.py_func.entry
|
| 845 |
+
|
| 846 |
+
for attr in copy_attributes:
|
| 847 |
+
setattr(entry, attr,
|
| 848 |
+
getattr(self.orig_py_func.entry, attr))
|
| 849 |
+
|
| 850 |
+
self.py_func.name = self.orig_py_func.name
|
| 851 |
+
self.py_func.doc = self.orig_py_func.doc
|
| 852 |
+
|
| 853 |
+
env.entries.pop('__pyx_fused_cpdef', None)
|
| 854 |
+
if isinstance(self.node, DefNode):
|
| 855 |
+
env.entries[entry.name] = entry
|
| 856 |
+
else:
|
| 857 |
+
env.entries[entry.name].as_variable = entry
|
| 858 |
+
|
| 859 |
+
env.pyfunc_entries.append(entry)
|
| 860 |
+
|
| 861 |
+
self.py_func.entry.fused_cfunction = self
|
| 862 |
+
for node in self.nodes:
|
| 863 |
+
if isinstance(self.node, DefNode):
|
| 864 |
+
node.fused_py_func = self.py_func
|
| 865 |
+
else:
|
| 866 |
+
node.py_func.fused_py_func = self.py_func
|
| 867 |
+
node.entry.as_variable = entry
|
| 868 |
+
|
| 869 |
+
self.synthesize_defnodes()
|
| 870 |
+
self.stats.append(self.__signatures__)
|
| 871 |
+
|
| 872 |
+
def analyse_expressions(self, env):
|
| 873 |
+
"""
|
| 874 |
+
Analyse the expressions. Take care to only evaluate default arguments
|
| 875 |
+
once and clone the result for all specializations
|
| 876 |
+
"""
|
| 877 |
+
for fused_compound_type in self.fused_compound_types:
|
| 878 |
+
for fused_type in fused_compound_type.get_fused_types():
|
| 879 |
+
for specialization_type in fused_type.types:
|
| 880 |
+
if specialization_type.is_complex:
|
| 881 |
+
specialization_type.create_declaration_utility_code(env)
|
| 882 |
+
|
| 883 |
+
if self.py_func:
|
| 884 |
+
self.__signatures__ = self.__signatures__.analyse_expressions(env)
|
| 885 |
+
self.py_func = self.py_func.analyse_expressions(env)
|
| 886 |
+
self.resulting_fused_function = self.resulting_fused_function.analyse_expressions(env)
|
| 887 |
+
self.fused_func_assignment = self.fused_func_assignment.analyse_expressions(env)
|
| 888 |
+
|
| 889 |
+
self.defaults = defaults = []
|
| 890 |
+
|
| 891 |
+
for arg in self.node.args:
|
| 892 |
+
if arg.default:
|
| 893 |
+
arg.default = arg.default.analyse_expressions(env)
|
| 894 |
+
if arg.default.is_literal:
|
| 895 |
+
defaults.append(copy.copy(arg.default))
|
| 896 |
+
else:
|
| 897 |
+
# coerce the argument to temp since CloneNode really requires a temp
|
| 898 |
+
defaults.append(ProxyNode(arg.default.coerce_to_temp(env)))
|
| 899 |
+
else:
|
| 900 |
+
defaults.append(None)
|
| 901 |
+
|
| 902 |
+
for i, stat in enumerate(self.stats):
|
| 903 |
+
stat = self.stats[i] = stat.analyse_expressions(env)
|
| 904 |
+
if isinstance(stat, FuncDefNode) and stat is not self.py_func:
|
| 905 |
+
# the dispatcher specifically doesn't want its defaults overriding
|
| 906 |
+
for arg, default in zip(stat.args, defaults):
|
| 907 |
+
if default is not None:
|
| 908 |
+
if default.is_literal:
|
| 909 |
+
arg.default = default.coerce_to(arg.type, env)
|
| 910 |
+
else:
|
| 911 |
+
arg.default = CloneNode(default).analyse_expressions(env).coerce_to(arg.type, env)
|
| 912 |
+
|
| 913 |
+
if self.py_func:
|
| 914 |
+
args = [CloneNode(default) for default in defaults if default]
|
| 915 |
+
self.defaults_tuple = TupleNode(self.pos, args=args)
|
| 916 |
+
self.defaults_tuple = self.defaults_tuple.analyse_types(env, skip_children=True).coerce_to_pyobject(env)
|
| 917 |
+
self.defaults_tuple = ProxyNode(self.defaults_tuple)
|
| 918 |
+
self.code_object = ProxyNode(self.specialized_pycfuncs[0].code_object)
|
| 919 |
+
|
| 920 |
+
fused_func = self.resulting_fused_function.arg
|
| 921 |
+
fused_func.defaults_tuple = CloneNode(self.defaults_tuple)
|
| 922 |
+
fused_func.code_object = CloneNode(self.code_object)
|
| 923 |
+
|
| 924 |
+
for i, pycfunc in enumerate(self.specialized_pycfuncs):
|
| 925 |
+
pycfunc.code_object = CloneNode(self.code_object)
|
| 926 |
+
pycfunc = self.specialized_pycfuncs[i] = pycfunc.analyse_types(env)
|
| 927 |
+
pycfunc.defaults_tuple = CloneNode(self.defaults_tuple)
|
| 928 |
+
return self
|
| 929 |
+
|
| 930 |
+
def synthesize_defnodes(self):
|
| 931 |
+
"""
|
| 932 |
+
Create the __signatures__ dict of PyCFunctionNode specializations.
|
| 933 |
+
"""
|
| 934 |
+
if isinstance(self.nodes[0], CFuncDefNode):
|
| 935 |
+
nodes = [node.py_func for node in self.nodes]
|
| 936 |
+
else:
|
| 937 |
+
nodes = self.nodes
|
| 938 |
+
|
| 939 |
+
# For the moment, fused functions do not support METH_FASTCALL
|
| 940 |
+
for node in nodes:
|
| 941 |
+
node.entry.signature.use_fastcall = False
|
| 942 |
+
|
| 943 |
+
signatures = [StringEncoding.EncodedString(node.specialized_signature_string)
|
| 944 |
+
for node in nodes]
|
| 945 |
+
keys = [ExprNodes.StringNode(node.pos, value=sig)
|
| 946 |
+
for node, sig in zip(nodes, signatures)]
|
| 947 |
+
values = [ExprNodes.PyCFunctionNode.from_defnode(node, binding=True)
|
| 948 |
+
for node in nodes]
|
| 949 |
+
|
| 950 |
+
self.__signatures__ = ExprNodes.DictNode.from_pairs(self.pos, zip(keys, values))
|
| 951 |
+
|
| 952 |
+
self.specialized_pycfuncs = values
|
| 953 |
+
for pycfuncnode in values:
|
| 954 |
+
pycfuncnode.is_specialization = True
|
| 955 |
+
|
| 956 |
+
def generate_function_definitions(self, env, code):
|
| 957 |
+
if self.py_func:
|
| 958 |
+
self.py_func.pymethdef_required = True
|
| 959 |
+
self.fused_func_assignment.generate_function_definitions(env, code)
|
| 960 |
+
|
| 961 |
+
from . import Options
|
| 962 |
+
for stat in self.stats:
|
| 963 |
+
if isinstance(stat, FuncDefNode) and (
|
| 964 |
+
stat.entry.used or
|
| 965 |
+
(Options.cimport_from_pyx and not stat.entry.visibility == 'extern')):
|
| 966 |
+
code.mark_pos(stat.pos)
|
| 967 |
+
stat.generate_function_definitions(env, code)
|
| 968 |
+
|
| 969 |
+
def generate_execution_code(self, code):
|
| 970 |
+
# Note: all def function specialization are wrapped in PyCFunction
|
| 971 |
+
# nodes in the self.__signatures__ dictnode.
|
| 972 |
+
for default in self.defaults:
|
| 973 |
+
if default is not None:
|
| 974 |
+
default.generate_evaluation_code(code)
|
| 975 |
+
|
| 976 |
+
if self.py_func:
|
| 977 |
+
self.defaults_tuple.generate_evaluation_code(code)
|
| 978 |
+
self.code_object.generate_evaluation_code(code)
|
| 979 |
+
|
| 980 |
+
for stat in self.stats:
|
| 981 |
+
code.mark_pos(stat.pos)
|
| 982 |
+
if isinstance(stat, ExprNodes.ExprNode):
|
| 983 |
+
stat.generate_evaluation_code(code)
|
| 984 |
+
else:
|
| 985 |
+
stat.generate_execution_code(code)
|
| 986 |
+
|
| 987 |
+
if self.__signatures__:
|
| 988 |
+
self.resulting_fused_function.generate_evaluation_code(code)
|
| 989 |
+
|
| 990 |
+
code.putln(
|
| 991 |
+
"((__pyx_FusedFunctionObject *) %s)->__signatures__ = %s;" %
|
| 992 |
+
(self.resulting_fused_function.result(),
|
| 993 |
+
self.__signatures__.result()))
|
| 994 |
+
self.__signatures__.generate_giveref(code)
|
| 995 |
+
self.__signatures__.generate_post_assignment_code(code)
|
| 996 |
+
self.__signatures__.free_temps(code)
|
| 997 |
+
|
| 998 |
+
self.fused_func_assignment.generate_execution_code(code)
|
| 999 |
+
|
| 1000 |
+
# Dispose of results
|
| 1001 |
+
self.resulting_fused_function.generate_disposal_code(code)
|
| 1002 |
+
self.resulting_fused_function.free_temps(code)
|
| 1003 |
+
self.defaults_tuple.generate_disposal_code(code)
|
| 1004 |
+
self.defaults_tuple.free_temps(code)
|
| 1005 |
+
self.code_object.generate_disposal_code(code)
|
| 1006 |
+
self.code_object.free_temps(code)
|
| 1007 |
+
|
| 1008 |
+
for default in self.defaults:
|
| 1009 |
+
if default is not None:
|
| 1010 |
+
default.generate_disposal_code(code)
|
| 1011 |
+
default.free_temps(code)
|
| 1012 |
+
|
| 1013 |
+
def annotate(self, code):
|
| 1014 |
+
for stat in self.stats:
|
| 1015 |
+
stat.annotate(code)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/StringEncoding.py
ADDED
|
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Cython -- encoding related tools
|
| 3 |
+
#
|
| 4 |
+
|
| 5 |
+
from __future__ import absolute_import
|
| 6 |
+
|
| 7 |
+
import re
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
if sys.version_info[0] >= 3:
|
| 11 |
+
_unicode, _str, _bytes, _unichr = str, str, bytes, chr
|
| 12 |
+
IS_PYTHON3 = True
|
| 13 |
+
else:
|
| 14 |
+
_unicode, _str, _bytes, _unichr = unicode, str, str, unichr
|
| 15 |
+
IS_PYTHON3 = False
|
| 16 |
+
|
| 17 |
+
empty_bytes = _bytes()
|
| 18 |
+
empty_unicode = _unicode()
|
| 19 |
+
|
| 20 |
+
join_bytes = empty_bytes.join
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class UnicodeLiteralBuilder(object):
|
| 24 |
+
"""Assemble a unicode string.
|
| 25 |
+
"""
|
| 26 |
+
def __init__(self):
|
| 27 |
+
self.chars = []
|
| 28 |
+
|
| 29 |
+
def append(self, characters):
|
| 30 |
+
if isinstance(characters, _bytes):
|
| 31 |
+
# this came from a Py2 string literal in the parser code
|
| 32 |
+
characters = characters.decode("ASCII")
|
| 33 |
+
assert isinstance(characters, _unicode), str(type(characters))
|
| 34 |
+
self.chars.append(characters)
|
| 35 |
+
|
| 36 |
+
if sys.maxunicode == 65535:
|
| 37 |
+
def append_charval(self, char_number):
|
| 38 |
+
if char_number > 65535:
|
| 39 |
+
# wide Unicode character on narrow platform => replace
|
| 40 |
+
# by surrogate pair
|
| 41 |
+
char_number -= 0x10000
|
| 42 |
+
self.chars.append( _unichr((char_number // 1024) + 0xD800) )
|
| 43 |
+
self.chars.append( _unichr((char_number % 1024) + 0xDC00) )
|
| 44 |
+
else:
|
| 45 |
+
self.chars.append( _unichr(char_number) )
|
| 46 |
+
else:
|
| 47 |
+
def append_charval(self, char_number):
|
| 48 |
+
self.chars.append( _unichr(char_number) )
|
| 49 |
+
|
| 50 |
+
def append_uescape(self, char_number, escape_string):
|
| 51 |
+
self.append_charval(char_number)
|
| 52 |
+
|
| 53 |
+
def getstring(self):
|
| 54 |
+
return EncodedString(u''.join(self.chars))
|
| 55 |
+
|
| 56 |
+
def getstrings(self):
|
| 57 |
+
return (None, self.getstring())
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class BytesLiteralBuilder(object):
|
| 61 |
+
"""Assemble a byte string or char value.
|
| 62 |
+
"""
|
| 63 |
+
def __init__(self, target_encoding):
|
| 64 |
+
self.chars = []
|
| 65 |
+
self.target_encoding = target_encoding
|
| 66 |
+
|
| 67 |
+
def append(self, characters):
|
| 68 |
+
if isinstance(characters, _unicode):
|
| 69 |
+
characters = characters.encode(self.target_encoding)
|
| 70 |
+
assert isinstance(characters, _bytes), str(type(characters))
|
| 71 |
+
self.chars.append(characters)
|
| 72 |
+
|
| 73 |
+
def append_charval(self, char_number):
|
| 74 |
+
self.chars.append( _unichr(char_number).encode('ISO-8859-1') )
|
| 75 |
+
|
| 76 |
+
def append_uescape(self, char_number, escape_string):
|
| 77 |
+
self.append(escape_string)
|
| 78 |
+
|
| 79 |
+
def getstring(self):
|
| 80 |
+
# this *must* return a byte string!
|
| 81 |
+
return bytes_literal(join_bytes(self.chars), self.target_encoding)
|
| 82 |
+
|
| 83 |
+
def getchar(self):
|
| 84 |
+
# this *must* return a byte string!
|
| 85 |
+
return self.getstring()
|
| 86 |
+
|
| 87 |
+
def getstrings(self):
|
| 88 |
+
return (self.getstring(), None)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class StrLiteralBuilder(object):
|
| 92 |
+
"""Assemble both a bytes and a unicode representation of a string.
|
| 93 |
+
"""
|
| 94 |
+
def __init__(self, target_encoding):
|
| 95 |
+
self._bytes = BytesLiteralBuilder(target_encoding)
|
| 96 |
+
self._unicode = UnicodeLiteralBuilder()
|
| 97 |
+
|
| 98 |
+
def append(self, characters):
|
| 99 |
+
self._bytes.append(characters)
|
| 100 |
+
self._unicode.append(characters)
|
| 101 |
+
|
| 102 |
+
def append_charval(self, char_number):
|
| 103 |
+
self._bytes.append_charval(char_number)
|
| 104 |
+
self._unicode.append_charval(char_number)
|
| 105 |
+
|
| 106 |
+
def append_uescape(self, char_number, escape_string):
|
| 107 |
+
self._bytes.append(escape_string)
|
| 108 |
+
self._unicode.append_charval(char_number)
|
| 109 |
+
|
| 110 |
+
def getstrings(self):
|
| 111 |
+
return (self._bytes.getstring(), self._unicode.getstring())
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class EncodedString(_unicode):
|
| 115 |
+
# unicode string subclass to keep track of the original encoding.
|
| 116 |
+
# 'encoding' is None for unicode strings and the source encoding
|
| 117 |
+
# otherwise
|
| 118 |
+
encoding = None
|
| 119 |
+
|
| 120 |
+
def __deepcopy__(self, memo):
|
| 121 |
+
return self
|
| 122 |
+
|
| 123 |
+
def byteencode(self):
|
| 124 |
+
assert self.encoding is not None
|
| 125 |
+
return self.encode(self.encoding)
|
| 126 |
+
|
| 127 |
+
def utf8encode(self):
|
| 128 |
+
assert self.encoding is None
|
| 129 |
+
return self.encode("UTF-8")
|
| 130 |
+
|
| 131 |
+
@property
|
| 132 |
+
def is_unicode(self):
|
| 133 |
+
return self.encoding is None
|
| 134 |
+
|
| 135 |
+
def contains_surrogates(self):
|
| 136 |
+
return string_contains_surrogates(self)
|
| 137 |
+
|
| 138 |
+
def as_utf8_string(self):
|
| 139 |
+
return bytes_literal(self.utf8encode(), 'utf8')
|
| 140 |
+
|
| 141 |
+
def as_c_string_literal(self):
|
| 142 |
+
# first encodes the string then produces a c string literal
|
| 143 |
+
if self.encoding is None:
|
| 144 |
+
s = self.as_utf8_string()
|
| 145 |
+
else:
|
| 146 |
+
s = bytes_literal(self.byteencode(), self.encoding)
|
| 147 |
+
return s.as_c_string_literal()
|
| 148 |
+
|
| 149 |
+
if not hasattr(_unicode, "isascii"):
|
| 150 |
+
def isascii(self):
|
| 151 |
+
# not defined for Python3.7+ since the class already has it
|
| 152 |
+
try:
|
| 153 |
+
self.encode("ascii")
|
| 154 |
+
except UnicodeEncodeError:
|
| 155 |
+
return False
|
| 156 |
+
else:
|
| 157 |
+
return True
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def string_contains_surrogates(ustring):
|
| 161 |
+
"""
|
| 162 |
+
Check if the unicode string contains surrogate code points
|
| 163 |
+
on a CPython platform with wide (UCS-4) or narrow (UTF-16)
|
| 164 |
+
Unicode, i.e. characters that would be spelled as two
|
| 165 |
+
separate code units on a narrow platform.
|
| 166 |
+
"""
|
| 167 |
+
for c in map(ord, ustring):
|
| 168 |
+
if c > 65535: # can only happen on wide platforms
|
| 169 |
+
return True
|
| 170 |
+
if 0xD800 <= c <= 0xDFFF:
|
| 171 |
+
return True
|
| 172 |
+
return False
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def string_contains_lone_surrogates(ustring):
|
| 176 |
+
"""
|
| 177 |
+
Check if the unicode string contains lone surrogate code points
|
| 178 |
+
on a CPython platform with wide (UCS-4) or narrow (UTF-16)
|
| 179 |
+
Unicode, i.e. characters that would be spelled as two
|
| 180 |
+
separate code units on a narrow platform, but that do not form a pair.
|
| 181 |
+
"""
|
| 182 |
+
last_was_start = False
|
| 183 |
+
unicode_uses_surrogate_encoding = sys.maxunicode == 65535
|
| 184 |
+
for c in map(ord, ustring):
|
| 185 |
+
# surrogates tend to be rare
|
| 186 |
+
if c < 0xD800 or c > 0xDFFF:
|
| 187 |
+
if last_was_start:
|
| 188 |
+
return True
|
| 189 |
+
elif not unicode_uses_surrogate_encoding:
|
| 190 |
+
# on 32bit Unicode platforms, there is never a pair
|
| 191 |
+
return True
|
| 192 |
+
elif c <= 0xDBFF:
|
| 193 |
+
if last_was_start:
|
| 194 |
+
return True # lone start
|
| 195 |
+
last_was_start = True
|
| 196 |
+
else:
|
| 197 |
+
if not last_was_start:
|
| 198 |
+
return True # lone end
|
| 199 |
+
last_was_start = False
|
| 200 |
+
return last_was_start
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
class BytesLiteral(_bytes):
|
| 204 |
+
# bytes subclass that is compatible with EncodedString
|
| 205 |
+
encoding = None
|
| 206 |
+
|
| 207 |
+
def __deepcopy__(self, memo):
|
| 208 |
+
return self
|
| 209 |
+
|
| 210 |
+
def byteencode(self):
|
| 211 |
+
if IS_PYTHON3:
|
| 212 |
+
return _bytes(self)
|
| 213 |
+
else:
|
| 214 |
+
# fake-recode the string to make it a plain bytes object
|
| 215 |
+
return self.decode('ISO-8859-1').encode('ISO-8859-1')
|
| 216 |
+
|
| 217 |
+
def utf8encode(self):
|
| 218 |
+
assert False, "this is not a unicode string: %r" % self
|
| 219 |
+
|
| 220 |
+
def __str__(self):
|
| 221 |
+
"""Fake-decode the byte string to unicode to support %
|
| 222 |
+
formatting of unicode strings.
|
| 223 |
+
"""
|
| 224 |
+
return self.decode('ISO-8859-1')
|
| 225 |
+
|
| 226 |
+
is_unicode = False
|
| 227 |
+
|
| 228 |
+
def as_c_string_literal(self):
|
| 229 |
+
value = split_string_literal(escape_byte_string(self))
|
| 230 |
+
return '"%s"' % value
|
| 231 |
+
|
| 232 |
+
if not hasattr(_bytes, "isascii"):
|
| 233 |
+
def isascii(self):
|
| 234 |
+
# already defined for Python3.7+
|
| 235 |
+
return True
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def bytes_literal(s, encoding):
|
| 239 |
+
assert isinstance(s, bytes)
|
| 240 |
+
s = BytesLiteral(s)
|
| 241 |
+
s.encoding = encoding
|
| 242 |
+
return s
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
def encoded_string(s, encoding):
|
| 246 |
+
assert isinstance(s, (_unicode, bytes))
|
| 247 |
+
s = EncodedString(s)
|
| 248 |
+
if encoding is not None:
|
| 249 |
+
s.encoding = encoding
|
| 250 |
+
return s
|
| 251 |
+
|
| 252 |
+
def encoded_string_or_bytes_literal(s, encoding):
|
| 253 |
+
if isinstance(s, bytes):
|
| 254 |
+
return bytes_literal(s, encoding)
|
| 255 |
+
else:
|
| 256 |
+
return encoded_string(s, encoding)
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
char_from_escape_sequence = {
|
| 260 |
+
r'\a' : u'\a',
|
| 261 |
+
r'\b' : u'\b',
|
| 262 |
+
r'\f' : u'\f',
|
| 263 |
+
r'\n' : u'\n',
|
| 264 |
+
r'\r' : u'\r',
|
| 265 |
+
r'\t' : u'\t',
|
| 266 |
+
r'\v' : u'\v',
|
| 267 |
+
}.get
|
| 268 |
+
|
| 269 |
+
_c_special = ('\\', '??', '"') + tuple(map(chr, range(32)))
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def _to_escape_sequence(s):
|
| 273 |
+
if s in '\n\r\t':
|
| 274 |
+
return repr(s)[1:-1]
|
| 275 |
+
elif s == '"':
|
| 276 |
+
return r'\"'
|
| 277 |
+
elif s == '\\':
|
| 278 |
+
return r'\\'
|
| 279 |
+
else:
|
| 280 |
+
# within a character sequence, oct passes much better than hex
|
| 281 |
+
return ''.join(['\\%03o' % ord(c) for c in s])
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
def _build_specials_replacer():
|
| 285 |
+
subexps = []
|
| 286 |
+
replacements = {}
|
| 287 |
+
for special in _c_special:
|
| 288 |
+
regexp = ''.join(['[%s]' % c.replace('\\', '\\\\') for c in special])
|
| 289 |
+
subexps.append(regexp)
|
| 290 |
+
replacements[special.encode('ASCII')] = _to_escape_sequence(special).encode('ASCII')
|
| 291 |
+
sub = re.compile(('(%s)' % '|'.join(subexps)).encode('ASCII')).sub
|
| 292 |
+
def replace_specials(m):
|
| 293 |
+
return replacements[m.group(1)]
|
| 294 |
+
def replace(s):
|
| 295 |
+
return sub(replace_specials, s)
|
| 296 |
+
return replace
|
| 297 |
+
|
| 298 |
+
_replace_specials = _build_specials_replacer()
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def escape_char(c):
|
| 302 |
+
if IS_PYTHON3:
|
| 303 |
+
c = c.decode('ISO-8859-1')
|
| 304 |
+
if c in '\n\r\t\\':
|
| 305 |
+
return repr(c)[1:-1]
|
| 306 |
+
elif c == "'":
|
| 307 |
+
return "\\'"
|
| 308 |
+
n = ord(c)
|
| 309 |
+
if n < 32 or n > 127:
|
| 310 |
+
# hex works well for characters
|
| 311 |
+
return "\\x%02X" % n
|
| 312 |
+
else:
|
| 313 |
+
return c
|
| 314 |
+
|
| 315 |
+
def escape_byte_string(s):
|
| 316 |
+
"""Escape a byte string so that it can be written into C code.
|
| 317 |
+
Note that this returns a Unicode string instead which, when
|
| 318 |
+
encoded as ISO-8859-1, will result in the correct byte sequence
|
| 319 |
+
being written.
|
| 320 |
+
"""
|
| 321 |
+
s = _replace_specials(s)
|
| 322 |
+
try:
|
| 323 |
+
return s.decode("ASCII") # trial decoding: plain ASCII => done
|
| 324 |
+
except UnicodeDecodeError:
|
| 325 |
+
pass
|
| 326 |
+
if IS_PYTHON3:
|
| 327 |
+
s_new = bytearray()
|
| 328 |
+
append, extend = s_new.append, s_new.extend
|
| 329 |
+
for b in s:
|
| 330 |
+
if b >= 128:
|
| 331 |
+
extend(('\\%3o' % b).encode('ASCII'))
|
| 332 |
+
else:
|
| 333 |
+
append(b)
|
| 334 |
+
return s_new.decode('ISO-8859-1')
|
| 335 |
+
else:
|
| 336 |
+
l = []
|
| 337 |
+
append = l.append
|
| 338 |
+
for c in s:
|
| 339 |
+
o = ord(c)
|
| 340 |
+
if o >= 128:
|
| 341 |
+
append('\\%3o' % o)
|
| 342 |
+
else:
|
| 343 |
+
append(c)
|
| 344 |
+
return join_bytes(l).decode('ISO-8859-1')
|
| 345 |
+
|
| 346 |
+
def split_string_literal(s, limit=2000):
|
| 347 |
+
# MSVC can't handle long string literals.
|
| 348 |
+
if len(s) < limit:
|
| 349 |
+
return s
|
| 350 |
+
else:
|
| 351 |
+
start = 0
|
| 352 |
+
chunks = []
|
| 353 |
+
while start < len(s):
|
| 354 |
+
end = start + limit
|
| 355 |
+
if len(s) > end-4 and '\\' in s[end-4:end]:
|
| 356 |
+
end -= 4 - s[end-4:end].find('\\') # just before the backslash
|
| 357 |
+
while s[end-1] == '\\':
|
| 358 |
+
end -= 1
|
| 359 |
+
if end == start:
|
| 360 |
+
# must have been a long line of backslashes
|
| 361 |
+
end = start + limit - (limit % 2) - 4
|
| 362 |
+
break
|
| 363 |
+
chunks.append(s[start:end])
|
| 364 |
+
start = end
|
| 365 |
+
return '""'.join(chunks)
|
| 366 |
+
|
| 367 |
+
def encode_pyunicode_string(s):
|
| 368 |
+
"""Create Py_UNICODE[] representation of a given unicode string.
|
| 369 |
+
"""
|
| 370 |
+
s = list(map(ord, s)) + [0]
|
| 371 |
+
|
| 372 |
+
if sys.maxunicode >= 0x10000: # Wide build or Py3.3
|
| 373 |
+
utf16, utf32 = [], s
|
| 374 |
+
for code_point in s:
|
| 375 |
+
if code_point >= 0x10000: # outside of BMP
|
| 376 |
+
high, low = divmod(code_point - 0x10000, 1024)
|
| 377 |
+
utf16.append(high + 0xD800)
|
| 378 |
+
utf16.append(low + 0xDC00)
|
| 379 |
+
else:
|
| 380 |
+
utf16.append(code_point)
|
| 381 |
+
else:
|
| 382 |
+
utf16, utf32 = s, []
|
| 383 |
+
for code_unit in s:
|
| 384 |
+
if 0xDC00 <= code_unit <= 0xDFFF and utf32 and 0xD800 <= utf32[-1] <= 0xDBFF:
|
| 385 |
+
high, low = utf32[-1], code_unit
|
| 386 |
+
utf32[-1] = ((high & 0x3FF) << 10) + (low & 0x3FF) + 0x10000
|
| 387 |
+
else:
|
| 388 |
+
utf32.append(code_unit)
|
| 389 |
+
|
| 390 |
+
if utf16 == utf32:
|
| 391 |
+
utf16 = []
|
| 392 |
+
return ",".join(map(_unicode, utf16)), ",".join(map(_unicode, utf32))
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/UtilityCode.py
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
from .TreeFragment import parse_from_strings, StringParseContext
|
| 4 |
+
from . import Symtab
|
| 5 |
+
from . import Naming
|
| 6 |
+
from . import Code
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class NonManglingModuleScope(Symtab.ModuleScope):
|
| 10 |
+
|
| 11 |
+
def __init__(self, prefix, *args, **kw):
|
| 12 |
+
self.prefix = prefix
|
| 13 |
+
self.cython_scope = None
|
| 14 |
+
self.cpp = kw.pop('cpp', False)
|
| 15 |
+
Symtab.ModuleScope.__init__(self, *args, **kw)
|
| 16 |
+
|
| 17 |
+
def add_imported_entry(self, name, entry, pos):
|
| 18 |
+
entry.used = True
|
| 19 |
+
return super(NonManglingModuleScope, self).add_imported_entry(name, entry, pos)
|
| 20 |
+
|
| 21 |
+
def mangle(self, prefix, name=None):
|
| 22 |
+
if name:
|
| 23 |
+
if prefix in (Naming.typeobj_prefix, Naming.func_prefix, Naming.var_prefix, Naming.pyfunc_prefix):
|
| 24 |
+
# Functions, classes etc. gets a manually defined prefix easily
|
| 25 |
+
# manually callable instead (the one passed to CythonUtilityCode)
|
| 26 |
+
prefix = self.prefix
|
| 27 |
+
return "%s%s" % (prefix, name)
|
| 28 |
+
else:
|
| 29 |
+
return Symtab.ModuleScope.mangle(self, prefix)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class CythonUtilityCodeContext(StringParseContext):
|
| 33 |
+
scope = None
|
| 34 |
+
|
| 35 |
+
def find_module(self, module_name, from_module=None, pos=None, need_pxd=True, absolute_fallback=True, relative_import=False):
|
| 36 |
+
if from_module:
|
| 37 |
+
raise AssertionError("Relative imports not supported in utility code.")
|
| 38 |
+
if module_name != self.module_name:
|
| 39 |
+
if module_name not in self.modules:
|
| 40 |
+
raise AssertionError("Only the cython cimport is supported.")
|
| 41 |
+
else:
|
| 42 |
+
return self.modules[module_name]
|
| 43 |
+
|
| 44 |
+
if self.scope is None:
|
| 45 |
+
self.scope = NonManglingModuleScope(
|
| 46 |
+
self.prefix, module_name, parent_module=None, context=self, cpp=self.cpp)
|
| 47 |
+
|
| 48 |
+
return self.scope
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class CythonUtilityCode(Code.UtilityCodeBase):
|
| 52 |
+
"""
|
| 53 |
+
Utility code written in the Cython language itself.
|
| 54 |
+
|
| 55 |
+
The @cname decorator can set the cname for a function, method of cdef class.
|
| 56 |
+
Functions decorated with @cname('c_func_name') get the given cname.
|
| 57 |
+
|
| 58 |
+
For cdef classes the rules are as follows:
|
| 59 |
+
obj struct -> <cname>_obj
|
| 60 |
+
obj type ptr -> <cname>_type
|
| 61 |
+
methods -> <class_cname>_<method_cname>
|
| 62 |
+
|
| 63 |
+
For methods the cname decorator is optional, but without the decorator the
|
| 64 |
+
methods will not be prototyped. See Cython.Compiler.CythonScope and
|
| 65 |
+
tests/run/cythonscope.pyx for examples.
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
is_cython_utility = True
|
| 69 |
+
|
| 70 |
+
def __init__(self, impl, name="__pyxutil", prefix="", requires=None,
|
| 71 |
+
file=None, from_scope=None, context=None, compiler_directives=None,
|
| 72 |
+
outer_module_scope=None):
|
| 73 |
+
# 1) We need to delay the parsing/processing, so that all modules can be
|
| 74 |
+
# imported without import loops
|
| 75 |
+
# 2) The same utility code object can be used for multiple source files;
|
| 76 |
+
# while the generated node trees can be altered in the compilation of a
|
| 77 |
+
# single file.
|
| 78 |
+
# Hence, delay any processing until later.
|
| 79 |
+
context_types = {}
|
| 80 |
+
if context is not None:
|
| 81 |
+
from .PyrexTypes import BaseType
|
| 82 |
+
for key, value in context.items():
|
| 83 |
+
if isinstance(value, BaseType):
|
| 84 |
+
context[key] = key
|
| 85 |
+
context_types[key] = value
|
| 86 |
+
impl = Code.sub_tempita(impl, context, file, name)
|
| 87 |
+
self.impl = impl
|
| 88 |
+
self.name = name
|
| 89 |
+
self.file = file
|
| 90 |
+
self.prefix = prefix
|
| 91 |
+
self.requires = requires or []
|
| 92 |
+
self.from_scope = from_scope
|
| 93 |
+
self.outer_module_scope = outer_module_scope
|
| 94 |
+
self.compiler_directives = compiler_directives
|
| 95 |
+
self.context_types = context_types
|
| 96 |
+
|
| 97 |
+
def __eq__(self, other):
|
| 98 |
+
if isinstance(other, CythonUtilityCode):
|
| 99 |
+
return self._equality_params() == other._equality_params()
|
| 100 |
+
else:
|
| 101 |
+
return False
|
| 102 |
+
|
| 103 |
+
def _equality_params(self):
|
| 104 |
+
outer_scope = self.outer_module_scope
|
| 105 |
+
while isinstance(outer_scope, NonManglingModuleScope):
|
| 106 |
+
outer_scope = outer_scope.outer_scope
|
| 107 |
+
return self.impl, outer_scope, self.compiler_directives
|
| 108 |
+
|
| 109 |
+
def __hash__(self):
|
| 110 |
+
return hash(self.impl)
|
| 111 |
+
|
| 112 |
+
def get_tree(self, entries_only=False, cython_scope=None):
|
| 113 |
+
from .AnalysedTreeTransforms import AutoTestDictTransform
|
| 114 |
+
# The AutoTestDictTransform creates the statement "__test__ = {}",
|
| 115 |
+
# which when copied into the main ModuleNode overwrites
|
| 116 |
+
# any __test__ in user code; not desired
|
| 117 |
+
excludes = [AutoTestDictTransform]
|
| 118 |
+
|
| 119 |
+
from . import Pipeline, ParseTreeTransforms
|
| 120 |
+
context = CythonUtilityCodeContext(
|
| 121 |
+
self.name, compiler_directives=self.compiler_directives,
|
| 122 |
+
cpp=cython_scope.is_cpp() if cython_scope else False)
|
| 123 |
+
context.prefix = self.prefix
|
| 124 |
+
context.cython_scope = cython_scope
|
| 125 |
+
#context = StringParseContext(self.name)
|
| 126 |
+
tree = parse_from_strings(
|
| 127 |
+
self.name, self.impl, context=context, allow_struct_enum_decorator=True,
|
| 128 |
+
in_utility_code=True)
|
| 129 |
+
pipeline = Pipeline.create_pipeline(context, 'pyx', exclude_classes=excludes)
|
| 130 |
+
|
| 131 |
+
if entries_only:
|
| 132 |
+
p = []
|
| 133 |
+
for t in pipeline:
|
| 134 |
+
p.append(t)
|
| 135 |
+
if isinstance(t, ParseTreeTransforms.AnalyseDeclarationsTransform):
|
| 136 |
+
break
|
| 137 |
+
|
| 138 |
+
pipeline = p
|
| 139 |
+
|
| 140 |
+
transform = ParseTreeTransforms.CnameDirectivesTransform(context)
|
| 141 |
+
# InterpretCompilerDirectives already does a cdef declarator check
|
| 142 |
+
#before = ParseTreeTransforms.DecoratorTransform
|
| 143 |
+
before = ParseTreeTransforms.InterpretCompilerDirectives
|
| 144 |
+
pipeline = Pipeline.insert_into_pipeline(pipeline, transform,
|
| 145 |
+
before=before)
|
| 146 |
+
|
| 147 |
+
def merge_scope(scope):
|
| 148 |
+
def merge_scope_transform(module_node):
|
| 149 |
+
module_node.scope.merge_in(scope)
|
| 150 |
+
return module_node
|
| 151 |
+
return merge_scope_transform
|
| 152 |
+
|
| 153 |
+
if self.from_scope:
|
| 154 |
+
pipeline = Pipeline.insert_into_pipeline(
|
| 155 |
+
pipeline, merge_scope(self.from_scope),
|
| 156 |
+
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
|
| 157 |
+
|
| 158 |
+
for dep in self.requires:
|
| 159 |
+
if isinstance(dep, CythonUtilityCode) and hasattr(dep, 'tree') and not cython_scope:
|
| 160 |
+
pipeline = Pipeline.insert_into_pipeline(
|
| 161 |
+
pipeline, merge_scope(dep.tree.scope),
|
| 162 |
+
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
|
| 163 |
+
|
| 164 |
+
if self.outer_module_scope:
|
| 165 |
+
# inject outer module between utility code module and builtin module
|
| 166 |
+
def scope_transform(module_node):
|
| 167 |
+
module_node.scope.outer_scope = self.outer_module_scope
|
| 168 |
+
return module_node
|
| 169 |
+
|
| 170 |
+
pipeline = Pipeline.insert_into_pipeline(
|
| 171 |
+
pipeline, scope_transform,
|
| 172 |
+
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
|
| 173 |
+
|
| 174 |
+
if self.context_types:
|
| 175 |
+
# inject types into module scope
|
| 176 |
+
def scope_transform(module_node):
|
| 177 |
+
dummy_entry = object()
|
| 178 |
+
for name, type in self.context_types.items():
|
| 179 |
+
# Restore the old type entry after declaring the type.
|
| 180 |
+
# We need to access types in the scope, but this shouldn't alter the entry
|
| 181 |
+
# that is visible from everywhere else
|
| 182 |
+
old_type_entry = getattr(type, "entry", dummy_entry)
|
| 183 |
+
entry = module_node.scope.declare_type(name, type, None, visibility='extern')
|
| 184 |
+
if old_type_entry is not dummy_entry:
|
| 185 |
+
type.entry = old_type_entry
|
| 186 |
+
entry.in_cinclude = True
|
| 187 |
+
return module_node
|
| 188 |
+
|
| 189 |
+
pipeline = Pipeline.insert_into_pipeline(
|
| 190 |
+
pipeline, scope_transform,
|
| 191 |
+
before=ParseTreeTransforms.AnalyseDeclarationsTransform)
|
| 192 |
+
|
| 193 |
+
(err, tree) = Pipeline.run_pipeline(pipeline, tree, printtree=False)
|
| 194 |
+
assert not err, err
|
| 195 |
+
self.tree = tree
|
| 196 |
+
return tree
|
| 197 |
+
|
| 198 |
+
def put_code(self, output):
|
| 199 |
+
pass
|
| 200 |
+
|
| 201 |
+
@classmethod
|
| 202 |
+
def load_as_string(cls, util_code_name, from_file=None, **kwargs):
|
| 203 |
+
"""
|
| 204 |
+
Load a utility code as a string. Returns (proto, implementation)
|
| 205 |
+
"""
|
| 206 |
+
util = cls.load(util_code_name, from_file, **kwargs)
|
| 207 |
+
return util.proto, util.impl # keep line numbers => no lstrip()
|
| 208 |
+
|
| 209 |
+
def declare_in_scope(self, dest_scope, used=False, cython_scope=None,
|
| 210 |
+
allowlist=None):
|
| 211 |
+
"""
|
| 212 |
+
Declare all entries from the utility code in dest_scope. Code will only
|
| 213 |
+
be included for used entries. If module_name is given, declare the
|
| 214 |
+
type entries with that name.
|
| 215 |
+
"""
|
| 216 |
+
tree = self.get_tree(entries_only=True, cython_scope=cython_scope)
|
| 217 |
+
|
| 218 |
+
entries = tree.scope.entries
|
| 219 |
+
entries.pop('__name__')
|
| 220 |
+
entries.pop('__file__')
|
| 221 |
+
entries.pop('__builtins__')
|
| 222 |
+
entries.pop('__doc__')
|
| 223 |
+
|
| 224 |
+
for entry in entries.values():
|
| 225 |
+
entry.utility_code_definition = self
|
| 226 |
+
entry.used = used
|
| 227 |
+
|
| 228 |
+
original_scope = tree.scope
|
| 229 |
+
dest_scope.merge_in(original_scope, merge_unused=True, allowlist=allowlist)
|
| 230 |
+
tree.scope = dest_scope
|
| 231 |
+
|
| 232 |
+
for dep in self.requires:
|
| 233 |
+
if dep.is_cython_utility:
|
| 234 |
+
dep.declare_in_scope(dest_scope, cython_scope=cython_scope)
|
| 235 |
+
|
| 236 |
+
return original_scope
|
| 237 |
+
|
| 238 |
+
@staticmethod
|
| 239 |
+
def filter_inherited_directives(current_directives):
|
| 240 |
+
"""
|
| 241 |
+
Cython utility code should usually only pick up a few directives from the
|
| 242 |
+
environment (those that intentionally control its function) and ignore most
|
| 243 |
+
other compiler directives. This function provides a sensible default list
|
| 244 |
+
of directives to copy.
|
| 245 |
+
"""
|
| 246 |
+
from .Options import _directive_defaults
|
| 247 |
+
utility_code_directives = dict(_directive_defaults)
|
| 248 |
+
inherited_directive_names = (
|
| 249 |
+
'binding', 'always_allow_keywords', 'allow_none_for_extension_args',
|
| 250 |
+
'auto_pickle', 'ccomplex',
|
| 251 |
+
'c_string_type', 'c_string_encoding',
|
| 252 |
+
'optimize.inline_defnode_calls', 'optimize.unpack_method_calls',
|
| 253 |
+
'optimize.unpack_method_calls_in_pyinit', 'optimize.use_switch')
|
| 254 |
+
for name in inherited_directive_names:
|
| 255 |
+
if name in current_directives:
|
| 256 |
+
utility_code_directives[name] = current_directives[name]
|
| 257 |
+
return utility_code_directives
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def declare_declarations_in_scope(declaration_string, env, private_type=True,
|
| 261 |
+
*args, **kwargs):
|
| 262 |
+
"""
|
| 263 |
+
Declare some declarations given as Cython code in declaration_string
|
| 264 |
+
in scope env.
|
| 265 |
+
"""
|
| 266 |
+
CythonUtilityCode(declaration_string, *args, **kwargs).declare_in_scope(env)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/Version.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# for backwards compatibility
|
| 2 |
+
|
| 3 |
+
from __future__ import absolute_import
|
| 4 |
+
|
| 5 |
+
from .. import __version__ as version
|
| 6 |
+
|
| 7 |
+
# For 'generated by' header line in C files.
|
| 8 |
+
|
| 9 |
+
watermark = str(version)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Compiler/__pycache__/AutoDocTransforms.cpython-311.pyc
ADDED
|
Binary file (14.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/DFA.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# cython: auto_cpdef=True
|
| 2 |
+
"""
|
| 3 |
+
Python Lexical Analyser
|
| 4 |
+
|
| 5 |
+
Converting NFA to DFA
|
| 6 |
+
"""
|
| 7 |
+
from __future__ import absolute_import
|
| 8 |
+
|
| 9 |
+
from . import Machines
|
| 10 |
+
from .Machines import LOWEST_PRIORITY
|
| 11 |
+
from .Transitions import TransitionMap
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def nfa_to_dfa(old_machine, debug=None):
|
| 15 |
+
"""
|
| 16 |
+
Given a nondeterministic Machine, return a new equivalent
|
| 17 |
+
Machine which is deterministic.
|
| 18 |
+
"""
|
| 19 |
+
# We build a new machine whose states correspond to sets of states
|
| 20 |
+
# in the old machine. Initially we add a new state corresponding to
|
| 21 |
+
# the epsilon-closure of each initial old state. Then we give transitions
|
| 22 |
+
# to each new state which are the union of all transitions out of any
|
| 23 |
+
# of the corresponding old states. The new state reached on a given
|
| 24 |
+
# character is the one corresponding to the set of states reachable
|
| 25 |
+
# on that character from any of the old states. As new combinations of
|
| 26 |
+
# old states are created, new states are added as needed until closure
|
| 27 |
+
# is reached.
|
| 28 |
+
new_machine = Machines.FastMachine()
|
| 29 |
+
state_map = StateMap(new_machine)
|
| 30 |
+
|
| 31 |
+
# Seed the process using the initial states of the old machine.
|
| 32 |
+
# Make the corresponding new states into initial states of the new
|
| 33 |
+
# machine with the same names.
|
| 34 |
+
for (key, old_state) in old_machine.initial_states.items():
|
| 35 |
+
new_state = state_map.old_to_new(epsilon_closure(old_state))
|
| 36 |
+
new_machine.make_initial_state(key, new_state)
|
| 37 |
+
|
| 38 |
+
# Tricky bit here: we add things to the end of this list while we're
|
| 39 |
+
# iterating over it. The iteration stops when closure is achieved.
|
| 40 |
+
for new_state in new_machine.states:
|
| 41 |
+
transitions = TransitionMap()
|
| 42 |
+
for old_state in state_map.new_to_old(new_state):
|
| 43 |
+
for event, old_target_states in old_state.transitions.items():
|
| 44 |
+
if event and old_target_states:
|
| 45 |
+
transitions.add_set(event, set_epsilon_closure(old_target_states))
|
| 46 |
+
for event, old_states in transitions.items():
|
| 47 |
+
new_machine.add_transitions(new_state, event, state_map.old_to_new(old_states))
|
| 48 |
+
|
| 49 |
+
if debug:
|
| 50 |
+
debug.write("\n===== State Mapping =====\n")
|
| 51 |
+
state_map.dump(debug)
|
| 52 |
+
return new_machine
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def set_epsilon_closure(state_set):
|
| 56 |
+
"""
|
| 57 |
+
Given a set of states, return the union of the epsilon
|
| 58 |
+
closures of its member states.
|
| 59 |
+
"""
|
| 60 |
+
result = {}
|
| 61 |
+
for state1 in state_set:
|
| 62 |
+
for state2 in epsilon_closure(state1):
|
| 63 |
+
result[state2] = 1
|
| 64 |
+
return result
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def epsilon_closure(state):
|
| 68 |
+
"""
|
| 69 |
+
Return the set of states reachable from the given state
|
| 70 |
+
by epsilon moves.
|
| 71 |
+
"""
|
| 72 |
+
# Cache the result
|
| 73 |
+
result = state.epsilon_closure
|
| 74 |
+
if result is None:
|
| 75 |
+
result = {}
|
| 76 |
+
state.epsilon_closure = result
|
| 77 |
+
add_to_epsilon_closure(result, state)
|
| 78 |
+
return result
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def add_to_epsilon_closure(state_set, state):
|
| 82 |
+
"""
|
| 83 |
+
Recursively add to |state_set| states reachable from the given state
|
| 84 |
+
by epsilon moves.
|
| 85 |
+
"""
|
| 86 |
+
if not state_set.get(state, 0):
|
| 87 |
+
state_set[state] = 1
|
| 88 |
+
state_set_2 = state.transitions.get_epsilon()
|
| 89 |
+
if state_set_2:
|
| 90 |
+
for state2 in state_set_2:
|
| 91 |
+
add_to_epsilon_closure(state_set, state2)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class StateMap(object):
|
| 95 |
+
"""
|
| 96 |
+
Helper class used by nfa_to_dfa() to map back and forth between
|
| 97 |
+
sets of states from the old machine and states of the new machine.
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
def __init__(self, new_machine):
|
| 101 |
+
self.new_machine = new_machine # Machine
|
| 102 |
+
self.old_to_new_dict = {} # {(old_state,...) : new_state}
|
| 103 |
+
self.new_to_old_dict = {} # {id(new_state) : old_state_set}
|
| 104 |
+
|
| 105 |
+
def old_to_new(self, old_state_set):
|
| 106 |
+
"""
|
| 107 |
+
Return the state of the new machine corresponding to the
|
| 108 |
+
set of old machine states represented by |state_set|. A new
|
| 109 |
+
state will be created if necessary. If any of the old states
|
| 110 |
+
are accepting states, the new state will be an accepting state
|
| 111 |
+
with the highest priority action from the old states.
|
| 112 |
+
"""
|
| 113 |
+
key = self.make_key(old_state_set)
|
| 114 |
+
new_state = self.old_to_new_dict.get(key, None)
|
| 115 |
+
if not new_state:
|
| 116 |
+
action = self.highest_priority_action(old_state_set)
|
| 117 |
+
new_state = self.new_machine.new_state(action)
|
| 118 |
+
self.old_to_new_dict[key] = new_state
|
| 119 |
+
self.new_to_old_dict[id(new_state)] = old_state_set
|
| 120 |
+
return new_state
|
| 121 |
+
|
| 122 |
+
def highest_priority_action(self, state_set):
|
| 123 |
+
best_action = None
|
| 124 |
+
best_priority = LOWEST_PRIORITY
|
| 125 |
+
for state in state_set:
|
| 126 |
+
priority = state.action_priority
|
| 127 |
+
if priority > best_priority:
|
| 128 |
+
best_action = state.action
|
| 129 |
+
best_priority = priority
|
| 130 |
+
return best_action
|
| 131 |
+
|
| 132 |
+
def new_to_old(self, new_state):
|
| 133 |
+
"""Given a new state, return a set of corresponding old states."""
|
| 134 |
+
return self.new_to_old_dict[id(new_state)]
|
| 135 |
+
|
| 136 |
+
def make_key(self, state_set):
|
| 137 |
+
"""
|
| 138 |
+
Convert a set of states into a uniquified
|
| 139 |
+
sorted tuple suitable for use as a dictionary key.
|
| 140 |
+
"""
|
| 141 |
+
return tuple(sorted(state_set))
|
| 142 |
+
|
| 143 |
+
def dump(self, file):
|
| 144 |
+
from .Transitions import state_set_str
|
| 145 |
+
|
| 146 |
+
for new_state in self.new_machine.states:
|
| 147 |
+
old_state_set = self.new_to_old_dict[id(new_state)]
|
| 148 |
+
file.write(" State %s <-- %s\n" % (
|
| 149 |
+
new_state['number'], state_set_str(old_state_set)))
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__init__.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Python Lexical Analyser
|
| 3 |
+
|
| 4 |
+
The Plex module provides lexical analysers with similar capabilities
|
| 5 |
+
to GNU Flex. The following classes and functions are exported;
|
| 6 |
+
see the attached docstrings for more information.
|
| 7 |
+
|
| 8 |
+
Scanner For scanning a character stream under the
|
| 9 |
+
direction of a Lexicon.
|
| 10 |
+
|
| 11 |
+
Lexicon For constructing a lexical definition
|
| 12 |
+
to be used by a Scanner.
|
| 13 |
+
|
| 14 |
+
Str, Any, AnyBut, AnyChar, Seq, Alt, Opt, Rep, Rep1,
|
| 15 |
+
Bol, Eol, Eof, Empty
|
| 16 |
+
|
| 17 |
+
Regular expression constructors, for building pattern
|
| 18 |
+
definitions for a Lexicon.
|
| 19 |
+
|
| 20 |
+
State For defining scanner states when creating a
|
| 21 |
+
Lexicon.
|
| 22 |
+
|
| 23 |
+
TEXT, IGNORE, Begin
|
| 24 |
+
|
| 25 |
+
Actions for associating with patterns when
|
| 26 |
+
creating a Lexicon.
|
| 27 |
+
"""
|
| 28 |
+
# flake8: noqa:F401
|
| 29 |
+
from __future__ import absolute_import
|
| 30 |
+
|
| 31 |
+
from .Actions import TEXT, IGNORE, Begin, Method
|
| 32 |
+
from .Lexicons import Lexicon, State
|
| 33 |
+
from .Regexps import RE, Seq, Alt, Rep1, Empty, Str, Any, AnyBut, AnyChar, Range
|
| 34 |
+
from .Regexps import Opt, Rep, Bol, Eol, Eof, Case, NoCase
|
| 35 |
+
from .Scanners import Scanner
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/Actions.cpython-311.pyc
ADDED
|
Binary file (6.3 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/Lexicons.cpython-311.pyc
ADDED
|
Binary file (8.03 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/Regexps.cpython-311.pyc
ADDED
|
Binary file (23.6 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/Transitions.cpython-311.pyc
ADDED
|
Binary file (9.92 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Plex/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (1.81 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# The original Tempita implements all of its templating code here.
|
| 2 |
+
# Moved it to _tempita.py to make the compilation portable.
|
| 3 |
+
|
| 4 |
+
from ._tempita import *
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (250 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__pycache__/_looper.cpython-311.pyc
ADDED
|
Binary file (7.99 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__pycache__/_tempita.cpython-311.pyc
ADDED
|
Binary file (52.8 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/__pycache__/compat3.cpython-311.pyc
ADDED
|
Binary file (1.84 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Tempita/_tempita.py
ADDED
|
@@ -0,0 +1,1091 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# cython: language_level=3str
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
A small templating language
|
| 5 |
+
|
| 6 |
+
This implements a small templating language. This language implements
|
| 7 |
+
if/elif/else, for/continue/break, expressions, and blocks of Python
|
| 8 |
+
code. The syntax is::
|
| 9 |
+
|
| 10 |
+
{{any expression (function calls etc)}}
|
| 11 |
+
{{any expression | filter}}
|
| 12 |
+
{{for x in y}}...{{endfor}}
|
| 13 |
+
{{if x}}x{{elif y}}y{{else}}z{{endif}}
|
| 14 |
+
{{py:x=1}}
|
| 15 |
+
{{py:
|
| 16 |
+
def foo(bar):
|
| 17 |
+
return 'baz'
|
| 18 |
+
}}
|
| 19 |
+
{{default var = default_value}}
|
| 20 |
+
{{# comment}}
|
| 21 |
+
|
| 22 |
+
You use this with the ``Template`` class or the ``sub`` shortcut.
|
| 23 |
+
The ``Template`` class takes the template string and the name of
|
| 24 |
+
the template (for errors) and a default namespace. Then (like
|
| 25 |
+
``string.Template``) you can call the ``tmpl.substitute(**kw)``
|
| 26 |
+
method to make a substitution (or ``tmpl.substitute(a_dict)``).
|
| 27 |
+
|
| 28 |
+
``sub(content, **kw)`` substitutes the template immediately. You
|
| 29 |
+
can use ``__name='tmpl.html'`` to set the name of the template.
|
| 30 |
+
|
| 31 |
+
If there are syntax errors ``TemplateError`` will be raised.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
from __future__ import absolute_import
|
| 35 |
+
|
| 36 |
+
import re
|
| 37 |
+
import sys
|
| 38 |
+
import os
|
| 39 |
+
import tokenize
|
| 40 |
+
from io import StringIO
|
| 41 |
+
|
| 42 |
+
from ._looper import looper
|
| 43 |
+
from .compat3 import bytes, unicode_, basestring_, next, is_unicode, coerce_text
|
| 44 |
+
|
| 45 |
+
__all__ = ['TemplateError', 'Template', 'sub', 'bunch']
|
| 46 |
+
|
| 47 |
+
in_re = re.compile(r'\s+in\s+')
|
| 48 |
+
var_re = re.compile(r'^[a-z_][a-z0-9_]*$', re.I)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class TemplateError(Exception):
|
| 52 |
+
"""Exception raised while parsing a template
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def __init__(self, message, position, name=None):
|
| 56 |
+
Exception.__init__(self, message)
|
| 57 |
+
self.position = position
|
| 58 |
+
self.name = name
|
| 59 |
+
|
| 60 |
+
def __str__(self):
|
| 61 |
+
msg = ' '.join(self.args)
|
| 62 |
+
if self.position:
|
| 63 |
+
msg = '%s at line %s column %s' % (
|
| 64 |
+
msg, self.position[0], self.position[1])
|
| 65 |
+
if self.name:
|
| 66 |
+
msg += ' in %s' % self.name
|
| 67 |
+
return msg
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class _TemplateContinue(Exception):
|
| 71 |
+
pass
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class _TemplateBreak(Exception):
|
| 75 |
+
pass
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def get_file_template(name, from_template):
|
| 79 |
+
path = os.path.join(os.path.dirname(from_template.name), name)
|
| 80 |
+
return from_template.__class__.from_filename(
|
| 81 |
+
path, namespace=from_template.namespace,
|
| 82 |
+
get_template=from_template.get_template)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class Template(object):
|
| 86 |
+
|
| 87 |
+
default_namespace = {
|
| 88 |
+
'start_braces': '{{',
|
| 89 |
+
'end_braces': '}}',
|
| 90 |
+
'looper': looper,
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
default_encoding = 'utf8'
|
| 94 |
+
default_inherit = None
|
| 95 |
+
|
| 96 |
+
def __init__(self, content, name=None, namespace=None, stacklevel=None,
|
| 97 |
+
get_template=None, default_inherit=None, line_offset=0,
|
| 98 |
+
delimiters=None, delimeters=None):
|
| 99 |
+
self.content = content
|
| 100 |
+
|
| 101 |
+
# set delimiters
|
| 102 |
+
if delimeters:
|
| 103 |
+
import warnings
|
| 104 |
+
warnings.warn(
|
| 105 |
+
"'delimeters' kwarg is being deprecated in favor of correctly"
|
| 106 |
+
" spelled 'delimiters'. Please adjust your code.",
|
| 107 |
+
DeprecationWarning
|
| 108 |
+
)
|
| 109 |
+
if delimiters is None:
|
| 110 |
+
delimiters = delimeters
|
| 111 |
+
if delimiters is None:
|
| 112 |
+
delimiters = (self.default_namespace['start_braces'],
|
| 113 |
+
self.default_namespace['end_braces'])
|
| 114 |
+
else:
|
| 115 |
+
#assert len(delimiters) == 2 and all([isinstance(delimiter, basestring)
|
| 116 |
+
# for delimiter in delimiters])
|
| 117 |
+
self.default_namespace = self.__class__.default_namespace.copy()
|
| 118 |
+
self.default_namespace['start_braces'] = delimiters[0]
|
| 119 |
+
self.default_namespace['end_braces'] = delimiters[1]
|
| 120 |
+
self.delimiters = self.delimeters = delimiters # Keep a legacy read-only copy, but don't use it.
|
| 121 |
+
|
| 122 |
+
self._unicode = is_unicode(content)
|
| 123 |
+
if name is None and stacklevel is not None:
|
| 124 |
+
try:
|
| 125 |
+
caller = sys._getframe(stacklevel)
|
| 126 |
+
except ValueError:
|
| 127 |
+
pass
|
| 128 |
+
else:
|
| 129 |
+
globals = caller.f_globals
|
| 130 |
+
lineno = caller.f_lineno
|
| 131 |
+
if '__file__' in globals:
|
| 132 |
+
name = globals['__file__']
|
| 133 |
+
if name.endswith('.pyc') or name.endswith('.pyo'):
|
| 134 |
+
name = name[:-1]
|
| 135 |
+
elif '__name__' in globals:
|
| 136 |
+
name = globals['__name__']
|
| 137 |
+
else:
|
| 138 |
+
name = '<string>'
|
| 139 |
+
if lineno:
|
| 140 |
+
name += ':%s' % lineno
|
| 141 |
+
self.name = name
|
| 142 |
+
self._parsed = parse(content, name=name, line_offset=line_offset, delimiters=self.delimiters)
|
| 143 |
+
if namespace is None:
|
| 144 |
+
namespace = {}
|
| 145 |
+
self.namespace = namespace
|
| 146 |
+
self.get_template = get_template
|
| 147 |
+
if default_inherit is not None:
|
| 148 |
+
self.default_inherit = default_inherit
|
| 149 |
+
|
| 150 |
+
def from_filename(cls, filename, namespace=None, encoding=None,
|
| 151 |
+
default_inherit=None, get_template=get_file_template):
|
| 152 |
+
with open(filename, 'rb') as f:
|
| 153 |
+
c = f.read()
|
| 154 |
+
if encoding:
|
| 155 |
+
c = c.decode(encoding)
|
| 156 |
+
return cls(content=c, name=filename, namespace=namespace,
|
| 157 |
+
default_inherit=default_inherit, get_template=get_template)
|
| 158 |
+
|
| 159 |
+
from_filename = classmethod(from_filename)
|
| 160 |
+
|
| 161 |
+
def __repr__(self):
|
| 162 |
+
return '<%s %s name=%r>' % (
|
| 163 |
+
self.__class__.__name__,
|
| 164 |
+
hex(id(self))[2:], self.name)
|
| 165 |
+
|
| 166 |
+
def substitute(self, *args, **kw):
|
| 167 |
+
if args:
|
| 168 |
+
if kw:
|
| 169 |
+
raise TypeError(
|
| 170 |
+
"You can only give positional *or* keyword arguments")
|
| 171 |
+
if len(args) > 1:
|
| 172 |
+
raise TypeError(
|
| 173 |
+
"You can only give one positional argument")
|
| 174 |
+
if not hasattr(args[0], 'items'):
|
| 175 |
+
raise TypeError(
|
| 176 |
+
"If you pass in a single argument, you must pass in a dictionary-like object (with a .items() method); you gave %r"
|
| 177 |
+
% (args[0],))
|
| 178 |
+
kw = args[0]
|
| 179 |
+
ns = kw
|
| 180 |
+
ns['__template_name__'] = self.name
|
| 181 |
+
if self.namespace:
|
| 182 |
+
ns.update(self.namespace)
|
| 183 |
+
result, defs, inherit = self._interpret(ns)
|
| 184 |
+
if not inherit:
|
| 185 |
+
inherit = self.default_inherit
|
| 186 |
+
if inherit:
|
| 187 |
+
result = self._interpret_inherit(result, defs, inherit, ns)
|
| 188 |
+
return result
|
| 189 |
+
|
| 190 |
+
def _interpret(self, ns):
|
| 191 |
+
__traceback_hide__ = True
|
| 192 |
+
parts = []
|
| 193 |
+
defs = {}
|
| 194 |
+
self._interpret_codes(self._parsed, ns, out=parts, defs=defs)
|
| 195 |
+
if '__inherit__' in defs:
|
| 196 |
+
inherit = defs.pop('__inherit__')
|
| 197 |
+
else:
|
| 198 |
+
inherit = None
|
| 199 |
+
return ''.join(parts), defs, inherit
|
| 200 |
+
|
| 201 |
+
def _interpret_inherit(self, body, defs, inherit_template, ns):
|
| 202 |
+
__traceback_hide__ = True
|
| 203 |
+
if not self.get_template:
|
| 204 |
+
raise TemplateError(
|
| 205 |
+
'You cannot use inheritance without passing in get_template',
|
| 206 |
+
position=None, name=self.name)
|
| 207 |
+
templ = self.get_template(inherit_template, self)
|
| 208 |
+
self_ = TemplateObject(self.name)
|
| 209 |
+
for name, value in defs.items():
|
| 210 |
+
setattr(self_, name, value)
|
| 211 |
+
self_.body = body
|
| 212 |
+
ns = ns.copy()
|
| 213 |
+
ns['self'] = self_
|
| 214 |
+
return templ.substitute(ns)
|
| 215 |
+
|
| 216 |
+
def _interpret_codes(self, codes, ns, out, defs):
|
| 217 |
+
__traceback_hide__ = True
|
| 218 |
+
for item in codes:
|
| 219 |
+
if isinstance(item, basestring_):
|
| 220 |
+
out.append(item)
|
| 221 |
+
else:
|
| 222 |
+
self._interpret_code(item, ns, out, defs)
|
| 223 |
+
|
| 224 |
+
def _interpret_code(self, code, ns, out, defs):
|
| 225 |
+
__traceback_hide__ = True
|
| 226 |
+
name, pos = code[0], code[1]
|
| 227 |
+
if name == 'py':
|
| 228 |
+
self._exec(code[2], ns, pos)
|
| 229 |
+
elif name == 'continue':
|
| 230 |
+
raise _TemplateContinue()
|
| 231 |
+
elif name == 'break':
|
| 232 |
+
raise _TemplateBreak()
|
| 233 |
+
elif name == 'for':
|
| 234 |
+
vars, expr, content = code[2], code[3], code[4]
|
| 235 |
+
expr = self._eval(expr, ns, pos)
|
| 236 |
+
self._interpret_for(vars, expr, content, ns, out, defs)
|
| 237 |
+
elif name == 'cond':
|
| 238 |
+
parts = code[2:]
|
| 239 |
+
self._interpret_if(parts, ns, out, defs)
|
| 240 |
+
elif name == 'expr':
|
| 241 |
+
parts = code[2].split('|')
|
| 242 |
+
base = self._eval(parts[0], ns, pos)
|
| 243 |
+
for part in parts[1:]:
|
| 244 |
+
func = self._eval(part, ns, pos)
|
| 245 |
+
base = func(base)
|
| 246 |
+
out.append(self._repr(base, pos))
|
| 247 |
+
elif name == 'default':
|
| 248 |
+
var, expr = code[2], code[3]
|
| 249 |
+
if var not in ns:
|
| 250 |
+
result = self._eval(expr, ns, pos)
|
| 251 |
+
ns[var] = result
|
| 252 |
+
elif name == 'inherit':
|
| 253 |
+
expr = code[2]
|
| 254 |
+
value = self._eval(expr, ns, pos)
|
| 255 |
+
defs['__inherit__'] = value
|
| 256 |
+
elif name == 'def':
|
| 257 |
+
name = code[2]
|
| 258 |
+
signature = code[3]
|
| 259 |
+
parts = code[4]
|
| 260 |
+
ns[name] = defs[name] = TemplateDef(self, name, signature, body=parts, ns=ns,
|
| 261 |
+
pos=pos)
|
| 262 |
+
elif name == 'comment':
|
| 263 |
+
return
|
| 264 |
+
else:
|
| 265 |
+
assert 0, "Unknown code: %r" % name
|
| 266 |
+
|
| 267 |
+
def _interpret_for(self, vars, expr, content, ns, out, defs):
|
| 268 |
+
__traceback_hide__ = True
|
| 269 |
+
for item in expr:
|
| 270 |
+
if len(vars) == 1:
|
| 271 |
+
ns[vars[0]] = item
|
| 272 |
+
else:
|
| 273 |
+
if len(vars) != len(item):
|
| 274 |
+
raise ValueError(
|
| 275 |
+
'Need %i items to unpack (got %i items)'
|
| 276 |
+
% (len(vars), len(item)))
|
| 277 |
+
for name, value in zip(vars, item):
|
| 278 |
+
ns[name] = value
|
| 279 |
+
try:
|
| 280 |
+
self._interpret_codes(content, ns, out, defs)
|
| 281 |
+
except _TemplateContinue:
|
| 282 |
+
continue
|
| 283 |
+
except _TemplateBreak:
|
| 284 |
+
break
|
| 285 |
+
|
| 286 |
+
def _interpret_if(self, parts, ns, out, defs):
|
| 287 |
+
__traceback_hide__ = True
|
| 288 |
+
# @@: if/else/else gets through
|
| 289 |
+
for part in parts:
|
| 290 |
+
assert not isinstance(part, basestring_)
|
| 291 |
+
name, pos = part[0], part[1]
|
| 292 |
+
if name == 'else':
|
| 293 |
+
result = True
|
| 294 |
+
else:
|
| 295 |
+
result = self._eval(part[2], ns, pos)
|
| 296 |
+
if result:
|
| 297 |
+
self._interpret_codes(part[3], ns, out, defs)
|
| 298 |
+
break
|
| 299 |
+
|
| 300 |
+
def _eval(self, code, ns, pos):
|
| 301 |
+
__traceback_hide__ = True
|
| 302 |
+
try:
|
| 303 |
+
try:
|
| 304 |
+
value = eval(code, self.default_namespace, ns)
|
| 305 |
+
except SyntaxError as e:
|
| 306 |
+
raise SyntaxError(
|
| 307 |
+
'invalid syntax in expression: %s' % code)
|
| 308 |
+
return value
|
| 309 |
+
except Exception as e:
|
| 310 |
+
if getattr(e, 'args', None):
|
| 311 |
+
arg0 = e.args[0]
|
| 312 |
+
else:
|
| 313 |
+
arg0 = coerce_text(e)
|
| 314 |
+
e.args = (self._add_line_info(arg0, pos),)
|
| 315 |
+
raise
|
| 316 |
+
|
| 317 |
+
def _exec(self, code, ns, pos):
|
| 318 |
+
__traceback_hide__ = True
|
| 319 |
+
try:
|
| 320 |
+
exec(code, self.default_namespace, ns)
|
| 321 |
+
except Exception as e:
|
| 322 |
+
if e.args:
|
| 323 |
+
e.args = (self._add_line_info(e.args[0], pos),)
|
| 324 |
+
else:
|
| 325 |
+
e.args = (self._add_line_info(None, pos),)
|
| 326 |
+
raise
|
| 327 |
+
|
| 328 |
+
def _repr(self, value, pos):
|
| 329 |
+
__traceback_hide__ = True
|
| 330 |
+
try:
|
| 331 |
+
if value is None:
|
| 332 |
+
return ''
|
| 333 |
+
if self._unicode:
|
| 334 |
+
try:
|
| 335 |
+
value = unicode_(value)
|
| 336 |
+
except UnicodeDecodeError:
|
| 337 |
+
value = bytes(value)
|
| 338 |
+
else:
|
| 339 |
+
if not isinstance(value, basestring_):
|
| 340 |
+
value = coerce_text(value)
|
| 341 |
+
if (is_unicode(value)
|
| 342 |
+
and self.default_encoding):
|
| 343 |
+
value = value.encode(self.default_encoding)
|
| 344 |
+
except Exception as e:
|
| 345 |
+
e.args = (self._add_line_info(e.args[0], pos),)
|
| 346 |
+
raise
|
| 347 |
+
else:
|
| 348 |
+
if self._unicode and isinstance(value, bytes):
|
| 349 |
+
if not self.default_encoding:
|
| 350 |
+
raise UnicodeDecodeError(
|
| 351 |
+
'Cannot decode bytes value %r into unicode '
|
| 352 |
+
'(no default_encoding provided)' % value)
|
| 353 |
+
try:
|
| 354 |
+
value = value.decode(self.default_encoding)
|
| 355 |
+
except UnicodeDecodeError as e:
|
| 356 |
+
raise UnicodeDecodeError(
|
| 357 |
+
e.encoding,
|
| 358 |
+
e.object,
|
| 359 |
+
e.start,
|
| 360 |
+
e.end,
|
| 361 |
+
e.reason + ' in string %r' % value)
|
| 362 |
+
elif not self._unicode and is_unicode(value):
|
| 363 |
+
if not self.default_encoding:
|
| 364 |
+
raise UnicodeEncodeError(
|
| 365 |
+
'Cannot encode unicode value %r into bytes '
|
| 366 |
+
'(no default_encoding provided)' % value)
|
| 367 |
+
value = value.encode(self.default_encoding)
|
| 368 |
+
return value
|
| 369 |
+
|
| 370 |
+
def _add_line_info(self, msg, pos):
|
| 371 |
+
msg = "%s at line %s column %s" % (
|
| 372 |
+
msg, pos[0], pos[1])
|
| 373 |
+
if self.name:
|
| 374 |
+
msg += " in file %s" % self.name
|
| 375 |
+
return msg
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
def sub(content, delimiters=None, **kw):
|
| 379 |
+
name = kw.get('__name')
|
| 380 |
+
delimeters = kw.pop('delimeters') if 'delimeters' in kw else None # for legacy code
|
| 381 |
+
tmpl = Template(content, name=name, delimiters=delimiters, delimeters=delimeters)
|
| 382 |
+
return tmpl.substitute(kw)
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def paste_script_template_renderer(content, vars, filename=None):
|
| 386 |
+
tmpl = Template(content, name=filename)
|
| 387 |
+
return tmpl.substitute(vars)
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
class bunch(dict):
|
| 391 |
+
|
| 392 |
+
def __init__(self, **kw):
|
| 393 |
+
for name, value in kw.items():
|
| 394 |
+
setattr(self, name, value)
|
| 395 |
+
|
| 396 |
+
def __setattr__(self, name, value):
|
| 397 |
+
self[name] = value
|
| 398 |
+
|
| 399 |
+
def __getattr__(self, name):
|
| 400 |
+
try:
|
| 401 |
+
return self[name]
|
| 402 |
+
except KeyError:
|
| 403 |
+
raise AttributeError(name)
|
| 404 |
+
|
| 405 |
+
def __getitem__(self, key):
|
| 406 |
+
if 'default' in self:
|
| 407 |
+
try:
|
| 408 |
+
return dict.__getitem__(self, key)
|
| 409 |
+
except KeyError:
|
| 410 |
+
return dict.__getitem__(self, 'default')
|
| 411 |
+
else:
|
| 412 |
+
return dict.__getitem__(self, key)
|
| 413 |
+
|
| 414 |
+
def __repr__(self):
|
| 415 |
+
return '<%s %s>' % (
|
| 416 |
+
self.__class__.__name__,
|
| 417 |
+
' '.join(['%s=%r' % (k, v) for k, v in sorted(self.items())]))
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
class TemplateDef(object):
|
| 421 |
+
def __init__(self, template, func_name, func_signature,
|
| 422 |
+
body, ns, pos, bound_self=None):
|
| 423 |
+
self._template = template
|
| 424 |
+
self._func_name = func_name
|
| 425 |
+
self._func_signature = func_signature
|
| 426 |
+
self._body = body
|
| 427 |
+
self._ns = ns
|
| 428 |
+
self._pos = pos
|
| 429 |
+
self._bound_self = bound_self
|
| 430 |
+
|
| 431 |
+
def __repr__(self):
|
| 432 |
+
return '<tempita function %s(%s) at %s:%s>' % (
|
| 433 |
+
self._func_name, self._func_signature,
|
| 434 |
+
self._template.name, self._pos)
|
| 435 |
+
|
| 436 |
+
def __str__(self):
|
| 437 |
+
return self()
|
| 438 |
+
|
| 439 |
+
def __call__(self, *args, **kw):
|
| 440 |
+
values = self._parse_signature(args, kw)
|
| 441 |
+
ns = self._ns.copy()
|
| 442 |
+
ns.update(values)
|
| 443 |
+
if self._bound_self is not None:
|
| 444 |
+
ns['self'] = self._bound_self
|
| 445 |
+
out = []
|
| 446 |
+
subdefs = {}
|
| 447 |
+
self._template._interpret_codes(self._body, ns, out, subdefs)
|
| 448 |
+
return ''.join(out)
|
| 449 |
+
|
| 450 |
+
def __get__(self, obj, type=None):
|
| 451 |
+
if obj is None:
|
| 452 |
+
return self
|
| 453 |
+
return self.__class__(
|
| 454 |
+
self._template, self._func_name, self._func_signature,
|
| 455 |
+
self._body, self._ns, self._pos, bound_self=obj)
|
| 456 |
+
|
| 457 |
+
def _parse_signature(self, args, kw):
|
| 458 |
+
values = {}
|
| 459 |
+
sig_args, var_args, var_kw, defaults = self._func_signature
|
| 460 |
+
extra_kw = {}
|
| 461 |
+
for name, value in kw.items():
|
| 462 |
+
if not var_kw and name not in sig_args:
|
| 463 |
+
raise TypeError(
|
| 464 |
+
'Unexpected argument %s' % name)
|
| 465 |
+
if name in sig_args:
|
| 466 |
+
values[sig_args] = value
|
| 467 |
+
else:
|
| 468 |
+
extra_kw[name] = value
|
| 469 |
+
args = list(args)
|
| 470 |
+
sig_args = list(sig_args)
|
| 471 |
+
while args:
|
| 472 |
+
while sig_args and sig_args[0] in values:
|
| 473 |
+
sig_args.pop(0)
|
| 474 |
+
if sig_args:
|
| 475 |
+
name = sig_args.pop(0)
|
| 476 |
+
values[name] = args.pop(0)
|
| 477 |
+
elif var_args:
|
| 478 |
+
values[var_args] = tuple(args)
|
| 479 |
+
break
|
| 480 |
+
else:
|
| 481 |
+
raise TypeError(
|
| 482 |
+
'Extra position arguments: %s'
|
| 483 |
+
% ', '.join([repr(v) for v in args]))
|
| 484 |
+
for name, value_expr in defaults.items():
|
| 485 |
+
if name not in values:
|
| 486 |
+
values[name] = self._template._eval(
|
| 487 |
+
value_expr, self._ns, self._pos)
|
| 488 |
+
for name in sig_args:
|
| 489 |
+
if name not in values:
|
| 490 |
+
raise TypeError(
|
| 491 |
+
'Missing argument: %s' % name)
|
| 492 |
+
if var_kw:
|
| 493 |
+
values[var_kw] = extra_kw
|
| 494 |
+
return values
|
| 495 |
+
|
| 496 |
+
|
| 497 |
+
class TemplateObject(object):
|
| 498 |
+
|
| 499 |
+
def __init__(self, name):
|
| 500 |
+
self.__name = name
|
| 501 |
+
self.get = TemplateObjectGetter(self)
|
| 502 |
+
|
| 503 |
+
def __repr__(self):
|
| 504 |
+
return '<%s %s>' % (self.__class__.__name__, self.__name)
|
| 505 |
+
|
| 506 |
+
|
| 507 |
+
class TemplateObjectGetter(object):
|
| 508 |
+
|
| 509 |
+
def __init__(self, template_obj):
|
| 510 |
+
self.__template_obj = template_obj
|
| 511 |
+
|
| 512 |
+
def __getattr__(self, attr):
|
| 513 |
+
return getattr(self.__template_obj, attr, Empty)
|
| 514 |
+
|
| 515 |
+
def __repr__(self):
|
| 516 |
+
return '<%s around %r>' % (self.__class__.__name__, self.__template_obj)
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
class _Empty(object):
|
| 520 |
+
def __call__(self, *args, **kw):
|
| 521 |
+
return self
|
| 522 |
+
|
| 523 |
+
def __str__(self):
|
| 524 |
+
return ''
|
| 525 |
+
|
| 526 |
+
def __repr__(self):
|
| 527 |
+
return 'Empty'
|
| 528 |
+
|
| 529 |
+
def __unicode__(self):
|
| 530 |
+
return u''
|
| 531 |
+
|
| 532 |
+
def __iter__(self):
|
| 533 |
+
return iter(())
|
| 534 |
+
|
| 535 |
+
def __bool__(self):
|
| 536 |
+
return False
|
| 537 |
+
|
| 538 |
+
if sys.version < "3":
|
| 539 |
+
__nonzero__ = __bool__
|
| 540 |
+
|
| 541 |
+
Empty = _Empty()
|
| 542 |
+
del _Empty
|
| 543 |
+
|
| 544 |
+
############################################################
|
| 545 |
+
## Lexing and Parsing
|
| 546 |
+
############################################################
|
| 547 |
+
|
| 548 |
+
|
| 549 |
+
def lex(s, name=None, trim_whitespace=True, line_offset=0, delimiters=None):
|
| 550 |
+
"""
|
| 551 |
+
Lex a string into chunks:
|
| 552 |
+
|
| 553 |
+
>>> lex('hey')
|
| 554 |
+
['hey']
|
| 555 |
+
>>> lex('hey {{you}}')
|
| 556 |
+
['hey ', ('you', (1, 7))]
|
| 557 |
+
>>> lex('hey {{')
|
| 558 |
+
Traceback (most recent call last):
|
| 559 |
+
...
|
| 560 |
+
TemplateError: No }} to finish last expression at line 1 column 7
|
| 561 |
+
>>> lex('hey }}')
|
| 562 |
+
Traceback (most recent call last):
|
| 563 |
+
...
|
| 564 |
+
TemplateError: }} outside expression at line 1 column 7
|
| 565 |
+
>>> lex('hey {{ {{')
|
| 566 |
+
Traceback (most recent call last):
|
| 567 |
+
...
|
| 568 |
+
TemplateError: {{ inside expression at line 1 column 10
|
| 569 |
+
|
| 570 |
+
"""
|
| 571 |
+
if delimiters is None:
|
| 572 |
+
delimiters = ( Template.default_namespace['start_braces'],
|
| 573 |
+
Template.default_namespace['end_braces'] )
|
| 574 |
+
in_expr = False
|
| 575 |
+
chunks = []
|
| 576 |
+
last = 0
|
| 577 |
+
last_pos = (line_offset + 1, 1)
|
| 578 |
+
|
| 579 |
+
token_re = re.compile(r'%s|%s' % (re.escape(delimiters[0]),
|
| 580 |
+
re.escape(delimiters[1])))
|
| 581 |
+
for match in token_re.finditer(s):
|
| 582 |
+
expr = match.group(0)
|
| 583 |
+
pos = find_position(s, match.end(), last, last_pos)
|
| 584 |
+
if expr == delimiters[0] and in_expr:
|
| 585 |
+
raise TemplateError('%s inside expression' % delimiters[0],
|
| 586 |
+
position=pos,
|
| 587 |
+
name=name)
|
| 588 |
+
elif expr == delimiters[1] and not in_expr:
|
| 589 |
+
raise TemplateError('%s outside expression' % delimiters[1],
|
| 590 |
+
position=pos,
|
| 591 |
+
name=name)
|
| 592 |
+
if expr == delimiters[0]:
|
| 593 |
+
part = s[last:match.start()]
|
| 594 |
+
if part:
|
| 595 |
+
chunks.append(part)
|
| 596 |
+
in_expr = True
|
| 597 |
+
else:
|
| 598 |
+
chunks.append((s[last:match.start()], last_pos))
|
| 599 |
+
in_expr = False
|
| 600 |
+
last = match.end()
|
| 601 |
+
last_pos = pos
|
| 602 |
+
if in_expr:
|
| 603 |
+
raise TemplateError('No %s to finish last expression' % delimiters[1],
|
| 604 |
+
name=name, position=last_pos)
|
| 605 |
+
part = s[last:]
|
| 606 |
+
if part:
|
| 607 |
+
chunks.append(part)
|
| 608 |
+
if trim_whitespace:
|
| 609 |
+
chunks = trim_lex(chunks)
|
| 610 |
+
return chunks
|
| 611 |
+
|
| 612 |
+
statement_re = re.compile(r'^(?:if |elif |for |def |inherit |default |py:)')
|
| 613 |
+
single_statements = ['else', 'endif', 'endfor', 'enddef', 'continue', 'break']
|
| 614 |
+
trail_whitespace_re = re.compile(r'\n\r?[\t ]*$')
|
| 615 |
+
lead_whitespace_re = re.compile(r'^[\t ]*\n')
|
| 616 |
+
|
| 617 |
+
|
| 618 |
+
def trim_lex(tokens):
|
| 619 |
+
r"""
|
| 620 |
+
Takes a lexed set of tokens, and removes whitespace when there is
|
| 621 |
+
a directive on a line by itself:
|
| 622 |
+
|
| 623 |
+
>>> tokens = lex('{{if x}}\nx\n{{endif}}\ny', trim_whitespace=False)
|
| 624 |
+
>>> tokens
|
| 625 |
+
[('if x', (1, 3)), '\nx\n', ('endif', (3, 3)), '\ny']
|
| 626 |
+
>>> trim_lex(tokens)
|
| 627 |
+
[('if x', (1, 3)), 'x\n', ('endif', (3, 3)), 'y']
|
| 628 |
+
"""
|
| 629 |
+
last_trim = None
|
| 630 |
+
for i, current in enumerate(tokens):
|
| 631 |
+
if isinstance(current, basestring_):
|
| 632 |
+
# we don't trim this
|
| 633 |
+
continue
|
| 634 |
+
item = current[0]
|
| 635 |
+
if not statement_re.search(item) and item not in single_statements:
|
| 636 |
+
continue
|
| 637 |
+
if not i:
|
| 638 |
+
prev = ''
|
| 639 |
+
else:
|
| 640 |
+
prev = tokens[i - 1]
|
| 641 |
+
if i + 1 >= len(tokens):
|
| 642 |
+
next_chunk = ''
|
| 643 |
+
else:
|
| 644 |
+
next_chunk = tokens[i + 1]
|
| 645 |
+
if (not isinstance(next_chunk, basestring_)
|
| 646 |
+
or not isinstance(prev, basestring_)):
|
| 647 |
+
continue
|
| 648 |
+
prev_ok = not prev or trail_whitespace_re.search(prev)
|
| 649 |
+
if i == 1 and not prev.strip():
|
| 650 |
+
prev_ok = True
|
| 651 |
+
if last_trim is not None and last_trim + 2 == i and not prev.strip():
|
| 652 |
+
prev_ok = 'last'
|
| 653 |
+
if (prev_ok
|
| 654 |
+
and (not next_chunk or lead_whitespace_re.search(next_chunk)
|
| 655 |
+
or (i == len(tokens) - 2 and not next_chunk.strip()))):
|
| 656 |
+
if prev:
|
| 657 |
+
if ((i == 1 and not prev.strip())
|
| 658 |
+
or prev_ok == 'last'):
|
| 659 |
+
tokens[i - 1] = ''
|
| 660 |
+
else:
|
| 661 |
+
m = trail_whitespace_re.search(prev)
|
| 662 |
+
# +1 to leave the leading \n on:
|
| 663 |
+
prev = prev[:m.start() + 1]
|
| 664 |
+
tokens[i - 1] = prev
|
| 665 |
+
if next_chunk:
|
| 666 |
+
last_trim = i
|
| 667 |
+
if i == len(tokens) - 2 and not next_chunk.strip():
|
| 668 |
+
tokens[i + 1] = ''
|
| 669 |
+
else:
|
| 670 |
+
m = lead_whitespace_re.search(next_chunk)
|
| 671 |
+
next_chunk = next_chunk[m.end():]
|
| 672 |
+
tokens[i + 1] = next_chunk
|
| 673 |
+
return tokens
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
def find_position(string, index, last_index, last_pos):
|
| 677 |
+
"""Given a string and index, return (line, column)"""
|
| 678 |
+
lines = string.count('\n', last_index, index)
|
| 679 |
+
if lines > 0:
|
| 680 |
+
column = index - string.rfind('\n', last_index, index)
|
| 681 |
+
else:
|
| 682 |
+
column = last_pos[1] + (index - last_index)
|
| 683 |
+
return (last_pos[0] + lines, column)
|
| 684 |
+
|
| 685 |
+
|
| 686 |
+
def parse(s, name=None, line_offset=0, delimiters=None):
|
| 687 |
+
r"""
|
| 688 |
+
Parses a string into a kind of AST
|
| 689 |
+
|
| 690 |
+
>>> parse('{{x}}')
|
| 691 |
+
[('expr', (1, 3), 'x')]
|
| 692 |
+
>>> parse('foo')
|
| 693 |
+
['foo']
|
| 694 |
+
>>> parse('{{if x}}test{{endif}}')
|
| 695 |
+
[('cond', (1, 3), ('if', (1, 3), 'x', ['test']))]
|
| 696 |
+
>>> parse('series->{{for x in y}}x={{x}}{{endfor}}')
|
| 697 |
+
['series->', ('for', (1, 11), ('x',), 'y', ['x=', ('expr', (1, 27), 'x')])]
|
| 698 |
+
>>> parse('{{for x, y in z:}}{{continue}}{{endfor}}')
|
| 699 |
+
[('for', (1, 3), ('x', 'y'), 'z', [('continue', (1, 21))])]
|
| 700 |
+
>>> parse('{{py:x=1}}')
|
| 701 |
+
[('py', (1, 3), 'x=1')]
|
| 702 |
+
>>> parse('{{if x}}a{{elif y}}b{{else}}c{{endif}}')
|
| 703 |
+
[('cond', (1, 3), ('if', (1, 3), 'x', ['a']), ('elif', (1, 12), 'y', ['b']), ('else', (1, 23), None, ['c']))]
|
| 704 |
+
|
| 705 |
+
Some exceptions::
|
| 706 |
+
|
| 707 |
+
>>> parse('{{continue}}')
|
| 708 |
+
Traceback (most recent call last):
|
| 709 |
+
...
|
| 710 |
+
TemplateError: continue outside of for loop at line 1 column 3
|
| 711 |
+
>>> parse('{{if x}}foo')
|
| 712 |
+
Traceback (most recent call last):
|
| 713 |
+
...
|
| 714 |
+
TemplateError: No {{endif}} at line 1 column 3
|
| 715 |
+
>>> parse('{{else}}')
|
| 716 |
+
Traceback (most recent call last):
|
| 717 |
+
...
|
| 718 |
+
TemplateError: else outside of an if block at line 1 column 3
|
| 719 |
+
>>> parse('{{if x}}{{for x in y}}{{endif}}{{endfor}}')
|
| 720 |
+
Traceback (most recent call last):
|
| 721 |
+
...
|
| 722 |
+
TemplateError: Unexpected endif at line 1 column 25
|
| 723 |
+
>>> parse('{{if}}{{endif}}')
|
| 724 |
+
Traceback (most recent call last):
|
| 725 |
+
...
|
| 726 |
+
TemplateError: if with no expression at line 1 column 3
|
| 727 |
+
>>> parse('{{for x y}}{{endfor}}')
|
| 728 |
+
Traceback (most recent call last):
|
| 729 |
+
...
|
| 730 |
+
TemplateError: Bad for (no "in") in 'x y' at line 1 column 3
|
| 731 |
+
>>> parse('{{py:x=1\ny=2}}')
|
| 732 |
+
Traceback (most recent call last):
|
| 733 |
+
...
|
| 734 |
+
TemplateError: Multi-line py blocks must start with a newline at line 1 column 3
|
| 735 |
+
"""
|
| 736 |
+
if delimiters is None:
|
| 737 |
+
delimiters = ( Template.default_namespace['start_braces'],
|
| 738 |
+
Template.default_namespace['end_braces'] )
|
| 739 |
+
tokens = lex(s, name=name, line_offset=line_offset, delimiters=delimiters)
|
| 740 |
+
result = []
|
| 741 |
+
while tokens:
|
| 742 |
+
next_chunk, tokens = parse_expr(tokens, name)
|
| 743 |
+
result.append(next_chunk)
|
| 744 |
+
return result
|
| 745 |
+
|
| 746 |
+
|
| 747 |
+
def parse_expr(tokens, name, context=()):
|
| 748 |
+
if isinstance(tokens[0], basestring_):
|
| 749 |
+
return tokens[0], tokens[1:]
|
| 750 |
+
expr, pos = tokens[0]
|
| 751 |
+
expr = expr.strip()
|
| 752 |
+
if expr.startswith('py:'):
|
| 753 |
+
expr = expr[3:].lstrip(' \t')
|
| 754 |
+
if expr.startswith('\n') or expr.startswith('\r'):
|
| 755 |
+
expr = expr.lstrip('\r\n')
|
| 756 |
+
if '\r' in expr:
|
| 757 |
+
expr = expr.replace('\r\n', '\n')
|
| 758 |
+
expr = expr.replace('\r', '')
|
| 759 |
+
expr += '\n'
|
| 760 |
+
else:
|
| 761 |
+
if '\n' in expr:
|
| 762 |
+
raise TemplateError(
|
| 763 |
+
'Multi-line py blocks must start with a newline',
|
| 764 |
+
position=pos, name=name)
|
| 765 |
+
return ('py', pos, expr), tokens[1:]
|
| 766 |
+
elif expr in ('continue', 'break'):
|
| 767 |
+
if 'for' not in context:
|
| 768 |
+
raise TemplateError(
|
| 769 |
+
'continue outside of for loop',
|
| 770 |
+
position=pos, name=name)
|
| 771 |
+
return (expr, pos), tokens[1:]
|
| 772 |
+
elif expr.startswith('if '):
|
| 773 |
+
return parse_cond(tokens, name, context)
|
| 774 |
+
elif (expr.startswith('elif ')
|
| 775 |
+
or expr == 'else'):
|
| 776 |
+
raise TemplateError(
|
| 777 |
+
'%s outside of an if block' % expr.split()[0],
|
| 778 |
+
position=pos, name=name)
|
| 779 |
+
elif expr in ('if', 'elif', 'for'):
|
| 780 |
+
raise TemplateError(
|
| 781 |
+
'%s with no expression' % expr,
|
| 782 |
+
position=pos, name=name)
|
| 783 |
+
elif expr in ('endif', 'endfor', 'enddef'):
|
| 784 |
+
raise TemplateError(
|
| 785 |
+
'Unexpected %s' % expr,
|
| 786 |
+
position=pos, name=name)
|
| 787 |
+
elif expr.startswith('for '):
|
| 788 |
+
return parse_for(tokens, name, context)
|
| 789 |
+
elif expr.startswith('default '):
|
| 790 |
+
return parse_default(tokens, name, context)
|
| 791 |
+
elif expr.startswith('inherit '):
|
| 792 |
+
return parse_inherit(tokens, name, context)
|
| 793 |
+
elif expr.startswith('def '):
|
| 794 |
+
return parse_def(tokens, name, context)
|
| 795 |
+
elif expr.startswith('#'):
|
| 796 |
+
return ('comment', pos, tokens[0][0]), tokens[1:]
|
| 797 |
+
return ('expr', pos, tokens[0][0]), tokens[1:]
|
| 798 |
+
|
| 799 |
+
|
| 800 |
+
def parse_cond(tokens, name, context):
|
| 801 |
+
start = tokens[0][1]
|
| 802 |
+
pieces = []
|
| 803 |
+
context = context + ('if',)
|
| 804 |
+
while 1:
|
| 805 |
+
if not tokens:
|
| 806 |
+
raise TemplateError(
|
| 807 |
+
'Missing {{endif}}',
|
| 808 |
+
position=start, name=name)
|
| 809 |
+
if (isinstance(tokens[0], tuple)
|
| 810 |
+
and tokens[0][0] == 'endif'):
|
| 811 |
+
return ('cond', start) + tuple(pieces), tokens[1:]
|
| 812 |
+
next_chunk, tokens = parse_one_cond(tokens, name, context)
|
| 813 |
+
pieces.append(next_chunk)
|
| 814 |
+
|
| 815 |
+
|
| 816 |
+
def parse_one_cond(tokens, name, context):
|
| 817 |
+
(first, pos), tokens = tokens[0], tokens[1:]
|
| 818 |
+
content = []
|
| 819 |
+
if first.endswith(':'):
|
| 820 |
+
first = first[:-1]
|
| 821 |
+
if first.startswith('if '):
|
| 822 |
+
part = ('if', pos, first[3:].lstrip(), content)
|
| 823 |
+
elif first.startswith('elif '):
|
| 824 |
+
part = ('elif', pos, first[5:].lstrip(), content)
|
| 825 |
+
elif first == 'else':
|
| 826 |
+
part = ('else', pos, None, content)
|
| 827 |
+
else:
|
| 828 |
+
assert 0, "Unexpected token %r at %s" % (first, pos)
|
| 829 |
+
while 1:
|
| 830 |
+
if not tokens:
|
| 831 |
+
raise TemplateError(
|
| 832 |
+
'No {{endif}}',
|
| 833 |
+
position=pos, name=name)
|
| 834 |
+
if (isinstance(tokens[0], tuple)
|
| 835 |
+
and (tokens[0][0] == 'endif'
|
| 836 |
+
or tokens[0][0].startswith('elif ')
|
| 837 |
+
or tokens[0][0] == 'else')):
|
| 838 |
+
return part, tokens
|
| 839 |
+
next_chunk, tokens = parse_expr(tokens, name, context)
|
| 840 |
+
content.append(next_chunk)
|
| 841 |
+
|
| 842 |
+
|
| 843 |
+
def parse_for(tokens, name, context):
|
| 844 |
+
first, pos = tokens[0]
|
| 845 |
+
tokens = tokens[1:]
|
| 846 |
+
context = ('for',) + context
|
| 847 |
+
content = []
|
| 848 |
+
assert first.startswith('for '), first
|
| 849 |
+
if first.endswith(':'):
|
| 850 |
+
first = first[:-1]
|
| 851 |
+
first = first[3:].strip()
|
| 852 |
+
match = in_re.search(first)
|
| 853 |
+
if not match:
|
| 854 |
+
raise TemplateError(
|
| 855 |
+
'Bad for (no "in") in %r' % first,
|
| 856 |
+
position=pos, name=name)
|
| 857 |
+
vars = first[:match.start()]
|
| 858 |
+
if '(' in vars:
|
| 859 |
+
raise TemplateError(
|
| 860 |
+
'You cannot have () in the variable section of a for loop (%r)'
|
| 861 |
+
% vars, position=pos, name=name)
|
| 862 |
+
vars = tuple([
|
| 863 |
+
v.strip() for v in first[:match.start()].split(',')
|
| 864 |
+
if v.strip()])
|
| 865 |
+
expr = first[match.end():]
|
| 866 |
+
while 1:
|
| 867 |
+
if not tokens:
|
| 868 |
+
raise TemplateError(
|
| 869 |
+
'No {{endfor}}',
|
| 870 |
+
position=pos, name=name)
|
| 871 |
+
if (isinstance(tokens[0], tuple)
|
| 872 |
+
and tokens[0][0] == 'endfor'):
|
| 873 |
+
return ('for', pos, vars, expr, content), tokens[1:]
|
| 874 |
+
next_chunk, tokens = parse_expr(tokens, name, context)
|
| 875 |
+
content.append(next_chunk)
|
| 876 |
+
|
| 877 |
+
|
| 878 |
+
def parse_default(tokens, name, context):
|
| 879 |
+
first, pos = tokens[0]
|
| 880 |
+
assert first.startswith('default ')
|
| 881 |
+
first = first.split(None, 1)[1]
|
| 882 |
+
parts = first.split('=', 1)
|
| 883 |
+
if len(parts) == 1:
|
| 884 |
+
raise TemplateError(
|
| 885 |
+
"Expression must be {{default var=value}}; no = found in %r" % first,
|
| 886 |
+
position=pos, name=name)
|
| 887 |
+
var = parts[0].strip()
|
| 888 |
+
if ',' in var:
|
| 889 |
+
raise TemplateError(
|
| 890 |
+
"{{default x, y = ...}} is not supported",
|
| 891 |
+
position=pos, name=name)
|
| 892 |
+
if not var_re.search(var):
|
| 893 |
+
raise TemplateError(
|
| 894 |
+
"Not a valid variable name for {{default}}: %r"
|
| 895 |
+
% var, position=pos, name=name)
|
| 896 |
+
expr = parts[1].strip()
|
| 897 |
+
return ('default', pos, var, expr), tokens[1:]
|
| 898 |
+
|
| 899 |
+
|
| 900 |
+
def parse_inherit(tokens, name, context):
|
| 901 |
+
first, pos = tokens[0]
|
| 902 |
+
assert first.startswith('inherit ')
|
| 903 |
+
expr = first.split(None, 1)[1]
|
| 904 |
+
return ('inherit', pos, expr), tokens[1:]
|
| 905 |
+
|
| 906 |
+
|
| 907 |
+
def parse_def(tokens, name, context):
|
| 908 |
+
first, start = tokens[0]
|
| 909 |
+
tokens = tokens[1:]
|
| 910 |
+
assert first.startswith('def ')
|
| 911 |
+
first = first.split(None, 1)[1]
|
| 912 |
+
if first.endswith(':'):
|
| 913 |
+
first = first[:-1]
|
| 914 |
+
if '(' not in first:
|
| 915 |
+
func_name = first
|
| 916 |
+
sig = ((), None, None, {})
|
| 917 |
+
elif not first.endswith(')'):
|
| 918 |
+
raise TemplateError("Function definition doesn't end with ): %s" % first,
|
| 919 |
+
position=start, name=name)
|
| 920 |
+
else:
|
| 921 |
+
first = first[:-1]
|
| 922 |
+
func_name, sig_text = first.split('(', 1)
|
| 923 |
+
sig = parse_signature(sig_text, name, start)
|
| 924 |
+
context = context + ('def',)
|
| 925 |
+
content = []
|
| 926 |
+
while 1:
|
| 927 |
+
if not tokens:
|
| 928 |
+
raise TemplateError(
|
| 929 |
+
'Missing {{enddef}}',
|
| 930 |
+
position=start, name=name)
|
| 931 |
+
if (isinstance(tokens[0], tuple)
|
| 932 |
+
and tokens[0][0] == 'enddef'):
|
| 933 |
+
return ('def', start, func_name, sig, content), tokens[1:]
|
| 934 |
+
next_chunk, tokens = parse_expr(tokens, name, context)
|
| 935 |
+
content.append(next_chunk)
|
| 936 |
+
|
| 937 |
+
|
| 938 |
+
def parse_signature(sig_text, name, pos):
|
| 939 |
+
tokens = tokenize.generate_tokens(StringIO(sig_text).readline)
|
| 940 |
+
sig_args = []
|
| 941 |
+
var_arg = None
|
| 942 |
+
var_kw = None
|
| 943 |
+
defaults = {}
|
| 944 |
+
|
| 945 |
+
def get_token(pos=False):
|
| 946 |
+
try:
|
| 947 |
+
tok_type, tok_string, (srow, scol), (erow, ecol), line = next(tokens)
|
| 948 |
+
except StopIteration:
|
| 949 |
+
return tokenize.ENDMARKER, ''
|
| 950 |
+
if pos:
|
| 951 |
+
return tok_type, tok_string, (srow, scol), (erow, ecol)
|
| 952 |
+
else:
|
| 953 |
+
return tok_type, tok_string
|
| 954 |
+
while 1:
|
| 955 |
+
var_arg_type = None
|
| 956 |
+
tok_type, tok_string = get_token()
|
| 957 |
+
if tok_type == tokenize.ENDMARKER:
|
| 958 |
+
break
|
| 959 |
+
if tok_type == tokenize.OP and (tok_string == '*' or tok_string == '**'):
|
| 960 |
+
var_arg_type = tok_string
|
| 961 |
+
tok_type, tok_string = get_token()
|
| 962 |
+
if tok_type != tokenize.NAME:
|
| 963 |
+
raise TemplateError('Invalid signature: (%s)' % sig_text,
|
| 964 |
+
position=pos, name=name)
|
| 965 |
+
var_name = tok_string
|
| 966 |
+
tok_type, tok_string = get_token()
|
| 967 |
+
if tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','):
|
| 968 |
+
if var_arg_type == '*':
|
| 969 |
+
var_arg = var_name
|
| 970 |
+
elif var_arg_type == '**':
|
| 971 |
+
var_kw = var_name
|
| 972 |
+
else:
|
| 973 |
+
sig_args.append(var_name)
|
| 974 |
+
if tok_type == tokenize.ENDMARKER:
|
| 975 |
+
break
|
| 976 |
+
continue
|
| 977 |
+
if var_arg_type is not None:
|
| 978 |
+
raise TemplateError('Invalid signature: (%s)' % sig_text,
|
| 979 |
+
position=pos, name=name)
|
| 980 |
+
if tok_type == tokenize.OP and tok_string == '=':
|
| 981 |
+
nest_type = None
|
| 982 |
+
unnest_type = None
|
| 983 |
+
nest_count = 0
|
| 984 |
+
start_pos = end_pos = None
|
| 985 |
+
parts = []
|
| 986 |
+
while 1:
|
| 987 |
+
tok_type, tok_string, s, e = get_token(True)
|
| 988 |
+
if start_pos is None:
|
| 989 |
+
start_pos = s
|
| 990 |
+
end_pos = e
|
| 991 |
+
if tok_type == tokenize.ENDMARKER and nest_count:
|
| 992 |
+
raise TemplateError('Invalid signature: (%s)' % sig_text,
|
| 993 |
+
position=pos, name=name)
|
| 994 |
+
if (not nest_count and
|
| 995 |
+
(tok_type == tokenize.ENDMARKER or (tok_type == tokenize.OP and tok_string == ','))):
|
| 996 |
+
default_expr = isolate_expression(sig_text, start_pos, end_pos)
|
| 997 |
+
defaults[var_name] = default_expr
|
| 998 |
+
sig_args.append(var_name)
|
| 999 |
+
break
|
| 1000 |
+
parts.append((tok_type, tok_string))
|
| 1001 |
+
if nest_count and tok_type == tokenize.OP and tok_string == nest_type:
|
| 1002 |
+
nest_count += 1
|
| 1003 |
+
elif nest_count and tok_type == tokenize.OP and tok_string == unnest_type:
|
| 1004 |
+
nest_count -= 1
|
| 1005 |
+
if not nest_count:
|
| 1006 |
+
nest_type = unnest_type = None
|
| 1007 |
+
elif not nest_count and tok_type == tokenize.OP and tok_string in ('(', '[', '{'):
|
| 1008 |
+
nest_type = tok_string
|
| 1009 |
+
nest_count = 1
|
| 1010 |
+
unnest_type = {'(': ')', '[': ']', '{': '}'}[nest_type]
|
| 1011 |
+
return sig_args, var_arg, var_kw, defaults
|
| 1012 |
+
|
| 1013 |
+
|
| 1014 |
+
def isolate_expression(string, start_pos, end_pos):
|
| 1015 |
+
srow, scol = start_pos
|
| 1016 |
+
srow -= 1
|
| 1017 |
+
erow, ecol = end_pos
|
| 1018 |
+
erow -= 1
|
| 1019 |
+
lines = string.splitlines(True)
|
| 1020 |
+
if srow == erow:
|
| 1021 |
+
return lines[srow][scol:ecol]
|
| 1022 |
+
parts = [lines[srow][scol:]]
|
| 1023 |
+
parts.extend(lines[srow+1:erow])
|
| 1024 |
+
if erow < len(lines):
|
| 1025 |
+
# It'll sometimes give (end_row_past_finish, 0)
|
| 1026 |
+
parts.append(lines[erow][:ecol])
|
| 1027 |
+
return ''.join(parts)
|
| 1028 |
+
|
| 1029 |
+
_fill_command_usage = """\
|
| 1030 |
+
%prog [OPTIONS] TEMPLATE arg=value
|
| 1031 |
+
|
| 1032 |
+
Use py:arg=value to set a Python value; otherwise all values are
|
| 1033 |
+
strings.
|
| 1034 |
+
"""
|
| 1035 |
+
|
| 1036 |
+
|
| 1037 |
+
def fill_command(args=None):
|
| 1038 |
+
import sys
|
| 1039 |
+
import optparse
|
| 1040 |
+
import pkg_resources
|
| 1041 |
+
import os
|
| 1042 |
+
if args is None:
|
| 1043 |
+
args = sys.argv[1:]
|
| 1044 |
+
dist = pkg_resources.get_distribution('Paste')
|
| 1045 |
+
parser = optparse.OptionParser(
|
| 1046 |
+
version=coerce_text(dist),
|
| 1047 |
+
usage=_fill_command_usage)
|
| 1048 |
+
parser.add_option(
|
| 1049 |
+
'-o', '--output',
|
| 1050 |
+
dest='output',
|
| 1051 |
+
metavar="FILENAME",
|
| 1052 |
+
help="File to write output to (default stdout)")
|
| 1053 |
+
parser.add_option(
|
| 1054 |
+
'--env',
|
| 1055 |
+
dest='use_env',
|
| 1056 |
+
action='store_true',
|
| 1057 |
+
help="Put the environment in as top-level variables")
|
| 1058 |
+
options, args = parser.parse_args(args)
|
| 1059 |
+
if len(args) < 1:
|
| 1060 |
+
print('You must give a template filename')
|
| 1061 |
+
sys.exit(2)
|
| 1062 |
+
template_name = args[0]
|
| 1063 |
+
args = args[1:]
|
| 1064 |
+
vars = {}
|
| 1065 |
+
if options.use_env:
|
| 1066 |
+
vars.update(os.environ)
|
| 1067 |
+
for value in args:
|
| 1068 |
+
if '=' not in value:
|
| 1069 |
+
print('Bad argument: %r' % value)
|
| 1070 |
+
sys.exit(2)
|
| 1071 |
+
name, value = value.split('=', 1)
|
| 1072 |
+
if name.startswith('py:'):
|
| 1073 |
+
name = name[:3]
|
| 1074 |
+
value = eval(value)
|
| 1075 |
+
vars[name] = value
|
| 1076 |
+
if template_name == '-':
|
| 1077 |
+
template_content = sys.stdin.read()
|
| 1078 |
+
template_name = '<stdin>'
|
| 1079 |
+
else:
|
| 1080 |
+
with open(template_name, 'rb') as f:
|
| 1081 |
+
template_content = f.read()
|
| 1082 |
+
template = Template(template_content, name=template_name)
|
| 1083 |
+
result = template.substitute(vars)
|
| 1084 |
+
if options.output:
|
| 1085 |
+
with open(options.output, 'wb') as f:
|
| 1086 |
+
f.write(result)
|
| 1087 |
+
else:
|
| 1088 |
+
sys.stdout.write(result)
|
| 1089 |
+
|
| 1090 |
+
if __name__ == '__main__':
|
| 1091 |
+
fill_command()
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/projection.py
ADDED
|
@@ -0,0 +1,528 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""One-mode (unipartite) projections of bipartite graphs."""
|
| 2 |
+
import networkx as nx
|
| 3 |
+
from networkx.exception import NetworkXAlgorithmError
|
| 4 |
+
from networkx.utils import not_implemented_for
|
| 5 |
+
|
| 6 |
+
__all__ = [
|
| 7 |
+
"projected_graph",
|
| 8 |
+
"weighted_projected_graph",
|
| 9 |
+
"collaboration_weighted_projected_graph",
|
| 10 |
+
"overlap_weighted_projected_graph",
|
| 11 |
+
"generic_weighted_projected_graph",
|
| 12 |
+
]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@nx._dispatch(graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True)
|
| 16 |
+
def projected_graph(B, nodes, multigraph=False):
|
| 17 |
+
r"""Returns the projection of B onto one of its node sets.
|
| 18 |
+
|
| 19 |
+
Returns the graph G that is the projection of the bipartite graph B
|
| 20 |
+
onto the specified nodes. They retain their attributes and are connected
|
| 21 |
+
in G if they have a common neighbor in B.
|
| 22 |
+
|
| 23 |
+
Parameters
|
| 24 |
+
----------
|
| 25 |
+
B : NetworkX graph
|
| 26 |
+
The input graph should be bipartite.
|
| 27 |
+
|
| 28 |
+
nodes : list or iterable
|
| 29 |
+
Nodes to project onto (the "bottom" nodes).
|
| 30 |
+
|
| 31 |
+
multigraph: bool (default=False)
|
| 32 |
+
If True return a multigraph where the multiple edges represent multiple
|
| 33 |
+
shared neighbors. They edge key in the multigraph is assigned to the
|
| 34 |
+
label of the neighbor.
|
| 35 |
+
|
| 36 |
+
Returns
|
| 37 |
+
-------
|
| 38 |
+
Graph : NetworkX graph or multigraph
|
| 39 |
+
A graph that is the projection onto the given nodes.
|
| 40 |
+
|
| 41 |
+
Examples
|
| 42 |
+
--------
|
| 43 |
+
>>> from networkx.algorithms import bipartite
|
| 44 |
+
>>> B = nx.path_graph(4)
|
| 45 |
+
>>> G = bipartite.projected_graph(B, [1, 3])
|
| 46 |
+
>>> list(G)
|
| 47 |
+
[1, 3]
|
| 48 |
+
>>> list(G.edges())
|
| 49 |
+
[(1, 3)]
|
| 50 |
+
|
| 51 |
+
If nodes `a`, and `b` are connected through both nodes 1 and 2 then
|
| 52 |
+
building a multigraph results in two edges in the projection onto
|
| 53 |
+
[`a`, `b`]:
|
| 54 |
+
|
| 55 |
+
>>> B = nx.Graph()
|
| 56 |
+
>>> B.add_edges_from([("a", 1), ("b", 1), ("a", 2), ("b", 2)])
|
| 57 |
+
>>> G = bipartite.projected_graph(B, ["a", "b"], multigraph=True)
|
| 58 |
+
>>> print([sorted((u, v)) for u, v in G.edges()])
|
| 59 |
+
[['a', 'b'], ['a', 'b']]
|
| 60 |
+
|
| 61 |
+
Notes
|
| 62 |
+
-----
|
| 63 |
+
No attempt is made to verify that the input graph B is bipartite.
|
| 64 |
+
Returns a simple graph that is the projection of the bipartite graph B
|
| 65 |
+
onto the set of nodes given in list nodes. If multigraph=True then
|
| 66 |
+
a multigraph is returned with an edge for every shared neighbor.
|
| 67 |
+
|
| 68 |
+
Directed graphs are allowed as input. The output will also then
|
| 69 |
+
be a directed graph with edges if there is a directed path between
|
| 70 |
+
the nodes.
|
| 71 |
+
|
| 72 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 73 |
+
|
| 74 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 75 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 76 |
+
|
| 77 |
+
See Also
|
| 78 |
+
--------
|
| 79 |
+
is_bipartite,
|
| 80 |
+
is_bipartite_node_set,
|
| 81 |
+
sets,
|
| 82 |
+
weighted_projected_graph,
|
| 83 |
+
collaboration_weighted_projected_graph,
|
| 84 |
+
overlap_weighted_projected_graph,
|
| 85 |
+
generic_weighted_projected_graph
|
| 86 |
+
"""
|
| 87 |
+
if B.is_multigraph():
|
| 88 |
+
raise nx.NetworkXError("not defined for multigraphs")
|
| 89 |
+
if B.is_directed():
|
| 90 |
+
directed = True
|
| 91 |
+
if multigraph:
|
| 92 |
+
G = nx.MultiDiGraph()
|
| 93 |
+
else:
|
| 94 |
+
G = nx.DiGraph()
|
| 95 |
+
else:
|
| 96 |
+
directed = False
|
| 97 |
+
if multigraph:
|
| 98 |
+
G = nx.MultiGraph()
|
| 99 |
+
else:
|
| 100 |
+
G = nx.Graph()
|
| 101 |
+
G.graph.update(B.graph)
|
| 102 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 103 |
+
for u in nodes:
|
| 104 |
+
nbrs2 = {v for nbr in B[u] for v in B[nbr] if v != u}
|
| 105 |
+
if multigraph:
|
| 106 |
+
for n in nbrs2:
|
| 107 |
+
if directed:
|
| 108 |
+
links = set(B[u]) & set(B.pred[n])
|
| 109 |
+
else:
|
| 110 |
+
links = set(B[u]) & set(B[n])
|
| 111 |
+
for l in links:
|
| 112 |
+
if not G.has_edge(u, n, l):
|
| 113 |
+
G.add_edge(u, n, key=l)
|
| 114 |
+
else:
|
| 115 |
+
G.add_edges_from((u, n) for n in nbrs2)
|
| 116 |
+
return G
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
@not_implemented_for("multigraph")
|
| 120 |
+
@nx._dispatch(graphs="B")
|
| 121 |
+
def weighted_projected_graph(B, nodes, ratio=False):
|
| 122 |
+
r"""Returns a weighted projection of B onto one of its node sets.
|
| 123 |
+
|
| 124 |
+
The weighted projected graph is the projection of the bipartite
|
| 125 |
+
network B onto the specified nodes with weights representing the
|
| 126 |
+
number of shared neighbors or the ratio between actual shared
|
| 127 |
+
neighbors and possible shared neighbors if ``ratio is True`` [1]_.
|
| 128 |
+
The nodes retain their attributes and are connected in the resulting
|
| 129 |
+
graph if they have an edge to a common node in the original graph.
|
| 130 |
+
|
| 131 |
+
Parameters
|
| 132 |
+
----------
|
| 133 |
+
B : NetworkX graph
|
| 134 |
+
The input graph should be bipartite.
|
| 135 |
+
|
| 136 |
+
nodes : list or iterable
|
| 137 |
+
Distinct nodes to project onto (the "bottom" nodes).
|
| 138 |
+
|
| 139 |
+
ratio: Bool (default=False)
|
| 140 |
+
If True, edge weight is the ratio between actual shared neighbors
|
| 141 |
+
and maximum possible shared neighbors (i.e., the size of the other
|
| 142 |
+
node set). If False, edges weight is the number of shared neighbors.
|
| 143 |
+
|
| 144 |
+
Returns
|
| 145 |
+
-------
|
| 146 |
+
Graph : NetworkX graph
|
| 147 |
+
A graph that is the projection onto the given nodes.
|
| 148 |
+
|
| 149 |
+
Examples
|
| 150 |
+
--------
|
| 151 |
+
>>> from networkx.algorithms import bipartite
|
| 152 |
+
>>> B = nx.path_graph(4)
|
| 153 |
+
>>> G = bipartite.weighted_projected_graph(B, [1, 3])
|
| 154 |
+
>>> list(G)
|
| 155 |
+
[1, 3]
|
| 156 |
+
>>> list(G.edges(data=True))
|
| 157 |
+
[(1, 3, {'weight': 1})]
|
| 158 |
+
>>> G = bipartite.weighted_projected_graph(B, [1, 3], ratio=True)
|
| 159 |
+
>>> list(G.edges(data=True))
|
| 160 |
+
[(1, 3, {'weight': 0.5})]
|
| 161 |
+
|
| 162 |
+
Notes
|
| 163 |
+
-----
|
| 164 |
+
No attempt is made to verify that the input graph B is bipartite, or that
|
| 165 |
+
the input nodes are distinct. However, if the length of the input nodes is
|
| 166 |
+
greater than or equal to the nodes in the graph B, an exception is raised.
|
| 167 |
+
If the nodes are not distinct but don't raise this error, the output weights
|
| 168 |
+
will be incorrect.
|
| 169 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 170 |
+
|
| 171 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 172 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 173 |
+
|
| 174 |
+
See Also
|
| 175 |
+
--------
|
| 176 |
+
is_bipartite,
|
| 177 |
+
is_bipartite_node_set,
|
| 178 |
+
sets,
|
| 179 |
+
collaboration_weighted_projected_graph,
|
| 180 |
+
overlap_weighted_projected_graph,
|
| 181 |
+
generic_weighted_projected_graph
|
| 182 |
+
projected_graph
|
| 183 |
+
|
| 184 |
+
References
|
| 185 |
+
----------
|
| 186 |
+
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
| 187 |
+
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
| 188 |
+
of Social Network Analysis. Sage Publications.
|
| 189 |
+
"""
|
| 190 |
+
if B.is_directed():
|
| 191 |
+
pred = B.pred
|
| 192 |
+
G = nx.DiGraph()
|
| 193 |
+
else:
|
| 194 |
+
pred = B.adj
|
| 195 |
+
G = nx.Graph()
|
| 196 |
+
G.graph.update(B.graph)
|
| 197 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 198 |
+
n_top = len(B) - len(nodes)
|
| 199 |
+
|
| 200 |
+
if n_top < 1:
|
| 201 |
+
raise NetworkXAlgorithmError(
|
| 202 |
+
f"the size of the nodes to project onto ({len(nodes)}) is >= the graph size ({len(B)}).\n"
|
| 203 |
+
"They are either not a valid bipartite partition or contain duplicates"
|
| 204 |
+
)
|
| 205 |
+
|
| 206 |
+
for u in nodes:
|
| 207 |
+
unbrs = set(B[u])
|
| 208 |
+
nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
|
| 209 |
+
for v in nbrs2:
|
| 210 |
+
vnbrs = set(pred[v])
|
| 211 |
+
common = unbrs & vnbrs
|
| 212 |
+
if not ratio:
|
| 213 |
+
weight = len(common)
|
| 214 |
+
else:
|
| 215 |
+
weight = len(common) / n_top
|
| 216 |
+
G.add_edge(u, v, weight=weight)
|
| 217 |
+
return G
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
@not_implemented_for("multigraph")
|
| 221 |
+
@nx._dispatch(graphs="B")
|
| 222 |
+
def collaboration_weighted_projected_graph(B, nodes):
|
| 223 |
+
r"""Newman's weighted projection of B onto one of its node sets.
|
| 224 |
+
|
| 225 |
+
The collaboration weighted projection is the projection of the
|
| 226 |
+
bipartite network B onto the specified nodes with weights assigned
|
| 227 |
+
using Newman's collaboration model [1]_:
|
| 228 |
+
|
| 229 |
+
.. math::
|
| 230 |
+
|
| 231 |
+
w_{u, v} = \sum_k \frac{\delta_{u}^{k} \delta_{v}^{k}}{d_k - 1}
|
| 232 |
+
|
| 233 |
+
where `u` and `v` are nodes from the bottom bipartite node set,
|
| 234 |
+
and `k` is a node of the top node set.
|
| 235 |
+
The value `d_k` is the degree of node `k` in the bipartite
|
| 236 |
+
network and `\delta_{u}^{k}` is 1 if node `u` is
|
| 237 |
+
linked to node `k` in the original bipartite graph or 0 otherwise.
|
| 238 |
+
|
| 239 |
+
The nodes retain their attributes and are connected in the resulting
|
| 240 |
+
graph if have an edge to a common node in the original bipartite
|
| 241 |
+
graph.
|
| 242 |
+
|
| 243 |
+
Parameters
|
| 244 |
+
----------
|
| 245 |
+
B : NetworkX graph
|
| 246 |
+
The input graph should be bipartite.
|
| 247 |
+
|
| 248 |
+
nodes : list or iterable
|
| 249 |
+
Nodes to project onto (the "bottom" nodes).
|
| 250 |
+
|
| 251 |
+
Returns
|
| 252 |
+
-------
|
| 253 |
+
Graph : NetworkX graph
|
| 254 |
+
A graph that is the projection onto the given nodes.
|
| 255 |
+
|
| 256 |
+
Examples
|
| 257 |
+
--------
|
| 258 |
+
>>> from networkx.algorithms import bipartite
|
| 259 |
+
>>> B = nx.path_graph(5)
|
| 260 |
+
>>> B.add_edge(1, 5)
|
| 261 |
+
>>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5])
|
| 262 |
+
>>> list(G)
|
| 263 |
+
[0, 2, 4, 5]
|
| 264 |
+
>>> for edge in sorted(G.edges(data=True)):
|
| 265 |
+
... print(edge)
|
| 266 |
+
...
|
| 267 |
+
(0, 2, {'weight': 0.5})
|
| 268 |
+
(0, 5, {'weight': 0.5})
|
| 269 |
+
(2, 4, {'weight': 1.0})
|
| 270 |
+
(2, 5, {'weight': 0.5})
|
| 271 |
+
|
| 272 |
+
Notes
|
| 273 |
+
-----
|
| 274 |
+
No attempt is made to verify that the input graph B is bipartite.
|
| 275 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 276 |
+
|
| 277 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 278 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 279 |
+
|
| 280 |
+
See Also
|
| 281 |
+
--------
|
| 282 |
+
is_bipartite,
|
| 283 |
+
is_bipartite_node_set,
|
| 284 |
+
sets,
|
| 285 |
+
weighted_projected_graph,
|
| 286 |
+
overlap_weighted_projected_graph,
|
| 287 |
+
generic_weighted_projected_graph,
|
| 288 |
+
projected_graph
|
| 289 |
+
|
| 290 |
+
References
|
| 291 |
+
----------
|
| 292 |
+
.. [1] Scientific collaboration networks: II.
|
| 293 |
+
Shortest paths, weighted networks, and centrality,
|
| 294 |
+
M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
|
| 295 |
+
"""
|
| 296 |
+
if B.is_directed():
|
| 297 |
+
pred = B.pred
|
| 298 |
+
G = nx.DiGraph()
|
| 299 |
+
else:
|
| 300 |
+
pred = B.adj
|
| 301 |
+
G = nx.Graph()
|
| 302 |
+
G.graph.update(B.graph)
|
| 303 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 304 |
+
for u in nodes:
|
| 305 |
+
unbrs = set(B[u])
|
| 306 |
+
nbrs2 = {n for nbr in unbrs for n in B[nbr] if n != u}
|
| 307 |
+
for v in nbrs2:
|
| 308 |
+
vnbrs = set(pred[v])
|
| 309 |
+
common_degree = (len(B[n]) for n in unbrs & vnbrs)
|
| 310 |
+
weight = sum(1.0 / (deg - 1) for deg in common_degree if deg > 1)
|
| 311 |
+
G.add_edge(u, v, weight=weight)
|
| 312 |
+
return G
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
@not_implemented_for("multigraph")
|
| 316 |
+
@nx._dispatch(graphs="B")
|
| 317 |
+
def overlap_weighted_projected_graph(B, nodes, jaccard=True):
|
| 318 |
+
r"""Overlap weighted projection of B onto one of its node sets.
|
| 319 |
+
|
| 320 |
+
The overlap weighted projection is the projection of the bipartite
|
| 321 |
+
network B onto the specified nodes with weights representing
|
| 322 |
+
the Jaccard index between the neighborhoods of the two nodes in the
|
| 323 |
+
original bipartite network [1]_:
|
| 324 |
+
|
| 325 |
+
.. math::
|
| 326 |
+
|
| 327 |
+
w_{v, u} = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|}
|
| 328 |
+
|
| 329 |
+
or if the parameter 'jaccard' is False, the fraction of common
|
| 330 |
+
neighbors by minimum of both nodes degree in the original
|
| 331 |
+
bipartite graph [1]_:
|
| 332 |
+
|
| 333 |
+
.. math::
|
| 334 |
+
|
| 335 |
+
w_{v, u} = \frac{|N(u) \cap N(v)|}{min(|N(u)|, |N(v)|)}
|
| 336 |
+
|
| 337 |
+
The nodes retain their attributes and are connected in the resulting
|
| 338 |
+
graph if have an edge to a common node in the original bipartite graph.
|
| 339 |
+
|
| 340 |
+
Parameters
|
| 341 |
+
----------
|
| 342 |
+
B : NetworkX graph
|
| 343 |
+
The input graph should be bipartite.
|
| 344 |
+
|
| 345 |
+
nodes : list or iterable
|
| 346 |
+
Nodes to project onto (the "bottom" nodes).
|
| 347 |
+
|
| 348 |
+
jaccard: Bool (default=True)
|
| 349 |
+
|
| 350 |
+
Returns
|
| 351 |
+
-------
|
| 352 |
+
Graph : NetworkX graph
|
| 353 |
+
A graph that is the projection onto the given nodes.
|
| 354 |
+
|
| 355 |
+
Examples
|
| 356 |
+
--------
|
| 357 |
+
>>> from networkx.algorithms import bipartite
|
| 358 |
+
>>> B = nx.path_graph(5)
|
| 359 |
+
>>> nodes = [0, 2, 4]
|
| 360 |
+
>>> G = bipartite.overlap_weighted_projected_graph(B, nodes)
|
| 361 |
+
>>> list(G)
|
| 362 |
+
[0, 2, 4]
|
| 363 |
+
>>> list(G.edges(data=True))
|
| 364 |
+
[(0, 2, {'weight': 0.5}), (2, 4, {'weight': 0.5})]
|
| 365 |
+
>>> G = bipartite.overlap_weighted_projected_graph(B, nodes, jaccard=False)
|
| 366 |
+
>>> list(G.edges(data=True))
|
| 367 |
+
[(0, 2, {'weight': 1.0}), (2, 4, {'weight': 1.0})]
|
| 368 |
+
|
| 369 |
+
Notes
|
| 370 |
+
-----
|
| 371 |
+
No attempt is made to verify that the input graph B is bipartite.
|
| 372 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 373 |
+
|
| 374 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 375 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 376 |
+
|
| 377 |
+
See Also
|
| 378 |
+
--------
|
| 379 |
+
is_bipartite,
|
| 380 |
+
is_bipartite_node_set,
|
| 381 |
+
sets,
|
| 382 |
+
weighted_projected_graph,
|
| 383 |
+
collaboration_weighted_projected_graph,
|
| 384 |
+
generic_weighted_projected_graph,
|
| 385 |
+
projected_graph
|
| 386 |
+
|
| 387 |
+
References
|
| 388 |
+
----------
|
| 389 |
+
.. [1] Borgatti, S.P. and Halgin, D. In press. Analyzing Affiliation
|
| 390 |
+
Networks. In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
| 391 |
+
of Social Network Analysis. Sage Publications.
|
| 392 |
+
|
| 393 |
+
"""
|
| 394 |
+
if B.is_directed():
|
| 395 |
+
pred = B.pred
|
| 396 |
+
G = nx.DiGraph()
|
| 397 |
+
else:
|
| 398 |
+
pred = B.adj
|
| 399 |
+
G = nx.Graph()
|
| 400 |
+
G.graph.update(B.graph)
|
| 401 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 402 |
+
for u in nodes:
|
| 403 |
+
unbrs = set(B[u])
|
| 404 |
+
nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
|
| 405 |
+
for v in nbrs2:
|
| 406 |
+
vnbrs = set(pred[v])
|
| 407 |
+
if jaccard:
|
| 408 |
+
wt = len(unbrs & vnbrs) / len(unbrs | vnbrs)
|
| 409 |
+
else:
|
| 410 |
+
wt = len(unbrs & vnbrs) / min(len(unbrs), len(vnbrs))
|
| 411 |
+
G.add_edge(u, v, weight=wt)
|
| 412 |
+
return G
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
@not_implemented_for("multigraph")
|
| 416 |
+
@nx._dispatch(graphs="B", preserve_all_attrs=True)
|
| 417 |
+
def generic_weighted_projected_graph(B, nodes, weight_function=None):
|
| 418 |
+
r"""Weighted projection of B with a user-specified weight function.
|
| 419 |
+
|
| 420 |
+
The bipartite network B is projected on to the specified nodes
|
| 421 |
+
with weights computed by a user-specified function. This function
|
| 422 |
+
must accept as a parameter the neighborhood sets of two nodes and
|
| 423 |
+
return an integer or a float.
|
| 424 |
+
|
| 425 |
+
The nodes retain their attributes and are connected in the resulting graph
|
| 426 |
+
if they have an edge to a common node in the original graph.
|
| 427 |
+
|
| 428 |
+
Parameters
|
| 429 |
+
----------
|
| 430 |
+
B : NetworkX graph
|
| 431 |
+
The input graph should be bipartite.
|
| 432 |
+
|
| 433 |
+
nodes : list or iterable
|
| 434 |
+
Nodes to project onto (the "bottom" nodes).
|
| 435 |
+
|
| 436 |
+
weight_function : function
|
| 437 |
+
This function must accept as parameters the same input graph
|
| 438 |
+
that this function, and two nodes; and return an integer or a float.
|
| 439 |
+
The default function computes the number of shared neighbors.
|
| 440 |
+
|
| 441 |
+
Returns
|
| 442 |
+
-------
|
| 443 |
+
Graph : NetworkX graph
|
| 444 |
+
A graph that is the projection onto the given nodes.
|
| 445 |
+
|
| 446 |
+
Examples
|
| 447 |
+
--------
|
| 448 |
+
>>> from networkx.algorithms import bipartite
|
| 449 |
+
>>> # Define some custom weight functions
|
| 450 |
+
>>> def jaccard(G, u, v):
|
| 451 |
+
... unbrs = set(G[u])
|
| 452 |
+
... vnbrs = set(G[v])
|
| 453 |
+
... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
|
| 454 |
+
...
|
| 455 |
+
>>> def my_weight(G, u, v, weight="weight"):
|
| 456 |
+
... w = 0
|
| 457 |
+
... for nbr in set(G[u]) & set(G[v]):
|
| 458 |
+
... w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1)
|
| 459 |
+
... return w
|
| 460 |
+
...
|
| 461 |
+
>>> # A complete bipartite graph with 4 nodes and 4 edges
|
| 462 |
+
>>> B = nx.complete_bipartite_graph(2, 2)
|
| 463 |
+
>>> # Add some arbitrary weight to the edges
|
| 464 |
+
>>> for i, (u, v) in enumerate(B.edges()):
|
| 465 |
+
... B.edges[u, v]["weight"] = i + 1
|
| 466 |
+
...
|
| 467 |
+
>>> for edge in B.edges(data=True):
|
| 468 |
+
... print(edge)
|
| 469 |
+
...
|
| 470 |
+
(0, 2, {'weight': 1})
|
| 471 |
+
(0, 3, {'weight': 2})
|
| 472 |
+
(1, 2, {'weight': 3})
|
| 473 |
+
(1, 3, {'weight': 4})
|
| 474 |
+
>>> # By default, the weight is the number of shared neighbors
|
| 475 |
+
>>> G = bipartite.generic_weighted_projected_graph(B, [0, 1])
|
| 476 |
+
>>> print(list(G.edges(data=True)))
|
| 477 |
+
[(0, 1, {'weight': 2})]
|
| 478 |
+
>>> # To specify a custom weight function use the weight_function parameter
|
| 479 |
+
>>> G = bipartite.generic_weighted_projected_graph(
|
| 480 |
+
... B, [0, 1], weight_function=jaccard
|
| 481 |
+
... )
|
| 482 |
+
>>> print(list(G.edges(data=True)))
|
| 483 |
+
[(0, 1, {'weight': 1.0})]
|
| 484 |
+
>>> G = bipartite.generic_weighted_projected_graph(
|
| 485 |
+
... B, [0, 1], weight_function=my_weight
|
| 486 |
+
... )
|
| 487 |
+
>>> print(list(G.edges(data=True)))
|
| 488 |
+
[(0, 1, {'weight': 10})]
|
| 489 |
+
|
| 490 |
+
Notes
|
| 491 |
+
-----
|
| 492 |
+
No attempt is made to verify that the input graph B is bipartite.
|
| 493 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 494 |
+
|
| 495 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 496 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 497 |
+
|
| 498 |
+
See Also
|
| 499 |
+
--------
|
| 500 |
+
is_bipartite,
|
| 501 |
+
is_bipartite_node_set,
|
| 502 |
+
sets,
|
| 503 |
+
weighted_projected_graph,
|
| 504 |
+
collaboration_weighted_projected_graph,
|
| 505 |
+
overlap_weighted_projected_graph,
|
| 506 |
+
projected_graph
|
| 507 |
+
|
| 508 |
+
"""
|
| 509 |
+
if B.is_directed():
|
| 510 |
+
pred = B.pred
|
| 511 |
+
G = nx.DiGraph()
|
| 512 |
+
else:
|
| 513 |
+
pred = B.adj
|
| 514 |
+
G = nx.Graph()
|
| 515 |
+
if weight_function is None:
|
| 516 |
+
|
| 517 |
+
def weight_function(G, u, v):
|
| 518 |
+
# Notice that we use set(pred[v]) for handling the directed case.
|
| 519 |
+
return len(set(G[u]) & set(pred[v]))
|
| 520 |
+
|
| 521 |
+
G.graph.update(B.graph)
|
| 522 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 523 |
+
for u in nodes:
|
| 524 |
+
nbrs2 = {n for nbr in set(B[u]) for n in B[nbr]} - {u}
|
| 525 |
+
for v in nbrs2:
|
| 526 |
+
weight = weight_function(B, u, v)
|
| 527 |
+
G.add_edge(u, v, weight=weight)
|
| 528 |
+
return G
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (237 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality_subset.cpython-311.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_group.cpython-311.pyc
ADDED
|
Binary file (16.4 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-311.pyc
ADDED
|
Binary file (8.58 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_laplacian_centrality.cpython-311.pyc
ADDED
|
Binary file (10.8 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_load_centrality.cpython-311.pyc
ADDED
|
Binary file (16.6 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-311.pyc
ADDED
|
Binary file (5.28 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-311.pyc
ADDED
|
Binary file (5.65 kB). View file
|
|
|