after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def lookup_target(self, name):
# Look up name in this scope only. Declare as Python
# variable if not found.
entry = self.lookup_here(name)
if not entry:
entry = self.lookup_here_unmangled(name)
if entry and entry.is_pyglobal:
self._emit_class_private_warning(entry.pos, name)
if not entry:
entry = self.declare_var(name, py_object_type, None)
return entry
|
def lookup_target(self, name):
# Look up name in this scope only. Declare as Python
# variable if not found.
entry = self.lookup_here(name)
if not entry:
entry = self.declare_var(name, py_object_type, None)
return entry
|
https://github.com/cython/cython/issues/3548
|
(bleeding) ✔ ~/source/other_source/pandas [master {pandas/master}|✚ 2]
jupiter@15:19 ➤ pip install -v .
Non-user install because user site-packages disabled
Created temporary directory: /tmp/pip-ephem-wheel-cache-zcmyaxf3
Created temporary directory: /tmp/pip-req-tracker-dfm6xgbp
Initialized build tracking at /tmp/pip-req-tracker-dfm6xgbp
Created build tracker: /tmp/pip-req-tracker-dfm6xgbp
Entered build tracker: /tmp/pip-req-tracker-dfm6xgbp
Created temporary directory: /tmp/pip-install-_tjupc3v
Processing /home/tcaswell/source/other_source/pandas
Created temporary directory: /tmp/pip-req-build-eh2hlgu3
Added file:///home/tcaswell/source/other_source/pandas to build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Running setup.py (path:/tmp/pip-req-build-eh2hlgu3/setup.py) egg_info for package from file:///home/tcaswell/source/other_source/pandas
Running command python setup.py egg_info
warning: pandas/_libs/groupby.pyx:1101:26: Unreachable code
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
ctypedef struct __Pyx_memviewslice:
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
^
------------------------------------------------------------
__pyxutil:15:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
ctypedef unsigned long long ___pyx_uint64_t "__pyx_t_5numpy_uint64_t"
^
------------------------------------------------------------
__pyxutil:17:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
^
------------------------------------------------------------
__pyxutil:16:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 794, in <module>
setup_package()
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 764, in setup_package
ext_modules=maybe_cythonize(extensions, compiler_directives=directives),
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 537, in maybe_cythonize
return cythonize(extensions, *args, **kwargs)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1105, in cythonize
cythonize_one(*args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1263, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: pandas/_libs/interval.pyx
Compiling pandas/_libs/algos.pyx because it changed.
Compiling pandas/_libs/groupby.pyx because it changed.
Compiling pandas/_libs/hashing.pyx because it changed.
Compiling pandas/_libs/hashtable.pyx because it changed.
Compiling pandas/_libs/index.pyx because it changed.
Compiling pandas/_libs/indexing.pyx because it changed.
Compiling pandas/_libs/internals.pyx because it changed.
Compiling pandas/_libs/interval.pyx because it changed.
Compiling pandas/_libs/join.pyx because it changed.
Compiling pandas/_libs/lib.pyx because it changed.
Compiling pandas/_libs/missing.pyx because it changed.
Compiling pandas/_libs/parsers.pyx because it changed.
Compiling pandas/_libs/reduction.pyx because it changed.
Compiling pandas/_libs/ops.pyx because it changed.
Compiling pandas/_libs/ops_dispatch.pyx because it changed.
Compiling pandas/_libs/properties.pyx because it changed.
Compiling pandas/_libs/reshape.pyx because it changed.
Compiling pandas/_libs/sparse.pyx because it changed.
Compiling pandas/_libs/tslib.pyx because it changed.
Compiling pandas/_libs/tslibs/c_timestamp.pyx because it changed.
Compiling pandas/_libs/tslibs/ccalendar.pyx because it changed.
Compiling pandas/_libs/tslibs/conversion.pyx because it changed.
Compiling pandas/_libs/tslibs/fields.pyx because it changed.
Compiling pandas/_libs/tslibs/frequencies.pyx because it changed.
Compiling pandas/_libs/tslibs/nattype.pyx because it changed.
Compiling pandas/_libs/tslibs/np_datetime.pyx because it changed.
Compiling pandas/_libs/tslibs/offsets.pyx because it changed.
Compiling pandas/_libs/tslibs/parsing.pyx because it changed.
Compiling pandas/_libs/tslibs/period.pyx because it changed.
Compiling pandas/_libs/tslibs/resolution.pyx because it changed.
Compiling pandas/_libs/tslibs/strptime.pyx because it changed.
Compiling pandas/_libs/tslibs/timedeltas.pyx because it changed.
Compiling pandas/_libs/tslibs/timestamps.pyx because it changed.
Compiling pandas/_libs/tslibs/timezones.pyx because it changed.
Compiling pandas/_libs/tslibs/tzconversion.pyx because it changed.
Compiling pandas/_libs/testing.pyx because it changed.
Compiling pandas/_libs/window/aggregations.pyx because it changed.
Compiling pandas/_libs/window/indexers.pyx because it changed.
Compiling pandas/_libs/writers.pyx because it changed.
Compiling pandas/io/sas/sas.pyx because it changed.
[ 1/40] Cythonizing pandas/_libs/algos.pyx
[ 2/40] Cythonizing pandas/_libs/groupby.pyx
[ 3/40] Cythonizing pandas/_libs/hashing.pyx
[ 4/40] Cythonizing pandas/_libs/hashtable.pyx
[ 5/40] Cythonizing pandas/_libs/index.pyx
[ 6/40] Cythonizing pandas/_libs/indexing.pyx
[ 7/40] Cythonizing pandas/_libs/internals.pyx
[ 8/40] Cythonizing pandas/_libs/interval.pyx
Cleaning up...
Removing source in /tmp/pip-req-build-eh2hlgu3
Removed file:///home/tcaswell/source/other_source/pandas from build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Removed build tracker: '/tmp/pip-req-tracker-dfm6xgbp'
ERROR: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
Exception information:
Traceback (most recent call last):
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/cli/base_command.py", line 186, in _main
status = self.run(options, args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/commands/install.py", line 331, in run
resolver.resolve(requirement_set)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 177, in resolve
discovered_reqs.extend(self._resolve_one(requirement_set, req))
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 333, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 282, in _get_abstract_dist_for
abstract_dist = self.preparer.prepare_linked_requirement(req)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 515, in prepare_linked_requirement
abstract_dist = _get_prepared_distribution(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 95, in _get_prepared_distribution
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py", line 40, in prepare_distribution_metadata
self.req.prepare_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 564, in prepare_metadata
self.metadata_directory = self._generate_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 538, in _generate_metadata
return generate_metadata_legacy(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py", line 115, in generate_metadata
call_subprocess(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py", line 242, in call_subprocess
raise InstallationError(exc_msg)
pip._internal.exceptions.InstallationError: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
|
Cython.Compiler.Errors.CompileError
|
def mangle_class_private_name(self, name):
# a few utilitycode names need to specifically be ignored
if name and name.lower().startswith("__pyx_"):
return name
if name and name.startswith("__") and not name.endswith("__"):
name = EncodedString("_%s%s" % (self.class_name.lstrip("_"), name))
return name
|
def mangle_class_private_name(self, name):
# a few utilitycode names need to specifically be ignored
if name and name.lower().startswith("__pyx_"):
return name
return self.mangle_special_name(name)
|
https://github.com/cython/cython/issues/3548
|
(bleeding) ✔ ~/source/other_source/pandas [master {pandas/master}|✚ 2]
jupiter@15:19 ➤ pip install -v .
Non-user install because user site-packages disabled
Created temporary directory: /tmp/pip-ephem-wheel-cache-zcmyaxf3
Created temporary directory: /tmp/pip-req-tracker-dfm6xgbp
Initialized build tracking at /tmp/pip-req-tracker-dfm6xgbp
Created build tracker: /tmp/pip-req-tracker-dfm6xgbp
Entered build tracker: /tmp/pip-req-tracker-dfm6xgbp
Created temporary directory: /tmp/pip-install-_tjupc3v
Processing /home/tcaswell/source/other_source/pandas
Created temporary directory: /tmp/pip-req-build-eh2hlgu3
Added file:///home/tcaswell/source/other_source/pandas to build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Running setup.py (path:/tmp/pip-req-build-eh2hlgu3/setup.py) egg_info for package from file:///home/tcaswell/source/other_source/pandas
Running command python setup.py egg_info
warning: pandas/_libs/groupby.pyx:1101:26: Unreachable code
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
ctypedef struct __Pyx_memviewslice:
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
^
------------------------------------------------------------
__pyxutil:15:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
ctypedef unsigned long long ___pyx_uint64_t "__pyx_t_5numpy_uint64_t"
^
------------------------------------------------------------
__pyxutil:17:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
^
------------------------------------------------------------
__pyxutil:16:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 794, in <module>
setup_package()
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 764, in setup_package
ext_modules=maybe_cythonize(extensions, compiler_directives=directives),
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 537, in maybe_cythonize
return cythonize(extensions, *args, **kwargs)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1105, in cythonize
cythonize_one(*args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1263, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: pandas/_libs/interval.pyx
Compiling pandas/_libs/algos.pyx because it changed.
Compiling pandas/_libs/groupby.pyx because it changed.
Compiling pandas/_libs/hashing.pyx because it changed.
Compiling pandas/_libs/hashtable.pyx because it changed.
Compiling pandas/_libs/index.pyx because it changed.
Compiling pandas/_libs/indexing.pyx because it changed.
Compiling pandas/_libs/internals.pyx because it changed.
Compiling pandas/_libs/interval.pyx because it changed.
Compiling pandas/_libs/join.pyx because it changed.
Compiling pandas/_libs/lib.pyx because it changed.
Compiling pandas/_libs/missing.pyx because it changed.
Compiling pandas/_libs/parsers.pyx because it changed.
Compiling pandas/_libs/reduction.pyx because it changed.
Compiling pandas/_libs/ops.pyx because it changed.
Compiling pandas/_libs/ops_dispatch.pyx because it changed.
Compiling pandas/_libs/properties.pyx because it changed.
Compiling pandas/_libs/reshape.pyx because it changed.
Compiling pandas/_libs/sparse.pyx because it changed.
Compiling pandas/_libs/tslib.pyx because it changed.
Compiling pandas/_libs/tslibs/c_timestamp.pyx because it changed.
Compiling pandas/_libs/tslibs/ccalendar.pyx because it changed.
Compiling pandas/_libs/tslibs/conversion.pyx because it changed.
Compiling pandas/_libs/tslibs/fields.pyx because it changed.
Compiling pandas/_libs/tslibs/frequencies.pyx because it changed.
Compiling pandas/_libs/tslibs/nattype.pyx because it changed.
Compiling pandas/_libs/tslibs/np_datetime.pyx because it changed.
Compiling pandas/_libs/tslibs/offsets.pyx because it changed.
Compiling pandas/_libs/tslibs/parsing.pyx because it changed.
Compiling pandas/_libs/tslibs/period.pyx because it changed.
Compiling pandas/_libs/tslibs/resolution.pyx because it changed.
Compiling pandas/_libs/tslibs/strptime.pyx because it changed.
Compiling pandas/_libs/tslibs/timedeltas.pyx because it changed.
Compiling pandas/_libs/tslibs/timestamps.pyx because it changed.
Compiling pandas/_libs/tslibs/timezones.pyx because it changed.
Compiling pandas/_libs/tslibs/tzconversion.pyx because it changed.
Compiling pandas/_libs/testing.pyx because it changed.
Compiling pandas/_libs/window/aggregations.pyx because it changed.
Compiling pandas/_libs/window/indexers.pyx because it changed.
Compiling pandas/_libs/writers.pyx because it changed.
Compiling pandas/io/sas/sas.pyx because it changed.
[ 1/40] Cythonizing pandas/_libs/algos.pyx
[ 2/40] Cythonizing pandas/_libs/groupby.pyx
[ 3/40] Cythonizing pandas/_libs/hashing.pyx
[ 4/40] Cythonizing pandas/_libs/hashtable.pyx
[ 5/40] Cythonizing pandas/_libs/index.pyx
[ 6/40] Cythonizing pandas/_libs/indexing.pyx
[ 7/40] Cythonizing pandas/_libs/internals.pyx
[ 8/40] Cythonizing pandas/_libs/interval.pyx
Cleaning up...
Removing source in /tmp/pip-req-build-eh2hlgu3
Removed file:///home/tcaswell/source/other_source/pandas from build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Removed build tracker: '/tmp/pip-req-tracker-dfm6xgbp'
ERROR: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
Exception information:
Traceback (most recent call last):
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/cli/base_command.py", line 186, in _main
status = self.run(options, args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/commands/install.py", line 331, in run
resolver.resolve(requirement_set)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 177, in resolve
discovered_reqs.extend(self._resolve_one(requirement_set, req))
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 333, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 282, in _get_abstract_dist_for
abstract_dist = self.preparer.prepare_linked_requirement(req)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 515, in prepare_linked_requirement
abstract_dist = _get_prepared_distribution(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 95, in _get_prepared_distribution
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py", line 40, in prepare_distribution_metadata
self.req.prepare_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 564, in prepare_metadata
self.metadata_directory = self._generate_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 538, in _generate_metadata
return generate_metadata_legacy(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py", line 115, in generate_metadata
call_subprocess(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py", line 242, in call_subprocess
raise InstallationError(exc_msg)
pip._internal.exceptions.InstallationError: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
|
Cython.Compiler.Errors.CompileError
|
def declare_var(
self, name, type, pos, cname=None, visibility="private", api=0, in_pxd=0, is_cdef=0
):
name = self.mangle_class_private_name(name)
if type is unspecified_type:
type = py_object_type
# Add an entry for a class attribute.
entry = Scope.declare_var(
self,
name,
type,
pos,
cname=cname,
visibility=visibility,
api=api,
in_pxd=in_pxd,
is_cdef=is_cdef,
)
entry.is_pyglobal = 1
entry.is_pyclass_attr = 1
return entry
|
def declare_var(
self, name, type, pos, cname=None, visibility="private", api=0, in_pxd=0, is_cdef=0
):
name = self.mangle_special_name(name)
if type is unspecified_type:
type = py_object_type
# Add an entry for a class attribute.
entry = Scope.declare_var(
self,
name,
type,
pos,
cname=cname,
visibility=visibility,
api=api,
in_pxd=in_pxd,
is_cdef=is_cdef,
)
entry.is_pyglobal = 1
entry.is_pyclass_attr = 1
return entry
|
https://github.com/cython/cython/issues/3548
|
(bleeding) ✔ ~/source/other_source/pandas [master {pandas/master}|✚ 2]
jupiter@15:19 ➤ pip install -v .
Non-user install because user site-packages disabled
Created temporary directory: /tmp/pip-ephem-wheel-cache-zcmyaxf3
Created temporary directory: /tmp/pip-req-tracker-dfm6xgbp
Initialized build tracking at /tmp/pip-req-tracker-dfm6xgbp
Created build tracker: /tmp/pip-req-tracker-dfm6xgbp
Entered build tracker: /tmp/pip-req-tracker-dfm6xgbp
Created temporary directory: /tmp/pip-install-_tjupc3v
Processing /home/tcaswell/source/other_source/pandas
Created temporary directory: /tmp/pip-req-build-eh2hlgu3
Added file:///home/tcaswell/source/other_source/pandas to build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Running setup.py (path:/tmp/pip-req-build-eh2hlgu3/setup.py) egg_info for package from file:///home/tcaswell/source/other_source/pandas
Running command python setup.py egg_info
warning: pandas/_libs/groupby.pyx:1101:26: Unreachable code
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
ctypedef struct __Pyx_memviewslice:
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
^
------------------------------------------------------------
__pyxutil:15:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
ctypedef unsigned long long ___pyx_uint64_t "__pyx_t_5numpy_uint64_t"
^
------------------------------------------------------------
__pyxutil:17:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
^
------------------------------------------------------------
__pyxutil:16:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 794, in <module>
setup_package()
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 764, in setup_package
ext_modules=maybe_cythonize(extensions, compiler_directives=directives),
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 537, in maybe_cythonize
return cythonize(extensions, *args, **kwargs)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1105, in cythonize
cythonize_one(*args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1263, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: pandas/_libs/interval.pyx
Compiling pandas/_libs/algos.pyx because it changed.
Compiling pandas/_libs/groupby.pyx because it changed.
Compiling pandas/_libs/hashing.pyx because it changed.
Compiling pandas/_libs/hashtable.pyx because it changed.
Compiling pandas/_libs/index.pyx because it changed.
Compiling pandas/_libs/indexing.pyx because it changed.
Compiling pandas/_libs/internals.pyx because it changed.
Compiling pandas/_libs/interval.pyx because it changed.
Compiling pandas/_libs/join.pyx because it changed.
Compiling pandas/_libs/lib.pyx because it changed.
Compiling pandas/_libs/missing.pyx because it changed.
Compiling pandas/_libs/parsers.pyx because it changed.
Compiling pandas/_libs/reduction.pyx because it changed.
Compiling pandas/_libs/ops.pyx because it changed.
Compiling pandas/_libs/ops_dispatch.pyx because it changed.
Compiling pandas/_libs/properties.pyx because it changed.
Compiling pandas/_libs/reshape.pyx because it changed.
Compiling pandas/_libs/sparse.pyx because it changed.
Compiling pandas/_libs/tslib.pyx because it changed.
Compiling pandas/_libs/tslibs/c_timestamp.pyx because it changed.
Compiling pandas/_libs/tslibs/ccalendar.pyx because it changed.
Compiling pandas/_libs/tslibs/conversion.pyx because it changed.
Compiling pandas/_libs/tslibs/fields.pyx because it changed.
Compiling pandas/_libs/tslibs/frequencies.pyx because it changed.
Compiling pandas/_libs/tslibs/nattype.pyx because it changed.
Compiling pandas/_libs/tslibs/np_datetime.pyx because it changed.
Compiling pandas/_libs/tslibs/offsets.pyx because it changed.
Compiling pandas/_libs/tslibs/parsing.pyx because it changed.
Compiling pandas/_libs/tslibs/period.pyx because it changed.
Compiling pandas/_libs/tslibs/resolution.pyx because it changed.
Compiling pandas/_libs/tslibs/strptime.pyx because it changed.
Compiling pandas/_libs/tslibs/timedeltas.pyx because it changed.
Compiling pandas/_libs/tslibs/timestamps.pyx because it changed.
Compiling pandas/_libs/tslibs/timezones.pyx because it changed.
Compiling pandas/_libs/tslibs/tzconversion.pyx because it changed.
Compiling pandas/_libs/testing.pyx because it changed.
Compiling pandas/_libs/window/aggregations.pyx because it changed.
Compiling pandas/_libs/window/indexers.pyx because it changed.
Compiling pandas/_libs/writers.pyx because it changed.
Compiling pandas/io/sas/sas.pyx because it changed.
[ 1/40] Cythonizing pandas/_libs/algos.pyx
[ 2/40] Cythonizing pandas/_libs/groupby.pyx
[ 3/40] Cythonizing pandas/_libs/hashing.pyx
[ 4/40] Cythonizing pandas/_libs/hashtable.pyx
[ 5/40] Cythonizing pandas/_libs/index.pyx
[ 6/40] Cythonizing pandas/_libs/indexing.pyx
[ 7/40] Cythonizing pandas/_libs/internals.pyx
[ 8/40] Cythonizing pandas/_libs/interval.pyx
Cleaning up...
Removing source in /tmp/pip-req-build-eh2hlgu3
Removed file:///home/tcaswell/source/other_source/pandas from build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Removed build tracker: '/tmp/pip-req-tracker-dfm6xgbp'
ERROR: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
Exception information:
Traceback (most recent call last):
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/cli/base_command.py", line 186, in _main
status = self.run(options, args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/commands/install.py", line 331, in run
resolver.resolve(requirement_set)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 177, in resolve
discovered_reqs.extend(self._resolve_one(requirement_set, req))
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 333, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 282, in _get_abstract_dist_for
abstract_dist = self.preparer.prepare_linked_requirement(req)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 515, in prepare_linked_requirement
abstract_dist = _get_prepared_distribution(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 95, in _get_prepared_distribution
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py", line 40, in prepare_distribution_metadata
self.req.prepare_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 564, in prepare_metadata
self.metadata_directory = self._generate_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 538, in _generate_metadata
return generate_metadata_legacy(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py", line 115, in generate_metadata
call_subprocess(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py", line 242, in call_subprocess
raise InstallationError(exc_msg)
pip._internal.exceptions.InstallationError: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
|
Cython.Compiler.Errors.CompileError
|
def declare_var(
self, name, type, pos, cname=None, visibility="private", api=0, in_pxd=0, is_cdef=0
):
name = self.mangle_class_private_name(name)
if is_cdef:
# Add an entry for an attribute.
if self.defined:
error(
pos,
"C attributes cannot be added in implementation part of"
" extension type defined in a pxd",
)
if not self.is_closure_class_scope and get_special_method_signature(name):
error(pos, "The name '%s' is reserved for a special method." % name)
if not cname:
cname = name
if visibility == "private":
cname = c_safe_identifier(cname)
cname = punycodify_name(cname, Naming.unicode_structmember_prefix)
if type.is_cpp_class and visibility != "extern":
type.check_nullary_constructor(pos)
self.use_utility_code(Code.UtilityCode("#include <new>"))
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_memoryviewslice:
self.has_memoryview_attrs = True
elif type.is_cpp_class:
self.has_cpp_class_attrs = True
elif type.is_pyobject and (
self.is_closure_class_scope or name != "__weakref__"
):
self.has_pyobject_attrs = True
if not type.is_builtin_type or not type.scope or type.scope.needs_gc():
self.has_cyclic_pyobject_attrs = True
if visibility not in ("private", "public", "readonly"):
error(pos, "Attribute of extension type cannot be declared %s" % visibility)
if visibility in ("public", "readonly"):
# If the field is an external typedef, we cannot be sure about the type,
# so do conversion ourself rather than rely on the CPython mechanism (through
# a property; made in AnalyseDeclarationsTransform).
entry.needs_property = True
if not self.is_closure_class_scope and name == "__weakref__":
error(pos, "Special attribute __weakref__ cannot be exposed to Python")
if not (type.is_pyobject or type.can_coerce_to_pyobject(self)):
# we're not testing for coercion *from* Python here - that would fail later
error(
pos,
"C attribute of type '%s' cannot be accessed from Python" % type,
)
else:
entry.needs_property = False
return entry
else:
if type is unspecified_type:
type = py_object_type
# Add an entry for a class attribute.
entry = Scope.declare_var(
self,
name,
type,
pos,
cname=cname,
visibility=visibility,
api=api,
in_pxd=in_pxd,
is_cdef=is_cdef,
)
entry.is_member = 1
entry.is_pyglobal = (
1 # xxx: is_pyglobal changes behaviour in so many places that
)
# I keep it in for now. is_member should be enough
# later on
self.namespace_cname = "(PyObject *)%s" % self.parent_type.typeptr_cname
return entry
|
def declare_var(
self, name, type, pos, cname=None, visibility="private", api=0, in_pxd=0, is_cdef=0
):
name = self.mangle_special_name(name)
if is_cdef:
# Add an entry for an attribute.
if self.defined:
error(
pos,
"C attributes cannot be added in implementation part of"
" extension type defined in a pxd",
)
if not self.is_closure_class_scope and get_special_method_signature(name):
error(pos, "The name '%s' is reserved for a special method." % name)
if not cname:
cname = name
if visibility == "private":
cname = c_safe_identifier(cname)
cname = punycodify_name(cname, Naming.unicode_structmember_prefix)
if type.is_cpp_class and visibility != "extern":
type.check_nullary_constructor(pos)
self.use_utility_code(Code.UtilityCode("#include <new>"))
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_memoryviewslice:
self.has_memoryview_attrs = True
elif type.is_cpp_class:
self.has_cpp_class_attrs = True
elif type.is_pyobject and (
self.is_closure_class_scope or name != "__weakref__"
):
self.has_pyobject_attrs = True
if not type.is_builtin_type or not type.scope or type.scope.needs_gc():
self.has_cyclic_pyobject_attrs = True
if visibility not in ("private", "public", "readonly"):
error(pos, "Attribute of extension type cannot be declared %s" % visibility)
if visibility in ("public", "readonly"):
# If the field is an external typedef, we cannot be sure about the type,
# so do conversion ourself rather than rely on the CPython mechanism (through
# a property; made in AnalyseDeclarationsTransform).
entry.needs_property = True
if not self.is_closure_class_scope and name == "__weakref__":
error(pos, "Special attribute __weakref__ cannot be exposed to Python")
if not (type.is_pyobject or type.can_coerce_to_pyobject(self)):
# we're not testing for coercion *from* Python here - that would fail later
error(
pos,
"C attribute of type '%s' cannot be accessed from Python" % type,
)
else:
entry.needs_property = False
return entry
else:
if type is unspecified_type:
type = py_object_type
# Add an entry for a class attribute.
entry = Scope.declare_var(
self,
name,
type,
pos,
cname=cname,
visibility=visibility,
api=api,
in_pxd=in_pxd,
is_cdef=is_cdef,
)
entry.is_member = 1
entry.is_pyglobal = (
1 # xxx: is_pyglobal changes behaviour in so many places that
)
# I keep it in for now. is_member should be enough
# later on
self.namespace_cname = "(PyObject *)%s" % self.parent_type.typeptr_cname
return entry
|
https://github.com/cython/cython/issues/3548
|
(bleeding) ✔ ~/source/other_source/pandas [master {pandas/master}|✚ 2]
jupiter@15:19 ➤ pip install -v .
Non-user install because user site-packages disabled
Created temporary directory: /tmp/pip-ephem-wheel-cache-zcmyaxf3
Created temporary directory: /tmp/pip-req-tracker-dfm6xgbp
Initialized build tracking at /tmp/pip-req-tracker-dfm6xgbp
Created build tracker: /tmp/pip-req-tracker-dfm6xgbp
Entered build tracker: /tmp/pip-req-tracker-dfm6xgbp
Created temporary directory: /tmp/pip-install-_tjupc3v
Processing /home/tcaswell/source/other_source/pandas
Created temporary directory: /tmp/pip-req-build-eh2hlgu3
Added file:///home/tcaswell/source/other_source/pandas to build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Running setup.py (path:/tmp/pip-req-build-eh2hlgu3/setup.py) egg_info for package from file:///home/tcaswell/source/other_source/pandas
Running command python setup.py egg_info
warning: pandas/_libs/groupby.pyx:1101:26: Unreachable code
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
ctypedef struct __Pyx_memviewslice:
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
^
------------------------------------------------------------
__pyxutil:15:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
ctypedef unsigned long long ___pyx_uint64_t "__pyx_t_5numpy_uint64_t"
^
------------------------------------------------------------
__pyxutil:17:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
void *memview
void __PYX_XDEC_MEMVIEW(__Pyx_memviewslice *, int have_gil)
bint __pyx_memoryview_check(object)
ctypedef long long ___pyx_int64_t "__pyx_t_5numpy_int64_t"
ctypedef double ___pyx_float64_t "__pyx_t_5numpy_float64_t"
^
------------------------------------------------------------
__pyxutil:16:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):19:37: '___pyx_int64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
cdef char kind
itemsize = -1
cdef bint ___pyx_int64_t_is_signed
___pyx_int64_t_is_signed = not (<___pyx_int64_t> -1 > 0)
cdef bint ___pyx_uint64_t_is_signed
___pyx_uint64_t_is_signed = not (<___pyx_uint64_t> -1 > 0)
^
------------------------------------------------------------
(tree fragment):21:38: '___pyx_uint64_t' is not a type identifier
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
values = [self.right, self.left]
sort_order = np.lexsort(values)
return is_monotonic(sort_order, False)[0]
def get_indexer(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:122:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_int64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_int64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_float64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_float64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: undeclared name not builtin: ___pyx_uint64_t
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: '___pyx_uint64_t' redeclared
Error compiling Cython file:
------------------------------------------------------------
...
raise KeyError(
'indexer does not intersect a unique set of intervals')
old_len = result.data.n
return result.to_array().astype('intp')
def get_indexer_non_unique(self, scalar_t[:] target):
^
------------------------------------------------------------
pandas/_libs/intervaltree.pxi:150:4: Previous declaration is here
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 794, in <module>
setup_package()
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 764, in setup_package
ext_modules=maybe_cythonize(extensions, compiler_directives=directives),
File "/tmp/pip-req-build-eh2hlgu3/setup.py", line 537, in maybe_cythonize
return cythonize(extensions, *args, **kwargs)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1105, in cythonize
cythonize_one(*args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/Cython/Build/Dependencies.py", line 1263, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: pandas/_libs/interval.pyx
Compiling pandas/_libs/algos.pyx because it changed.
Compiling pandas/_libs/groupby.pyx because it changed.
Compiling pandas/_libs/hashing.pyx because it changed.
Compiling pandas/_libs/hashtable.pyx because it changed.
Compiling pandas/_libs/index.pyx because it changed.
Compiling pandas/_libs/indexing.pyx because it changed.
Compiling pandas/_libs/internals.pyx because it changed.
Compiling pandas/_libs/interval.pyx because it changed.
Compiling pandas/_libs/join.pyx because it changed.
Compiling pandas/_libs/lib.pyx because it changed.
Compiling pandas/_libs/missing.pyx because it changed.
Compiling pandas/_libs/parsers.pyx because it changed.
Compiling pandas/_libs/reduction.pyx because it changed.
Compiling pandas/_libs/ops.pyx because it changed.
Compiling pandas/_libs/ops_dispatch.pyx because it changed.
Compiling pandas/_libs/properties.pyx because it changed.
Compiling pandas/_libs/reshape.pyx because it changed.
Compiling pandas/_libs/sparse.pyx because it changed.
Compiling pandas/_libs/tslib.pyx because it changed.
Compiling pandas/_libs/tslibs/c_timestamp.pyx because it changed.
Compiling pandas/_libs/tslibs/ccalendar.pyx because it changed.
Compiling pandas/_libs/tslibs/conversion.pyx because it changed.
Compiling pandas/_libs/tslibs/fields.pyx because it changed.
Compiling pandas/_libs/tslibs/frequencies.pyx because it changed.
Compiling pandas/_libs/tslibs/nattype.pyx because it changed.
Compiling pandas/_libs/tslibs/np_datetime.pyx because it changed.
Compiling pandas/_libs/tslibs/offsets.pyx because it changed.
Compiling pandas/_libs/tslibs/parsing.pyx because it changed.
Compiling pandas/_libs/tslibs/period.pyx because it changed.
Compiling pandas/_libs/tslibs/resolution.pyx because it changed.
Compiling pandas/_libs/tslibs/strptime.pyx because it changed.
Compiling pandas/_libs/tslibs/timedeltas.pyx because it changed.
Compiling pandas/_libs/tslibs/timestamps.pyx because it changed.
Compiling pandas/_libs/tslibs/timezones.pyx because it changed.
Compiling pandas/_libs/tslibs/tzconversion.pyx because it changed.
Compiling pandas/_libs/testing.pyx because it changed.
Compiling pandas/_libs/window/aggregations.pyx because it changed.
Compiling pandas/_libs/window/indexers.pyx because it changed.
Compiling pandas/_libs/writers.pyx because it changed.
Compiling pandas/io/sas/sas.pyx because it changed.
[ 1/40] Cythonizing pandas/_libs/algos.pyx
[ 2/40] Cythonizing pandas/_libs/groupby.pyx
[ 3/40] Cythonizing pandas/_libs/hashing.pyx
[ 4/40] Cythonizing pandas/_libs/hashtable.pyx
[ 5/40] Cythonizing pandas/_libs/index.pyx
[ 6/40] Cythonizing pandas/_libs/indexing.pyx
[ 7/40] Cythonizing pandas/_libs/internals.pyx
[ 8/40] Cythonizing pandas/_libs/interval.pyx
Cleaning up...
Removing source in /tmp/pip-req-build-eh2hlgu3
Removed file:///home/tcaswell/source/other_source/pandas from build tracker '/tmp/pip-req-tracker-dfm6xgbp'
Removed build tracker: '/tmp/pip-req-tracker-dfm6xgbp'
ERROR: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
Exception information:
Traceback (most recent call last):
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/cli/base_command.py", line 186, in _main
status = self.run(options, args)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/commands/install.py", line 331, in run
resolver.resolve(requirement_set)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 177, in resolve
discovered_reqs.extend(self._resolve_one(requirement_set, req))
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 333, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/legacy_resolve.py", line 282, in _get_abstract_dist_for
abstract_dist = self.preparer.prepare_linked_requirement(req)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 515, in prepare_linked_requirement
abstract_dist = _get_prepared_distribution(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/prepare.py", line 95, in _get_prepared_distribution
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py", line 40, in prepare_distribution_metadata
self.req.prepare_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 564, in prepare_metadata
self.metadata_directory = self._generate_metadata()
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/req/req_install.py", line 538, in _generate_metadata
return generate_metadata_legacy(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py", line 115, in generate_metadata
call_subprocess(
File "/home/tcaswell/.virtualenvs/bleeding/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py", line 242, in call_subprocess
raise InstallationError(exc_msg)
pip._internal.exceptions.InstallationError: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.
|
Cython.Compiler.Errors.CompileError
|
def declare_from_annotation(self, env, as_target=False):
"""Implements PEP 526 annotation typing in a fairly relaxed way.
Annotations are ignored for global variables, Python class attributes and already declared variables.
String literals are allowed and ignored.
The ambiguous Python types 'int' and 'long' are ignored and the 'cython.int' form must be used instead.
"""
if not env.directives["annotation_typing"]:
return
if env.is_module_scope or env.is_py_class_scope:
# annotations never create global cdef names and Python classes don't support them anyway
return
name = self.name
if self.entry or env.lookup_here(name) is not None:
# already declared => ignore annotation
return
annotation = self.annotation
if annotation.expr.is_string_literal:
# name: "description" => not a type, but still a declared variable or attribute
atype = None
else:
_, atype = annotation.analyse_type_annotation(env)
if atype is None:
atype = (
unspecified_type
if as_target and env.directives["infer_types"] != False
else py_object_type
)
if atype.is_fused and env.fused_to_specific:
atype = atype.specialize(env.fused_to_specific)
self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target)
self.entry.annotation = annotation.expr
|
def declare_from_annotation(self, env, as_target=False):
"""Implements PEP 526 annotation typing in a fairly relaxed way.
Annotations are ignored for global variables, Python class attributes and already declared variables.
String literals are allowed and ignored.
The ambiguous Python types 'int' and 'long' are ignored and the 'cython.int' form must be used instead.
"""
if not env.directives["annotation_typing"]:
return
if env.is_module_scope or env.is_py_class_scope:
# annotations never create global cdef names and Python classes don't support them anyway
return
name = self.name
if self.entry or env.lookup_here(name) is not None:
# already declared => ignore annotation
return
annotation = self.annotation
if annotation.expr.is_string_literal:
# name: "description" => not a type, but still a declared variable or attribute
atype = None
else:
_, atype = annotation.analyse_type_annotation(env)
if atype is None:
atype = (
unspecified_type
if as_target and env.directives["infer_types"] != False
else py_object_type
)
self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target)
self.entry.annotation = annotation.expr
|
https://github.com/cython/cython/issues/3142
|
Compiling bug.py because it changed.
[1/1] Cythonizing bug.py
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Traceback (most recent call last):
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/users/augier3pi/Dev/transonic/transonic_cl/cythonize.py", line 11, in <module>
ext_modules=cythonize(path, language_level=3), include_dirs=[np.get_include()]
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1219, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: bug.py
|
Cython.Compiler.Errors.CompileError
|
def analyse_type_annotation(self, env, assigned_value=None):
if self.untyped:
# Already applied as a fused type, not re-evaluating it here.
return None, None
annotation = self.expr
base_type = None
is_ambiguous = False
explicit_pytype = explicit_ctype = False
if annotation.is_dict_literal:
warning(
annotation.pos,
"Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly.",
)
for name, value in annotation.key_value_pairs:
if not name.is_string_literal:
continue
if name.value in ("type", b"type"):
explicit_pytype = True
if not explicit_ctype:
annotation = value
elif name.value in ("ctype", b"ctype"):
explicit_ctype = True
annotation = value
if explicit_pytype and explicit_ctype:
warning(
annotation.pos,
"Duplicate type declarations found in signature annotation",
)
arg_type = annotation.analyse_as_type(env)
if (
annotation.is_name
and not annotation.cython_attribute
and annotation.name in ("int", "long", "float")
):
# Map builtin numeric Python types to C types in safe cases.
if (
assigned_value is not None
and arg_type is not None
and not arg_type.is_pyobject
):
assigned_type = assigned_value.infer_type(env)
if assigned_type and assigned_type.is_pyobject:
# C type seems unsafe, e.g. due to 'None' default value => ignore annotation type
is_ambiguous = True
arg_type = None
# ignore 'int' and require 'cython.int' to avoid unsafe integer declarations
if arg_type in (
PyrexTypes.c_long_type,
PyrexTypes.c_int_type,
PyrexTypes.c_float_type,
):
arg_type = (
PyrexTypes.c_double_type
if annotation.name == "float"
else py_object_type
)
elif arg_type is not None and annotation.is_string_literal:
warning(
annotation.pos,
"Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.",
level=1,
)
if arg_type is not None:
if explicit_pytype and not explicit_ctype and not arg_type.is_pyobject:
warning(
annotation.pos,
"Python type declaration in signature annotation does not refer to a Python type",
)
base_type = Nodes.CAnalysedBaseTypeNode(
annotation.pos, type=arg_type, is_arg=True
)
elif is_ambiguous:
warning(annotation.pos, "Ambiguous types in annotation, ignoring")
else:
warning(annotation.pos, "Unknown type declaration in annotation, ignoring")
return base_type, arg_type
|
def analyse_type_annotation(self, env, assigned_value=None):
annotation = self.expr
base_type = None
is_ambiguous = False
explicit_pytype = explicit_ctype = False
if annotation.is_dict_literal:
warning(
annotation.pos,
"Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly.",
)
for name, value in annotation.key_value_pairs:
if not name.is_string_literal:
continue
if name.value in ("type", b"type"):
explicit_pytype = True
if not explicit_ctype:
annotation = value
elif name.value in ("ctype", b"ctype"):
explicit_ctype = True
annotation = value
if explicit_pytype and explicit_ctype:
warning(
annotation.pos,
"Duplicate type declarations found in signature annotation",
)
arg_type = annotation.analyse_as_type(env)
if (
annotation.is_name
and not annotation.cython_attribute
and annotation.name in ("int", "long", "float")
):
# Map builtin numeric Python types to C types in safe cases.
if (
assigned_value is not None
and arg_type is not None
and not arg_type.is_pyobject
):
assigned_type = assigned_value.infer_type(env)
if assigned_type and assigned_type.is_pyobject:
# C type seems unsafe, e.g. due to 'None' default value => ignore annotation type
is_ambiguous = True
arg_type = None
# ignore 'int' and require 'cython.int' to avoid unsafe integer declarations
if arg_type in (
PyrexTypes.c_long_type,
PyrexTypes.c_int_type,
PyrexTypes.c_float_type,
):
arg_type = (
PyrexTypes.c_double_type
if annotation.name == "float"
else py_object_type
)
elif arg_type is not None and annotation.is_string_literal:
warning(
annotation.pos,
"Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.",
level=1,
)
if arg_type is not None:
if explicit_pytype and not explicit_ctype and not arg_type.is_pyobject:
warning(
annotation.pos,
"Python type declaration in signature annotation does not refer to a Python type",
)
base_type = Nodes.CAnalysedBaseTypeNode(
annotation.pos, type=arg_type, is_arg=True
)
elif is_ambiguous:
warning(annotation.pos, "Ambiguous types in annotation, ignoring")
else:
warning(annotation.pos, "Unknown type declaration in annotation, ignoring")
return base_type, arg_type
|
https://github.com/cython/cython/issues/3142
|
Compiling bug.py because it changed.
[1/1] Cythonizing bug.py
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Traceback (most recent call last):
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/users/augier3pi/Dev/transonic/transonic_cl/cythonize.py", line 11, in <module>
ext_modules=cythonize(path, language_level=3), include_dirs=[np.get_include()]
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1219, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: bug.py
|
Cython.Compiler.Errors.CompileError
|
def _specialize_function_args(self, args, fused_to_specific):
for arg in args:
if arg.type.is_fused:
arg.type = arg.type.specialize(fused_to_specific)
if arg.type.is_memoryviewslice:
arg.type.validate_memslice_dtype(arg.pos)
if arg.annotation:
# TODO might be nice if annotations were specialized instead?
# (Or might be hard to do reliably)
arg.annotation.untyped = True
|
def _specialize_function_args(self, args, fused_to_specific):
for arg in args:
if arg.type.is_fused:
arg.type = arg.type.specialize(fused_to_specific)
if arg.type.is_memoryviewslice:
arg.type.validate_memslice_dtype(arg.pos)
|
https://github.com/cython/cython/issues/3142
|
Compiling bug.py because it changed.
[1/1] Cythonizing bug.py
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Traceback (most recent call last):
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/users/augier3pi/Dev/transonic/transonic_cl/cythonize.py", line 11, in <module>
ext_modules=cythonize(path, language_level=3), include_dirs=[np.get_include()]
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1219, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: bug.py
|
Cython.Compiler.Errors.CompileError
|
def align_argument_type(self, env, arg):
# @cython.locals()
directive_locals = self.directive_locals
orig_type = arg.type
if arg.name in directive_locals:
type_node = directive_locals[arg.name]
other_type = type_node.analyse_as_type(env)
elif (
isinstance(arg, CArgDeclNode)
and arg.annotation
and env.directives["annotation_typing"]
):
type_node = arg.annotation
other_type = arg.inject_type_from_annotations(env)
if other_type is None:
return arg
else:
return arg
if other_type is None:
error(type_node.pos, "Not a type")
elif orig_type is not py_object_type and not orig_type.same_as(other_type):
error(arg.base_type.pos, "Signature does not agree with previous declaration")
error(type_node.pos, "Previous declaration here")
else:
arg.type = other_type
return arg
|
def align_argument_type(self, env, arg):
# @cython.locals()
directive_locals = self.directive_locals
orig_type = arg.type
if arg.name in directive_locals:
type_node = directive_locals[arg.name]
other_type = type_node.analyse_as_type(env)
elif (
isinstance(arg, CArgDeclNode)
and arg.annotation
and env.directives["annotation_typing"]
):
type_node = arg.annotation
other_type = arg.inject_type_from_annotations(env)
if other_type is None:
return arg
else:
return arg
if other_type is None:
error(type_node.pos, "Not a type")
elif other_type.is_fused and any(orig_type.same_as(t) for t in other_type.types):
pass # use specialized rather than fused type
elif orig_type is not py_object_type and not orig_type.same_as(other_type):
error(arg.base_type.pos, "Signature does not agree with previous declaration")
error(type_node.pos, "Previous declaration here")
else:
arg.type = other_type
return arg
|
https://github.com/cython/cython/issues/3142
|
Compiling bug.py because it changed.
[1/1] Cythonizing bug.py
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Traceback (most recent call last):
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/users/augier3pi/Dev/transonic/transonic_cl/cythonize.py", line 11, in <module>
ext_modules=cythonize(path, language_level=3), include_dirs=[np.get_include()]
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1219, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: bug.py
|
Cython.Compiler.Errors.CompileError
|
def visit_FuncDefNode(self, node):
"""
Analyse a function and its body, as that hasn't happened yet. Also
analyse the directive_locals set by @cython.locals().
Then, if we are a function with fused arguments, replace the function
(after it has declared itself in the symbol table!) with a
FusedCFuncDefNode, and analyse its children (which are in turn normal
functions). If we're a normal function, just analyse the body of the
function.
"""
env = self.current_env()
self.seen_vars_stack.append(set())
lenv = node.local_scope
node.declare_arguments(lenv)
# @cython.locals(...)
for var, type_node in node.directive_locals.items():
if not lenv.lookup_here(var): # don't redeclare args
type = type_node.analyse_as_type(lenv)
if type and type.is_fused and lenv.fused_to_specific:
type = type.specialize(lenv.fused_to_specific)
if type:
lenv.declare_var(var, type, type_node.pos)
else:
error(type_node.pos, "Not a type")
if self._handle_fused(node):
node = self._create_fused_function(env, node)
else:
node.body.analyse_declarations(lenv)
self._handle_nogil_cleanup(lenv, node)
self._super_visit_FuncDefNode(node)
self.seen_vars_stack.pop()
return node
|
def visit_FuncDefNode(self, node):
"""
Analyse a function and its body, as that hasn't happened yet. Also
analyse the directive_locals set by @cython.locals().
Then, if we are a function with fused arguments, replace the function
(after it has declared itself in the symbol table!) with a
FusedCFuncDefNode, and analyse its children (which are in turn normal
functions). If we're a normal function, just analyse the body of the
function.
"""
env = self.current_env()
self.seen_vars_stack.append(set())
lenv = node.local_scope
node.declare_arguments(lenv)
# @cython.locals(...)
for var, type_node in node.directive_locals.items():
if not lenv.lookup_here(var): # don't redeclare args
type = type_node.analyse_as_type(lenv)
if type:
lenv.declare_var(var, type, type_node.pos)
else:
error(type_node.pos, "Not a type")
if self._handle_fused(node):
node = self._create_fused_function(env, node)
else:
node.body.analyse_declarations(lenv)
self._handle_nogil_cleanup(lenv, node)
self._super_visit_FuncDefNode(node)
self.seen_vars_stack.pop()
return node
|
https://github.com/cython/cython/issues/3142
|
Compiling bug.py because it changed.
[1/1] Cythonizing bug.py
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:23: Cannot coerce to a type that is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
return arr
^
------------------------------------------------------------
bug.py:5:11: Type is not specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Error compiling Cython file:
------------------------------------------------------------
...
import numpy as np
def func(arg):
arr = np.empty_like(arg)
^
------------------------------------------------------------
bug.py:4:4: Invalid use of fused types, type cannot be specialized
Traceback (most recent call last):
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/users/augier3pi/Dev/transonic/transonic_cl/cythonize.py", line 11, in <module>
ext_modules=cythonize(path, language_level=3), include_dirs=[np.get_include()]
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File "/home/users/augier3pi/.pyenv/versions/3.7.3/lib/python3.7/site-packages/Cython/Build/Dependencies.py", line 1219, in cythonize_one
raise CompileError(None, pyx_file)
Cython.Compiler.Errors.CompileError: bug.py
|
Cython.Compiler.Errors.CompileError
|
def py_operation_function(self, code):
type1, type2 = self.operand1.type, self.operand2.type
if type1 is unicode_type or type2 is unicode_type:
if type1 in (unicode_type, str_type) and type2 in (unicode_type, str_type):
is_unicode_concat = True
elif isinstance(self.operand1, FormattedValueNode) or isinstance(
self.operand2, FormattedValueNode
):
# Assume that even if we don't know the second type, it's going to be a string.
is_unicode_concat = True
else:
# Operation depends on the second type.
is_unicode_concat = False
if is_unicode_concat:
if self.operand1.may_be_none() or self.operand2.may_be_none():
return "__Pyx_PyUnicode_ConcatSafe"
else:
return "__Pyx_PyUnicode_Concat"
return super(AddNode, self).py_operation_function(code)
|
def py_operation_function(self, code):
is_unicode_concat = False
if isinstance(self.operand1, FormattedValueNode) or isinstance(
self.operand2, FormattedValueNode
):
is_unicode_concat = True
else:
type1, type2 = self.operand1.type, self.operand2.type
if type1 is unicode_type or type2 is unicode_type:
is_unicode_concat = type1.is_builtin_type and type2.is_builtin_type
if is_unicode_concat:
if self.operand1.may_be_none() or self.operand2.may_be_none():
return "__Pyx_PyUnicode_ConcatSafe"
else:
return "__Pyx_PyUnicode_Concat"
return super(AddNode, self).py_operation_function(code)
|
https://github.com/cython/cython/issues/3426
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "concat.py", line 11, in concat.appendinline
y += 'ab'
TypeError: must be str, not list
|
TypeError
|
def unpack_source_tree(tree_file, workdir, cython_root):
programs = {
"PYTHON": [sys.executable],
"CYTHON": [sys.executable, os.path.join(cython_root, "cython.py")],
"CYTHONIZE": [sys.executable, os.path.join(cython_root, "cythonize.py")],
}
if workdir is None:
workdir = tempfile.mkdtemp()
header, cur_file = [], None
with open(tree_file) as f:
try:
for line in f:
if line.startswith("#####"):
filename = line.strip().strip("#").strip().replace("/", os.path.sep)
path = os.path.join(workdir, filename)
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
if cur_file is not None:
to_close, cur_file = cur_file, None
to_close.close()
cur_file = open(path, "w")
elif cur_file is not None:
cur_file.write(line)
elif line.strip() and not line.lstrip().startswith("#"):
if line.strip() not in ('"""', "'''"):
command = shlex.split(line)
if not command:
continue
# In Python 3: prog, *args = command
prog, args = command[0], command[1:]
try:
header.append(programs[prog] + args)
except KeyError:
header.append(command)
finally:
if cur_file is not None:
cur_file.close()
return workdir, header
|
def unpack_source_tree(tree_file, dir=None):
if dir is None:
dir = tempfile.mkdtemp()
header = []
cur_file = None
f = open(tree_file)
try:
lines = f.readlines()
finally:
f.close()
del f
try:
for line in lines:
if line[:5] == "#####":
filename = line.strip().strip("#").strip().replace("/", os.path.sep)
path = os.path.join(dir, filename)
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
if cur_file is not None:
f, cur_file = cur_file, None
f.close()
cur_file = open(path, "w")
elif cur_file is not None:
cur_file.write(line)
elif line.strip() and not line.lstrip().startswith("#"):
if line.strip() not in ('"""', "'''"):
header.append(line)
finally:
if cur_file is not None:
cur_file.close()
return dir, "".join(header)
|
https://github.com/cython/cython/issues/3369
|
runTest (__main__.EndToEndTest)
End-to-end fused_cmethods ... /home/user/Data/Projects/Code/Cython 20_2_18/Python2Venv/bin/python setup.py build_ext -i
/bin/sh: /home/user/Data/Projects/Code/Cython: No such file or directory
FAIL
======================================================================
FAIL: runTest (__main__.EndToEndTest)
End-to-end fused_cmethods
----------------------------------------------------------------------
Traceback (most recent call last):
File "runtests.py", line 1799, in runTest
self.assertEqual(0, res, "non-zero exit status")
AssertionError: non-zero exit status
----------------------------------------------------------------------
Ran 25 tests in 15.594s
|
AssertionError
|
def analyse_types(self, env):
if self.doc:
self.doc = self.doc.analyse_types(env)
self.doc = self.doc.coerce_to_pyobject(env)
env.use_utility_code(UtilityCode.load_cached("CreateClass", "ObjectHandling.c"))
return self
|
def analyse_types(self, env):
self.bases = self.bases.analyse_types(env)
if self.doc:
self.doc = self.doc.analyse_types(env)
self.doc = self.doc.coerce_to_pyobject(env)
env.use_utility_code(UtilityCode.load_cached("CreateClass", "ObjectHandling.c"))
return self
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def generate_result_code(self, code):
class_def_node = self.class_def_node
cname = code.intern_identifier(self.name)
if self.doc:
code.put_error_if_neg(
self.pos,
"PyDict_SetItem(%s, %s, %s)"
% (
class_def_node.dict.py_result(),
code.intern_identifier(StringEncoding.EncodedString("__doc__")),
self.doc.py_result(),
),
)
py_mod_name = self.get_py_mod_name(code)
qualname = self.get_py_qualified_name(code)
code.putln(
"%s = __Pyx_CreateClass(%s, %s, %s, %s, %s); %s"
% (
self.result(),
class_def_node.bases.py_result(),
class_def_node.dict.py_result(),
cname,
qualname,
py_mod_name,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
|
def generate_result_code(self, code):
cname = code.intern_identifier(self.name)
if self.doc:
code.put_error_if_neg(
self.pos,
"PyDict_SetItem(%s, %s, %s)"
% (
self.dict.py_result(),
code.intern_identifier(StringEncoding.EncodedString("__doc__")),
self.doc.py_result(),
),
)
py_mod_name = self.get_py_mod_name(code)
qualname = self.get_py_qualified_name(code)
code.putln(
"%s = __Pyx_CreateClass(%s, %s, %s, %s, %s); %s"
% (
self.result(),
self.bases.py_result(),
self.dict.py_result(),
cname,
qualname,
py_mod_name,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def generate_result_code(self, code):
code.globalstate.use_utility_code(
UtilityCode.load_cached("Py3ClassCreate", "ObjectHandling.c")
)
cname = code.intern_identifier(self.name)
class_def_node = self.class_def_node
mkw = class_def_node.mkw.py_result() if class_def_node.mkw else "NULL"
if class_def_node.metaclass:
metaclass = class_def_node.metaclass.py_result()
else:
metaclass = "((PyObject*)&__Pyx_DefaultClassType)"
code.putln(
"%s = __Pyx_Py3ClassCreate(%s, %s, %s, %s, %s, %d, %d); %s"
% (
self.result(),
metaclass,
cname,
class_def_node.bases.py_result(),
class_def_node.dict.py_result(),
mkw,
self.calculate_metaclass,
self.allow_py2_metaclass,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
|
def generate_result_code(self, code):
code.globalstate.use_utility_code(
UtilityCode.load_cached("Py3ClassCreate", "ObjectHandling.c")
)
cname = code.intern_identifier(self.name)
if self.mkw:
mkw = self.mkw.py_result()
else:
mkw = "NULL"
if self.metaclass:
metaclass = self.metaclass.py_result()
else:
metaclass = "((PyObject*)&__Pyx_DefaultClassType)"
code.putln(
"%s = __Pyx_Py3ClassCreate(%s, %s, %s, %s, %s, %d, %d); %s"
% (
self.result(),
metaclass,
cname,
self.bases.py_result(),
self.dict.py_result(),
mkw,
self.calculate_metaclass,
self.allow_py2_metaclass,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def generate_result_code(self, code):
bases = self.class_def_node.bases
mkw = self.class_def_node.mkw
if mkw:
code.globalstate.use_utility_code(
UtilityCode.load_cached("Py3MetaclassGet", "ObjectHandling.c")
)
call = "__Pyx_Py3MetaclassGet(%s, %s)" % (bases.result(), mkw.result())
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("CalculateMetaclass", "ObjectHandling.c")
)
call = "__Pyx_CalculateMetaclass(NULL, %s)" % (bases.result())
code.putln(
"%s = %s; %s"
% (self.result(), call, code.error_goto_if_null(self.result(), self.pos))
)
code.put_gotref(self.py_result())
|
def generate_result_code(self, code):
if self.mkw:
code.globalstate.use_utility_code(
UtilityCode.load_cached("Py3MetaclassGet", "ObjectHandling.c")
)
call = "__Pyx_Py3MetaclassGet(%s, %s)" % (
self.bases.result(),
self.mkw.result(),
)
else:
code.globalstate.use_utility_code(
UtilityCode.load_cached("CalculateMetaclass", "ObjectHandling.c")
)
call = "__Pyx_CalculateMetaclass(NULL, %s)" % (self.bases.result())
code.putln(
"%s = %s; %s"
% (self.result(), call, code.error_goto_if_null(self.result(), self.pos))
)
code.put_gotref(self.py_result())
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def analyse_types(self, env):
if self.doc:
self.doc = self.doc.analyse_types(env).coerce_to_pyobject(env)
self.type = py_object_type
self.is_temp = 1
return self
|
def analyse_types(self, env):
if self.doc:
self.doc = self.doc.analyse_types(env)
self.doc = self.doc.coerce_to_pyobject(env)
self.type = py_object_type
self.is_temp = 1
return self
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def generate_result_code(self, code):
cname = code.intern_identifier(self.name)
py_mod_name = self.get_py_mod_name(code)
qualname = self.get_py_qualified_name(code)
class_def_node = self.class_def_node
null = "(PyObject *) NULL"
doc_code = self.doc.result() if self.doc else null
mkw = class_def_node.mkw.py_result() if class_def_node.mkw else null
metaclass = (
class_def_node.metaclass.py_result() if class_def_node.metaclass else null
)
code.putln(
"%s = __Pyx_Py3MetaclassPrepare(%s, %s, %s, %s, %s, %s, %s); %s"
% (
self.result(),
metaclass,
class_def_node.bases.result(),
cname,
qualname,
mkw,
py_mod_name,
doc_code,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
|
def generate_result_code(self, code):
cname = code.intern_identifier(self.name)
py_mod_name = self.get_py_mod_name(code)
qualname = self.get_py_qualified_name(code)
if self.doc:
doc_code = self.doc.result()
else:
doc_code = "(PyObject *) NULL"
if self.mkw:
mkw = self.mkw.py_result()
else:
mkw = "(PyObject *) NULL"
if self.metaclass:
metaclass = self.metaclass.py_result()
else:
metaclass = "(PyObject *) NULL"
code.putln(
"%s = __Pyx_Py3MetaclassPrepare(%s, %s, %s, %s, %s, %s, %s); %s"
% (
self.result(),
metaclass,
self.bases.result(),
cname,
qualname,
mkw,
py_mod_name,
doc_code,
code.error_goto_if_null(self.result(), self.pos),
)
)
code.put_gotref(self.py_result())
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def __init__(
self,
pos,
name,
bases,
doc,
body,
decorators=None,
keyword_args=None,
force_py3_semantics=False,
):
StatNode.__init__(self, pos)
self.name = name
self.doc = doc
self.body = body
self.decorators = decorators
self.bases = bases
from . import ExprNodes
if self.doc and Options.docstrings:
doc = embed_position(self.pos, self.doc)
doc_node = ExprNodes.StringNode(pos, value=doc)
else:
doc_node = None
allow_py2_metaclass = not force_py3_semantics
if keyword_args:
allow_py2_metaclass = False
self.is_py3_style_class = True
if keyword_args.is_dict_literal:
if keyword_args.key_value_pairs:
for i, item in list(enumerate(keyword_args.key_value_pairs))[::-1]:
if item.key.value == "metaclass":
if self.metaclass is not None:
error(
item.pos,
"keyword argument 'metaclass' passed multiple times",
)
# special case: we already know the metaclass,
# so we don't need to do the "build kwargs,
# find metaclass" dance at runtime
self.metaclass = item.value
del keyword_args.key_value_pairs[i]
self.mkw = keyword_args
else:
assert self.metaclass is not None
else:
# MergedDictNode
self.mkw = ExprNodes.ProxyNode(keyword_args)
if force_py3_semantics or self.bases or self.mkw or self.metaclass:
if self.metaclass is None:
if keyword_args and not keyword_args.is_dict_literal:
# **kwargs may contain 'metaclass' arg
mkdict = self.mkw
else:
mkdict = None
if (
not mkdict
and self.bases.is_sequence_constructor
and not self.bases.args
):
pass # no base classes => no inherited metaclass
else:
self.metaclass = ExprNodes.PyClassMetaclassNode(
pos, class_def_node=self
)
needs_metaclass_calculation = False
else:
needs_metaclass_calculation = True
self.dict = ExprNodes.PyClassNamespaceNode(
pos, name=name, doc=doc_node, class_def_node=self
)
self.classobj = ExprNodes.Py3ClassNode(
pos,
name=name,
class_def_node=self,
doc=doc_node,
calculate_metaclass=needs_metaclass_calculation,
allow_py2_metaclass=allow_py2_metaclass,
)
else:
# no bases, no metaclass => old style class creation
self.dict = ExprNodes.DictNode(pos, key_value_pairs=[])
self.classobj = ExprNodes.ClassNode(
pos, name=name, class_def_node=self, doc=doc_node
)
self.target = ExprNodes.NameNode(pos, name=name)
self.class_cell = ExprNodes.ClassCellInjectorNode(self.pos)
|
def __init__(
self,
pos,
name,
bases,
doc,
body,
decorators=None,
keyword_args=None,
force_py3_semantics=False,
):
StatNode.__init__(self, pos)
self.name = name
self.doc = doc
self.body = body
self.decorators = decorators
self.bases = bases
from . import ExprNodes
if self.doc and Options.docstrings:
doc = embed_position(self.pos, self.doc)
doc_node = ExprNodes.StringNode(pos, value=doc)
else:
doc_node = None
allow_py2_metaclass = not force_py3_semantics
if keyword_args:
allow_py2_metaclass = False
self.is_py3_style_class = True
if keyword_args.is_dict_literal:
if keyword_args.key_value_pairs:
for i, item in list(enumerate(keyword_args.key_value_pairs))[::-1]:
if item.key.value == "metaclass":
if self.metaclass is not None:
error(
item.pos,
"keyword argument 'metaclass' passed multiple times",
)
# special case: we already know the metaclass,
# so we don't need to do the "build kwargs,
# find metaclass" dance at runtime
self.metaclass = item.value
del keyword_args.key_value_pairs[i]
self.mkw = keyword_args
else:
assert self.metaclass is not None
else:
# MergedDictNode
self.mkw = ExprNodes.ProxyNode(keyword_args)
if force_py3_semantics or self.bases or self.mkw or self.metaclass:
if self.metaclass is None:
if keyword_args and not keyword_args.is_dict_literal:
# **kwargs may contain 'metaclass' arg
mkdict = self.mkw
else:
mkdict = None
if (
not mkdict
and self.bases.is_sequence_constructor
and not self.bases.args
):
pass # no base classes => no inherited metaclass
else:
self.metaclass = ExprNodes.PyClassMetaclassNode(
pos, mkw=mkdict, bases=self.bases
)
needs_metaclass_calculation = False
else:
needs_metaclass_calculation = True
self.dict = ExprNodes.PyClassNamespaceNode(
pos,
name=name,
doc=doc_node,
metaclass=self.metaclass,
bases=self.bases,
mkw=self.mkw,
)
self.classobj = ExprNodes.Py3ClassNode(
pos,
name=name,
bases=self.bases,
dict=self.dict,
doc=doc_node,
metaclass=self.metaclass,
mkw=self.mkw,
calculate_metaclass=needs_metaclass_calculation,
allow_py2_metaclass=allow_py2_metaclass,
)
else:
# no bases, no metaclass => old style class creation
self.dict = ExprNodes.DictNode(pos, key_value_pairs=[])
self.classobj = ExprNodes.ClassNode(
pos, name=name, bases=bases, dict=self.dict, doc=doc_node
)
self.target = ExprNodes.NameNode(pos, name=name)
self.class_cell = ExprNodes.ClassCellInjectorNode(self.pos)
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def as_cclass(self):
"""
Return this node as if it were declared as an extension class
"""
if self.is_py3_style_class:
error(
self.classobj.pos, "Python3 style class could not be represented as C class"
)
return
from . import ExprNodes
return CClassDefNode(
self.pos,
visibility="private",
module_name=None,
class_name=self.name,
bases=self.bases or ExprNodes.TupleNode(self.pos, args=[]),
decorators=self.decorators,
body=self.body,
in_pxd=False,
doc=self.doc,
)
|
def as_cclass(self):
"""
Return this node as if it were declared as an extension class
"""
if self.is_py3_style_class:
error(
self.classobj.pos, "Python3 style class could not be represented as C class"
)
return
from . import ExprNodes
return CClassDefNode(
self.pos,
visibility="private",
module_name=None,
class_name=self.name,
bases=self.classobj.bases or ExprNodes.TupleNode(self.pos, args=[]),
decorators=self.decorators,
body=self.body,
in_pxd=False,
doc=self.doc,
)
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def analyse_declarations(self, env):
class_result = self.classobj
if self.decorators:
from .ExprNodes import SimpleCallNode
for decorator in self.decorators[::-1]:
class_result = SimpleCallNode(
decorator.pos, function=decorator.decorator, args=[class_result]
)
self.decorators = None
self.class_result = class_result
if self.bases:
self.bases.analyse_declarations(env)
if self.mkw:
self.mkw.analyse_declarations(env)
self.class_result.analyse_declarations(env)
self.target.analyse_target_declaration(env)
cenv = self.create_scope(env)
cenv.directives = env.directives
cenv.class_obj_cname = self.target.entry.cname
self.body.analyse_declarations(cenv)
|
def analyse_declarations(self, env):
class_result = self.classobj
if self.decorators:
from .ExprNodes import SimpleCallNode
for decorator in self.decorators[::-1]:
class_result = SimpleCallNode(
decorator.pos, function=decorator.decorator, args=[class_result]
)
self.decorators = None
self.class_result = class_result
self.class_result.analyse_declarations(env)
self.target.analyse_target_declaration(env)
cenv = self.create_scope(env)
cenv.directives = env.directives
cenv.class_obj_cname = self.target.entry.cname
self.body.analyse_declarations(cenv)
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def analyse_expressions(self, env):
if self.bases:
self.bases = self.bases.analyse_expressions(env)
if self.mkw:
self.mkw = self.mkw.analyse_expressions(env)
if self.metaclass:
self.metaclass = self.metaclass.analyse_expressions(env)
self.dict = self.dict.analyse_expressions(env)
self.class_result = self.class_result.analyse_expressions(env)
cenv = self.scope
self.body = self.body.analyse_expressions(cenv)
self.target.analyse_target_expression(env, self.classobj)
self.class_cell = self.class_cell.analyse_expressions(cenv)
return self
|
def analyse_expressions(self, env):
if self.bases:
self.bases = self.bases.analyse_expressions(env)
if self.metaclass:
self.metaclass = self.metaclass.analyse_expressions(env)
if self.mkw:
self.mkw = self.mkw.analyse_expressions(env)
self.dict = self.dict.analyse_expressions(env)
self.class_result = self.class_result.analyse_expressions(env)
cenv = self.scope
self.body = self.body.analyse_expressions(cenv)
self.target.analyse_target_expression(env, self.classobj)
self.class_cell = self.class_cell.analyse_expressions(cenv)
return self
|
https://github.com/cython/cython/issues/3338
|
Compiling testfile.py because it changed.
[1/1] Cythonizing testfile.py
Traceback (most recent call last):
File "setup.py", line 12, in <module>
distutils.core.setup(ext_modules=Cython.Build.cythonize(packpaths, nthreads=buildthreads, compiler_directives={'language_level': 3, 'profile': True}, annotate=True))
File ".../Cython/Build/Dependencies.py", line 1096, in cythonize
cythonize_one(*args)
File ".../Cython/Build/Dependencies.py", line 1202, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File ".../Cython/Compiler/Main.py", line 727, in compile_single
return run_pipeline(source, options, full_module_name)
File ".../Cython/Compiler/Main.py", line 515, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File ".../Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File ".../Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File ".../Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File ".../Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File ".../Cython/Compiler/ModuleNode.py", line 385, in generate_c_code
self.body.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File ".../Cython/Compiler/Nodes.py", line 3176, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File ".../Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File ".../Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File ".../Cython/Compiler/Nodes.py", line 4601, in generate_execution_code
self.class_result.generate_evaluation_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 773, in generate_evaluation_code
self.generate_result_code(code)
File ".../Cython/Compiler/ExprNodes.py", line 9034, in generate_result_code
self.bases.py_result(),
File ".../Cython/Compiler/ExprNodes.py", line 525, in py_result
return self.result_as(py_object_type)
File ".../Cython/Compiler/ExprNodes.py", line 516, in result_as
if (self.is_temp and self.type.is_pyobject and
AttributeError: 'NoneType' object has no attribute 'is_pyobject'
|
AttributeError
|
def file_reporter(self, filename):
# TODO: let coverage.py handle .py files itself
# ext = os.path.splitext(filename)[1].lower()
# if ext == '.py':
# from coverage.python import PythonFileReporter
# return PythonFileReporter(filename)
filename = canonical_filename(os.path.abspath(filename))
if self._c_files_map and filename in self._c_files_map:
c_file, rel_file_path, code = self._c_files_map[filename]
else:
c_file, _ = self._find_source_files(filename)
if not c_file:
return None # unknown file
rel_file_path, code = self._parse_lines(c_file, filename)
if code is None:
return None # no source found
return CythonModuleReporter(c_file, filename, rel_file_path, code)
|
def file_reporter(self, filename):
# TODO: let coverage.py handle .py files itself
# ext = os.path.splitext(filename)[1].lower()
# if ext == '.py':
# from coverage.python import PythonFileReporter
# return PythonFileReporter(filename)
filename = canonical_filename(os.path.abspath(filename))
if self._c_files_map and filename in self._c_files_map:
c_file, rel_file_path, code = self._c_files_map[filename]
else:
c_file, _ = self._find_source_files(filename)
if not c_file:
return None # unknown file
rel_file_path, code = self._parse_lines(c_file, filename)
return CythonModuleReporter(c_file, filename, rel_file_path, code)
|
https://github.com/cython/cython/issues/1985
|
$ coveralls
...
Traceback (most recent call last):
File "/home/travis/miniconda/envs/dplenv/bin/coveralls", line 6, in <module>
sys.exit(coveralls.wear())
File "/home/travis/miniconda/envs/dplenv/lib/python3.5/site-packages/coveralls/__init__.py", line 94, in wear
source_files=coverage.coveralls(args.base_dir, ignore_errors=args.ignore_errors, merge_file=args.merge_file),
File "/home/travis/miniconda/envs/dplenv/lib/python3.5/site-packages/coveralls/control.py", line 9, in coveralls
return reporter.report(base_dir, ignore_errors=ignore_errors, merge_file=merge_file)
File "/home/travis/miniconda/envs/dplenv/lib/python3.5/site-packages/coveralls/report.py", line 20, in report
analysis = self.coverage._analyze(fr)
File "/home/travis/miniconda/envs/dplenv/lib/python3.5/site-packages/coverage/control.py", line 891, in _analyze
return Analysis(self.data, it)
File "/home/travis/miniconda/envs/dplenv/lib/python3.5/site-packages/coverage/results.py", line 19, in __init__
self.statements = self.file_reporter.lines()
File "/home/travis/miniconda/envs/dplenv/lib/python3.5/site-packages/Cython/Coverage.py", line 276, in lines
return set(self._code)
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def generate_module_preamble(self, env, options, cimported_modules, metadata, code):
code.put_generated_by()
if metadata:
code.putln("/* BEGIN: Cython Metadata")
code.putln(json.dumps(metadata, indent=4, sort_keys=True))
code.putln("END: Cython Metadata */")
code.putln("")
code.putln("#define PY_SSIZE_T_CLEAN")
for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey):
if inc.location == inc.INITIAL:
inc.write(code)
code.putln("#ifndef Py_PYTHON_H")
code.putln(
" #error Python headers needed to compile C extensions, "
"please install development version of Python."
)
code.putln(
"#elif PY_VERSION_HEX < 0x02070000 || "
"(0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)"
)
code.putln(" #error Cython requires Python 2.7+ or Python 3.3+.")
code.putln("#else")
code.globalstate["end"].putln("#endif /* Py_PYTHON_H */")
from .. import __version__
code.putln('#define CYTHON_ABI "%s"' % __version__.replace(".", "_"))
code.putln("#define CYTHON_HEX_VERSION %s" % build_hex_version(__version__))
code.putln(
"#define CYTHON_FUTURE_DIVISION %d"
% (Future.division in env.context.future_directives)
)
self._put_setup_code(code, "CModulePreamble")
if env.context.options.cplus:
self._put_setup_code(code, "CppInitCode")
else:
self._put_setup_code(code, "CInitCode")
self._put_setup_code(code, "PythonCompatibility")
self._put_setup_code(code, "MathInitCode")
if options.c_line_in_traceback:
cinfo = "%s = %s; " % (Naming.clineno_cname, Naming.line_c_macro)
else:
cinfo = ""
code.put(
"""
#define __PYX_ERR(f_index, lineno, Ln_error) \\
{ \\
%s = %s[f_index]; %s = lineno; %sgoto Ln_error; \\
}
"""
% (Naming.filename_cname, Naming.filetable_cname, Naming.lineno_cname, cinfo)
)
code.putln("")
self.generate_extern_c_macro_definition(code)
code.putln("")
code.putln("#define %s" % Naming.h_guard_prefix + self.api_name(env))
code.putln("#define %s" % Naming.api_guard_prefix + self.api_name(env))
code.putln("/* Early includes */")
self.generate_includes(env, cimported_modules, code, late=False)
code.putln("")
code.putln(
"#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)"
)
code.putln("#define CYTHON_WITHOUT_ASSERTIONS")
code.putln("#endif")
code.putln("")
if env.directives["ccomplex"]:
code.putln("")
code.putln("#if !defined(CYTHON_CCOMPLEX)")
code.putln("#define CYTHON_CCOMPLEX 1")
code.putln("#endif")
code.putln("")
code.put(
UtilityCode.load_as_string(
"UtilityFunctionPredeclarations", "ModuleSetupCode.c"
)[0]
)
c_string_type = env.directives["c_string_type"]
c_string_encoding = env.directives["c_string_encoding"]
if c_string_type not in ("bytes", "bytearray") and not c_string_encoding:
error(
self.pos,
"a default encoding must be provided if c_string_type is not a byte type",
)
code.putln(
"#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII %s"
% int(c_string_encoding == "ascii")
)
code.putln(
"#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 %s"
% int(c_string_encoding.replace("-", "").lower() == "utf8")
)
if c_string_encoding == "default":
code.putln("#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 1")
else:
code.putln(
"#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT "
"(PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)"
)
code.putln('#define __PYX_DEFAULT_STRING_ENCODING "%s"' % c_string_encoding)
if c_string_type == "bytearray":
c_string_func_name = "ByteArray"
else:
c_string_func_name = c_string_type.title()
code.putln(
"#define __Pyx_PyObject_FromString __Pyx_Py%s_FromString" % c_string_func_name
)
code.putln(
"#define __Pyx_PyObject_FromStringAndSize __Pyx_Py%s_FromStringAndSize"
% c_string_func_name
)
code.put(UtilityCode.load_as_string("TypeConversions", "TypeConversion.c")[0])
# These utility functions are assumed to exist and used elsewhere.
PyrexTypes.c_long_type.create_to_py_utility_code(env)
PyrexTypes.c_long_type.create_from_py_utility_code(env)
PyrexTypes.c_int_type.create_from_py_utility_code(env)
code.put(Nodes.branch_prediction_macros)
code.putln(
"static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }"
)
code.putln("")
code.putln("static PyObject *%s = NULL;" % env.module_cname)
code.putln("static PyObject *%s;" % env.module_dict_cname)
code.putln("static PyObject *%s;" % Naming.builtins_cname)
code.putln("static PyObject *%s = NULL;" % Naming.cython_runtime_cname)
code.putln("static PyObject *%s;" % Naming.empty_tuple)
code.putln("static PyObject *%s;" % Naming.empty_bytes)
code.putln("static PyObject *%s;" % Naming.empty_unicode)
if Options.pre_import is not None:
code.putln("static PyObject *%s;" % Naming.preimport_cname)
code.putln("static int %s;" % Naming.lineno_cname)
code.putln("static int %s = 0;" % Naming.clineno_cname)
code.putln(
"static const char * %s= %s;" % (Naming.cfilenm_cname, Naming.file_c_macro)
)
code.putln("static const char *%s;" % Naming.filename_cname)
env.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
if has_np_pythran(env):
env.use_utility_code(
UtilityCode.load_cached("PythranConversion", "CppSupport.cpp")
)
|
def generate_module_preamble(self, env, options, cimported_modules, metadata, code):
code.put_generated_by()
if metadata:
code.putln("/* BEGIN: Cython Metadata")
code.putln(json.dumps(metadata, indent=4, sort_keys=True))
code.putln("END: Cython Metadata */")
code.putln("")
code.putln("#define PY_SSIZE_T_CLEAN")
for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey):
if inc.location == inc.INITIAL:
inc.write(code)
code.putln("#ifndef Py_PYTHON_H")
code.putln(
" #error Python headers needed to compile C extensions, "
"please install development version of Python."
)
code.putln(
"#elif PY_VERSION_HEX < 0x02070000 || "
"(0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)"
)
code.putln(" #error Cython requires Python 2.7+ or Python 3.3+.")
code.putln("#else")
code.globalstate["end"].putln("#endif /* Py_PYTHON_H */")
from .. import __version__
code.putln('#define CYTHON_ABI "%s"' % __version__.replace(".", "_"))
code.putln("#define CYTHON_HEX_VERSION %s" % build_hex_version(__version__))
code.putln(
"#define CYTHON_FUTURE_DIVISION %d"
% (Future.division in env.context.future_directives)
)
self._put_setup_code(code, "CModulePreamble")
if env.context.options.cplus:
self._put_setup_code(code, "CppInitCode")
else:
self._put_setup_code(code, "CInitCode")
self._put_setup_code(code, "PythonCompatibility")
self._put_setup_code(code, "MathInitCode")
if options.c_line_in_traceback:
cinfo = "%s = %s; " % (Naming.clineno_cname, Naming.line_c_macro)
else:
cinfo = ""
code.put(
"""
#define __PYX_ERR(f_index, lineno, Ln_error) \\
{ \\
%s = %s[f_index]; %s = lineno; %sgoto Ln_error; \\
}
"""
% (Naming.filename_cname, Naming.filetable_cname, Naming.lineno_cname, cinfo)
)
code.putln("")
self.generate_extern_c_macro_definition(code)
code.putln("")
code.putln("#define %s" % Naming.h_guard_prefix + self.api_name(env))
code.putln("#define %s" % Naming.api_guard_prefix + self.api_name(env))
code.putln("/* Early includes */")
self.generate_includes(env, cimported_modules, code, late=False)
code.putln("")
code.putln(
"#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)"
)
code.putln("#define CYTHON_WITHOUT_ASSERTIONS")
code.putln("#endif")
code.putln("")
if env.directives["ccomplex"]:
code.putln("")
code.putln("#if !defined(CYTHON_CCOMPLEX)")
code.putln("#define CYTHON_CCOMPLEX 1")
code.putln("#endif")
code.putln("")
code.put(
UtilityCode.load_as_string(
"UtilityFunctionPredeclarations", "ModuleSetupCode.c"
)[0]
)
c_string_type = env.directives["c_string_type"]
c_string_encoding = env.directives["c_string_encoding"]
if c_string_type not in ("bytes", "bytearray") and not c_string_encoding:
error(
self.pos,
"a default encoding must be provided if c_string_type is not a byte type",
)
code.putln(
"#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII %s"
% int(c_string_encoding == "ascii")
)
if c_string_encoding == "default":
code.putln("#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 1")
else:
code.putln("#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0")
code.putln('#define __PYX_DEFAULT_STRING_ENCODING "%s"' % c_string_encoding)
if c_string_type == "bytearray":
c_string_func_name = "ByteArray"
else:
c_string_func_name = c_string_type.title()
code.putln(
"#define __Pyx_PyObject_FromString __Pyx_Py%s_FromString" % c_string_func_name
)
code.putln(
"#define __Pyx_PyObject_FromStringAndSize __Pyx_Py%s_FromStringAndSize"
% c_string_func_name
)
code.put(UtilityCode.load_as_string("TypeConversions", "TypeConversion.c")[0])
# These utility functions are assumed to exist and used elsewhere.
PyrexTypes.c_long_type.create_to_py_utility_code(env)
PyrexTypes.c_long_type.create_from_py_utility_code(env)
PyrexTypes.c_int_type.create_from_py_utility_code(env)
code.put(Nodes.branch_prediction_macros)
code.putln(
"static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }"
)
code.putln("")
code.putln("static PyObject *%s = NULL;" % env.module_cname)
code.putln("static PyObject *%s;" % env.module_dict_cname)
code.putln("static PyObject *%s;" % Naming.builtins_cname)
code.putln("static PyObject *%s = NULL;" % Naming.cython_runtime_cname)
code.putln("static PyObject *%s;" % Naming.empty_tuple)
code.putln("static PyObject *%s;" % Naming.empty_bytes)
code.putln("static PyObject *%s;" % Naming.empty_unicode)
if Options.pre_import is not None:
code.putln("static PyObject *%s;" % Naming.preimport_cname)
code.putln("static int %s;" % Naming.lineno_cname)
code.putln("static int %s = 0;" % Naming.clineno_cname)
code.putln(
"static const char * %s= %s;" % (Naming.cfilenm_cname, Naming.file_c_macro)
)
code.putln("static const char *%s;" % Naming.filename_cname)
env.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
if has_np_pythran(env):
env.use_utility_code(
UtilityCode.load_cached("PythranConversion", "CppSupport.cpp")
)
|
https://github.com/cython/cython/issues/2819
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "test.pyx", line 6, in test.test
File "stringsource", line 15, in string.from_py.__pyx_convert_string_from_py_std__in_string
TypeError: expected bytes, str found
|
TypeError
|
def generate_module_preamble(self, env, options, cimported_modules, metadata, code):
code.put_generated_by()
if metadata:
code.putln("/* BEGIN: Cython Metadata")
code.putln(json.dumps(metadata, indent=4, sort_keys=True))
code.putln("END: Cython Metadata */")
code.putln("")
code.putln("#define PY_SSIZE_T_CLEAN")
for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey):
if inc.location == inc.INITIAL:
inc.write(code)
code.putln("#ifndef Py_PYTHON_H")
code.putln(
" #error Python headers needed to compile C extensions, "
"please install development version of Python."
)
code.putln(
"#elif PY_VERSION_HEX < 0x02060000 || "
"(0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)"
)
code.putln(" #error Cython requires Python 2.6+ or Python 3.3+.")
code.putln("#else")
code.globalstate["end"].putln("#endif /* Py_PYTHON_H */")
from .. import __version__
code.putln('#define CYTHON_ABI "%s"' % __version__.replace(".", "_"))
code.putln("#define CYTHON_HEX_VERSION %s" % build_hex_version(__version__))
code.putln(
"#define CYTHON_FUTURE_DIVISION %d"
% (Future.division in env.context.future_directives)
)
self._put_setup_code(code, "CModulePreamble")
if env.context.options.cplus:
self._put_setup_code(code, "CppInitCode")
else:
self._put_setup_code(code, "CInitCode")
self._put_setup_code(code, "PythonCompatibility")
self._put_setup_code(code, "MathInitCode")
if options.c_line_in_traceback:
cinfo = "%s = %s; " % (Naming.clineno_cname, Naming.line_c_macro)
else:
cinfo = ""
code.put(
"""
#define __PYX_ERR(f_index, lineno, Ln_error) \\
{ \\
%s = %s[f_index]; %s = lineno; %sgoto Ln_error; \\
}
"""
% (Naming.filename_cname, Naming.filetable_cname, Naming.lineno_cname, cinfo)
)
code.putln("")
self.generate_extern_c_macro_definition(code)
code.putln("")
code.putln("#define %s" % Naming.h_guard_prefix + self.api_name(env))
code.putln("#define %s" % Naming.api_guard_prefix + self.api_name(env))
code.putln("/* Early includes */")
self.generate_includes(env, cimported_modules, code, late=False)
code.putln("")
code.putln(
"#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)"
)
code.putln("#define CYTHON_WITHOUT_ASSERTIONS")
code.putln("#endif")
code.putln("")
if env.directives["ccomplex"]:
code.putln("")
code.putln("#if !defined(CYTHON_CCOMPLEX)")
code.putln("#define CYTHON_CCOMPLEX 1")
code.putln("#endif")
code.putln("")
code.put(
UtilityCode.load_as_string(
"UtilityFunctionPredeclarations", "ModuleSetupCode.c"
)[0]
)
c_string_type = env.directives["c_string_type"]
c_string_encoding = env.directives["c_string_encoding"]
if c_string_type not in ("bytes", "bytearray") and not c_string_encoding:
error(
self.pos,
"a default encoding must be provided if c_string_type is not a byte type",
)
code.putln(
"#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII %s"
% int(c_string_encoding == "ascii")
)
code.putln(
"#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 %s"
% int(c_string_encoding.replace("-", "").lower() == "utf8")
)
if c_string_encoding == "default":
code.putln("#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 1")
else:
code.putln(
"#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT "
"(PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)"
)
code.putln('#define __PYX_DEFAULT_STRING_ENCODING "%s"' % c_string_encoding)
if c_string_type == "bytearray":
c_string_func_name = "ByteArray"
else:
c_string_func_name = c_string_type.title()
code.putln(
"#define __Pyx_PyObject_FromString __Pyx_Py%s_FromString" % c_string_func_name
)
code.putln(
"#define __Pyx_PyObject_FromStringAndSize __Pyx_Py%s_FromStringAndSize"
% c_string_func_name
)
code.put(UtilityCode.load_as_string("TypeConversions", "TypeConversion.c")[0])
# These utility functions are assumed to exist and used elsewhere.
PyrexTypes.c_long_type.create_to_py_utility_code(env)
PyrexTypes.c_long_type.create_from_py_utility_code(env)
PyrexTypes.c_int_type.create_from_py_utility_code(env)
code.put(Nodes.branch_prediction_macros)
code.putln(
"static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }"
)
code.putln("")
code.putln("static PyObject *%s = NULL;" % env.module_cname)
code.putln("static PyObject *%s;" % env.module_dict_cname)
code.putln("static PyObject *%s;" % Naming.builtins_cname)
code.putln("static PyObject *%s = NULL;" % Naming.cython_runtime_cname)
code.putln("static PyObject *%s;" % Naming.empty_tuple)
code.putln("static PyObject *%s;" % Naming.empty_bytes)
code.putln("static PyObject *%s;" % Naming.empty_unicode)
if Options.pre_import is not None:
code.putln("static PyObject *%s;" % Naming.preimport_cname)
code.putln("static int %s;" % Naming.lineno_cname)
code.putln("static int %s = 0;" % Naming.clineno_cname)
code.putln(
"static const char * %s= %s;" % (Naming.cfilenm_cname, Naming.file_c_macro)
)
code.putln("static const char *%s;" % Naming.filename_cname)
env.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
if has_np_pythran(env):
env.use_utility_code(
UtilityCode.load_cached("PythranConversion", "CppSupport.cpp")
)
|
def generate_module_preamble(self, env, options, cimported_modules, metadata, code):
code.put_generated_by()
if metadata:
code.putln("/* BEGIN: Cython Metadata")
code.putln(json.dumps(metadata, indent=4, sort_keys=True))
code.putln("END: Cython Metadata */")
code.putln("")
code.putln("#define PY_SSIZE_T_CLEAN")
for inc in sorted(env.c_includes.values(), key=IncludeCode.sortkey):
if inc.location == inc.INITIAL:
inc.write(code)
code.putln("#ifndef Py_PYTHON_H")
code.putln(
" #error Python headers needed to compile C extensions, "
"please install development version of Python."
)
code.putln(
"#elif PY_VERSION_HEX < 0x02060000 || "
"(0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)"
)
code.putln(" #error Cython requires Python 2.6+ or Python 3.3+.")
code.putln("#else")
code.globalstate["end"].putln("#endif /* Py_PYTHON_H */")
from .. import __version__
code.putln('#define CYTHON_ABI "%s"' % __version__.replace(".", "_"))
code.putln("#define CYTHON_HEX_VERSION %s" % build_hex_version(__version__))
code.putln(
"#define CYTHON_FUTURE_DIVISION %d"
% (Future.division in env.context.future_directives)
)
self._put_setup_code(code, "CModulePreamble")
if env.context.options.cplus:
self._put_setup_code(code, "CppInitCode")
else:
self._put_setup_code(code, "CInitCode")
self._put_setup_code(code, "PythonCompatibility")
self._put_setup_code(code, "MathInitCode")
if options.c_line_in_traceback:
cinfo = "%s = %s; " % (Naming.clineno_cname, Naming.line_c_macro)
else:
cinfo = ""
code.put(
"""
#define __PYX_ERR(f_index, lineno, Ln_error) \\
{ \\
%s = %s[f_index]; %s = lineno; %sgoto Ln_error; \\
}
"""
% (Naming.filename_cname, Naming.filetable_cname, Naming.lineno_cname, cinfo)
)
code.putln("")
self.generate_extern_c_macro_definition(code)
code.putln("")
code.putln("#define %s" % Naming.h_guard_prefix + self.api_name(env))
code.putln("#define %s" % Naming.api_guard_prefix + self.api_name(env))
code.putln("/* Early includes */")
self.generate_includes(env, cimported_modules, code, late=False)
code.putln("")
code.putln(
"#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)"
)
code.putln("#define CYTHON_WITHOUT_ASSERTIONS")
code.putln("#endif")
code.putln("")
if env.directives["ccomplex"]:
code.putln("")
code.putln("#if !defined(CYTHON_CCOMPLEX)")
code.putln("#define CYTHON_CCOMPLEX 1")
code.putln("#endif")
code.putln("")
code.put(
UtilityCode.load_as_string(
"UtilityFunctionPredeclarations", "ModuleSetupCode.c"
)[0]
)
c_string_type = env.directives["c_string_type"]
c_string_encoding = env.directives["c_string_encoding"]
if c_string_type not in ("bytes", "bytearray") and not c_string_encoding:
error(
self.pos,
"a default encoding must be provided if c_string_type is not a byte type",
)
code.putln(
"#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII %s"
% int(c_string_encoding == "ascii")
)
if c_string_encoding == "default":
code.putln("#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 1")
else:
code.putln("#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0")
code.putln('#define __PYX_DEFAULT_STRING_ENCODING "%s"' % c_string_encoding)
if c_string_type == "bytearray":
c_string_func_name = "ByteArray"
else:
c_string_func_name = c_string_type.title()
code.putln(
"#define __Pyx_PyObject_FromString __Pyx_Py%s_FromString" % c_string_func_name
)
code.putln(
"#define __Pyx_PyObject_FromStringAndSize __Pyx_Py%s_FromStringAndSize"
% c_string_func_name
)
code.put(UtilityCode.load_as_string("TypeConversions", "TypeConversion.c")[0])
# These utility functions are assumed to exist and used elsewhere.
PyrexTypes.c_long_type.create_to_py_utility_code(env)
PyrexTypes.c_long_type.create_from_py_utility_code(env)
PyrexTypes.c_int_type.create_from_py_utility_code(env)
code.put(Nodes.branch_prediction_macros)
code.putln(
"static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }"
)
code.putln("")
code.putln("static PyObject *%s = NULL;" % env.module_cname)
code.putln("static PyObject *%s;" % env.module_dict_cname)
code.putln("static PyObject *%s;" % Naming.builtins_cname)
code.putln("static PyObject *%s = NULL;" % Naming.cython_runtime_cname)
code.putln("static PyObject *%s;" % Naming.empty_tuple)
code.putln("static PyObject *%s;" % Naming.empty_bytes)
code.putln("static PyObject *%s;" % Naming.empty_unicode)
if Options.pre_import is not None:
code.putln("static PyObject *%s;" % Naming.preimport_cname)
code.putln("static int %s;" % Naming.lineno_cname)
code.putln("static int %s = 0;" % Naming.clineno_cname)
code.putln(
"static const char * %s= %s;" % (Naming.cfilenm_cname, Naming.file_c_macro)
)
code.putln("static const char *%s;" % Naming.filename_cname)
env.use_utility_code(UtilityCode.load_cached("FastTypeChecks", "ModuleSetupCode.c"))
if has_np_pythran(env):
env.use_utility_code(
UtilityCode.load_cached("PythranConversion", "CppSupport.cpp")
)
|
https://github.com/cython/cython/issues/2819
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "test.pyx", line 6, in test.test
File "stringsource", line 15, in string.from_py.__pyx_convert_string_from_py_std__in_string
TypeError: expected bytes, str found
|
TypeError
|
def privatize_temps(self, code, exclude_temps=()):
"""
Make any used temporaries private. Before the relevant code block
code.start_collecting_temps() should have been called.
"""
c = self.privatization_insertion_point
self.privatization_insertion_point = None
if self.is_parallel:
self.temps = temps = code.funcstate.stop_collecting_temps()
privates, firstprivates = [], []
for temp, type in sorted(temps):
if type.is_pyobject or type.is_memoryviewslice:
firstprivates.append(temp)
else:
privates.append(temp)
if privates:
c.put(" private(%s)" % ", ".join(privates))
if firstprivates:
c.put(" firstprivate(%s)" % ", ".join(firstprivates))
if self.breaking_label_used:
shared_vars = [Naming.parallel_why]
if self.error_label_used:
shared_vars.extend(self.parallel_exc)
c.put(" private(%s, %s, %s)" % self.pos_info)
c.put(" shared(%s)" % ", ".join(shared_vars))
|
def privatize_temps(self, code, exclude_temps=()):
"""
Make any used temporaries private. Before the relevant code block
code.start_collecting_temps() should have been called.
"""
if self.is_parallel:
c = self.privatization_insertion_point
self.temps = temps = code.funcstate.stop_collecting_temps()
privates, firstprivates = [], []
for temp, type in sorted(temps):
if type.is_pyobject or type.is_memoryviewslice:
firstprivates.append(temp)
else:
privates.append(temp)
if privates:
c.put(" private(%s)" % ", ".join(privates))
if firstprivates:
c.put(" firstprivate(%s)" % ", ".join(firstprivates))
if self.breaking_label_used:
shared_vars = [Naming.parallel_why]
if self.error_label_used:
shared_vars.extend(self.parallel_exc)
c.put(" private(%s, %s, %s)" % self.pos_info)
c.put(" shared(%s)" % ", ".join(shared_vars))
|
https://github.com/cython/cython/issues/2780
|
%%cython
from contextlib import contextmanager
@contextmanager
def tag(name):
print("<%s>" % name)
yield
print("</%s>" % name)
from cython.parallel cimport prange
from libc.stdio cimport printf
def func():
cdef int i
with tag('aaa'):
for i in prange(5, nogil=True): # using "xrange" or "with nogil" works well
printf("%d", i)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.py:367: FutureWarning: Cython directive 'language_level' not set, using 2 for now (Py2). This will change in a later release! File: /Users/tema/.ipython/cython/_cython_magic_52369e6689303208367674016d90c298.pyx
tree = Parsing.p_module(s, pxd, full_module_name)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-11-72854e963c38> in <module>()
----> 1 get_ipython().run_cell_magic(u'cython', u'', u'from contextlib import contextmanager\n\n@contextmanager\ndef tag(name):\n print("<%s>" % name)\n yield\n print("</%s>" % name)\n\n\nfrom cython.parallel cimport prange\nfrom libc.stdio cimport printf\n\n\ndef func():\n cdef int i\n\n with tag(\'aaa\'):\n for i in prange(5, nogil=True):\n printf("%d", i)')
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.pyc in run_cell_magic(self, magic_name, line, cell)
2115 magic_arg_s = self.var_expand(line, stack_depth)
2116 with self.builtin_trap:
-> 2117 result = fn(magic_arg_s, cell)
2118 return result
2119
<decorator-gen-118> in cython(self, line, cell)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/IPython/core/magic.pyc in <lambda>(f, *a, **k)
186 # but it's overkill for just that one bit of state.
187 def magic_deco(arg):
--> 188 call = lambda f, *a, **k: f(*a, **k)
189
190 if callable(arg):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/IpythonMagic.pyc in cython(self, line, cell)
322 extension = None
323 if need_cythonize:
--> 324 extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
325 assert len(extensions) == 1
326 extension = extensions[0]
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/IpythonMagic.pyc in _cythonize(self, module_name, code, lib_dir, args, quiet)
430 elif sys.version_info[0] >= 3:
431 opts['language_level'] = 3
--> 432 return cythonize([extension], **opts)
433 except CompileError:
434 return None
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/Dependencies.pyc in cythonize(module_list, exclude, nthreads, aliases, quiet, force, language, exclude_failures, **options)
1084 if not nthreads:
1085 for args in to_compile:
-> 1086 cythonize_one(*args)
1087
1088 if exclude_failures:
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/Dependencies.pyc in cythonize_one(pyx_file, c_file, fingerprint, quiet, options, raise_on_failure, embedded_metadata, full_module_name, progress)
1190 any_failures = 0
1191 try:
-> 1192 result = compile_single(pyx_file, options, full_module_name=full_module_name)
1193 if result.num_errors > 0:
1194 any_failures = 1
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.pyc in compile_single(source, options, full_module_name)
723 recursion.
724 """
--> 725 return run_pipeline(source, options, full_module_name)
726
727
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.pyc in run_pipeline(source, options, full_module_name, context)
511
512 context.setup_errors(options, result)
--> 513 err, enddata = Pipeline.run_pipeline(pipeline, source)
514 context.teardown_errors(err, options, result)
515 return result
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in run_pipeline(pipeline, source, printtree)
353 exec("def %s(phase, data): return phase(data)" % phase_name, exec_ns)
354 run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
--> 355 data = run(phase, data)
356 if DebugFlags.debug_verbose_pipeline:
357 print(" %.3f seconds" % (time() - t))
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in run(phase, data)
333
334 def run(phase, data):
--> 335 return phase(data)
336
337 error = None
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in generate_pyx_code_stage(module_node)
50 def generate_pyx_code_stage_factory(options, result):
51 def generate_pyx_code_stage(module_node):
---> 52 module_node.process_implementation(options, result)
53 result.compilation_source = module_node.compilation_source
54 return result
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.pyc in process_implementation(self, options, result)
141 self.find_referenced_modules(env, self.referenced_modules, {})
142 self.sort_cdef_classes(env)
--> 143 self.generate_c_code(env, options, result)
144 self.generate_h_code(env, options, result)
145 self.generate_api_code(env, options, result)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.pyc in generate_c_code(self, env, options, result)
377 self.generate_variable_definitions(env, code)
378
--> 379 self.body.generate_function_definitions(env, code)
380
381 code.mark_pos(None)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
440 #print "StatListNode.generate_function_definitions" ###
441 for stat in self.stats:
--> 442 stat.generate_function_definitions(env, code)
443
444 def generate_execution_code(self, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
3171 self.py_wrapper.func_cname = self.entry.func_cname
3172 self.py_wrapper.generate_function_definitions(env, code)
-> 3173 FuncDefNode.generate_function_definitions(self, env, code)
3174
3175 def generate_function_header(self, code, with_pymethdef, proto_only=0):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
1981 # ----- Function body -----
1982 # -------------------------
-> 1983 self.generate_function_body(env, code)
1984
1985 code.mark_pos(self.pos, trace=False)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_body(self, env, code)
1743
1744 def generate_function_body(self, env, code):
-> 1745 self.body.generate_execution_code(code)
1746
1747 def generate_function_definitions(self, env, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
446 for stat in self.stats:
447 code.mark_pos(stat.pos)
--> 448 stat.generate_execution_code(code)
449
450 def annotate(self, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
7090
7091 code.error_label = old_error_label
-> 7092 self.body.generate_execution_code(code)
7093
7094 if code.label_used(intermediate_error_label):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
7559 if not self.body.is_terminator:
7560 code.putln('/*normal exit:*/{')
-> 7561 fresh_finally_clause().generate_execution_code(code)
7562 if not self.finally_clause.is_terminator:
7563 code.put_goto(catch_label)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in fresh_finally_clause(_next)
7547 # generate the original subtree once and always keep a fresh copy
7548 node = _next[0]
-> 7549 node_copy = copy.deepcopy(node)
7550 if node is self.finally_clause:
7551 _next[0] = node_copy
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_list(x, memo)
228 memo[id(x)] = y
229 for a in x:
--> 230 y.append(deepcopy(a, memo))
231 return y
232 d[list] = _deepcopy_list
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_list(x, memo)
228 memo[id(x)] = y
229 for a in x:
--> 230 y.append(deepcopy(a, memo))
231 return y
232 d[list] = _deepcopy_list
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
180 reductor = getattr(x, "__reduce_ex__", None)
181 if reductor:
--> 182 rv = reductor(2)
183 else:
184 reductor = getattr(x, "__reduce__", None)
TypeError: can't pickle cStringIO.StringO objects
|
TypeError
|
def end_parallel_block(self, code):
"""
To ensure all OpenMP threads have thread states, we ensure the GIL
in each thread (which creates a thread state if it doesn't exist),
after which we release the GIL.
On exit, reacquire the GIL and release the thread state.
If compiled without OpenMP support (at the C level), then we still have
to acquire the GIL to decref any object temporaries.
"""
begin_code = self.begin_of_parallel_block
self.begin_of_parallel_block = None
if self.error_label_used:
end_code = code
begin_code.putln("#ifdef _OPENMP")
begin_code.put_ensure_gil(declare_gilstate=True)
begin_code.putln("Py_BEGIN_ALLOW_THREADS")
begin_code.putln("#endif /* _OPENMP */")
end_code.putln("#ifdef _OPENMP")
end_code.putln("Py_END_ALLOW_THREADS")
end_code.putln("#else")
end_code.put_safe("{\n")
end_code.put_ensure_gil()
end_code.putln("#endif /* _OPENMP */")
self.cleanup_temps(end_code)
end_code.put_release_ensured_gil()
end_code.putln("#ifndef _OPENMP")
end_code.put_safe("}\n")
end_code.putln("#endif /* _OPENMP */")
|
def end_parallel_block(self, code):
"""
To ensure all OpenMP threads have thread states, we ensure the GIL
in each thread (which creates a thread state if it doesn't exist),
after which we release the GIL.
On exit, reacquire the GIL and release the thread state.
If compiled without OpenMP support (at the C level), then we still have
to acquire the GIL to decref any object temporaries.
"""
if self.error_label_used:
begin_code = self.begin_of_parallel_block
end_code = code
begin_code.putln("#ifdef _OPENMP")
begin_code.put_ensure_gil(declare_gilstate=True)
begin_code.putln("Py_BEGIN_ALLOW_THREADS")
begin_code.putln("#endif /* _OPENMP */")
end_code.putln("#ifdef _OPENMP")
end_code.putln("Py_END_ALLOW_THREADS")
end_code.putln("#else")
end_code.put_safe("{\n")
end_code.put_ensure_gil()
end_code.putln("#endif /* _OPENMP */")
self.cleanup_temps(end_code)
end_code.put_release_ensured_gil()
end_code.putln("#ifndef _OPENMP")
end_code.put_safe("}\n")
end_code.putln("#endif /* _OPENMP */")
|
https://github.com/cython/cython/issues/2780
|
%%cython
from contextlib import contextmanager
@contextmanager
def tag(name):
print("<%s>" % name)
yield
print("</%s>" % name)
from cython.parallel cimport prange
from libc.stdio cimport printf
def func():
cdef int i
with tag('aaa'):
for i in prange(5, nogil=True): # using "xrange" or "with nogil" works well
printf("%d", i)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.py:367: FutureWarning: Cython directive 'language_level' not set, using 2 for now (Py2). This will change in a later release! File: /Users/tema/.ipython/cython/_cython_magic_52369e6689303208367674016d90c298.pyx
tree = Parsing.p_module(s, pxd, full_module_name)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-11-72854e963c38> in <module>()
----> 1 get_ipython().run_cell_magic(u'cython', u'', u'from contextlib import contextmanager\n\n@contextmanager\ndef tag(name):\n print("<%s>" % name)\n yield\n print("</%s>" % name)\n\n\nfrom cython.parallel cimport prange\nfrom libc.stdio cimport printf\n\n\ndef func():\n cdef int i\n\n with tag(\'aaa\'):\n for i in prange(5, nogil=True):\n printf("%d", i)')
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.pyc in run_cell_magic(self, magic_name, line, cell)
2115 magic_arg_s = self.var_expand(line, stack_depth)
2116 with self.builtin_trap:
-> 2117 result = fn(magic_arg_s, cell)
2118 return result
2119
<decorator-gen-118> in cython(self, line, cell)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/IPython/core/magic.pyc in <lambda>(f, *a, **k)
186 # but it's overkill for just that one bit of state.
187 def magic_deco(arg):
--> 188 call = lambda f, *a, **k: f(*a, **k)
189
190 if callable(arg):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/IpythonMagic.pyc in cython(self, line, cell)
322 extension = None
323 if need_cythonize:
--> 324 extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
325 assert len(extensions) == 1
326 extension = extensions[0]
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/IpythonMagic.pyc in _cythonize(self, module_name, code, lib_dir, args, quiet)
430 elif sys.version_info[0] >= 3:
431 opts['language_level'] = 3
--> 432 return cythonize([extension], **opts)
433 except CompileError:
434 return None
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/Dependencies.pyc in cythonize(module_list, exclude, nthreads, aliases, quiet, force, language, exclude_failures, **options)
1084 if not nthreads:
1085 for args in to_compile:
-> 1086 cythonize_one(*args)
1087
1088 if exclude_failures:
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/Dependencies.pyc in cythonize_one(pyx_file, c_file, fingerprint, quiet, options, raise_on_failure, embedded_metadata, full_module_name, progress)
1190 any_failures = 0
1191 try:
-> 1192 result = compile_single(pyx_file, options, full_module_name=full_module_name)
1193 if result.num_errors > 0:
1194 any_failures = 1
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.pyc in compile_single(source, options, full_module_name)
723 recursion.
724 """
--> 725 return run_pipeline(source, options, full_module_name)
726
727
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.pyc in run_pipeline(source, options, full_module_name, context)
511
512 context.setup_errors(options, result)
--> 513 err, enddata = Pipeline.run_pipeline(pipeline, source)
514 context.teardown_errors(err, options, result)
515 return result
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in run_pipeline(pipeline, source, printtree)
353 exec("def %s(phase, data): return phase(data)" % phase_name, exec_ns)
354 run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
--> 355 data = run(phase, data)
356 if DebugFlags.debug_verbose_pipeline:
357 print(" %.3f seconds" % (time() - t))
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in run(phase, data)
333
334 def run(phase, data):
--> 335 return phase(data)
336
337 error = None
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in generate_pyx_code_stage(module_node)
50 def generate_pyx_code_stage_factory(options, result):
51 def generate_pyx_code_stage(module_node):
---> 52 module_node.process_implementation(options, result)
53 result.compilation_source = module_node.compilation_source
54 return result
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.pyc in process_implementation(self, options, result)
141 self.find_referenced_modules(env, self.referenced_modules, {})
142 self.sort_cdef_classes(env)
--> 143 self.generate_c_code(env, options, result)
144 self.generate_h_code(env, options, result)
145 self.generate_api_code(env, options, result)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.pyc in generate_c_code(self, env, options, result)
377 self.generate_variable_definitions(env, code)
378
--> 379 self.body.generate_function_definitions(env, code)
380
381 code.mark_pos(None)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
440 #print "StatListNode.generate_function_definitions" ###
441 for stat in self.stats:
--> 442 stat.generate_function_definitions(env, code)
443
444 def generate_execution_code(self, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
3171 self.py_wrapper.func_cname = self.entry.func_cname
3172 self.py_wrapper.generate_function_definitions(env, code)
-> 3173 FuncDefNode.generate_function_definitions(self, env, code)
3174
3175 def generate_function_header(self, code, with_pymethdef, proto_only=0):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
1981 # ----- Function body -----
1982 # -------------------------
-> 1983 self.generate_function_body(env, code)
1984
1985 code.mark_pos(self.pos, trace=False)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_body(self, env, code)
1743
1744 def generate_function_body(self, env, code):
-> 1745 self.body.generate_execution_code(code)
1746
1747 def generate_function_definitions(self, env, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
446 for stat in self.stats:
447 code.mark_pos(stat.pos)
--> 448 stat.generate_execution_code(code)
449
450 def annotate(self, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
7090
7091 code.error_label = old_error_label
-> 7092 self.body.generate_execution_code(code)
7093
7094 if code.label_used(intermediate_error_label):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
7559 if not self.body.is_terminator:
7560 code.putln('/*normal exit:*/{')
-> 7561 fresh_finally_clause().generate_execution_code(code)
7562 if not self.finally_clause.is_terminator:
7563 code.put_goto(catch_label)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in fresh_finally_clause(_next)
7547 # generate the original subtree once and always keep a fresh copy
7548 node = _next[0]
-> 7549 node_copy = copy.deepcopy(node)
7550 if node is self.finally_clause:
7551 _next[0] = node_copy
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_list(x, memo)
228 memo[id(x)] = y
229 for a in x:
--> 230 y.append(deepcopy(a, memo))
231 return y
232 d[list] = _deepcopy_list
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_list(x, memo)
228 memo[id(x)] = y
229 for a in x:
--> 230 y.append(deepcopy(a, memo))
231 return y
232 d[list] = _deepcopy_list
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
180 reductor = getattr(x, "__reduce_ex__", None)
181 if reductor:
--> 182 rv = reductor(2)
183 else:
184 reductor = getattr(x, "__reduce__", None)
TypeError: can't pickle cStringIO.StringO objects
|
TypeError
|
def end_parallel_control_flow_block(
self, code, break_=False, continue_=False, return_=False
):
"""
This ends the parallel control flow block and based on how the parallel
section was exited, takes the corresponding action. The break_ and
continue_ parameters indicate whether these should be propagated
outwards:
for i in prange(...):
with cython.parallel.parallel():
continue
Here break should be trapped in the parallel block, and propagated to
the for loop.
"""
c = self.begin_of_parallel_control_block_point
self.begin_of_parallel_control_block_point = None
self.begin_of_parallel_control_block_point_after_decls = None
# Firstly, always prefer errors over returning, continue or break
if self.error_label_used:
c.putln("const char *%s = NULL; int %s = 0, %s = 0;" % self.parallel_pos_info)
c.putln("PyObject *%s = NULL, *%s = NULL, *%s = NULL;" % self.parallel_exc)
code.putln("if (%s) {" % Naming.parallel_exc_type)
code.putln(
"/* This may have been overridden by a continue, "
"break or return in another thread. Prefer the error. */"
)
code.putln("%s = 4;" % Naming.parallel_why)
code.putln("}")
if continue_:
any_label_used = self.any_label_used
else:
any_label_used = self.breaking_label_used
if any_label_used:
# __pyx_parallel_why is used, declare and initialize
c.putln("int %s;" % Naming.parallel_why)
c.putln("%s = 0;" % Naming.parallel_why)
code.putln("if (%s) {" % Naming.parallel_why)
for temp_cname, private_cname in self.parallel_private_temps:
code.putln("%s = %s;" % (private_cname, temp_cname))
code.putln("switch (%s) {" % Naming.parallel_why)
if continue_:
code.put(" case 1: ")
code.put_goto(code.continue_label)
if break_:
code.put(" case 2: ")
code.put_goto(code.break_label)
if return_:
code.put(" case 3: ")
code.put_goto(code.return_label)
if self.error_label_used:
code.globalstate.use_utility_code(restore_exception_utility_code)
code.putln(" case 4:")
self.restore_parallel_exception(code)
code.put_goto(code.error_label)
code.putln("}") # end switch
code.putln("}") # end if
code.end_block() # end parallel control flow block
self.redef_builtin_expect_apple_gcc_bug(code)
|
def end_parallel_control_flow_block(
self, code, break_=False, continue_=False, return_=False
):
"""
This ends the parallel control flow block and based on how the parallel
section was exited, takes the corresponding action. The break_ and
continue_ parameters indicate whether these should be propagated
outwards:
for i in prange(...):
with cython.parallel.parallel():
continue
Here break should be trapped in the parallel block, and propagated to
the for loop.
"""
c = self.begin_of_parallel_control_block_point
# Firstly, always prefer errors over returning, continue or break
if self.error_label_used:
c.putln("const char *%s = NULL; int %s = 0, %s = 0;" % self.parallel_pos_info)
c.putln("PyObject *%s = NULL, *%s = NULL, *%s = NULL;" % self.parallel_exc)
code.putln("if (%s) {" % Naming.parallel_exc_type)
code.putln(
"/* This may have been overridden by a continue, "
"break or return in another thread. Prefer the error. */"
)
code.putln("%s = 4;" % Naming.parallel_why)
code.putln("}")
if continue_:
any_label_used = self.any_label_used
else:
any_label_used = self.breaking_label_used
if any_label_used:
# __pyx_parallel_why is used, declare and initialize
c.putln("int %s;" % Naming.parallel_why)
c.putln("%s = 0;" % Naming.parallel_why)
code.putln("if (%s) {" % Naming.parallel_why)
for temp_cname, private_cname in self.parallel_private_temps:
code.putln("%s = %s;" % (private_cname, temp_cname))
code.putln("switch (%s) {" % Naming.parallel_why)
if continue_:
code.put(" case 1: ")
code.put_goto(code.continue_label)
if break_:
code.put(" case 2: ")
code.put_goto(code.break_label)
if return_:
code.put(" case 3: ")
code.put_goto(code.return_label)
if self.error_label_used:
code.globalstate.use_utility_code(restore_exception_utility_code)
code.putln(" case 4:")
self.restore_parallel_exception(code)
code.put_goto(code.error_label)
code.putln("}") # end switch
code.putln("}") # end if
code.end_block() # end parallel control flow block
self.redef_builtin_expect_apple_gcc_bug(code)
|
https://github.com/cython/cython/issues/2780
|
%%cython
from contextlib import contextmanager
@contextmanager
def tag(name):
print("<%s>" % name)
yield
print("</%s>" % name)
from cython.parallel cimport prange
from libc.stdio cimport printf
def func():
cdef int i
with tag('aaa'):
for i in prange(5, nogil=True): # using "xrange" or "with nogil" works well
printf("%d", i)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.py:367: FutureWarning: Cython directive 'language_level' not set, using 2 for now (Py2). This will change in a later release! File: /Users/tema/.ipython/cython/_cython_magic_52369e6689303208367674016d90c298.pyx
tree = Parsing.p_module(s, pxd, full_module_name)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-11-72854e963c38> in <module>()
----> 1 get_ipython().run_cell_magic(u'cython', u'', u'from contextlib import contextmanager\n\n@contextmanager\ndef tag(name):\n print("<%s>" % name)\n yield\n print("</%s>" % name)\n\n\nfrom cython.parallel cimport prange\nfrom libc.stdio cimport printf\n\n\ndef func():\n cdef int i\n\n with tag(\'aaa\'):\n for i in prange(5, nogil=True):\n printf("%d", i)')
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.pyc in run_cell_magic(self, magic_name, line, cell)
2115 magic_arg_s = self.var_expand(line, stack_depth)
2116 with self.builtin_trap:
-> 2117 result = fn(magic_arg_s, cell)
2118 return result
2119
<decorator-gen-118> in cython(self, line, cell)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/IPython/core/magic.pyc in <lambda>(f, *a, **k)
186 # but it's overkill for just that one bit of state.
187 def magic_deco(arg):
--> 188 call = lambda f, *a, **k: f(*a, **k)
189
190 if callable(arg):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/IpythonMagic.pyc in cython(self, line, cell)
322 extension = None
323 if need_cythonize:
--> 324 extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
325 assert len(extensions) == 1
326 extension = extensions[0]
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/IpythonMagic.pyc in _cythonize(self, module_name, code, lib_dir, args, quiet)
430 elif sys.version_info[0] >= 3:
431 opts['language_level'] = 3
--> 432 return cythonize([extension], **opts)
433 except CompileError:
434 return None
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/Dependencies.pyc in cythonize(module_list, exclude, nthreads, aliases, quiet, force, language, exclude_failures, **options)
1084 if not nthreads:
1085 for args in to_compile:
-> 1086 cythonize_one(*args)
1087
1088 if exclude_failures:
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/Dependencies.pyc in cythonize_one(pyx_file, c_file, fingerprint, quiet, options, raise_on_failure, embedded_metadata, full_module_name, progress)
1190 any_failures = 0
1191 try:
-> 1192 result = compile_single(pyx_file, options, full_module_name=full_module_name)
1193 if result.num_errors > 0:
1194 any_failures = 1
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.pyc in compile_single(source, options, full_module_name)
723 recursion.
724 """
--> 725 return run_pipeline(source, options, full_module_name)
726
727
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.pyc in run_pipeline(source, options, full_module_name, context)
511
512 context.setup_errors(options, result)
--> 513 err, enddata = Pipeline.run_pipeline(pipeline, source)
514 context.teardown_errors(err, options, result)
515 return result
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in run_pipeline(pipeline, source, printtree)
353 exec("def %s(phase, data): return phase(data)" % phase_name, exec_ns)
354 run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
--> 355 data = run(phase, data)
356 if DebugFlags.debug_verbose_pipeline:
357 print(" %.3f seconds" % (time() - t))
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in run(phase, data)
333
334 def run(phase, data):
--> 335 return phase(data)
336
337 error = None
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in generate_pyx_code_stage(module_node)
50 def generate_pyx_code_stage_factory(options, result):
51 def generate_pyx_code_stage(module_node):
---> 52 module_node.process_implementation(options, result)
53 result.compilation_source = module_node.compilation_source
54 return result
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.pyc in process_implementation(self, options, result)
141 self.find_referenced_modules(env, self.referenced_modules, {})
142 self.sort_cdef_classes(env)
--> 143 self.generate_c_code(env, options, result)
144 self.generate_h_code(env, options, result)
145 self.generate_api_code(env, options, result)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.pyc in generate_c_code(self, env, options, result)
377 self.generate_variable_definitions(env, code)
378
--> 379 self.body.generate_function_definitions(env, code)
380
381 code.mark_pos(None)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
440 #print "StatListNode.generate_function_definitions" ###
441 for stat in self.stats:
--> 442 stat.generate_function_definitions(env, code)
443
444 def generate_execution_code(self, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
3171 self.py_wrapper.func_cname = self.entry.func_cname
3172 self.py_wrapper.generate_function_definitions(env, code)
-> 3173 FuncDefNode.generate_function_definitions(self, env, code)
3174
3175 def generate_function_header(self, code, with_pymethdef, proto_only=0):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
1981 # ----- Function body -----
1982 # -------------------------
-> 1983 self.generate_function_body(env, code)
1984
1985 code.mark_pos(self.pos, trace=False)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_body(self, env, code)
1743
1744 def generate_function_body(self, env, code):
-> 1745 self.body.generate_execution_code(code)
1746
1747 def generate_function_definitions(self, env, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
446 for stat in self.stats:
447 code.mark_pos(stat.pos)
--> 448 stat.generate_execution_code(code)
449
450 def annotate(self, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
7090
7091 code.error_label = old_error_label
-> 7092 self.body.generate_execution_code(code)
7093
7094 if code.label_used(intermediate_error_label):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
7559 if not self.body.is_terminator:
7560 code.putln('/*normal exit:*/{')
-> 7561 fresh_finally_clause().generate_execution_code(code)
7562 if not self.finally_clause.is_terminator:
7563 code.put_goto(catch_label)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in fresh_finally_clause(_next)
7547 # generate the original subtree once and always keep a fresh copy
7548 node = _next[0]
-> 7549 node_copy = copy.deepcopy(node)
7550 if node is self.finally_clause:
7551 _next[0] = node_copy
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_list(x, memo)
228 memo[id(x)] = y
229 for a in x:
--> 230 y.append(deepcopy(a, memo))
231 return y
232 d[list] = _deepcopy_list
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_list(x, memo)
228 memo[id(x)] = y
229 for a in x:
--> 230 y.append(deepcopy(a, memo))
231 return y
232 d[list] = _deepcopy_list
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
180 reductor = getattr(x, "__reduce_ex__", None)
181 if reductor:
--> 182 rv = reductor(2)
183 else:
184 reductor = getattr(x, "__reduce__", None)
TypeError: can't pickle cStringIO.StringO objects
|
TypeError
|
def generate_execution_code(self, code):
"""
Generate code in the following steps
1) copy any closure variables determined thread-private
into temporaries
2) allocate temps for start, stop and step
3) generate a loop that calculates the total number of steps,
which then computes the target iteration variable for every step:
for i in prange(start, stop, step):
...
becomes
nsteps = (stop - start) / step;
i = start;
#pragma omp parallel for lastprivate(i)
for (temp = 0; temp < nsteps; temp++) {
i = start + step * temp;
...
}
Note that accumulation of 'i' would have a data dependency
between iterations.
Also, you can't do this
for (i = start; i < stop; i += step)
...
as the '<' operator should become '>' for descending loops.
'for i from x < i < y:' does not suffer from this problem
as the relational operator is known at compile time!
4) release our temps and write back any private closure variables
"""
self.declare_closure_privates(code)
# This can only be a NameNode
target_index_cname = self.target.entry.cname
# This will be used as the dict to format our code strings, holding
# the start, stop , step, temps and target cnames
fmt_dict = {
"target": target_index_cname,
"target_type": self.target.type.empty_declaration_code(),
}
# Setup start, stop and step, allocating temps if needed
start_stop_step = self.start, self.stop, self.step
defaults = "0", "0", "1"
for node, name, default in zip(start_stop_step, self.names, defaults):
if node is None:
result = default
elif node.is_literal:
result = node.get_constant_c_result_code()
else:
node.generate_evaluation_code(code)
result = node.result()
fmt_dict[name] = result
fmt_dict["i"] = code.funcstate.allocate_temp(self.index_type, False)
fmt_dict["nsteps"] = code.funcstate.allocate_temp(self.index_type, False)
# TODO: check if the step is 0 and if so, raise an exception in a
# 'with gil' block. For now, just abort
code.putln("if (%(step)s == 0) abort();" % fmt_dict)
self.setup_parallel_control_flow_block(code) # parallel control flow block
# Note: nsteps is private in an outer scope if present
code.putln(
"%(nsteps)s = (%(stop)s - %(start)s + %(step)s - %(step)s/abs(%(step)s)) / %(step)s;"
% fmt_dict
)
# The target iteration variable might not be initialized, do it only if
# we are executing at least 1 iteration, otherwise we should leave the
# target unaffected. The target iteration variable is firstprivate to
# shut up compiler warnings caused by lastprivate, as the compiler
# erroneously believes that nsteps may be <= 0, leaving the private
# target index uninitialized
code.putln("if (%(nsteps)s > 0)" % fmt_dict)
code.begin_block() # if block
self.generate_loop(code, fmt_dict)
code.end_block() # end if block
self.restore_labels(code)
if self.else_clause:
if self.breaking_label_used:
code.put("if (%s < 2)" % Naming.parallel_why)
code.begin_block() # else block
code.putln("/* else */")
self.else_clause.generate_execution_code(code)
code.end_block() # end else block
# ------ cleanup ------
self.end_parallel_control_flow_block(code) # end parallel control flow block
# And finally, release our privates and write back any closure
# variables
for temp in start_stop_step + (self.chunksize, self.num_threads):
if temp is not None:
temp.generate_disposal_code(code)
temp.free_temps(code)
code.funcstate.release_temp(fmt_dict["i"])
code.funcstate.release_temp(fmt_dict["nsteps"])
self.release_closure_privates(code)
|
def generate_execution_code(self, code):
"""
Generate code in the following steps
1) copy any closure variables determined thread-private
into temporaries
2) allocate temps for start, stop and step
3) generate a loop that calculates the total number of steps,
which then computes the target iteration variable for every step:
for i in prange(start, stop, step):
...
becomes
nsteps = (stop - start) / step;
i = start;
#pragma omp parallel for lastprivate(i)
for (temp = 0; temp < nsteps; temp++) {
i = start + step * temp;
...
}
Note that accumulation of 'i' would have a data dependency
between iterations.
Also, you can't do this
for (i = start; i < stop; i += step)
...
as the '<' operator should become '>' for descending loops.
'for i from x < i < y:' does not suffer from this problem
as the relational operator is known at compile time!
4) release our temps and write back any private closure variables
"""
self.declare_closure_privates(code)
# This can only be a NameNode
target_index_cname = self.target.entry.cname
# This will be used as the dict to format our code strings, holding
# the start, stop , step, temps and target cnames
fmt_dict = {
"target": target_index_cname,
"target_type": self.target.type.empty_declaration_code(),
}
# Setup start, stop and step, allocating temps if needed
start_stop_step = self.start, self.stop, self.step
defaults = "0", "0", "1"
for node, name, default in zip(start_stop_step, self.names, defaults):
if node is None:
result = default
elif node.is_literal:
result = node.get_constant_c_result_code()
else:
node.generate_evaluation_code(code)
result = node.result()
fmt_dict[name] = result
fmt_dict["i"] = code.funcstate.allocate_temp(self.index_type, False)
fmt_dict["nsteps"] = code.funcstate.allocate_temp(self.index_type, False)
# TODO: check if the step is 0 and if so, raise an exception in a
# 'with gil' block. For now, just abort
code.putln("if (%(step)s == 0) abort();" % fmt_dict)
self.setup_parallel_control_flow_block(code) # parallel control flow block
self.control_flow_var_code_point = code.insertion_point()
# Note: nsteps is private in an outer scope if present
code.putln(
"%(nsteps)s = (%(stop)s - %(start)s + %(step)s - %(step)s/abs(%(step)s)) / %(step)s;"
% fmt_dict
)
# The target iteration variable might not be initialized, do it only if
# we are executing at least 1 iteration, otherwise we should leave the
# target unaffected. The target iteration variable is firstprivate to
# shut up compiler warnings caused by lastprivate, as the compiler
# erroneously believes that nsteps may be <= 0, leaving the private
# target index uninitialized
code.putln("if (%(nsteps)s > 0)" % fmt_dict)
code.begin_block() # if block
self.generate_loop(code, fmt_dict)
code.end_block() # end if block
self.restore_labels(code)
if self.else_clause:
if self.breaking_label_used:
code.put("if (%s < 2)" % Naming.parallel_why)
code.begin_block() # else block
code.putln("/* else */")
self.else_clause.generate_execution_code(code)
code.end_block() # end else block
# ------ cleanup ------
self.end_parallel_control_flow_block(code) # end parallel control flow block
# And finally, release our privates and write back any closure
# variables
for temp in start_stop_step + (self.chunksize, self.num_threads):
if temp is not None:
temp.generate_disposal_code(code)
temp.free_temps(code)
code.funcstate.release_temp(fmt_dict["i"])
code.funcstate.release_temp(fmt_dict["nsteps"])
self.release_closure_privates(code)
|
https://github.com/cython/cython/issues/2780
|
%%cython
from contextlib import contextmanager
@contextmanager
def tag(name):
print("<%s>" % name)
yield
print("</%s>" % name)
from cython.parallel cimport prange
from libc.stdio cimport printf
def func():
cdef int i
with tag('aaa'):
for i in prange(5, nogil=True): # using "xrange" or "with nogil" works well
printf("%d", i)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.py:367: FutureWarning: Cython directive 'language_level' not set, using 2 for now (Py2). This will change in a later release! File: /Users/tema/.ipython/cython/_cython_magic_52369e6689303208367674016d90c298.pyx
tree = Parsing.p_module(s, pxd, full_module_name)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-11-72854e963c38> in <module>()
----> 1 get_ipython().run_cell_magic(u'cython', u'', u'from contextlib import contextmanager\n\n@contextmanager\ndef tag(name):\n print("<%s>" % name)\n yield\n print("</%s>" % name)\n\n\nfrom cython.parallel cimport prange\nfrom libc.stdio cimport printf\n\n\ndef func():\n cdef int i\n\n with tag(\'aaa\'):\n for i in prange(5, nogil=True):\n printf("%d", i)')
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.pyc in run_cell_magic(self, magic_name, line, cell)
2115 magic_arg_s = self.var_expand(line, stack_depth)
2116 with self.builtin_trap:
-> 2117 result = fn(magic_arg_s, cell)
2118 return result
2119
<decorator-gen-118> in cython(self, line, cell)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/IPython/core/magic.pyc in <lambda>(f, *a, **k)
186 # but it's overkill for just that one bit of state.
187 def magic_deco(arg):
--> 188 call = lambda f, *a, **k: f(*a, **k)
189
190 if callable(arg):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/IpythonMagic.pyc in cython(self, line, cell)
322 extension = None
323 if need_cythonize:
--> 324 extensions = self._cythonize(module_name, code, lib_dir, args, quiet=args.quiet)
325 assert len(extensions) == 1
326 extension = extensions[0]
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/IpythonMagic.pyc in _cythonize(self, module_name, code, lib_dir, args, quiet)
430 elif sys.version_info[0] >= 3:
431 opts['language_level'] = 3
--> 432 return cythonize([extension], **opts)
433 except CompileError:
434 return None
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/Dependencies.pyc in cythonize(module_list, exclude, nthreads, aliases, quiet, force, language, exclude_failures, **options)
1084 if not nthreads:
1085 for args in to_compile:
-> 1086 cythonize_one(*args)
1087
1088 if exclude_failures:
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Build/Dependencies.pyc in cythonize_one(pyx_file, c_file, fingerprint, quiet, options, raise_on_failure, embedded_metadata, full_module_name, progress)
1190 any_failures = 0
1191 try:
-> 1192 result = compile_single(pyx_file, options, full_module_name=full_module_name)
1193 if result.num_errors > 0:
1194 any_failures = 1
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.pyc in compile_single(source, options, full_module_name)
723 recursion.
724 """
--> 725 return run_pipeline(source, options, full_module_name)
726
727
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Main.pyc in run_pipeline(source, options, full_module_name, context)
511
512 context.setup_errors(options, result)
--> 513 err, enddata = Pipeline.run_pipeline(pipeline, source)
514 context.teardown_errors(err, options, result)
515 return result
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in run_pipeline(pipeline, source, printtree)
353 exec("def %s(phase, data): return phase(data)" % phase_name, exec_ns)
354 run = _pipeline_entry_points[phase_name] = exec_ns[phase_name]
--> 355 data = run(phase, data)
356 if DebugFlags.debug_verbose_pipeline:
357 print(" %.3f seconds" % (time() - t))
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in run(phase, data)
333
334 def run(phase, data):
--> 335 return phase(data)
336
337 error = None
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Pipeline.pyc in generate_pyx_code_stage(module_node)
50 def generate_pyx_code_stage_factory(options, result):
51 def generate_pyx_code_stage(module_node):
---> 52 module_node.process_implementation(options, result)
53 result.compilation_source = module_node.compilation_source
54 return result
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.pyc in process_implementation(self, options, result)
141 self.find_referenced_modules(env, self.referenced_modules, {})
142 self.sort_cdef_classes(env)
--> 143 self.generate_c_code(env, options, result)
144 self.generate_h_code(env, options, result)
145 self.generate_api_code(env, options, result)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.pyc in generate_c_code(self, env, options, result)
377 self.generate_variable_definitions(env, code)
378
--> 379 self.body.generate_function_definitions(env, code)
380
381 code.mark_pos(None)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
440 #print "StatListNode.generate_function_definitions" ###
441 for stat in self.stats:
--> 442 stat.generate_function_definitions(env, code)
443
444 def generate_execution_code(self, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
3171 self.py_wrapper.func_cname = self.entry.func_cname
3172 self.py_wrapper.generate_function_definitions(env, code)
-> 3173 FuncDefNode.generate_function_definitions(self, env, code)
3174
3175 def generate_function_header(self, code, with_pymethdef, proto_only=0):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_definitions(self, env, code)
1981 # ----- Function body -----
1982 # -------------------------
-> 1983 self.generate_function_body(env, code)
1984
1985 code.mark_pos(self.pos, trace=False)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_function_body(self, env, code)
1743
1744 def generate_function_body(self, env, code):
-> 1745 self.body.generate_execution_code(code)
1746
1747 def generate_function_definitions(self, env, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
446 for stat in self.stats:
447 code.mark_pos(stat.pos)
--> 448 stat.generate_execution_code(code)
449
450 def annotate(self, code):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
7090
7091 code.error_label = old_error_label
-> 7092 self.body.generate_execution_code(code)
7093
7094 if code.label_used(intermediate_error_label):
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in generate_execution_code(self, code)
7559 if not self.body.is_terminator:
7560 code.putln('/*normal exit:*/{')
-> 7561 fresh_finally_clause().generate_execution_code(code)
7562 if not self.finally_clause.is_terminator:
7563 code.put_goto(catch_label)
/Users/tema/Projects/miniconda2/lib/python2.7/site-packages/Cython/Compiler/Nodes.pyc in fresh_finally_clause(_next)
7547 # generate the original subtree once and always keep a fresh copy
7548 node = _next[0]
-> 7549 node_copy = copy.deepcopy(node)
7550 if node is self.finally_clause:
7551 _next[0] = node_copy
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_list(x, memo)
228 memo[id(x)] = y
229 for a in x:
--> 230 y.append(deepcopy(a, memo))
231 return y
232 d[list] = _deepcopy_list
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_list(x, memo)
228 memo[id(x)] = y
229 for a in x:
--> 230 y.append(deepcopy(a, memo))
231 return y
232 d[list] = _deepcopy_list
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
188 raise Error(
189 "un(deep)copyable object of type %s" % cls)
--> 190 y = _reconstruct(x, rv, 1, memo)
191
192 memo[d] = y
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _reconstruct(x, info, deep, memo)
332 if state is not None:
333 if deep:
--> 334 state = deepcopy(state, memo)
335 if hasattr(y, '__setstate__'):
336 y.__setstate__(state)
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
161 copier = _deepcopy_dispatch.get(cls)
162 if copier:
--> 163 y = copier(x, memo)
164 else:
165 try:
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in _deepcopy_dict(x, memo)
255 memo[id(x)] = y
256 for key, value in x.iteritems():
--> 257 y[deepcopy(key, memo)] = deepcopy(value, memo)
258 return y
259 d[dict] = _deepcopy_dict
/Users/tema/Projects/miniconda2/lib/python2.7/copy.pyc in deepcopy(x, memo, _nil)
180 reductor = getattr(x, "__reduce_ex__", None)
181 if reductor:
--> 182 rv = reductor(2)
183 else:
184 reductor = getattr(x, "__reduce__", None)
TypeError: can't pickle cStringIO.StringO objects
|
TypeError
|
def generate_assignment_code(
self,
rhs,
code,
overloaded_assignment=False,
exception_check=None,
exception_value=None,
):
self.generate_subexpr_evaluation_code(code)
if self.type.is_pyobject:
self.generate_setitem_code(rhs.py_result(), code)
elif self.base.type is bytearray_type:
value_code = self._check_byte_value(code, rhs)
self.generate_setitem_code(value_code, code)
elif (
self.base.type.is_cpp_class
and self.exception_check
and self.exception_check == "+"
):
if (
overloaded_assignment
and exception_check
and self.exception_value != exception_value
):
# Handle the case that both the index operator and the assignment
# operator have a c++ exception handler and they are not the same.
translate_double_cpp_exception(
code,
self.pos,
self.type,
self.result(),
rhs.result(),
self.exception_value,
exception_value,
self.in_nogil_context,
)
else:
# Handle the case that only the index operator has a
# c++ exception handler, or that
# both exception handlers are the same.
translate_cpp_exception(
code,
self.pos,
"%s = %s;" % (self.result(), rhs.result()),
self.result() if self.type.is_pyobject else None,
self.exception_value,
self.in_nogil_context,
)
else:
code.putln("%s = %s;" % (self.result(), rhs.result()))
self.generate_subexpr_disposal_code(code)
self.free_subexpr_temps(code)
rhs.generate_disposal_code(code)
rhs.free_temps(code)
|
def generate_assignment_code(
self,
rhs,
code,
overloaded_assignment=False,
exception_check=None,
exception_value=None,
):
self.generate_subexpr_evaluation_code(code)
if self.type.is_pyobject:
self.generate_setitem_code(rhs.py_result(), code)
elif self.base.type is bytearray_type:
value_code = self._check_byte_value(code, rhs)
self.generate_setitem_code(value_code, code)
elif (
self.base.type.is_cpp_class
and self.exception_check
and self.exception_check == "+"
):
if (
overloaded_assignment
and exception_check
and self.exception_value != exception_value
):
# Handle the case that both the index operator and the assignment
# operator have a c++ exception handler and they are not the same.
translate_double_cpp_exception(
code,
self.pos,
self.type,
self.result(),
rhs.result(),
self.exception_value,
exception_value,
self.in_nogil_context,
)
else:
# Handle the case that only the index operator has a
# c++ exception handler, or that
# both exception handlers are the same.
translate_cpp_exception(
code,
self.pos,
"%s = %s;" % (self.result(), rhs.result()),
self.result() if self.lhs.is_pyobject else None,
self.exception_value,
self.in_nogil_context,
)
else:
code.putln("%s = %s;" % (self.result(), rhs.result()))
self.generate_subexpr_disposal_code(code)
self.free_subexpr_temps(code)
rhs.generate_disposal_code(code)
rhs.free_temps(code)
|
https://github.com/cython/cython/issues/2671
|
python setup.py build_ext
Compiling cy_vec_wrapper.pyx because it changed.
[1/1] Cythonizing cy_vec_wrapper.pyx
/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Main.py:367: FutureWarning: Cython directive 'language_level' not set, using 2 for now (Py2). This will change in a later release! File: /home/harding/tmp/vec_wrapper/cy_vec_wrapper.pyx
tree = Parsing.p_module(s, pxd, full_module_name)
Traceback (most recent call last):
File "setup.py", line 12, in <module>
language='c++', # generate C++ code
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Build/Dependencies.py", line 1086, in cythonize
cythonize_one(*args)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Build/Dependencies.py", line 1192, in cythonize_one
result = compile_single(pyx_file, options, full_module_name=full_module_name)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Main.py", line 725, in compile_single
return run_pipeline(source, options, full_module_name)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Main.py", line 513, in run_pipeline
err, enddata = Pipeline.run_pipeline(pipeline, source)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Pipeline.py", line 355, in run_pipeline
data = run(phase, data)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Pipeline.py", line 335, in run
return phase(data)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
module_node.process_implementation(options, result)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.py", line 143, in process_implementation
self.generate_c_code(env, options, result)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.py", line 379, in generate_c_code
self.body.generate_function_definitions(env, code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 4839, in generate_function_definitions
self.body.generate_function_definitions(self.scope, code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 442, in generate_function_definitions
stat.generate_function_definitions(env, code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 3173, in generate_function_definitions
FuncDefNode.generate_function_definitions(self, env, code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 1983, in generate_function_definitions
self.generate_function_body(env, code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 1745, in generate_function_body
self.body.generate_execution_code(code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 448, in generate_execution_code
stat.generate_execution_code(code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 5169, in generate_execution_code
self.generate_assignment_code(code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/Nodes.py", line 5466, in generate_assignment_code
self.lhs.generate_assignment_code(self.rhs, code)
File "/home/harding/.local/lib/python2.7/site-packages/Cython/Compiler/ExprNodes.py", line 4105, in generate_assignment_code
self.result() if self.lhs.is_pyobject else None,
AttributeError: 'IndexNode' object has no attribute 'lhs'
|
AttributeError
|
def _handle_simple_method_dict_pop(self, node, function, args, is_unbound_method):
"""Replace dict.pop() by a call to _PyDict_Pop()."""
if len(args) == 2:
args.append(ExprNodes.NullNode(node.pos))
elif len(args) != 3:
self._error_wrong_arg_count("dict.pop", node, args, "2 or 3")
return node
return self._substitute_method_call(
node,
function,
"__Pyx_PyDict_Pop",
self.PyDict_Pop_func_type,
"pop",
is_unbound_method,
args,
may_return_none=True,
utility_code=load_c_utility("py_dict_pop"),
)
|
def _handle_simple_method_dict_pop(self, node, function, args, is_unbound_method):
"""Replace dict.pop() by a call to _PyDict_Pop()."""
if len(args) == 2:
args.append(ExprNodes.NullNode(node.pos))
elif len(args) != 3:
self._error_wrong_arg_count("dict.pop", node, args, "2 or 3")
return node
return self._substitute_method_call(
node,
function,
"__Pyx_PyDict_Pop",
self.PyDict_Pop_func_type,
"pop",
is_unbound_method,
args,
utility_code=load_c_utility("py_dict_pop"),
)
|
https://github.com/cython/cython/issues/2152
|
Traceback (most recent call last):
File "run_cython.py", line 2, in <module>
main()
File "cython_ext_type_none.pyx", line 12, in cython_ext_type_none.main
A = MyClass() # This doesn't work
File "cython_ext_type_none.pyx", line 7, in cython_ext_type_none.MyClass.__init__
self.attr = kwargs.pop('attr', None)
TypeError: Cannot convert NoneType to cython_ext_type_none.MyClass
|
TypeError
|
def generate_type_ready_code(self, env, entry, code):
# Generate a call to PyType_Ready for an extension
# type defined in this module.
type = entry.type
typeobj_cname = type.typeobj_cname
scope = type.scope
if scope: # could be None if there was an error
if entry.visibility != "extern":
for slot in TypeSlots.slot_table:
slot.generate_dynamic_init_code(scope, code)
code.putln(
"if (PyType_Ready(&%s) < 0) %s"
% (typeobj_cname, code.error_goto(entry.pos))
)
# Don't inherit tp_print from builtin types, restoring the
# behavior of using tp_repr or tp_str instead.
code.putln("%s.tp_print = 0;" % typeobj_cname)
# Fix special method docstrings. This is a bit of a hack, but
# unless we let PyType_Ready create the slot wrappers we have
# a significant performance hit. (See trac #561.)
for func in entry.type.scope.pyfunc_entries:
is_buffer = func.name in ("__getbuffer__", "__releasebuffer__")
if (
func.is_special
and Options.docstrings
and func.wrapperbase_cname
and not is_buffer
):
slot = TypeSlots.method_name_to_slot.get(func.name)
preprocessor_guard = (
slot.preprocessor_guard_code() if slot else None
)
if preprocessor_guard:
code.putln(preprocessor_guard)
code.putln("#if CYTHON_COMPILING_IN_CPYTHON")
code.putln("{")
code.putln(
'PyObject *wrapper = PyObject_GetAttrString((PyObject *)&%s, "%s"); %s'
% (
typeobj_cname,
func.name,
code.error_goto_if_null("wrapper", entry.pos),
)
)
code.putln("if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) {")
code.putln(
"%s = *((PyWrapperDescrObject *)wrapper)->d_base;"
% (func.wrapperbase_cname)
)
code.putln(
"%s.doc = %s;" % (func.wrapperbase_cname, func.doc_cname)
)
code.putln(
"((PyWrapperDescrObject *)wrapper)->d_base = &%s;"
% (func.wrapperbase_cname)
)
code.putln("}")
code.putln("}")
code.putln("#endif")
if preprocessor_guard:
code.putln("#endif")
if type.vtable_cname:
code.putln(
"if (__Pyx_SetVtable(%s.tp_dict, %s) < 0) %s"
% (typeobj_cname, type.vtabptr_cname, code.error_goto(entry.pos))
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("SetVTable", "ImportExport.c")
)
if not type.scope.is_internal and not type.scope.directives["internal"]:
# scope.is_internal is set for types defined by
# Cython (such as closures), the 'internal'
# directive is set by users
code.putln(
'if (PyObject_SetAttrString(%s, "%s", (PyObject *)&%s) < 0) %s'
% (
Naming.module_cname,
scope.class_name,
typeobj_cname,
code.error_goto(entry.pos),
)
)
weakref_entry = (
scope.lookup_here("__weakref__")
if not scope.is_closure_class_scope
else None
)
if weakref_entry:
if weakref_entry.type is py_object_type:
tp_weaklistoffset = "%s.tp_weaklistoffset" % typeobj_cname
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
objstruct = "struct %s" % type.objstruct_cname
code.putln(
"if (%s == 0) %s = offsetof(%s, %s);"
% (
tp_weaklistoffset,
tp_weaklistoffset,
objstruct,
weakref_entry.cname,
)
)
else:
error(
weakref_entry.pos, "__weakref__ slot must be of type 'object'"
)
if (
scope.lookup_here("__reduce_cython__")
if not scope.is_closure_class_scope
else None
):
# Unfortunately, we cannot reliably detect whether a
# superclass defined __reduce__ at compile time, so we must
# do so at runtime.
code.globalstate.use_utility_code(
UtilityCode.load_cached("SetupReduce", "ExtensionTypes.c")
)
code.putln(
"if (__Pyx_setup_reduce((PyObject*)&%s) < 0) %s"
% (typeobj_cname, code.error_goto(entry.pos))
)
|
def generate_type_ready_code(self, env, entry, code):
# Generate a call to PyType_Ready for an extension
# type defined in this module.
type = entry.type
typeobj_cname = type.typeobj_cname
scope = type.scope
if scope: # could be None if there was an error
if entry.visibility != "extern":
for slot in TypeSlots.slot_table:
slot.generate_dynamic_init_code(scope, code)
code.putln(
"if (PyType_Ready(&%s) < 0) %s"
% (typeobj_cname, code.error_goto(entry.pos))
)
# Don't inherit tp_print from builtin types, restoring the
# behavior of using tp_repr or tp_str instead.
code.putln("%s.tp_print = 0;" % typeobj_cname)
# Fix special method docstrings. This is a bit of a hack, but
# unless we let PyType_Ready create the slot wrappers we have
# a significant performance hit. (See trac #561.)
for func in entry.type.scope.pyfunc_entries:
is_buffer = func.name in ("__getbuffer__", "__releasebuffer__")
if (
func.is_special
and Options.docstrings
and func.wrapperbase_cname
and not is_buffer
):
slot = TypeSlots.method_name_to_slot[func.name]
preprocessor_guard = slot.preprocessor_guard_code()
if preprocessor_guard:
code.putln(preprocessor_guard)
code.putln("#if CYTHON_COMPILING_IN_CPYTHON")
code.putln("{")
code.putln(
'PyObject *wrapper = PyObject_GetAttrString((PyObject *)&%s, "%s"); %s'
% (
typeobj_cname,
func.name,
code.error_goto_if_null("wrapper", entry.pos),
)
)
code.putln("if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) {")
code.putln(
"%s = *((PyWrapperDescrObject *)wrapper)->d_base;"
% (func.wrapperbase_cname)
)
code.putln(
"%s.doc = %s;" % (func.wrapperbase_cname, func.doc_cname)
)
code.putln(
"((PyWrapperDescrObject *)wrapper)->d_base = &%s;"
% (func.wrapperbase_cname)
)
code.putln("}")
code.putln("}")
code.putln("#endif")
if preprocessor_guard:
code.putln("#endif")
if type.vtable_cname:
code.putln(
"if (__Pyx_SetVtable(%s.tp_dict, %s) < 0) %s"
% (typeobj_cname, type.vtabptr_cname, code.error_goto(entry.pos))
)
code.globalstate.use_utility_code(
UtilityCode.load_cached("SetVTable", "ImportExport.c")
)
if not type.scope.is_internal and not type.scope.directives["internal"]:
# scope.is_internal is set for types defined by
# Cython (such as closures), the 'internal'
# directive is set by users
code.putln(
'if (PyObject_SetAttrString(%s, "%s", (PyObject *)&%s) < 0) %s'
% (
Naming.module_cname,
scope.class_name,
typeobj_cname,
code.error_goto(entry.pos),
)
)
weakref_entry = (
scope.lookup_here("__weakref__")
if not scope.is_closure_class_scope
else None
)
if weakref_entry:
if weakref_entry.type is py_object_type:
tp_weaklistoffset = "%s.tp_weaklistoffset" % typeobj_cname
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
objstruct = "struct %s" % type.objstruct_cname
code.putln(
"if (%s == 0) %s = offsetof(%s, %s);"
% (
tp_weaklistoffset,
tp_weaklistoffset,
objstruct,
weakref_entry.cname,
)
)
else:
error(
weakref_entry.pos, "__weakref__ slot must be of type 'object'"
)
if (
scope.lookup_here("__reduce_cython__")
if not scope.is_closure_class_scope
else None
):
# Unfortunately, we cannot reliably detect whether a
# superclass defined __reduce__ at compile time, so we must
# do so at runtime.
code.globalstate.use_utility_code(
UtilityCode.load_cached("SetupReduce", "ExtensionTypes.c")
)
code.putln(
"if (__Pyx_setup_reduce((PyObject*)&%s) < 0) %s"
% (typeobj_cname, code.error_goto(entry.pos))
)
|
https://github.com/cython/cython/issues/2019
|
[sagelib-8.1.rc2] [1/2] Cythonizing sage/libs/pynac/constant.pyx
[sagelib-8.1.rc2] [2/2] Cythonizing sage/libs/pynac/pynac.pyx
[sagelib-8.1.rc2] Traceback (most recent call last):
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Build/Dependencies.py", line 1179, in cythonize_one_helper
[sagelib-8.1.rc2] return cythonize_one(*m)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Build/Dependencies.py", line 1144, in cythonize_one
[sagelib-8.1.rc2] result = compile([pyx_file], options)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/Main.py", line 695, in compile
[sagelib-8.1.rc2] return compile_multiple(source, options)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/Main.py", line 673, in compile_multiple
[sagelib-8.1.rc2] result = run_pipeline(source, options, context=context)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/Main.py", line 494, in run_pipeline
[sagelib-8.1.rc2] err, enddata = Pipeline.run_pipeline(pipeline, source)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/Pipeline.py", line 354, in run_pipeline
[sagelib-8.1.rc2] data = run(phase, data)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/Pipeline.py", line 334, in run
[sagelib-8.1.rc2] return phase(data)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/Pipeline.py", line 52, in generate_pyx_code_stage
[sagelib-8.1.rc2] module_node.process_implementation(options, result)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.py", line 141, in process_implementation
[sagelib-8.1.rc2] self.generate_c_code(env, options, result)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.py", line 380, in generate_c_code
[sagelib-8.1.rc2] self.generate_module_init_func(modules[:-1], env, globalstate['init_module'])
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.py", line 2377, in generate_module_init_func
[sagelib-8.1.rc2] self.generate_type_init_code(env, code)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.py", line 2864, in generate_type_init_code
[sagelib-8.1.rc2] self.generate_type_ready_code(env, entry, code)
[sagelib-8.1.rc2] File "/home/ralf/sage/local/lib/python2.7/site-packages/Cython/Compiler/ModuleNode.py", line 2963, in generate_type_ready_code
[sagelib-8.1.rc2] slot = TypeSlots.method_name_to_slot[func.name]
[sagelib-8.1.rc2] KeyError: u'__eq__'
|
KeyError
|
def create_extension_list(
patterns,
exclude=None,
ctx=None,
aliases=None,
quiet=False,
language=None,
exclude_failures=False,
):
if language is not None:
print(
'Please put "# distutils: language=%s" in your .pyx or .pxd file(s)'
% language
)
if exclude is None:
exclude = []
if patterns is None:
return [], {}
elif isinstance(patterns, basestring) or not isinstance(
patterns, collections.Iterable
):
patterns = [patterns]
explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
seen = set()
deps = create_dependency_tree(ctx, quiet=quiet)
to_exclude = set()
if not isinstance(exclude, list):
exclude = [exclude]
for pattern in exclude:
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
module_list = []
module_metadata = {}
# workaround for setuptools
if "setuptools" in sys.modules:
Extension_distutils = sys.modules["setuptools.extension"]._Extension
Extension_setuptools = sys.modules["setuptools"].Extension
else:
# dummy class, in case we do not have setuptools
Extension_distutils = Extension
class Extension_setuptools(Extension):
pass
# if no create_extension() function is defined, use a simple
# default function.
create_extension = ctx.options.create_extension or default_create_extension
for pattern in patterns:
if isinstance(pattern, str):
filepattern = pattern
template = Extension(pattern, []) # Fake Extension without sources
name = "*"
base = None
ext_language = language
elif isinstance(pattern, (Extension_distutils, Extension_setuptools)):
cython_sources = [
s for s in pattern.sources if os.path.splitext(s)[1] in (".py", ".pyx")
]
if cython_sources:
filepattern = cython_sources[0]
if len(cython_sources) > 1:
print(
"Warning: Multiple cython sources found for extension '%s': %s\n"
"See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
"for sharing declarations among Cython files."
% (pattern.name, cython_sources)
)
else:
# ignore non-cython modules
module_list.append(pattern)
continue
template = pattern
name = template.name
base = DistutilsInfo(exn=template)
ext_language = None # do not override whatever the Extension says
else:
msg = str(
"pattern is not of type str nor subclass of Extension (%s)"
" but of type %s and class %s"
% (repr(Extension), type(pattern), pattern.__class__)
)
raise TypeError(msg)
for file in nonempty(
sorted(extended_iglob(filepattern)),
"'%s' doesn't match any files" % filepattern,
):
if os.path.abspath(file) in to_exclude:
continue
pkg = deps.package(file)
module_name = deps.fully_qualified_name(file)
if "*" in name:
if module_name in explicit_modules:
continue
elif name != module_name:
print(
"Warning: Extension name '%s' does not match fully qualified name '%s' of '%s'"
% (name, module_name, file)
)
module_name = name
if module_name not in seen:
try:
kwds = deps.distutils_info(file, aliases, base).values
except Exception:
if exclude_failures:
continue
raise
if base is not None:
for key, value in base.values.items():
if key not in kwds:
kwds[key] = value
kwds["name"] = module_name
sources = [file] + [m for m in template.sources if m != filepattern]
if "sources" in kwds:
# allow users to add .c files etc.
for source in kwds["sources"]:
source = encode_filename_in_py2(source)
if source not in sources:
sources.append(source)
kwds["sources"] = sources
if ext_language and "language" not in kwds:
kwds["language"] = ext_language
np_pythran = kwds.pop("np_pythran", False)
# Create the new extension
m, metadata = create_extension(template, kwds)
m.np_pythran = np_pythran or getattr(m, "np_pythran", False)
if m.np_pythran:
update_pythran_extension(m)
module_list.append(m)
# Store metadata (this will be written as JSON in the
# generated C file but otherwise has no purpose)
module_metadata[module_name] = metadata
if file not in m.sources:
# Old setuptools unconditionally replaces .pyx with .c/.cpp
target_file = os.path.splitext(file)[0] + (
".cpp" if m.language == "c++" else ".c"
)
try:
m.sources.remove(target_file)
except ValueError:
# never seen this in the wild, but probably better to warn about this unexpected case
print(
"Warning: Cython source file not found in sources list, adding %s"
% file
)
m.sources.insert(0, file)
seen.add(name)
return module_list, module_metadata
|
def create_extension_list(
patterns,
exclude=None,
ctx=None,
aliases=None,
quiet=False,
language=None,
exclude_failures=False,
):
if language is not None:
print(
'Please put "# distutils: language=%s" in your .pyx or .pxd file(s)'
% language
)
if exclude is None:
exclude = []
if patterns is None:
return [], {}
elif isinstance(patterns, basestring) or not isinstance(
patterns, collections.Iterable
):
patterns = [patterns]
explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
seen = set()
deps = create_dependency_tree(ctx, quiet=quiet)
to_exclude = set()
if not isinstance(exclude, list):
exclude = [exclude]
for pattern in exclude:
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
module_list = []
module_metadata = {}
# workaround for setuptools
if "setuptools" in sys.modules:
Extension_distutils = sys.modules["setuptools.extension"]._Extension
Extension_setuptools = sys.modules["setuptools"].Extension
else:
# dummy class, in case we do not have setuptools
Extension_distutils = Extension
class Extension_setuptools(Extension):
pass
# if no create_extension() function is defined, use a simple
# default function.
create_extension = ctx.options.create_extension or default_create_extension
for pattern in patterns:
if isinstance(pattern, str):
filepattern = pattern
template = Extension(pattern, []) # Fake Extension without sources
name = "*"
base = None
ext_language = language
elif isinstance(pattern, (Extension_distutils, Extension_setuptools)):
cython_sources = [
s for s in pattern.sources if os.path.splitext(s)[1] in (".py", ".pyx")
]
if cython_sources:
filepattern = cython_sources[0]
if len(cython_sources) > 1:
print(
"Warning: Multiple cython sources found for extension '%s': %s\n"
"See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
"for sharing declarations among Cython files."
% (pattern.name, cython_sources)
)
else:
# ignore non-cython modules
module_list.append(pattern)
continue
template = pattern
name = template.name
base = DistutilsInfo(exn=template)
ext_language = None # do not override whatever the Extension says
else:
msg = str(
"pattern is not of type str nor subclass of Extension (%s)"
" but of type %s and class %s"
% (repr(Extension), type(pattern), pattern.__class__)
)
raise TypeError(msg)
for file in nonempty(
sorted(extended_iglob(filepattern)),
"'%s' doesn't match any files" % filepattern,
):
if os.path.abspath(file) in to_exclude:
continue
pkg = deps.package(file)
module_name = deps.fully_qualified_name(file)
if "*" in name:
if module_name in explicit_modules:
continue
elif name != module_name:
print(
"Warning: Extension name '%s' does not match fully qualified name '%s' of '%s'"
% (name, module_name, file)
)
module_name = name
if module_name not in seen:
try:
kwds = deps.distutils_info(file, aliases, base).values
except Exception:
if exclude_failures:
continue
raise
if base is not None:
for key, value in base.values.items():
if key not in kwds:
kwds[key] = value
kwds["name"] = module_name
sources = [file] + [m for m in template.sources if m != filepattern]
if "sources" in kwds:
# allow users to add .c files etc.
for source in kwds["sources"]:
source = encode_filename_in_py2(source)
if source not in sources:
sources.append(source)
kwds["sources"] = sources
if ext_language and "language" not in kwds:
kwds["language"] = ext_language
np_pythran = kwds.pop("np_pythran", False)
# Create the new extension
m, metadata = create_extension(template, kwds)
m.np_pythran = np_pythran or getattr(m, "np_pythran", False)
if m.np_pythran:
update_pythran_extension(m)
module_list.append(m)
# Store metadata (this will be written as JSON in the
# generated C file but otherwise has no purpose)
module_metadata[module_name] = metadata
if file not in m.sources:
# Old setuptools unconditionally replaces .pyx with .c/.cpp
target_file = file.rsplit(".")[0] + (
".cpp" if m.language == "c++" else ".c"
)
try:
m.sources.remove(target_file)
except ValueError:
# never seen this in the wild, but probably better to warn about this unexpected case
print(
"Warning: Cython source file not found in sources list, adding %s"
% file
)
m.sources.insert(0, file)
seen.add(name)
return module_list, module_metadata
|
https://github.com/cython/cython/issues/1879
|
running build_ext
Traceback (most recent call last):
File "setup.py", line 278, in <module>
**setup_args
File "/usr/lib64/python2.6/distutils/core.py", line 152, in setup
dist.run_commands()
File "/usr/lib64/python2.6/distutils/dist.py", line 975, in run_commands
self.run_command(cmd)
File "/usr/lib64/python2.6/distutils/dist.py", line 995, in run_command
cmd_obj.run()
File "/usr/lib64/python2.6/distutils/command/build.py", line 134, in run
self.run_command(cmd_name)
File "/usr/lib64/python2.6/distutils/cmd.py", line 333, in run_command
self.distribution.run_command(command)
File "/usr/lib64/python2.6/distutils/dist.py", line 994, in run_command
cmd_obj.ensure_finalized()
File "/usr/lib64/python2.6/distutils/cmd.py", line 117, in ensure_finalized
self.finalize_options()
File "/usr/src/tmp/BUILD/cython-0.27/Cython/Distutils/build_ext.py", line 19, in finalize_options
self.distribution.ext_modules)
File "/usr/src/tmp/BUILD/cython-0.27/Cython/Build/Dependencies.py", line 915, in cythonize
aliases=aliases)
File "/usr/src/tmp/BUILD/cython-0.27/Cython/Build/Dependencies.py", line 854, in create_extension_list
m.sources.remove(file.rsplit('.')[0] + '.c')
ValueError: list.remove(x): x not in list
error: Bad exit status from /var/tmp/rpm-tmp.6StfGq (%build)
Bad exit status from /var/tmp/rpm-tmp.6StfGq (%build)
|
ValueError
|
def create_extension_list(
patterns,
exclude=None,
ctx=None,
aliases=None,
quiet=False,
language=None,
exclude_failures=False,
):
if language is not None:
print(
'Please put "# distutils: language=%s" in your .pyx or .pxd file(s)'
% language
)
if exclude is None:
exclude = []
if patterns is None:
return [], {}
elif isinstance(patterns, basestring) or not isinstance(
patterns, collections.Iterable
):
patterns = [patterns]
explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
seen = set()
deps = create_dependency_tree(ctx, quiet=quiet)
to_exclude = set()
if not isinstance(exclude, list):
exclude = [exclude]
for pattern in exclude:
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
module_list = []
module_metadata = {}
# workaround for setuptools
if "setuptools" in sys.modules:
Extension_distutils = sys.modules["setuptools.extension"]._Extension
Extension_setuptools = sys.modules["setuptools"].Extension
else:
# dummy class, in case we do not have setuptools
Extension_distutils = Extension
class Extension_setuptools(Extension):
pass
# if no create_extension() function is defined, use a simple
# default function.
create_extension = ctx.options.create_extension or default_create_extension
for pattern in patterns:
if isinstance(pattern, str):
filepattern = pattern
template = Extension(pattern, []) # Fake Extension without sources
name = "*"
base = None
ext_language = language
elif isinstance(pattern, (Extension_distutils, Extension_setuptools)):
cython_sources = [
s for s in pattern.sources if os.path.splitext(s)[1] in (".py", ".pyx")
]
if cython_sources:
filepattern = cython_sources[0]
if len(cython_sources) > 1:
print(
"Warning: Multiple cython sources found for extension '%s': %s\n"
"See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
"for sharing declarations among Cython files."
% (pattern.name, cython_sources)
)
else:
# ignore non-cython modules
module_list.append(pattern)
continue
template = pattern
name = template.name
base = DistutilsInfo(exn=template)
ext_language = None # do not override whatever the Extension says
else:
msg = str(
"pattern is not of type str nor subclass of Extension (%s)"
" but of type %s and class %s"
% (repr(Extension), type(pattern), pattern.__class__)
)
raise TypeError(msg)
for file in nonempty(
sorted(extended_iglob(filepattern)),
"'%s' doesn't match any files" % filepattern,
):
if os.path.abspath(file) in to_exclude:
continue
pkg = deps.package(file)
module_name = deps.fully_qualified_name(file)
if "*" in name:
if module_name in explicit_modules:
continue
elif name != module_name:
print(
"Warning: Extension name '%s' does not match fully qualified name '%s' of '%s'"
% (name, module_name, file)
)
module_name = name
if module_name not in seen:
try:
kwds = deps.distutils_info(file, aliases, base).values
except Exception:
if exclude_failures:
continue
raise
if base is not None:
for key, value in base.values.items():
if key not in kwds:
kwds[key] = value
kwds["name"] = module_name
sources = [file] + [m for m in template.sources if m != filepattern]
if "sources" in kwds:
# allow users to add .c files etc.
for source in kwds["sources"]:
source = encode_filename_in_py2(source)
if source not in sources:
sources.append(source)
kwds["sources"] = sources
if ext_language and "language" not in kwds:
kwds["language"] = ext_language
np_pythran = kwds.pop("np_pythran", False)
# Create the new extension
m, metadata = create_extension(template, kwds)
m.np_pythran = np_pythran or getattr(m, "np_pythran", False)
if m.np_pythran:
update_pythran_extension(m)
module_list.append(m)
# Store metadata (this will be written as JSON in the
# generated C file but otherwise has no purpose)
module_metadata[module_name] = metadata
if file not in m.sources:
# Old setuptools unconditionally replaces .pyx with .c/.cpp
target_file = file.rsplit(".")[0] + (
".cpp" if m.language == "c++" else ".c"
)
try:
m.sources.remove(target_file)
except ValueError:
# never seen this in the wild, but probably better to warn about this unexpected case
print(
"Warning: Cython source file not found in sources list, adding %s"
% file
)
m.sources.insert(0, file)
seen.add(name)
return module_list, module_metadata
|
def create_extension_list(
patterns,
exclude=None,
ctx=None,
aliases=None,
quiet=False,
language=None,
exclude_failures=False,
):
if language is not None:
print(
'Please put "# distutils: language=%s" in your .pyx or .pxd file(s)'
% language
)
if exclude is None:
exclude = []
if patterns is None:
return [], {}
elif isinstance(patterns, basestring) or not isinstance(
patterns, collections.Iterable
):
patterns = [patterns]
explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
seen = set()
deps = create_dependency_tree(ctx, quiet=quiet)
to_exclude = set()
if not isinstance(exclude, list):
exclude = [exclude]
for pattern in exclude:
to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
module_list = []
module_metadata = {}
# workaround for setuptools
if "setuptools" in sys.modules:
Extension_distutils = sys.modules["setuptools.extension"]._Extension
Extension_setuptools = sys.modules["setuptools"].Extension
else:
# dummy class, in case we do not have setuptools
Extension_distutils = Extension
class Extension_setuptools(Extension):
pass
# if no create_extension() function is defined, use a simple
# default function.
create_extension = ctx.options.create_extension or default_create_extension
for pattern in patterns:
if isinstance(pattern, str):
filepattern = pattern
template = Extension(pattern, []) # Fake Extension without sources
name = "*"
base = None
ext_language = language
elif isinstance(pattern, (Extension_distutils, Extension_setuptools)):
cython_sources = [
s for s in pattern.sources if os.path.splitext(s)[1] in (".py", ".pyx")
]
if cython_sources:
filepattern = cython_sources[0]
if len(cython_sources) > 1:
print(
"Warning: Multiple cython sources found for extension '%s': %s\n"
"See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
"for sharing declarations among Cython files."
% (pattern.name, cython_sources)
)
else:
# ignore non-cython modules
module_list.append(pattern)
continue
template = pattern
name = template.name
base = DistutilsInfo(exn=template)
ext_language = None # do not override whatever the Extension says
else:
msg = str(
"pattern is not of type str nor subclass of Extension (%s)"
" but of type %s and class %s"
% (repr(Extension), type(pattern), pattern.__class__)
)
raise TypeError(msg)
for file in nonempty(
sorted(extended_iglob(filepattern)),
"'%s' doesn't match any files" % filepattern,
):
if os.path.abspath(file) in to_exclude:
continue
pkg = deps.package(file)
module_name = deps.fully_qualified_name(file)
if "*" in name:
if module_name in explicit_modules:
continue
elif name != module_name:
print(
"Warning: Extension name '%s' does not match fully qualified name '%s' of '%s'"
% (name, module_name, file)
)
module_name = name
if module_name not in seen:
try:
kwds = deps.distutils_info(file, aliases, base).values
except Exception:
if exclude_failures:
continue
raise
if base is not None:
for key, value in base.values.items():
if key not in kwds:
kwds[key] = value
kwds["name"] = module_name
sources = [file] + [m for m in template.sources if m != filepattern]
if "sources" in kwds:
# allow users to add .c files etc.
for source in kwds["sources"]:
source = encode_filename_in_py2(source)
if source not in sources:
sources.append(source)
kwds["sources"] = sources
if ext_language and "language" not in kwds:
kwds["language"] = ext_language
np_pythran = kwds.pop("np_pythran", False)
# Create the new extension
m, metadata = create_extension(template, kwds)
m.np_pythran = np_pythran or getattr(m, "np_pythran", False)
if m.np_pythran:
update_pythran_extension(m)
module_list.append(m)
# Store metadata (this will be written as JSON in the
# generated C file but otherwise has no purpose)
module_metadata[module_name] = metadata
if file not in m.sources:
# Old setuptools unconditionally replaces .pyx with .c/.cpp
m.sources.remove(
file.rsplit(".")[0] + (".cpp" if m.language == "c++" else ".c")
)
m.sources.insert(0, file)
seen.add(name)
return module_list, module_metadata
|
https://github.com/cython/cython/issues/1879
|
running build_ext
Traceback (most recent call last):
File "setup.py", line 278, in <module>
**setup_args
File "/usr/lib64/python2.6/distutils/core.py", line 152, in setup
dist.run_commands()
File "/usr/lib64/python2.6/distutils/dist.py", line 975, in run_commands
self.run_command(cmd)
File "/usr/lib64/python2.6/distutils/dist.py", line 995, in run_command
cmd_obj.run()
File "/usr/lib64/python2.6/distutils/command/build.py", line 134, in run
self.run_command(cmd_name)
File "/usr/lib64/python2.6/distutils/cmd.py", line 333, in run_command
self.distribution.run_command(command)
File "/usr/lib64/python2.6/distutils/dist.py", line 994, in run_command
cmd_obj.ensure_finalized()
File "/usr/lib64/python2.6/distutils/cmd.py", line 117, in ensure_finalized
self.finalize_options()
File "/usr/src/tmp/BUILD/cython-0.27/Cython/Distutils/build_ext.py", line 19, in finalize_options
self.distribution.ext_modules)
File "/usr/src/tmp/BUILD/cython-0.27/Cython/Build/Dependencies.py", line 915, in cythonize
aliases=aliases)
File "/usr/src/tmp/BUILD/cython-0.27/Cython/Build/Dependencies.py", line 854, in create_extension_list
m.sources.remove(file.rsplit('.')[0] + '.c')
ValueError: list.remove(x): x not in list
error: Bad exit status from /var/tmp/rpm-tmp.6StfGq (%build)
Bad exit status from /var/tmp/rpm-tmp.6StfGq (%build)
|
ValueError
|
def get_if_raw_addr(ifname):
"""Returns the IPv4 address configured on 'ifname', packed with inet_pton.""" # noqa: E501
ifname = network_name(ifname)
# Get ifconfig output
subproc = subprocess.Popen(
[conf.prog.ifconfig, ifname],
close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = subproc.communicate()
if subproc.returncode:
warning("Failed to execute ifconfig: (%s)", plain_str(stderr).strip())
return b"\0\0\0\0"
# Get IPv4 addresses
addresses = [
line.strip() for line in plain_str(stdout).splitlines() if "inet " in line
]
if not addresses:
warning("No IPv4 address found on %s !", ifname)
return b"\0\0\0\0"
# Pack the first address
address = addresses[0].split(" ")[1]
if "/" in address: # NetBSD 8.0
address = address.split("/")[0]
return socket.inet_pton(socket.AF_INET, address)
|
def get_if_raw_addr(ifname):
"""Returns the IPv4 address configured on 'ifname', packed with inet_pton.""" # noqa: E501
ifname = network_name(ifname)
# Get ifconfig output
subproc = subprocess.Popen(
[conf.prog.ifconfig, ifname],
close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = subproc.communicate()
if subproc.returncode:
warning("Failed to execute ifconfig: (%s)", plain_str(stderr))
return b"\0\0\0\0"
# Get IPv4 addresses
addresses = [
line.strip() for line in plain_str(stdout).splitlines() if "inet " in line
]
if not addresses:
warning("No IPv4 address found on %s !", ifname)
return b"\0\0\0\0"
# Pack the first address
address = addresses[0].split(" ")[1]
if "/" in address: # NetBSD 8.0
address = address.split("/")[0]
return socket.inet_pton(socket.AF_INET, address)
|
https://github.com/secdev/scapy/issues/3051
|
% scapy
scapy.__version__
'2.4.4.dev265'
conf.use_pcap
False
pkt = sr1(IP(dst="192.168.43.5")/UDP(sport=137, dport=137), timeout=2, verbose=0)
Traceback (most recent call last):
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 73, in __init__
fcntl.ioctl(self.ins, BIOCSETIF, struct.pack("16s16x", self.iface.encode())) # noqa: E501
OSError: [Errno 6] Device not configured
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/sendrecv.py", line 541, in sr1
s = conf.L3socket(promisc=promisc, filter=filter,
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 241, in __init__
super(L2bpfListenSocket, self).__init__(*args, **kwargs)
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 75, in __init__
raise Scapy_Exception("BIOCSETIF failed on %s" % self.iface)
scapy.error.Scapy_Exception: BIOCSETIF failed on bridge1
conf.use_pcap = True
WARNING: No libpcap provider available ! pcap won't be used
pkt = sr1(IP(dst="192.168.43.5")/UDP(sport=137, dport=137), timeout=2, verbose=0)
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/sendrecv.py", line 541, in sr1
s = conf.L3socket(promisc=promisc, filter=filter,
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/supersocket.py", line 273, in __init__
self.outs = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_RAW) # noqa: E501
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/socket.py", line 231, in __init__
_socket.socket.__init__(self, family, type, proto, fileno)
PermissionError: [Errno 1] Operation not permitted
|
OSError
|
def get_if_raw_hwaddr(ifname):
"""Returns the packed MAC address configured on 'ifname'."""
NULL_MAC_ADDRESS = b"\x00" * 6
ifname = network_name(ifname)
# Handle the loopback interface separately
if ifname == conf.loopback_name:
return (ARPHDR_LOOPBACK, NULL_MAC_ADDRESS)
# Get ifconfig output
subproc = subprocess.Popen(
[conf.prog.ifconfig, ifname],
close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = subproc.communicate()
if subproc.returncode:
raise Scapy_Exception(
"Failed to execute ifconfig: (%s)" % plain_str(stderr).strip()
)
# Get MAC addresses
addresses = [
line.strip()
for line in plain_str(stdout).splitlines()
if ("ether" in line or "lladdr" in line or "address" in line)
]
if not addresses:
raise Scapy_Exception("No MAC address found on %s !" % ifname)
# Pack and return the MAC address
mac = addresses[0].split(" ")[1]
mac = [chr(int(b, 16)) for b in mac.split(":")]
return (ARPHDR_ETHER, "".join(mac))
|
def get_if_raw_hwaddr(ifname):
"""Returns the packed MAC address configured on 'ifname'."""
NULL_MAC_ADDRESS = b"\x00" * 6
ifname = network_name(ifname)
# Handle the loopback interface separately
if ifname == conf.loopback_name:
return (ARPHDR_LOOPBACK, NULL_MAC_ADDRESS)
# Get ifconfig output
subproc = subprocess.Popen(
[conf.prog.ifconfig, ifname],
close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = subproc.communicate()
if subproc.returncode:
raise Scapy_Exception("Failed to execute ifconfig: (%s)" % (plain_str(stderr)))
# Get MAC addresses
addresses = [
line.strip()
for line in plain_str(stdout).splitlines()
if ("ether" in line or "lladdr" in line or "address" in line)
]
if not addresses:
raise Scapy_Exception("No MAC address found on %s !" % ifname)
# Pack and return the MAC address
mac = addresses[0].split(" ")[1]
mac = [chr(int(b, 16)) for b in mac.split(":")]
return (ARPHDR_ETHER, "".join(mac))
|
https://github.com/secdev/scapy/issues/3051
|
% scapy
scapy.__version__
'2.4.4.dev265'
conf.use_pcap
False
pkt = sr1(IP(dst="192.168.43.5")/UDP(sport=137, dport=137), timeout=2, verbose=0)
Traceback (most recent call last):
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 73, in __init__
fcntl.ioctl(self.ins, BIOCSETIF, struct.pack("16s16x", self.iface.encode())) # noqa: E501
OSError: [Errno 6] Device not configured
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/sendrecv.py", line 541, in sr1
s = conf.L3socket(promisc=promisc, filter=filter,
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 241, in __init__
super(L2bpfListenSocket, self).__init__(*args, **kwargs)
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 75, in __init__
raise Scapy_Exception("BIOCSETIF failed on %s" % self.iface)
scapy.error.Scapy_Exception: BIOCSETIF failed on bridge1
conf.use_pcap = True
WARNING: No libpcap provider available ! pcap won't be used
pkt = sr1(IP(dst="192.168.43.5")/UDP(sport=137, dport=137), timeout=2, verbose=0)
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/sendrecv.py", line 541, in sr1
s = conf.L3socket(promisc=promisc, filter=filter,
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/supersocket.py", line 273, in __init__
self.outs = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_RAW) # noqa: E501
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/socket.py", line 231, in __init__
_socket.socket.__init__(self, family, type, proto, fileno)
PermissionError: [Errno 1] Operation not permitted
|
OSError
|
def read_routes():
# type: () -> List[Tuple[int, int, str, str, str, int]]
"""Return a list of IPv4 routes than can be used by Scapy.
This function parses netstat.
"""
if SOLARIS:
f = os.popen("netstat -rvn -f inet")
elif FREEBSD:
f = os.popen("netstat -rnW -f inet") # -W to show long interface names
else:
f = os.popen("netstat -rn -f inet")
ok = 0
mtu_present = False
prio_present = False
refs_present = False
use_present = False
routes = [] # type: List[Tuple[int, int, str, str, str, int]]
pending_if = [] # type: List[Tuple[int, int, str]]
for line in f.readlines():
if not line:
break
line = line.strip().lower()
if line.find("----") >= 0: # a separation line
continue
if not ok:
if line.find("destination") >= 0:
ok = 1
mtu_present = "mtu" in line
prio_present = "prio" in line
refs_present = "ref" in line # There is no s on Solaris
use_present = "use" in line or "nhop" in line
continue
if not line:
break
rt = line.split()
if SOLARIS:
dest_, netmask_, gw, netif = rt[:4]
flg = rt[4 + mtu_present + refs_present]
else:
dest_, gw, flg = rt[:3]
locked = OPENBSD and rt[6] == "l"
offset = mtu_present + prio_present + refs_present + locked
offset += use_present
netif = rt[3 + offset]
if flg.find("lc") >= 0:
continue
elif dest_ == "default":
dest = 0
netmask = 0
elif SOLARIS:
dest = scapy.utils.atol(dest_)
netmask = scapy.utils.atol(netmask_)
else:
if "/" in dest_:
dest_, netmask_ = dest_.split("/")
netmask = scapy.utils.itom(int(netmask_))
else:
netmask = scapy.utils.itom((dest_.count(".") + 1) * 8)
dest_ += ".0" * (3 - dest_.count("."))
dest = scapy.utils.atol(dest_)
# XXX: TODO: add metrics for unix.py (use -e option on netstat)
metric = 1
if "g" not in flg:
gw = "0.0.0.0"
if netif is not None:
from scapy.arch import get_if_addr
try:
ifaddr = get_if_addr(netif)
if ifaddr == "0.0.0.0":
# This means the interface name is probably truncated by
# netstat -nr. We attempt to guess it's name and if not we
# ignore it.
guessed_netif = _guess_iface_name(netif)
if guessed_netif is not None:
ifaddr = get_if_addr(guessed_netif)
netif = guessed_netif
else:
log_runtime.info(
"Could not guess partial interface name: %s", netif
)
routes.append((dest, netmask, gw, netif, ifaddr, metric))
except OSError:
raise
else:
pending_if.append((dest, netmask, gw))
f.close()
# On Solaris, netstat does not provide output interfaces for some routes
# We need to parse completely the routing table to route their gw and
# know their output interface
for dest, netmask, gw in pending_if:
gw_l = scapy.utils.atol(gw)
max_rtmask, gw_if, gw_if_addr = 0, None, None
for rtdst, rtmask, _, rtif, rtaddr, _ in routes[:]:
if gw_l & rtmask == rtdst:
if rtmask >= max_rtmask:
max_rtmask = rtmask
gw_if = rtif
gw_if_addr = rtaddr
# XXX: TODO add metrics
metric = 1
if gw_if and gw_if_addr:
routes.append((dest, netmask, gw, gw_if, gw_if_addr, metric))
else:
warning("Did not find output interface to reach gateway %s", gw)
return routes
|
def read_routes():
# type: () -> List[Tuple[int, int, str, str, str, int]]
"""Return a list of IPv4 routes than can be used by Scapy.
This function parses netstat.
"""
if SOLARIS:
f = os.popen("netstat -rvn -f inet")
elif FREEBSD:
f = os.popen("netstat -rnW -f inet") # -W to show long interface names
else:
f = os.popen("netstat -rn -f inet")
ok = 0
mtu_present = False
prio_present = False
refs_present = False
use_present = False
routes = [] # type: List[Tuple[int, int, str, str, str, int]]
pending_if = [] # type: List[Tuple[int, int, str]]
for line in f.readlines():
if not line:
break
line = line.strip().lower()
if line.find("----") >= 0: # a separation line
continue
if not ok:
if line.find("destination") >= 0:
ok = 1
mtu_present = "mtu" in line
prio_present = "prio" in line
refs_present = "ref" in line # There is no s on Solaris
use_present = "use" in line or "nhop" in line
continue
if not line:
break
rt = line.split()
if SOLARIS:
dest_, netmask_, gw, netif = rt[:4]
flg = rt[4 + mtu_present + refs_present]
else:
dest_, gw, flg = rt[:3]
locked = OPENBSD and rt[6] == "l"
offset = mtu_present + prio_present + refs_present + locked
offset += use_present
netif = rt[3 + offset]
if flg.find("lc") >= 0:
continue
elif dest_ == "default":
dest = 0
netmask = 0
elif SOLARIS:
dest = scapy.utils.atol(dest_)
netmask = scapy.utils.atol(netmask_)
else:
if "/" in dest_:
dest_, netmask_ = dest_.split("/")
netmask = scapy.utils.itom(int(netmask_))
else:
netmask = scapy.utils.itom((dest_.count(".") + 1) * 8)
dest_ += ".0" * (3 - dest_.count("."))
dest = scapy.utils.atol(dest_)
# XXX: TODO: add metrics for unix.py (use -e option on netstat)
metric = 1
if "g" not in flg:
gw = "0.0.0.0"
if netif is not None:
from scapy.arch import get_if_addr
try:
ifaddr = get_if_addr(netif)
routes.append((dest, netmask, gw, netif, ifaddr, metric))
except OSError as exc:
if "Device not configured" in str(exc):
# This means the interface name is probably truncated by
# netstat -nr. We attempt to guess it's name and if not we
# ignore it.
guessed_netif = _guess_iface_name(netif)
if guessed_netif is not None:
ifaddr = get_if_addr(guessed_netif)
routes.append(
(dest, netmask, gw, guessed_netif, ifaddr, metric)
) # noqa: E501
else:
log_runtime.info(
"Could not guess partial interface name: %s", netif
)
else:
raise
else:
pending_if.append((dest, netmask, gw))
f.close()
# On Solaris, netstat does not provide output interfaces for some routes
# We need to parse completely the routing table to route their gw and
# know their output interface
for dest, netmask, gw in pending_if:
gw_l = scapy.utils.atol(gw)
max_rtmask, gw_if, gw_if_addr = 0, None, None
for rtdst, rtmask, _, rtif, rtaddr, _ in routes[:]:
if gw_l & rtmask == rtdst:
if rtmask >= max_rtmask:
max_rtmask = rtmask
gw_if = rtif
gw_if_addr = rtaddr
# XXX: TODO add metrics
metric = 1
if gw_if and gw_if_addr:
routes.append((dest, netmask, gw, gw_if, gw_if_addr, metric))
else:
warning("Did not find output interface to reach gateway %s", gw)
return routes
|
https://github.com/secdev/scapy/issues/3051
|
% scapy
scapy.__version__
'2.4.4.dev265'
conf.use_pcap
False
pkt = sr1(IP(dst="192.168.43.5")/UDP(sport=137, dport=137), timeout=2, verbose=0)
Traceback (most recent call last):
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 73, in __init__
fcntl.ioctl(self.ins, BIOCSETIF, struct.pack("16s16x", self.iface.encode())) # noqa: E501
OSError: [Errno 6] Device not configured
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/sendrecv.py", line 541, in sr1
s = conf.L3socket(promisc=promisc, filter=filter,
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 241, in __init__
super(L2bpfListenSocket, self).__init__(*args, **kwargs)
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/arch/bpf/supersocket.py", line 75, in __init__
raise Scapy_Exception("BIOCSETIF failed on %s" % self.iface)
scapy.error.Scapy_Exception: BIOCSETIF failed on bridge1
conf.use_pcap = True
WARNING: No libpcap provider available ! pcap won't be used
pkt = sr1(IP(dst="192.168.43.5")/UDP(sport=137, dport=137), timeout=2, verbose=0)
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/sendrecv.py", line 541, in sr1
s = conf.L3socket(promisc=promisc, filter=filter,
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/site-packages/scapy/supersocket.py", line 273, in __init__
self.outs = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_RAW) # noqa: E501
File "/Users/user/.pyenv/versions/3.8.1/lib/python3.8/socket.py", line 231, in __init__
_socket.socket.__init__(self, family, type, proto, fileno)
PermissionError: [Errno 1] Operation not permitted
|
OSError
|
def __init__(
self,
iface=None,
type=ETH_P_ALL,
promisc=None,
filter=None,
nofilter=0,
monitor=False,
):
self.fd_flags = None
self.assigned_interface = None
# SuperSocket mandatory variables
if promisc is None:
self.promisc = conf.sniff_promisc
else:
self.promisc = promisc
self.iface = network_name(iface or conf.iface)
# Get the BPF handle
self.ins = None
(self.ins, self.dev_bpf) = get_dev_bpf()
self.outs = self.ins
# Set the BPF buffer length
try:
fcntl.ioctl(self.ins, BIOCSBLEN, struct.pack("I", BPF_BUFFER_LENGTH)) # noqa: E501
except IOError:
raise Scapy_Exception("BIOCSBLEN failed on /dev/bpf%i" % self.dev_bpf)
# Assign the network interface to the BPF handle
try:
fcntl.ioctl(self.ins, BIOCSETIF, struct.pack("16s16x", self.iface.encode())) # noqa: E501
except IOError:
raise Scapy_Exception("BIOCSETIF failed on %s" % self.iface)
self.assigned_interface = self.iface
# Set the interface into promiscuous
if self.promisc:
self.set_promisc(1)
# Set the interface to monitor mode
# Note: - trick from libpcap/pcap-bpf.c - monitor_mode()
# - it only works on OS X 10.5 and later
if DARWIN and monitor:
dlt_radiotap = struct.pack("I", DLT_IEEE802_11_RADIO)
try:
fcntl.ioctl(self.ins, BIOCSDLT, dlt_radiotap)
except IOError:
raise Scapy_Exception("Can't set %s into monitor mode!" % self.iface)
# Don't block on read
try:
fcntl.ioctl(self.ins, BIOCIMMEDIATE, struct.pack("I", 1))
except IOError:
raise Scapy_Exception("BIOCIMMEDIATE failed on /dev/bpf%i" % self.dev_bpf)
# Scapy will provide the link layer source address
# Otherwise, it is written by the kernel
try:
fcntl.ioctl(self.ins, BIOCSHDRCMPLT, struct.pack("i", 1))
except IOError:
raise Scapy_Exception("BIOCSHDRCMPLT failed on /dev/bpf%i" % self.dev_bpf)
# Configure the BPF filter
if not nofilter:
if conf.except_filter:
if filter:
filter = "(%s) and not (%s)" % (filter, conf.except_filter)
else:
filter = "not (%s)" % conf.except_filter
if filter is not None:
try:
attach_filter(self.ins, filter, self.iface)
except ImportError as ex:
warning("Cannot set filter: %s" % ex)
# Set the guessed packet class
self.guessed_cls = self.guess_cls()
|
def __init__(
self,
iface=None,
type=ETH_P_ALL,
promisc=None,
filter=None,
nofilter=0,
monitor=False,
):
self.fd_flags = None
self.assigned_interface = None
# SuperSocket mandatory variables
if promisc is None:
self.promisc = conf.sniff_promisc
else:
self.promisc = promisc
self.iface = network_name(iface or conf.iface)
# Get the BPF handle
(self.ins, self.dev_bpf) = get_dev_bpf()
self.outs = self.ins
# Set the BPF buffer length
try:
fcntl.ioctl(self.ins, BIOCSBLEN, struct.pack("I", BPF_BUFFER_LENGTH)) # noqa: E501
except IOError:
raise Scapy_Exception("BIOCSBLEN failed on /dev/bpf%i" % self.dev_bpf)
# Assign the network interface to the BPF handle
try:
fcntl.ioctl(self.ins, BIOCSETIF, struct.pack("16s16x", self.iface.encode())) # noqa: E501
except IOError:
raise Scapy_Exception("BIOCSETIF failed on %s" % self.iface)
self.assigned_interface = self.iface
# Set the interface into promiscuous
if self.promisc:
self.set_promisc(1)
# Set the interface to monitor mode
# Note: - trick from libpcap/pcap-bpf.c - monitor_mode()
# - it only works on OS X 10.5 and later
if DARWIN and monitor:
dlt_radiotap = struct.pack("I", DLT_IEEE802_11_RADIO)
try:
fcntl.ioctl(self.ins, BIOCSDLT, dlt_radiotap)
except IOError:
raise Scapy_Exception("Can't set %s into monitor mode!" % self.iface)
# Don't block on read
try:
fcntl.ioctl(self.ins, BIOCIMMEDIATE, struct.pack("I", 1))
except IOError:
raise Scapy_Exception("BIOCIMMEDIATE failed on /dev/bpf%i" % self.dev_bpf)
# Scapy will provide the link layer source address
# Otherwise, it is written by the kernel
try:
fcntl.ioctl(self.ins, BIOCSHDRCMPLT, struct.pack("i", 1))
except IOError:
raise Scapy_Exception("BIOCSHDRCMPLT failed on /dev/bpf%i" % self.dev_bpf)
# Configure the BPF filter
if not nofilter:
if conf.except_filter:
if filter:
filter = "(%s) and not (%s)" % (filter, conf.except_filter)
else:
filter = "not (%s)" % conf.except_filter
if filter is not None:
try:
attach_filter(self.ins, filter, self.iface)
except ImportError as ex:
warning("Cannot set filter: %s" % ex)
# Set the guessed packet class
self.guessed_cls = self.guess_cls()
|
https://github.com/secdev/scapy/issues/3065
|
from scapy.config import conf
from scapy.sendrecv import AsyncSniffer
conf.L2socket()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/bdraco/Library/Python/3.8/lib/python/site-packages/scapy/arch/bpf/supersocket.py", line 242, in __init__
super(L2bpfListenSocket, self).__init__(*args, **kwargs)
File "/Users/bdraco/Library/Python/3.8/lib/python/site-packages/scapy/arch/bpf/supersocket.py", line 62, in __init__
(self.ins, self.dev_bpf) = get_dev_bpf()
File "/Users/bdraco/Library/Python/3.8/lib/python/site-packages/scapy/arch/bpf/core.py", line 114, in get_dev_bpf
raise Scapy_Exception("No /dev/bpf handle is available !")
scapy.error.Scapy_Exception: No /dev/bpf handle is available !
^D
Exception ignored in: <function _L2bpfSocket.__del__ at 0x110de31f0>
Traceback (most recent call last):
File "/Users/bdraco/Library/Python/3.8/lib/python/site-packages/scapy/arch/bpf/supersocket.py", line 139, in __del__
self.close()
File "/Users/bdraco/Library/Python/3.8/lib/python/site-packages/scapy/arch/bpf/supersocket.py", line 211, in close
if not self.closed and self.ins is not None:
AttributeError: 'L2bpfSocket' object has no attribute 'ins'
|
AttributeError
|
def tls_session_update(self, msg_str):
"""
Either for parsing or building, we store the client_random
along with the raw string representing this handshake message.
"""
super(TLSClientHello, self).tls_session_update(msg_str)
s = self.tls_session
s.advertised_tls_version = self.version
# This ClientHello could be a 1.3 one. Let's store the sid
# in all cases
if self.sidlen and self.sidlen > 0:
s.sid = self.sid
self.random_bytes = msg_str[10:38]
s.client_random = struct.pack("!I", self.gmt_unix_time) + self.random_bytes
# No distinction between a TLS 1.2 ClientHello and a TLS
# 1.3 ClientHello when dissecting : TLS 1.3 CH will be
# parsed as TLSClientHello
if self.ext:
for e in self.ext:
if isinstance(e, TLS_Ext_SupportedVersion_CH):
for ver in sorted(e.versions, reverse=True):
# RFC 8701: GREASE of TLS will send unknown versions
# here. We have to ignore them
if ver in _tls_version:
s.advertised_tls_version = ver
break
if s.sid:
s.middlebox_compatibility = True
if isinstance(e, TLS_Ext_SignatureAlgorithms):
s.advertised_sig_algs = e.sig_algs
|
def tls_session_update(self, msg_str):
"""
Either for parsing or building, we store the client_random
along with the raw string representing this handshake message.
"""
super(TLSClientHello, self).tls_session_update(msg_str)
s = self.tls_session
s.advertised_tls_version = self.version
# This ClientHello could be a 1.3 one. Let's store the sid
# in all cases
if self.sidlen and self.sidlen > 0:
s.sid = self.sid
self.random_bytes = msg_str[10:38]
s.client_random = struct.pack("!I", self.gmt_unix_time) + self.random_bytes
# No distinction between a TLS 1.2 ClientHello and a TLS
# 1.3 ClientHello when dissecting : TLS 1.3 CH will be
# parsed as TLSClientHello
if self.ext:
for e in self.ext:
if isinstance(e, TLS_Ext_SupportedVersion_CH):
for ver in e.versions:
# RFC 8701: GREASE of TLS will send unknown versions
# here. We have to ignore them
if ver in _tls_version:
s.advertised_tls_version = ver
break
if s.sid:
s.middlebox_compatibility = True
if isinstance(e, TLS_Ext_SignatureAlgorithms):
s.advertised_sig_algs = e.sig_algs
|
https://github.com/secdev/scapy/issues/2778
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-4c9f13dcdcd8> in <module>
----> 1 r2 = TLS(pcap[1].load, tls_session=r1.tls_session.mirror())
~/scapy_venv/lib/python3.7/site-packages/scapy/base_classes.py in __call__(cls, *args, **kargs)
264 cls = config.conf.raw_layer
265 i = cls.__new__(cls, cls.__name__, cls.__bases__, cls.__dict__)
--> 266 i.__init__(*args, **kargs)
267 return i
268
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in __init__(self, *args, **kargs)
302 def __init__(self, *args, **kargs):
303 self.deciphered_len = kargs.get("deciphered_len", None)
--> 304 super(TLS, self).__init__(*args, **kargs)
305
306 @classmethod
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, _pkt, post_transform, _internal, _underlayer, tls_session, **fields)
902 Packet.__init__(self, _pkt=_pkt, post_transform=post_transform,
903 _internal=_internal, _underlayer=_underlayer,
--> 904 **fields)
905
906 def __getattr__(self, attr):
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in __init__(self, _pkt, post_transform, _internal, _underlayer, **fields)
156 self.sniffed_on = None
157 if _pkt:
--> 158 self.dissect(_pkt)
159 if not _internal:
160 self.dissection_done(self)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in dissect(self, s)
873 s = self.pre_dissect(s)
874
--> 875 s = self.do_dissect(s)
876
877 s = self.post_dissect(s)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in do_dissect(self, s)
837 if not s:
838 break
--> 839 s, fval = f.getfield(self, s)
840 # We need to track fields with mutable values to discard
841 # .raw_packet_cache when needed.
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in getfield(self, pkt, s)
166 if isinstance(p, _GenericTLSSessionInheritance):
167 if not p.tls_session.frozen:
--> 168 p.post_dissection_tls_session_update(raw_msg)
169
170 lst.append(p)
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in post_dissection_tls_session_update(self, msg_str)
924
925 def post_dissection_tls_session_update(self, msg_str):
--> 926 self.tls_session_update(msg_str)
927
928 def copy(self):
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/handshake.py in tls_session_update(self, msg_str)
648 s.prcs = readConnState(ciphersuite=cs_cls,
649 connection_end=connection_end,
--> 650 tls_version=s.tls_version)
651 if not s.middlebox_compatibility:
652 s.triggered_prcs_commit = True
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, **kargs)
279 class readConnState(connState):
280 def __init__(self, **kargs):
--> 281 connState.__init__(self, read_or_write="read", **kargs)
282
283
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, connection_end, read_or_write, seq_num, compression_alg, ciphersuite, tls_version)
89 from scapy.layers.tls.crypto.suites import TLS_NULL_WITH_NULL_NULL
90 ciphersuite = TLS_NULL_WITH_NULL_NULL
---> 91 self.ciphersuite = ciphersuite(tls_version=tls_version)
92
93 if not self.ciphersuite.usable:
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/crypto/suites.py in __init__(self, tls_version)
140 """
141 super(_GenericCipherSuite, self).__init__()
--> 142 if tls_version <= 0x301:
143 self.key_block_len = self._key_block_len_v1_0
144
TypeError: '<=' not supported between instances of 'NoneType' and 'int'
|
TypeError
|
def tls_session_update(self, msg_str):
"""
Either for parsing or building, we store the client_random
along with the raw string representing this handshake message.
"""
super(TLS13ClientHello, self).tls_session_update(msg_str)
s = self.tls_session
if self.sidlen and self.sidlen > 0:
s.sid = self.sid
s.middlebox_compatibility = True
self.random_bytes = msg_str[10:38]
s.client_random = self.random_bytes
if self.ext:
for e in self.ext:
if isinstance(e, TLS_Ext_SupportedVersion_CH):
for ver in sorted(e.versions, reverse=True):
# RFC 8701: GREASE of TLS will send unknown versions
# here. We have to ignore them
if ver in _tls_version:
self.tls_session.advertised_tls_version = ver
break
if isinstance(e, TLS_Ext_SignatureAlgorithms):
s.advertised_sig_algs = e.sig_algs
|
def tls_session_update(self, msg_str):
"""
Either for parsing or building, we store the client_random
along with the raw string representing this handshake message.
"""
super(TLS13ClientHello, self).tls_session_update(msg_str)
s = self.tls_session
if self.sidlen and self.sidlen > 0:
s.sid = self.sid
s.middlebox_compatibility = True
self.random_bytes = msg_str[10:38]
s.client_random = self.random_bytes
if self.ext:
for e in self.ext:
if isinstance(e, TLS_Ext_SupportedVersion_CH):
for ver in e.versions:
# RFC 8701: GREASE of TLS will send unknown versions
# here. We have to ignore them
if ver in _tls_version:
self.tls_session.advertised_tls_version = ver
break
if isinstance(e, TLS_Ext_SignatureAlgorithms):
s.advertised_sig_algs = e.sig_algs
|
https://github.com/secdev/scapy/issues/2778
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-4c9f13dcdcd8> in <module>
----> 1 r2 = TLS(pcap[1].load, tls_session=r1.tls_session.mirror())
~/scapy_venv/lib/python3.7/site-packages/scapy/base_classes.py in __call__(cls, *args, **kargs)
264 cls = config.conf.raw_layer
265 i = cls.__new__(cls, cls.__name__, cls.__bases__, cls.__dict__)
--> 266 i.__init__(*args, **kargs)
267 return i
268
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in __init__(self, *args, **kargs)
302 def __init__(self, *args, **kargs):
303 self.deciphered_len = kargs.get("deciphered_len", None)
--> 304 super(TLS, self).__init__(*args, **kargs)
305
306 @classmethod
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, _pkt, post_transform, _internal, _underlayer, tls_session, **fields)
902 Packet.__init__(self, _pkt=_pkt, post_transform=post_transform,
903 _internal=_internal, _underlayer=_underlayer,
--> 904 **fields)
905
906 def __getattr__(self, attr):
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in __init__(self, _pkt, post_transform, _internal, _underlayer, **fields)
156 self.sniffed_on = None
157 if _pkt:
--> 158 self.dissect(_pkt)
159 if not _internal:
160 self.dissection_done(self)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in dissect(self, s)
873 s = self.pre_dissect(s)
874
--> 875 s = self.do_dissect(s)
876
877 s = self.post_dissect(s)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in do_dissect(self, s)
837 if not s:
838 break
--> 839 s, fval = f.getfield(self, s)
840 # We need to track fields with mutable values to discard
841 # .raw_packet_cache when needed.
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in getfield(self, pkt, s)
166 if isinstance(p, _GenericTLSSessionInheritance):
167 if not p.tls_session.frozen:
--> 168 p.post_dissection_tls_session_update(raw_msg)
169
170 lst.append(p)
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in post_dissection_tls_session_update(self, msg_str)
924
925 def post_dissection_tls_session_update(self, msg_str):
--> 926 self.tls_session_update(msg_str)
927
928 def copy(self):
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/handshake.py in tls_session_update(self, msg_str)
648 s.prcs = readConnState(ciphersuite=cs_cls,
649 connection_end=connection_end,
--> 650 tls_version=s.tls_version)
651 if not s.middlebox_compatibility:
652 s.triggered_prcs_commit = True
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, **kargs)
279 class readConnState(connState):
280 def __init__(self, **kargs):
--> 281 connState.__init__(self, read_or_write="read", **kargs)
282
283
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, connection_end, read_or_write, seq_num, compression_alg, ciphersuite, tls_version)
89 from scapy.layers.tls.crypto.suites import TLS_NULL_WITH_NULL_NULL
90 ciphersuite = TLS_NULL_WITH_NULL_NULL
---> 91 self.ciphersuite = ciphersuite(tls_version=tls_version)
92
93 if not self.ciphersuite.usable:
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/crypto/suites.py in __init__(self, tls_version)
140 """
141 super(_GenericCipherSuite, self).__init__()
--> 142 if tls_version <= 0x301:
143 self.key_block_len = self._key_block_len_v1_0
144
TypeError: '<=' not supported between instances of 'NoneType' and 'int'
|
TypeError
|
def tls_session_update(self, msg_str):
"""
Either for parsing or building, we store the server_random
along with the raw string representing this handshake message.
We also store the session_id, the cipher suite (if recognized),
the compression method, and finally we instantiate the pending write
and read connection states. Usually they get updated later on in the
negotiation when we learn the session keys, and eventually they
are committed once a ChangeCipherSpec has been sent/received.
"""
super(TLSServerHello, self).tls_session_update(msg_str)
s = self.tls_session
s.tls_version = self.version
if hasattr(self, "gmt_unix_time"):
self.random_bytes = msg_str[10:38]
s.server_random = struct.pack("!I", self.gmt_unix_time) + self.random_bytes
else:
s.server_random = self.random_bytes
s.sid = self.sid
cs_cls = None
if self.cipher:
cs_val = self.cipher
if cs_val not in _tls_cipher_suites_cls:
warning("Unknown cipher suite %d from ServerHello" % cs_val)
# we do not try to set a default nor stop the execution
else:
cs_cls = _tls_cipher_suites_cls[cs_val]
comp_cls = Comp_NULL
if self.comp:
comp_val = self.comp[0]
if comp_val not in _tls_compression_algs_cls:
err = "Unknown compression alg %d from ServerHello" % comp_val
warning(err)
comp_val = 0
comp_cls = _tls_compression_algs_cls[comp_val]
connection_end = s.connection_end
s.pwcs = writeConnState(
ciphersuite=cs_cls,
compression_alg=comp_cls,
connection_end=connection_end,
tls_version=self.version,
)
s.prcs = readConnState(
ciphersuite=cs_cls,
compression_alg=comp_cls,
connection_end=connection_end,
tls_version=self.version,
)
|
def tls_session_update(self, msg_str):
"""
Either for parsing or building, we store the server_random
along with the raw string representing this handshake message.
We also store the session_id, the cipher suite (if recognized),
the compression method, and finally we instantiate the pending write
and read connection states. Usually they get updated later on in the
negotiation when we learn the session keys, and eventually they
are committed once a ChangeCipherSpec has been sent/received.
"""
super(TLSServerHello, self).tls_session_update(msg_str)
self.tls_session.tls_version = self.version
self.random_bytes = msg_str[10:38]
self.tls_session.server_random = (
struct.pack("!I", self.gmt_unix_time) + self.random_bytes
)
self.tls_session.sid = self.sid
cs_cls = None
if self.cipher:
cs_val = self.cipher
if cs_val not in _tls_cipher_suites_cls:
warning("Unknown cipher suite %d from ServerHello" % cs_val)
# we do not try to set a default nor stop the execution
else:
cs_cls = _tls_cipher_suites_cls[cs_val]
comp_cls = Comp_NULL
if self.comp:
comp_val = self.comp[0]
if comp_val not in _tls_compression_algs_cls:
err = "Unknown compression alg %d from ServerHello" % comp_val
warning(err)
comp_val = 0
comp_cls = _tls_compression_algs_cls[comp_val]
connection_end = self.tls_session.connection_end
self.tls_session.pwcs = writeConnState(
ciphersuite=cs_cls,
compression_alg=comp_cls,
connection_end=connection_end,
tls_version=self.version,
)
self.tls_session.prcs = readConnState(
ciphersuite=cs_cls,
compression_alg=comp_cls,
connection_end=connection_end,
tls_version=self.version,
)
|
https://github.com/secdev/scapy/issues/2778
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-4c9f13dcdcd8> in <module>
----> 1 r2 = TLS(pcap[1].load, tls_session=r1.tls_session.mirror())
~/scapy_venv/lib/python3.7/site-packages/scapy/base_classes.py in __call__(cls, *args, **kargs)
264 cls = config.conf.raw_layer
265 i = cls.__new__(cls, cls.__name__, cls.__bases__, cls.__dict__)
--> 266 i.__init__(*args, **kargs)
267 return i
268
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in __init__(self, *args, **kargs)
302 def __init__(self, *args, **kargs):
303 self.deciphered_len = kargs.get("deciphered_len", None)
--> 304 super(TLS, self).__init__(*args, **kargs)
305
306 @classmethod
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, _pkt, post_transform, _internal, _underlayer, tls_session, **fields)
902 Packet.__init__(self, _pkt=_pkt, post_transform=post_transform,
903 _internal=_internal, _underlayer=_underlayer,
--> 904 **fields)
905
906 def __getattr__(self, attr):
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in __init__(self, _pkt, post_transform, _internal, _underlayer, **fields)
156 self.sniffed_on = None
157 if _pkt:
--> 158 self.dissect(_pkt)
159 if not _internal:
160 self.dissection_done(self)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in dissect(self, s)
873 s = self.pre_dissect(s)
874
--> 875 s = self.do_dissect(s)
876
877 s = self.post_dissect(s)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in do_dissect(self, s)
837 if not s:
838 break
--> 839 s, fval = f.getfield(self, s)
840 # We need to track fields with mutable values to discard
841 # .raw_packet_cache when needed.
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in getfield(self, pkt, s)
166 if isinstance(p, _GenericTLSSessionInheritance):
167 if not p.tls_session.frozen:
--> 168 p.post_dissection_tls_session_update(raw_msg)
169
170 lst.append(p)
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in post_dissection_tls_session_update(self, msg_str)
924
925 def post_dissection_tls_session_update(self, msg_str):
--> 926 self.tls_session_update(msg_str)
927
928 def copy(self):
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/handshake.py in tls_session_update(self, msg_str)
648 s.prcs = readConnState(ciphersuite=cs_cls,
649 connection_end=connection_end,
--> 650 tls_version=s.tls_version)
651 if not s.middlebox_compatibility:
652 s.triggered_prcs_commit = True
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, **kargs)
279 class readConnState(connState):
280 def __init__(self, **kargs):
--> 281 connState.__init__(self, read_or_write="read", **kargs)
282
283
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, connection_end, read_or_write, seq_num, compression_alg, ciphersuite, tls_version)
89 from scapy.layers.tls.crypto.suites import TLS_NULL_WITH_NULL_NULL
90 ciphersuite = TLS_NULL_WITH_NULL_NULL
---> 91 self.ciphersuite = ciphersuite(tls_version=tls_version)
92
93 if not self.ciphersuite.usable:
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/crypto/suites.py in __init__(self, tls_version)
140 """
141 super(_GenericCipherSuite, self).__init__()
--> 142 if tls_version <= 0x301:
143 self.key_block_len = self._key_block_len_v1_0
144
TypeError: '<=' not supported between instances of 'NoneType' and 'int'
|
TypeError
|
def tls_session_update(self, msg_str):
"""
Either for parsing or building, we store the server_random along with
the raw string representing this handshake message. We also store the
cipher suite (if recognized), and finally we instantiate the write and
read connection states.
"""
s = self.tls_session
s.server_random = self.random_bytes
s.ciphersuite = self.cipher
s.tls_version = self.version
# Check extensions
if self.ext:
for e in self.ext:
if isinstance(e, TLS_Ext_SupportedVersion_SH):
s.tls_version = e.version
break
if s.tls_version < 0x304:
# This means that the server does not support TLS 1.3 and ignored
# the initial TLS 1.3 ClientHello. tls_version has been updated
return TLSServerHello.tls_session_update(self, msg_str)
else:
_TLSHandshake.tls_session_update(self, msg_str)
cs_cls = None
if self.cipher:
cs_val = self.cipher
if cs_val not in _tls_cipher_suites_cls:
warning("Unknown cipher suite %d from ServerHello" % cs_val)
# we do not try to set a default nor stop the execution
else:
cs_cls = _tls_cipher_suites_cls[cs_val]
connection_end = s.connection_end
if connection_end == "server":
s.pwcs = writeConnState(
ciphersuite=cs_cls, connection_end=connection_end, tls_version=s.tls_version
)
if not s.middlebox_compatibility:
s.triggered_pwcs_commit = True
elif connection_end == "client":
s.prcs = readConnState(
ciphersuite=cs_cls, connection_end=connection_end, tls_version=s.tls_version
)
if not s.middlebox_compatibility:
s.triggered_prcs_commit = True
if s.tls13_early_secret is None:
# In case the connState was not pre-initialized, we could not
# compute the early secrets at the ClientHello, so we do it here.
s.compute_tls13_early_secrets()
s.compute_tls13_handshake_secrets()
if connection_end == "server":
shts = s.tls13_derived_secrets["server_handshake_traffic_secret"]
s.pwcs.tls13_derive_keys(shts)
elif connection_end == "client":
shts = s.tls13_derived_secrets["server_handshake_traffic_secret"]
s.prcs.tls13_derive_keys(shts)
|
def tls_session_update(self, msg_str):
"""
Either for parsing or building, we store the server_random along with
the raw string representing this handshake message. We also store the
cipher suite (if recognized), and finally we instantiate the write and
read connection states.
"""
super(TLS13ServerHello, self).tls_session_update(msg_str)
s = self.tls_session
if self.ext:
for e in self.ext:
if isinstance(e, TLS_Ext_SupportedVersion_SH):
s.tls_version = e.version
break
s.server_random = self.random_bytes
s.ciphersuite = self.cipher
cs_cls = None
if self.cipher:
cs_val = self.cipher
if cs_val not in _tls_cipher_suites_cls:
warning("Unknown cipher suite %d from ServerHello" % cs_val)
# we do not try to set a default nor stop the execution
else:
cs_cls = _tls_cipher_suites_cls[cs_val]
connection_end = s.connection_end
if connection_end == "server":
s.pwcs = writeConnState(
ciphersuite=cs_cls, connection_end=connection_end, tls_version=s.tls_version
)
if not s.middlebox_compatibility:
s.triggered_pwcs_commit = True
elif connection_end == "client":
s.prcs = readConnState(
ciphersuite=cs_cls, connection_end=connection_end, tls_version=s.tls_version
)
if not s.middlebox_compatibility:
s.triggered_prcs_commit = True
if s.tls13_early_secret is None:
# In case the connState was not pre-initialized, we could not
# compute the early secrets at the ClientHello, so we do it here.
s.compute_tls13_early_secrets()
s.compute_tls13_handshake_secrets()
if connection_end == "server":
shts = s.tls13_derived_secrets["server_handshake_traffic_secret"]
s.pwcs.tls13_derive_keys(shts)
elif connection_end == "client":
shts = s.tls13_derived_secrets["server_handshake_traffic_secret"]
s.prcs.tls13_derive_keys(shts)
|
https://github.com/secdev/scapy/issues/2778
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-4c9f13dcdcd8> in <module>
----> 1 r2 = TLS(pcap[1].load, tls_session=r1.tls_session.mirror())
~/scapy_venv/lib/python3.7/site-packages/scapy/base_classes.py in __call__(cls, *args, **kargs)
264 cls = config.conf.raw_layer
265 i = cls.__new__(cls, cls.__name__, cls.__bases__, cls.__dict__)
--> 266 i.__init__(*args, **kargs)
267 return i
268
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in __init__(self, *args, **kargs)
302 def __init__(self, *args, **kargs):
303 self.deciphered_len = kargs.get("deciphered_len", None)
--> 304 super(TLS, self).__init__(*args, **kargs)
305
306 @classmethod
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, _pkt, post_transform, _internal, _underlayer, tls_session, **fields)
902 Packet.__init__(self, _pkt=_pkt, post_transform=post_transform,
903 _internal=_internal, _underlayer=_underlayer,
--> 904 **fields)
905
906 def __getattr__(self, attr):
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in __init__(self, _pkt, post_transform, _internal, _underlayer, **fields)
156 self.sniffed_on = None
157 if _pkt:
--> 158 self.dissect(_pkt)
159 if not _internal:
160 self.dissection_done(self)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in dissect(self, s)
873 s = self.pre_dissect(s)
874
--> 875 s = self.do_dissect(s)
876
877 s = self.post_dissect(s)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in do_dissect(self, s)
837 if not s:
838 break
--> 839 s, fval = f.getfield(self, s)
840 # We need to track fields with mutable values to discard
841 # .raw_packet_cache when needed.
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in getfield(self, pkt, s)
166 if isinstance(p, _GenericTLSSessionInheritance):
167 if not p.tls_session.frozen:
--> 168 p.post_dissection_tls_session_update(raw_msg)
169
170 lst.append(p)
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in post_dissection_tls_session_update(self, msg_str)
924
925 def post_dissection_tls_session_update(self, msg_str):
--> 926 self.tls_session_update(msg_str)
927
928 def copy(self):
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/handshake.py in tls_session_update(self, msg_str)
648 s.prcs = readConnState(ciphersuite=cs_cls,
649 connection_end=connection_end,
--> 650 tls_version=s.tls_version)
651 if not s.middlebox_compatibility:
652 s.triggered_prcs_commit = True
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, **kargs)
279 class readConnState(connState):
280 def __init__(self, **kargs):
--> 281 connState.__init__(self, read_or_write="read", **kargs)
282
283
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, connection_end, read_or_write, seq_num, compression_alg, ciphersuite, tls_version)
89 from scapy.layers.tls.crypto.suites import TLS_NULL_WITH_NULL_NULL
90 ciphersuite = TLS_NULL_WITH_NULL_NULL
---> 91 self.ciphersuite = ciphersuite(tls_version=tls_version)
92
93 if not self.ciphersuite.usable:
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/crypto/suites.py in __init__(self, tls_version)
140 """
141 super(_GenericCipherSuite, self).__init__()
--> 142 if tls_version <= 0x301:
143 self.key_block_len = self._key_block_len_v1_0
144
TypeError: '<=' not supported between instances of 'NoneType' and 'int'
|
TypeError
|
def m2i(self, pkt, m):
"""
Try to parse one of the TLS subprotocols (ccs, alert, handshake or
application_data). This is used inside a loop managed by .getfield().
"""
cls = Raw
if pkt.type == 22:
if len(m) >= 1:
msgtype = orb(m[0])
# If a version was agreed on by both client and server,
# we use it (tls_session.tls_version)
# Otherwise, if the client advertised for TLS 1.3, we try to
# dissect the following packets (most likely, server hello)
# using TLS 1.3. The serverhello is able to fallback on
# TLS 1.2 if necessary. In any case, this will set the agreed
# version so that all future packets are correct.
if (
pkt.tls_session.advertised_tls_version == 0x0304
and pkt.tls_session.tls_version is None
) or pkt.tls_session.tls_version == 0x0304:
cls = _tls13_handshake_cls.get(msgtype, Raw)
else:
cls = _tls_handshake_cls.get(msgtype, Raw)
elif pkt.type == 20:
cls = TLSChangeCipherSpec
elif pkt.type == 21:
cls = TLSAlert
elif pkt.type == 23:
cls = TLSApplicationData
if cls is Raw:
return Raw(m)
else:
try:
return cls(m, tls_session=pkt.tls_session)
except Exception:
if conf.debug_dissector:
raise
return Raw(m)
|
def m2i(self, pkt, m):
"""
Try to parse one of the TLS subprotocols (ccs, alert, handshake or
application_data). This is used inside a loop managed by .getfield().
"""
cls = Raw
if pkt.type == 22:
if len(m) >= 1:
msgtype = orb(m[0])
if (pkt.tls_session.advertised_tls_version == 0x0304) or (
pkt.tls_session.tls_version and pkt.tls_session.tls_version == 0x0304
):
cls = _tls13_handshake_cls.get(msgtype, Raw)
else:
cls = _tls_handshake_cls.get(msgtype, Raw)
elif pkt.type == 20:
cls = TLSChangeCipherSpec
elif pkt.type == 21:
cls = TLSAlert
elif pkt.type == 23:
cls = TLSApplicationData
if cls is Raw:
return Raw(m)
else:
try:
return cls(m, tls_session=pkt.tls_session)
except Exception:
if conf.debug_dissector:
raise
return Raw(m)
|
https://github.com/secdev/scapy/issues/2778
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-4c9f13dcdcd8> in <module>
----> 1 r2 = TLS(pcap[1].load, tls_session=r1.tls_session.mirror())
~/scapy_venv/lib/python3.7/site-packages/scapy/base_classes.py in __call__(cls, *args, **kargs)
264 cls = config.conf.raw_layer
265 i = cls.__new__(cls, cls.__name__, cls.__bases__, cls.__dict__)
--> 266 i.__init__(*args, **kargs)
267 return i
268
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in __init__(self, *args, **kargs)
302 def __init__(self, *args, **kargs):
303 self.deciphered_len = kargs.get("deciphered_len", None)
--> 304 super(TLS, self).__init__(*args, **kargs)
305
306 @classmethod
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, _pkt, post_transform, _internal, _underlayer, tls_session, **fields)
902 Packet.__init__(self, _pkt=_pkt, post_transform=post_transform,
903 _internal=_internal, _underlayer=_underlayer,
--> 904 **fields)
905
906 def __getattr__(self, attr):
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in __init__(self, _pkt, post_transform, _internal, _underlayer, **fields)
156 self.sniffed_on = None
157 if _pkt:
--> 158 self.dissect(_pkt)
159 if not _internal:
160 self.dissection_done(self)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in dissect(self, s)
873 s = self.pre_dissect(s)
874
--> 875 s = self.do_dissect(s)
876
877 s = self.post_dissect(s)
~/scapy_venv/lib/python3.7/site-packages/scapy/packet.py in do_dissect(self, s)
837 if not s:
838 break
--> 839 s, fval = f.getfield(self, s)
840 # We need to track fields with mutable values to discard
841 # .raw_packet_cache when needed.
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/record.py in getfield(self, pkt, s)
166 if isinstance(p, _GenericTLSSessionInheritance):
167 if not p.tls_session.frozen:
--> 168 p.post_dissection_tls_session_update(raw_msg)
169
170 lst.append(p)
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in post_dissection_tls_session_update(self, msg_str)
924
925 def post_dissection_tls_session_update(self, msg_str):
--> 926 self.tls_session_update(msg_str)
927
928 def copy(self):
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/handshake.py in tls_session_update(self, msg_str)
648 s.prcs = readConnState(ciphersuite=cs_cls,
649 connection_end=connection_end,
--> 650 tls_version=s.tls_version)
651 if not s.middlebox_compatibility:
652 s.triggered_prcs_commit = True
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, **kargs)
279 class readConnState(connState):
280 def __init__(self, **kargs):
--> 281 connState.__init__(self, read_or_write="read", **kargs)
282
283
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/session.py in __init__(self, connection_end, read_or_write, seq_num, compression_alg, ciphersuite, tls_version)
89 from scapy.layers.tls.crypto.suites import TLS_NULL_WITH_NULL_NULL
90 ciphersuite = TLS_NULL_WITH_NULL_NULL
---> 91 self.ciphersuite = ciphersuite(tls_version=tls_version)
92
93 if not self.ciphersuite.usable:
~/scapy_venv/lib/python3.7/site-packages/scapy/layers/tls/crypto/suites.py in __init__(self, tls_version)
140 """
141 super(_GenericCipherSuite, self).__init__()
--> 142 if tls_version <= 0x301:
143 self.key_block_len = self._key_block_len_v1_0
144
TypeError: '<=' not supported between instances of 'NoneType' and 'int'
|
TypeError
|
def addfield(self, pkt, s, i):
"""
There is a hack with the _ExtensionsField.i2len. It works only because
we expect _ExtensionsField.i2m to return a string of the same size (if
not of the same value) upon successive calls (e.g. through i2len here,
then i2m when directly building the _ExtensionsField).
XXX A proper way to do this would be to keep the extensions built from
the i2len call here, instead of rebuilding them later on.
"""
if i is None:
if self.length_of is not None:
fld, fval = pkt.getfield_and_val(self.length_of)
tmp = pkt.tls_session.frozen
pkt.tls_session.frozen = True
f = fld.i2len(pkt, fval)
pkt.tls_session.frozen = tmp
i = self.adjust(pkt, f)
if i == 0: # for correct build if no ext and not explicitly 0
v = pkt.tls_session.tls_version
# With TLS 1.3, zero lengths are always explicit.
if v is None or v < 0x0304:
return s
else:
return s + struct.pack(self.fmt, i)
return s + struct.pack(self.fmt, i)
|
def addfield(self, pkt, s, i):
"""
There is a hack with the _ExtensionsField.i2len. It works only because
we expect _ExtensionsField.i2m to return a string of the same size (if
not of the same value) upon successive calls (e.g. through i2len here,
then i2m when directly building the _ExtensionsField).
XXX A proper way to do this would be to keep the extensions built from
the i2len call here, instead of rebuilding them later on.
"""
if i is None:
if self.length_of is not None:
fld, fval = pkt.getfield_and_val(self.length_of)
tmp = pkt.tls_session.frozen
pkt.tls_session.frozen = True
f = fld.i2len(pkt, fval)
pkt.tls_session.frozen = tmp
i = self.adjust(pkt, f)
if i == 0: # for correct build if no ext and not explicitly 0
v = pkt.tls_session.tls_version
# Xith TLS 1.3, zero lengths are always explicit.
if v is None or v < 0x0304:
return s
else:
return s + struct.pack(self.fmt, i)
return s + struct.pack(self.fmt, i)
|
https://github.com/secdev/scapy/issues/2763
|
Traceback (most recent call last):
File "utils/poc.py", line 51, in <module>
test_failed_fn()
File "utils/poc.py", line 47, in test_failed_fn
extract_tls_payloads(os.path.join(data_root, fn), 900)
File "utils/poc.py", line 19, in extract_tls_payloads
payload = bytes(p[TLS])
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 612, in build
p = self.do_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 594, in do_build
pkt = self.self_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 575, in self_build
p = f.addfield(self, p, val)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 207, in addfield
res += self.i2m(pkt, p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 194, in i2m
cur = raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/handshake.py", line 1028, in build
m = s.client_random + s.server_random + raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
TypeError: 'NoneType' object cannot be interpreted as an integer
|
TypeError
|
def getfield(self, pkt, s):
tmp_len = self.length_from(pkt) or 0
if tmp_len <= 0:
return s, []
return s[tmp_len:], self.m2i(pkt, s[:tmp_len])
|
def getfield(self, pkt, s):
tmp_len = self.length_from(pkt)
if tmp_len is None:
return s, []
return s[tmp_len:], self.m2i(pkt, s[:tmp_len])
|
https://github.com/secdev/scapy/issues/2763
|
Traceback (most recent call last):
File "utils/poc.py", line 51, in <module>
test_failed_fn()
File "utils/poc.py", line 47, in test_failed_fn
extract_tls_payloads(os.path.join(data_root, fn), 900)
File "utils/poc.py", line 19, in extract_tls_payloads
payload = bytes(p[TLS])
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 612, in build
p = self.do_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 594, in do_build
pkt = self.self_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 575, in self_build
p = f.addfield(self, p, val)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 207, in addfield
res += self.i2m(pkt, p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 194, in i2m
cur = raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/handshake.py", line 1028, in build
m = s.client_random + s.server_random + raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
TypeError: 'NoneType' object cannot be interpreted as an integer
|
TypeError
|
def build(self, *args, **kargs):
r"""
We overload build() method in order to provide a valid default value
for params based on TLS session if not provided. This cannot be done by
overriding i2m() because the method is called on a copy of the packet.
The 'params' field is built according to key_exchange.server_kx_msg_cls
which should have been set after receiving a cipher suite in a
previous ServerHello. Usual cases are:
- None: for RSA encryption or fixed FF/ECDH. This should never happen,
as no ServerKeyExchange should be generated in the first place.
- ServerDHParams: for ephemeral FFDH. In that case, the parameter to
server_kx_msg_cls does not matter.
- ServerECDH\*Params: for ephemeral ECDH. There are actually three
classes, which are dispatched by _tls_server_ecdh_cls_guess on
the first byte retrieved. The default here is b"\03", which
corresponds to ServerECDHNamedCurveParams (implicit curves).
When the Server\*DHParams are built via .fill_missing(), the session
server_kx_privkey will be updated accordingly.
"""
fval = self.getfieldval("params")
if fval is None:
s = self.tls_session
if s.pwcs:
if s.pwcs.key_exchange.export:
cls = ServerRSAParams(tls_session=s)
else:
cls = s.pwcs.key_exchange.server_kx_msg_cls(b"\x03")
cls = cls(tls_session=s)
try:
cls.fill_missing()
except Exception:
if conf.debug_dissector:
raise
else:
cls = Raw()
self.params = cls
fval = self.getfieldval("sig")
if fval is None:
s = self.tls_session
if s.pwcs and s.client_random:
if not s.pwcs.key_exchange.anonymous:
p = self.params
if p is None:
p = b""
m = s.client_random + s.server_random + raw(p)
cls = _TLSSignature(tls_session=s)
cls._update_sig(m, s.server_key)
else:
cls = Raw()
else:
cls = Raw()
self.sig = cls
return _TLSHandshake.build(self, *args, **kargs)
|
def build(self, *args, **kargs):
r"""
We overload build() method in order to provide a valid default value
for params based on TLS session if not provided. This cannot be done by
overriding i2m() because the method is called on a copy of the packet.
The 'params' field is built according to key_exchange.server_kx_msg_cls
which should have been set after receiving a cipher suite in a
previous ServerHello. Usual cases are:
- None: for RSA encryption or fixed FF/ECDH. This should never happen,
as no ServerKeyExchange should be generated in the first place.
- ServerDHParams: for ephemeral FFDH. In that case, the parameter to
server_kx_msg_cls does not matter.
- ServerECDH\*Params: for ephemeral ECDH. There are actually three
classes, which are dispatched by _tls_server_ecdh_cls_guess on
the first byte retrieved. The default here is b"\03", which
corresponds to ServerECDHNamedCurveParams (implicit curves).
When the Server\*DHParams are built via .fill_missing(), the session
server_kx_privkey will be updated accordingly.
"""
fval = self.getfieldval("params")
if fval is None:
s = self.tls_session
if s.pwcs:
if s.pwcs.key_exchange.export:
cls = ServerRSAParams(tls_session=s)
else:
cls = s.pwcs.key_exchange.server_kx_msg_cls(b"\x03")
cls = cls(tls_session=s)
try:
cls.fill_missing()
except Exception:
if conf.debug_dissector:
raise
else:
cls = Raw()
self.params = cls
fval = self.getfieldval("sig")
if fval is None:
s = self.tls_session
if s.pwcs:
if not s.pwcs.key_exchange.anonymous:
p = self.params
if p is None:
p = b""
m = s.client_random + s.server_random + raw(p)
cls = _TLSSignature(tls_session=s)
cls._update_sig(m, s.server_key)
else:
cls = Raw()
else:
cls = Raw()
self.sig = cls
return _TLSHandshake.build(self, *args, **kargs)
|
https://github.com/secdev/scapy/issues/2763
|
Traceback (most recent call last):
File "utils/poc.py", line 51, in <module>
test_failed_fn()
File "utils/poc.py", line 47, in test_failed_fn
extract_tls_payloads(os.path.join(data_root, fn), 900)
File "utils/poc.py", line 19, in extract_tls_payloads
payload = bytes(p[TLS])
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 612, in build
p = self.do_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 594, in do_build
pkt = self.self_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 575, in self_build
p = f.addfield(self, p, val)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 207, in addfield
res += self.i2m(pkt, p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 194, in i2m
cur = raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/handshake.py", line 1028, in build
m = s.client_random + s.server_random + raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
TypeError: 'NoneType' object cannot be interpreted as an integer
|
TypeError
|
def m2i(self, pkt, m):
s = pkt.tls_session
tmp_len = self.length_from(pkt)
if s.prcs:
cls = s.prcs.key_exchange.server_kx_msg_cls(m)
if cls is None:
return Raw(m[:tmp_len]) / Padding(m[tmp_len:])
return cls(m, tls_session=s)
else:
try:
p = ServerDHParams(m, tls_session=s)
if pkcs_os2ip(p.load[:2]) not in _tls_hash_sig:
raise Exception
return p
except Exception:
cls = _tls_server_ecdh_cls_guess(m)
p = cls(m, tls_session=s)
if pkcs_os2ip(p.load[:2]) not in _tls_hash_sig:
return Raw(m[:tmp_len]) / Padding(m[tmp_len:])
return p
|
def m2i(self, pkt, m):
s = pkt.tls_session
tmp_len = self.length_from(pkt)
if s.prcs:
cls = s.prcs.key_exchange.server_kx_msg_cls(m)
if cls is None:
return None, Raw(m[:tmp_len]) / Padding(m[tmp_len:])
return cls(m, tls_session=s)
else:
try:
p = ServerDHParams(m, tls_session=s)
if pkcs_os2ip(p.load[:2]) not in _tls_hash_sig:
raise Exception
return p
except Exception:
cls = _tls_server_ecdh_cls_guess(m)
p = cls(m, tls_session=s)
if pkcs_os2ip(p.load[:2]) not in _tls_hash_sig:
return None, Raw(m[:tmp_len]) / Padding(m[tmp_len:])
return p
|
https://github.com/secdev/scapy/issues/2763
|
Traceback (most recent call last):
File "utils/poc.py", line 51, in <module>
test_failed_fn()
File "utils/poc.py", line 47, in test_failed_fn
extract_tls_payloads(os.path.join(data_root, fn), 900)
File "utils/poc.py", line 19, in extract_tls_payloads
payload = bytes(p[TLS])
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 612, in build
p = self.do_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 594, in do_build
pkt = self.self_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 575, in self_build
p = f.addfield(self, p, val)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 207, in addfield
res += self.i2m(pkt, p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 194, in i2m
cur = raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/handshake.py", line 1028, in build
m = s.client_random + s.server_random + raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
TypeError: 'NoneType' object cannot be interpreted as an integer
|
TypeError
|
def _process_packet(self, pkt):
"""Process each packet: matches the TCP seq/ack numbers
to follow the TCP streams, and orders the fragments.
"""
if self.app:
# Special mode: Application layer. Use on top of TCP
pay_class = pkt.__class__
if not hasattr(pay_class, "tcp_reassemble"):
# Being on top of TCP, we have no way of knowing
# when a packet ends.
return pkt
self.data += bytes(pkt)
pkt = pay_class.tcp_reassemble(self.data, self.metadata)
if pkt:
self.data = b""
self.metadata = {}
return pkt
return
from scapy.layers.inet import IP, TCP
if not pkt or TCP not in pkt:
return pkt
pay = pkt[TCP].payload
if isinstance(pay, (NoPayload, conf.padding_layer)):
return pkt
new_data = pay.original
# Match packets by a uniqute TCP identifier
seq = pkt[TCP].seq
ident = pkt.sprintf(self.fmt)
data, metadata = self.tcp_frags[ident]
# Let's guess which class is going to be used
if "pay_class" not in metadata:
pay_class = pay.__class__
if hasattr(pay_class, "tcp_reassemble"):
tcp_reassemble = pay_class.tcp_reassemble
else:
# We can't know for sure when a packet ends.
# Ignore.
return pkt
metadata["pay_class"] = pay_class
metadata["tcp_reassemble"] = tcp_reassemble
else:
tcp_reassemble = metadata["tcp_reassemble"]
# Get a relative sequence number for a storage purpose
relative_seq = metadata.get("relative_seq", None)
if relative_seq is None:
relative_seq = metadata["relative_seq"] = seq - 1
seq = seq - relative_seq
# Add the data to the buffer
# Note that this take care of retransmission packets.
data.append(new_data, seq)
# Check TCP FIN or TCP RESET
if pkt[TCP].flags.F or pkt[TCP].flags.R:
metadata["tcp_end"] = True
# In case any app layer protocol requires it,
# allow the parser to inspect TCP PSH flag
if pkt[TCP].flags.P:
metadata["tcp_psh"] = True
# XXX TODO: check that no empty space is missing in the buffer.
# XXX Currently, if a TCP fragment was missing, we won't notice it.
packet = None
if data.full():
# Reassemble using all previous packets
packet = tcp_reassemble(bytes(data), metadata)
# Stack the result on top of the previous frames
if packet:
data.clear()
metadata.clear()
del self.tcp_frags[ident]
pay.underlayer.remove_payload()
if IP in pkt:
pkt[IP].len = None
pkt[IP].chksum = None
return pkt / packet
|
def _process_packet(self, pkt):
"""Process each packet: matches the TCP seq/ack numbers
to follow the TCP streams, and orders the fragments.
"""
if self.app:
# Special mode: Application layer. Use on top of TCP
pay_class = pkt.__class__
if not hasattr(pay_class, "tcp_reassemble"):
# Cannot tcp-reassemble
return pkt
self.data += bytes(pkt)
pkt = pay_class.tcp_reassemble(self.data, self.metadata)
if pkt:
self.data = b""
self.metadata = {}
return pkt
return
from scapy.layers.inet import IP, TCP
if not pkt or TCP not in pkt:
return pkt
pay = pkt[TCP].payload
if isinstance(pay, (NoPayload, conf.padding_layer)):
return pkt
new_data = pay.original
# Match packets by a uniqute TCP identifier
seq = pkt[TCP].seq
ident = pkt.sprintf(self.fmt)
data, metadata = self.tcp_frags[ident]
# Let's guess which class is going to be used
if "pay_class" not in metadata:
pay_class = pay.__class__
if not hasattr(pay_class, "tcp_reassemble"):
# Cannot tcp-reassemble
return pkt
metadata["pay_class"] = pay_class
else:
pay_class = metadata["pay_class"]
# Get a relative sequence number for a storage purpose
relative_seq = metadata.get("relative_seq", None)
if relative_seq is None:
relative_seq = metadata["relative_seq"] = seq - 1
seq = seq - relative_seq
# Add the data to the buffer
# Note that this take care of retransmission packets.
data.append(new_data, seq)
# Check TCP FIN or TCP RESET
if pkt[TCP].flags.F or pkt[TCP].flags.R:
metadata["tcp_end"] = True
# In case any app layer protocol requires it,
# allow the parser to inspect TCP PSH flag
if pkt[TCP].flags.P:
metadata["tcp_psh"] = True
# XXX TODO: check that no empty space is missing in the buffer.
# XXX Currently, if a TCP fragment was missing, we won't notice it.
packet = None
if data.full():
# Reassemble using all previous packets
packet = pay_class.tcp_reassemble(bytes(data), metadata)
# Stack the result on top of the previous frames
if packet:
data.clear()
del self.tcp_frags[ident]
pay.underlayer.remove_payload()
if IP in pkt:
pkt[IP].len = None
pkt[IP].chksum = None
return pkt / packet
|
https://github.com/secdev/scapy/issues/2763
|
Traceback (most recent call last):
File "utils/poc.py", line 51, in <module>
test_failed_fn()
File "utils/poc.py", line 47, in test_failed_fn
extract_tls_payloads(os.path.join(data_root, fn), 900)
File "utils/poc.py", line 19, in extract_tls_payloads
payload = bytes(p[TLS])
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 612, in build
p = self.do_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 594, in do_build
pkt = self.self_build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 575, in self_build
p = f.addfield(self, p, val)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 207, in addfield
res += self.i2m(pkt, p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/record.py", line 194, in i2m
cur = raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/session.py", line 966, in __bytes__
built_packet = super(_GenericTLSSessionInheritance, self).__bytes__()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/layers/tls/handshake.py", line 1028, in build
m = s.client_random + s.server_random + raw(p)
File "/data1/wangqian/venv_py3/lib/python3.7/site-packages/scapy/compat.py", line 53, in raw
return bytes(x)
TypeError: 'NoneType' object cannot be interpreted as an integer
|
TypeError
|
def __init__(self, filename, fdesc, magic):
RawPcapReader.__init__(self, filename, fdesc, magic)
try:
self.LLcls = conf.l2types[self.linktype]
except KeyError:
warning(
"PcapReader: unknown LL type [%i]/[%#x]. Using Raw packets"
% (self.linktype, self.linktype)
) # noqa: E501
from scapy.packet import Raw
self.LLcls = conf.raw_layer or Raw
|
def __init__(self, filename, fdesc, magic):
RawPcapReader.__init__(self, filename, fdesc, magic)
try:
self.LLcls = conf.l2types[self.linktype]
except KeyError:
warning(
"PcapReader: unknown LL type [%i]/[%#x]. Using Raw packets"
% (self.linktype, self.linktype)
) # noqa: E501
self.LLcls = conf.raw_layer
|
https://github.com/secdev/scapy/issues/2742
|
$ cat test.py
import sys
import scapy
import scapy.utils
scapy.utils.rdpcap(sys.argv[1])
$ python3 test.py sample.pcap
Traceback (most recent call last):
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1139, in read_packet
p = self.LLcls(s)
TypeError: 'NoneType' object is not callable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "test.py", line 6, in <module>
scapy.utils.rdpcap(sys.argv[1])
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 951, in rdpcap
return fdesc.read_all(count=count)
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1154, in read_all
res = RawPcapReader.read_all(self, count)
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1094, in read_all
p = self.read_packet()
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1147, in read_packet
p = conf.raw_layer(s)
TypeError: 'NoneType' object is not callable
$
|
TypeError
|
def read_packet(self, size=MTU):
rp = super(PcapReader, self).read_packet(size=size)
if rp is None:
raise EOFError
s, pkt_info = rp
try:
p = self.LLcls(s)
except KeyboardInterrupt:
raise
except Exception:
if conf.debug_dissector:
from scapy.sendrecv import debug
debug.crashed_on = (self.LLcls, s)
raise
from scapy.packet import Raw
p = (conf.raw_layer or Raw)(s)
power = Decimal(10) ** Decimal(-9 if self.nano else -6)
p.time = EDecimal(pkt_info.sec + power * pkt_info.usec)
p.wirelen = pkt_info.wirelen
return p
|
def read_packet(self, size=MTU):
rp = super(PcapReader, self).read_packet(size=size)
if rp is None:
raise EOFError
s, pkt_info = rp
try:
p = self.LLcls(s)
except KeyboardInterrupt:
raise
except Exception:
if conf.debug_dissector:
from scapy.sendrecv import debug
debug.crashed_on = (self.LLcls, s)
raise
p = conf.raw_layer(s)
power = Decimal(10) ** Decimal(-9 if self.nano else -6)
p.time = EDecimal(pkt_info.sec + power * pkt_info.usec)
p.wirelen = pkt_info.wirelen
return p
|
https://github.com/secdev/scapy/issues/2742
|
$ cat test.py
import sys
import scapy
import scapy.utils
scapy.utils.rdpcap(sys.argv[1])
$ python3 test.py sample.pcap
Traceback (most recent call last):
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1139, in read_packet
p = self.LLcls(s)
TypeError: 'NoneType' object is not callable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "test.py", line 6, in <module>
scapy.utils.rdpcap(sys.argv[1])
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 951, in rdpcap
return fdesc.read_all(count=count)
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1154, in read_all
res = RawPcapReader.read_all(self, count)
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1094, in read_all
p = self.read_packet()
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1147, in read_packet
p = conf.raw_layer(s)
TypeError: 'NoneType' object is not callable
$
|
TypeError
|
def read_packet(self, size=MTU):
rp = super(PcapNgReader, self).read_packet(size=size)
if rp is None:
raise EOFError
s, (linktype, tsresol, tshigh, tslow, wirelen) = rp
try:
p = conf.l2types[linktype](s)
except KeyboardInterrupt:
raise
except Exception:
if conf.debug_dissector:
raise
from scapy.packet import Raw
p = (conf.raw_layer or Raw)(s)
if tshigh is not None:
p.time = EDecimal((tshigh << 32) + tslow) / tsresol
p.wirelen = wirelen
return p
|
def read_packet(self, size=MTU):
rp = super(PcapNgReader, self).read_packet(size=size)
if rp is None:
raise EOFError
s, (linktype, tsresol, tshigh, tslow, wirelen) = rp
try:
p = conf.l2types[linktype](s)
except KeyboardInterrupt:
raise
except Exception:
if conf.debug_dissector:
raise
p = conf.raw_layer(s)
if tshigh is not None:
p.time = EDecimal((tshigh << 32) + tslow) / tsresol
p.wirelen = wirelen
return p
|
https://github.com/secdev/scapy/issues/2742
|
$ cat test.py
import sys
import scapy
import scapy.utils
scapy.utils.rdpcap(sys.argv[1])
$ python3 test.py sample.pcap
Traceback (most recent call last):
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1139, in read_packet
p = self.LLcls(s)
TypeError: 'NoneType' object is not callable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "test.py", line 6, in <module>
scapy.utils.rdpcap(sys.argv[1])
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 951, in rdpcap
return fdesc.read_all(count=count)
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1154, in read_all
res = RawPcapReader.read_all(self, count)
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1094, in read_all
p = self.read_packet()
File "/home/user/ven/lib/python3.8/site-packages/scapy/utils.py", line 1147, in read_packet
p = conf.raw_layer(s)
TypeError: 'NoneType' object is not callable
$
|
TypeError
|
def m2i(self, pkt, val):
ret = []
for v in val:
byte = orb(v)
left = byte >> 4
right = byte & 0xF
if left == 0xF:
ret.append(TBCD_TO_ASCII[right : right + 1])
else:
ret += [TBCD_TO_ASCII[right : right + 1], TBCD_TO_ASCII[left : left + 1]]
return b"".join(ret)
|
def m2i(self, pkt, val):
ret = []
for v in val:
byte = orb(v)
left = byte >> 4
right = byte & 0xF
if left == 0xF:
ret.append(TBCD_TO_ASCII[right : right + 1])
else:
ret += [TBCD_TO_ASCII[right : right + 1], TBCD_TO_ASCII[left : left + 1]] # noqa: E501
return b"".join(ret)
|
https://github.com/secdev/scapy/issues/2485
|
from scapy.contrib.gtp_v2 import IE_IMSI
ie = IE_IMSI(ietype='IMSI', length=8, IMSI='2080112345670000')
ie
<IE_IMSI ietype=IMSI length=8 IMSI='2080112345670000' |>
len(ie)
Traceback (most recent call last):
File "/usr/lib/python3.5/code.py", line 91, in runcode
exec(code, self.locals)
File "<console>", line 1, in <module>
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 527, in __len__
return len(self.__bytes__())
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 612, in build
p = self.do_build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 594, in do_build
pkt = self.self_build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 575, in self_build
p = f.addfield(self, p, val)
File "/root/Umakant/gtpv2/scapy-master/scapy/fields.py", line 1267, in addfield
return s + struct.pack("%is" % len_pkt, self.i2m(pkt, val))
File "/root/Umakant/gtpv2/scapy-master/scapy/contrib/gtp.py", line 179, in i2m
ret_string += chr(int(tmp[1] + tmp[0], 16))
ValueError: invalid literal for int() with base 16: "'b"
|
ValueError
|
def i2m(self, pkt, val):
if not isinstance(val, bytes):
val = bytes_encode(val)
ret_string = b""
for i in range(0, len(val), 2):
tmp = val[i : i + 2]
if len(tmp) == 2:
ret_string += chb(int(tmp[::-1], 16))
else:
ret_string += chb(int(b"F" + tmp[:1], 16))
return ret_string
|
def i2m(self, pkt, val):
val = str(val)
ret_string = ""
for i in range(0, len(val), 2):
tmp = val[i : i + 2]
if len(tmp) == 2:
ret_string += chr(int(tmp[1] + tmp[0], 16))
else:
ret_string += chr(int("F" + tmp[0], 16))
return ret_string
|
https://github.com/secdev/scapy/issues/2485
|
from scapy.contrib.gtp_v2 import IE_IMSI
ie = IE_IMSI(ietype='IMSI', length=8, IMSI='2080112345670000')
ie
<IE_IMSI ietype=IMSI length=8 IMSI='2080112345670000' |>
len(ie)
Traceback (most recent call last):
File "/usr/lib/python3.5/code.py", line 91, in runcode
exec(code, self.locals)
File "<console>", line 1, in <module>
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 527, in __len__
return len(self.__bytes__())
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 612, in build
p = self.do_build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 594, in do_build
pkt = self.self_build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 575, in self_build
p = f.addfield(self, p, val)
File "/root/Umakant/gtpv2/scapy-master/scapy/fields.py", line 1267, in addfield
return s + struct.pack("%is" % len_pkt, self.i2m(pkt, val))
File "/root/Umakant/gtpv2/scapy-master/scapy/contrib/gtp.py", line 179, in i2m
ret_string += chr(int(tmp[1] + tmp[0], 16))
ValueError: invalid literal for int() with base 16: "'b"
|
ValueError
|
def i2m(self, pkt, s):
if not isinstance(s, bytes):
s = bytes_encode(s)
s = b"".join(chb(len(x)) + x for x in s.split(b"."))
return s
|
def i2m(self, pkt, s):
s = b"".join(chb(len(x)) + x for x in s.split("."))
return s
|
https://github.com/secdev/scapy/issues/2485
|
from scapy.contrib.gtp_v2 import IE_IMSI
ie = IE_IMSI(ietype='IMSI', length=8, IMSI='2080112345670000')
ie
<IE_IMSI ietype=IMSI length=8 IMSI='2080112345670000' |>
len(ie)
Traceback (most recent call last):
File "/usr/lib/python3.5/code.py", line 91, in runcode
exec(code, self.locals)
File "<console>", line 1, in <module>
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 527, in __len__
return len(self.__bytes__())
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 492, in __bytes__
return self.build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 612, in build
p = self.do_build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 594, in do_build
pkt = self.self_build()
File "/root/Umakant/gtpv2/scapy-master/scapy/packet.py", line 575, in self_build
p = f.addfield(self, p, val)
File "/root/Umakant/gtpv2/scapy-master/scapy/fields.py", line 1267, in addfield
return s + struct.pack("%is" % len_pkt, self.i2m(pkt, val))
File "/root/Umakant/gtpv2/scapy-master/scapy/contrib/gtp.py", line 179, in i2m
ret_string += chr(int(tmp[1] + tmp[0], 16))
ValueError: invalid literal for int() with base 16: "'b"
|
ValueError
|
def __add__(self, other, **kwargs):
return EDecimal(Decimal.__add__(self, Decimal(other), **kwargs))
|
def __add__(self, other, **kwargs):
return EDecimal(Decimal.__add__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def __sub__(self, other, **kwargs):
return EDecimal(Decimal.__sub__(self, Decimal(other), **kwargs))
|
def __sub__(self, other, **kwargs):
return EDecimal(Decimal.__sub__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def __mul__(self, other, **kwargs):
return EDecimal(Decimal.__mul__(self, Decimal(other), **kwargs))
|
def __mul__(self, other, **kwargs):
return EDecimal(Decimal.__mul__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def __truediv__(self, other, **kwargs):
return EDecimal(Decimal.__truediv__(self, Decimal(other), **kwargs))
|
def __truediv__(self, other, **kwargs):
return EDecimal(Decimal.__truediv__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def __floordiv__(self, other, **kwargs):
return EDecimal(Decimal.__floordiv__(self, Decimal(other), **kwargs))
|
def __floordiv__(self, other, **kwargs):
return EDecimal(Decimal.__floordiv__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def __div__(self, other, **kwargs):
return EDecimal(Decimal.__div__(self, Decimal(other), **kwargs))
|
def __div__(self, other, **kwargs):
return EDecimal(Decimal.__div__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def __mod__(self, other, **kwargs):
return EDecimal(Decimal.__mod__(self, Decimal(other), **kwargs))
|
def __mod__(self, other, **kwargs):
return EDecimal(Decimal.__mod__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def __divmod__(self, other, **kwargs):
return EDecimal(Decimal.__divmod__(self, Decimal(other), **kwargs))
|
def __divmod__(self, other, **kwargs):
return EDecimal(Decimal.__divmod__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def __pow__(self, other, **kwargs):
return EDecimal(Decimal.__pow__(self, Decimal(other), **kwargs))
|
def __pow__(self, other, **kwargs):
return EDecimal(Decimal.__pow__(self, other, **kwargs))
|
https://github.com/secdev/scapy/issues/2433
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy-2.4.3.dev227-py2.7.egg/scapy/utils.py", line 67, in __mul__
return EDecimal(Decimal.__mul__(self, other, **kwargs))
File "/usr/lib/python2.7/decimal.py", line 657, in __new__
raise TypeError("Cannot convert %r to Decimal" % value)
TypeError: Cannot convert NotImplemented to Decimal
|
TypeError
|
def _check_len(self, pkt):
"""Check for odd packet length and pad according to Cisco spec.
This padding is only used for checksum computation. The original
packet should not be altered."""
if len(pkt) % 2:
last_chr = orb(pkt[-1])
if last_chr <= 0x80:
return pkt[:-1] + b"\x00" + chb(last_chr)
else:
return pkt[:-1] + b"\xff" + chb(orb(last_chr) - 1)
else:
return pkt
|
def _check_len(self, pkt):
"""Check for odd packet length and pad according to Cisco spec.
This padding is only used for checksum computation. The original
packet should not be altered."""
if len(pkt) % 2:
last_chr = pkt[-1]
if last_chr <= b"\x80":
return pkt[:-1] + b"\x00" + last_chr
else:
return pkt[:-1] + b"\xff" + chb(orb(last_chr) - 1)
else:
return pkt
|
https://github.com/secdev/scapy/issues/2413
|
root@ubuntu:/workspace/_prototype/rtg-avatar# python
Python 3.7.5 (default, Nov 7 2019, 10:50:52)
[GCC 8.3.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
from scapy.all import *
from scapy.contrib import *
pkt=CDPv2_HDR(vers=2, ttl=180, msg='123')
len(pkt)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 522, in __len__
return len(self.__bytes__())
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 487, in __bytes__
return self.build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 607, in build
p = self.do_build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 594, in do_build
return self.post_build(pkt, pay)
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 371, in post_build
cksum = checksum(self._check_len(p))
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 361, in _check_len
if last_chr <= b'\x80':
TypeError: '<=' not supported between instances of 'int' and 'bytes'
Error in sys.excepthook:
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/apport_python_hook.py", line 63, in apport_excepthook
from apport.fileutils import likely_packaged, get_recent_crashes
File "/usr/lib/python3/dist-packages/apport/__init__.py", line 5, in <module>
from apport.report import Report
File "/usr/lib/python3/dist-packages/apport/report.py", line 30, in <module>
import apport.fileutils
File "/usr/lib/python3/dist-packages/apport/fileutils.py", line 23, in <module>
from apport.packaging_impl import impl as packaging
File "/usr/lib/python3/dist-packages/apport/packaging_impl.py", line 24, in <module>
import apt
File "/usr/lib/python3/dist-packages/apt/__init__.py", line 23, in <module>
import apt_pkg
ModuleNotFoundError: No module named 'apt_pkg'
Original exception was:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 522, in __len__
return len(self.__bytes__())
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 487, in __bytes__
return self.build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 607, in build
p = self.do_build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 594, in do_build
return self.post_build(pkt, pay)
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 371, in post_build
cksum = checksum(self._check_len(p))
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 361, in _check_len
if last_chr <= b'\x80':
TypeError: '<=' not supported between instances of 'int' and 'bytes'
|
TypeError
|
def post_build(self, p, pay):
vlannamelen = 4 * ((len(self.vlanname) + 3) // 4)
if self.len is None:
tmp_len = vlannamelen + 12
p = chb(tmp_len & 0xFF) + p[1:]
# Pad vlan name with zeros if vlannamelen > len(vlanname)
tmp_len = vlannamelen - len(self.vlanname)
if tmp_len != 0:
p += b"\x00" * tmp_len
p += pay
return p
|
def post_build(self, p, pay):
vlannamelen = 4 * ((len(self.vlanname) + 3) // 4)
if self.len is None:
tmp_len = vlannamelen + 12
p = chr(tmp_len & 0xFF) + p[1:]
# Pad vlan name with zeros if vlannamelen > len(vlanname)
tmp_len = vlannamelen - len(self.vlanname)
if tmp_len != 0:
p += b"\x00" * tmp_len
p += pay
return p
|
https://github.com/secdev/scapy/issues/2413
|
root@ubuntu:/workspace/_prototype/rtg-avatar# python
Python 3.7.5 (default, Nov 7 2019, 10:50:52)
[GCC 8.3.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
from scapy.all import *
from scapy.contrib import *
pkt=CDPv2_HDR(vers=2, ttl=180, msg='123')
len(pkt)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 522, in __len__
return len(self.__bytes__())
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 487, in __bytes__
return self.build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 607, in build
p = self.do_build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 594, in do_build
return self.post_build(pkt, pay)
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 371, in post_build
cksum = checksum(self._check_len(p))
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 361, in _check_len
if last_chr <= b'\x80':
TypeError: '<=' not supported between instances of 'int' and 'bytes'
Error in sys.excepthook:
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/apport_python_hook.py", line 63, in apport_excepthook
from apport.fileutils import likely_packaged, get_recent_crashes
File "/usr/lib/python3/dist-packages/apport/__init__.py", line 5, in <module>
from apport.report import Report
File "/usr/lib/python3/dist-packages/apport/report.py", line 30, in <module>
import apport.fileutils
File "/usr/lib/python3/dist-packages/apport/fileutils.py", line 23, in <module>
from apport.packaging_impl import impl as packaging
File "/usr/lib/python3/dist-packages/apport/packaging_impl.py", line 24, in <module>
import apt
File "/usr/lib/python3/dist-packages/apt/__init__.py", line 23, in <module>
import apt_pkg
ModuleNotFoundError: No module named 'apt_pkg'
Original exception was:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 522, in __len__
return len(self.__bytes__())
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 487, in __bytes__
return self.build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 607, in build
p = self.do_build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 594, in do_build
return self.post_build(pkt, pay)
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 371, in post_build
cksum = checksum(self._check_len(p))
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 361, in _check_len
if last_chr <= b'\x80':
TypeError: '<=' not supported between instances of 'int' and 'bytes'
|
TypeError
|
def post_build(self, p, pay):
if self.domnamelen is None:
domnamelen = len(self.domname.strip(b"\x00"))
p = p[:3] + chb(domnamelen & 0xFF) + p[4:]
p += pay
return p
|
def post_build(self, p, pay):
if self.domnamelen is None:
domnamelen = len(self.domname.strip(b"\x00"))
p = p[:3] + chr(domnamelen & 0xFF) + p[4:]
p += pay
return p
|
https://github.com/secdev/scapy/issues/2413
|
root@ubuntu:/workspace/_prototype/rtg-avatar# python
Python 3.7.5 (default, Nov 7 2019, 10:50:52)
[GCC 8.3.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
from scapy.all import *
from scapy.contrib import *
pkt=CDPv2_HDR(vers=2, ttl=180, msg='123')
len(pkt)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 522, in __len__
return len(self.__bytes__())
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 487, in __bytes__
return self.build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 607, in build
p = self.do_build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 594, in do_build
return self.post_build(pkt, pay)
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 371, in post_build
cksum = checksum(self._check_len(p))
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 361, in _check_len
if last_chr <= b'\x80':
TypeError: '<=' not supported between instances of 'int' and 'bytes'
Error in sys.excepthook:
Traceback (most recent call last):
File "/usr/lib/python3/dist-packages/apport_python_hook.py", line 63, in apport_excepthook
from apport.fileutils import likely_packaged, get_recent_crashes
File "/usr/lib/python3/dist-packages/apport/__init__.py", line 5, in <module>
from apport.report import Report
File "/usr/lib/python3/dist-packages/apport/report.py", line 30, in <module>
import apport.fileutils
File "/usr/lib/python3/dist-packages/apport/fileutils.py", line 23, in <module>
from apport.packaging_impl import impl as packaging
File "/usr/lib/python3/dist-packages/apport/packaging_impl.py", line 24, in <module>
import apt
File "/usr/lib/python3/dist-packages/apt/__init__.py", line 23, in <module>
import apt_pkg
ModuleNotFoundError: No module named 'apt_pkg'
Original exception was:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 522, in __len__
return len(self.__bytes__())
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 487, in __bytes__
return self.build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 607, in build
p = self.do_build()
File "/usr/local/lib/python3.7/dist-packages/scapy/packet.py", line 594, in do_build
return self.post_build(pkt, pay)
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 371, in post_build
cksum = checksum(self._check_len(p))
File "/usr/local/lib/python3.7/dist-packages/scapy/contrib/cdp.py", line 361, in _check_len
if last_chr <= b'\x80':
TypeError: '<=' not supported between instances of 'int' and 'bytes'
|
TypeError
|
def compile_filter(filter_exp, iface=None, linktype=None, promisc=False):
"""Asks libpcap to parse the filter, then build the matching
BPF bytecode.
:param iface: if provided, use the interface to compile
:param linktype: if provided, use the linktype to compile
"""
try:
from scapy.libs.winpcapy import (
PCAP_ERRBUF_SIZE,
pcap_open_live,
pcap_compile,
pcap_compile_nopcap,
pcap_close,
)
from scapy.libs.structures import bpf_program
except ImportError:
raise Scapy_Exception("libpcap is not available. Cannot compile filter !")
root = WINDOWS or (os.geteuid() == 0)
from ctypes import create_string_buffer
bpf = bpf_program()
bpf_filter = create_string_buffer(filter_exp.encode("utf8"))
if not linktype:
# Try to guess linktype to avoid root
if not iface:
if not conf.iface:
raise Scapy_Exception("Please provide an interface or linktype!")
if WINDOWS:
iface = conf.iface.pcap_name
else:
iface = conf.iface
# Try to guess linktype to avoid requiring root
try:
arphd = get_if_raw_hwaddr(iface)[0]
linktype = ARPHRD_TO_DLT.get(arphd)
except Exception:
# Failed to use linktype: use the interface
if not root:
raise Scapy_Exception("Please provide a valid interface or linktype!")
if linktype is not None:
ret = pcap_compile_nopcap(MTU, linktype, ctypes.byref(bpf), bpf_filter, 0, -1)
elif iface:
if not root:
raise OSError("Compiling using an interface requires root.")
err = create_string_buffer(PCAP_ERRBUF_SIZE)
iface = create_string_buffer(iface.encode("utf8"))
pcap = pcap_open_live(iface, MTU, promisc, 0, err)
ret = pcap_compile(pcap, ctypes.byref(bpf), bpf_filter, 0, -1)
pcap_close(pcap)
if ret == -1:
raise Scapy_Exception(
"Failed to compile filter expression %s (%s)" % (filter_exp, ret)
)
if conf.use_pypy and sys.pypy_version_info <= (7, 3, 0):
# PyPy < 7.3.0 has a broken behavior
# https://bitbucket.org/pypy/pypy/issues/3114
return struct.pack("HL", bpf.bf_len, ctypes.addressof(bpf.bf_insns.contents))
return bpf
|
def compile_filter(filter_exp, iface=None, linktype=None, promisc=False):
"""Asks libpcap to parse the filter, then build the matching
BPF bytecode.
:param iface: if provided, use the interface to compile
:param linktype: if provided, use the linktype to compile
"""
try:
from scapy.libs.winpcapy import (
PCAP_ERRBUF_SIZE,
pcap_open_live,
pcap_compile,
pcap_compile_nopcap,
pcap_close,
)
from scapy.libs.structures import bpf_program
except ImportError:
raise Scapy_Exception("libpcap is not available. Cannot compile filter !")
root = WINDOWS or (os.geteuid() == 0)
from ctypes import create_string_buffer
bpf = bpf_program()
bpf_filter = create_string_buffer(filter_exp.encode("utf8"))
if not linktype:
# Try to guess linktype to avoid root
if not iface:
if not conf.iface:
raise Scapy_Exception("Please provide an interface or linktype!")
if WINDOWS:
iface = conf.iface.pcap_name
else:
iface = conf.iface
# Try to guess linktype to avoid requiring root
try:
arphd = get_if_raw_hwaddr(iface)[0]
linktype = ARPHRD_TO_DLT.get(arphd)
except Exception:
# Failed to use linktype: use the interface
if not root:
raise Scapy_Exception("Please provide a valid interface or linktype!")
if linktype is not None:
ret = pcap_compile_nopcap(MTU, linktype, ctypes.byref(bpf), bpf_filter, 0, -1)
elif iface:
if not root:
raise OSError("Compiling using an interface requires root.")
err = create_string_buffer(PCAP_ERRBUF_SIZE)
iface = create_string_buffer(iface.encode("utf8"))
pcap = pcap_open_live(iface, MTU, promisc, 0, err)
ret = pcap_compile(pcap, ctypes.byref(bpf), bpf_filter, 0, -1)
pcap_close(pcap)
if ret == -1:
raise Scapy_Exception(
"Failed to compile filter expression %s (%s)" % (filter_exp, ret)
)
if conf.use_pypy:
# XXX PyPy has a broken behavior.
# https://bitbucket.org/pypy/pypy/issues/3114
return struct.pack("HL", bpf.bf_len, ctypes.addressof(bpf.bf_insns.contents))
return bpf
|
https://github.com/secdev/scapy/issues/2393
|
a = Ether(dst='64:A2:F9:A9:2E:A9')/ARP(pdst='192.168.178.0/24')
a.show()
AttributeError
Traceback (most recent call last)<ipython-input-18-97d3136b2c17> in <module>
----> 1 a.show()
/netdisk/home/gpotter/github/scapy/scapy/packet.py in show(self, dump, indent, lvl, label_lvl)
1251 :return: return a hierarchical view if dump, else print it
1252 """
-> 1253 return self._show_or_dump(dump, indent, lvl, label_lvl)
1254
1255 def show2(self, dump=False, indent=3, lvl="", label_lvl=""):
/netdisk/home/gpotter/github/scapy/scapy/packet.py in _show_or_dump(self, dump, indent, lvl, label_lvl, first_call)
1225 ncol(f.name),
1226 ct.punct("="),)
-> 1227 reprval = f.i2repr(self, fvalue)
1228 if isinstance(reprval, str):
1229 reprval = reprval.replace("\n", "\n" + " " * (len(label_lvl) + # noqa: E501
/netdisk/home/gpotter/github/scapy/scapy/fields.py in i2repr(self, pkt, x)
513
514 def i2repr(self, pkt, x):
--> 515 x = self.i2h(pkt, x)
516 if self in conf.resolve:
517 x = conf.manufdb._resolve_MAC(x)
/netdisk/home/gpotter/github/scapy/scapy/layers/l2.py in i2h(self, pkt, x)
140 def i2h(self, pkt, x):
141 if x is None:
--> 142 iff = self.getif(pkt)
143 if iff is None:
144 iff = conf.iface
/netdisk/home/gpotter/github/scapy/scapy/layers/l2.py in <lambda>(pkt)
136 def __init__(self, name, getif=None):
137 MACField.__init__(self, name, None)
--> 138 self.getif = (lambda pkt: pkt.route()[0]) if getif is None else getif
139
140 def i2h(self, pkt, x):
/netdisk/home/gpotter/github/scapy/scapy/packet.py in route(self)
1175
1176 def route(self):
-> 1177 return self.payload.route()
1178
1179 def fragment(self, *args, **kargs):
/netdisk/home/gpotter/github/scapy/scapy/layers/l2.py in route(self)
411 def route(self):
412 fld, dst = self.getfield_and_val("pdst")--> 413 fld, dst = fld._find_fld_pkt_val(self, dst)
414 if isinstance(dst, Gen):
415 dst = next(iter(dst))
/netdisk/home/gpotter/github/scapy/scapy/fields.py in _find_fld_pkt_val(self, pkt, val)
324 # Default ? (in this case, let's make sure it's up-do-date)
325 dflts_pkt = pkt.default_fields
--> 326 if val == dflts_pkt[self.name] and self.name not in pkt.fields:
327 dflts_pkt[self.name] = fld.default
328 val = fld.default
/netdisk/home/gpotter/github/scapy/scapy/base_classes.py in __eq__(self, other)
131 p2 = other.parsed
132 else:
--> 133 p2, nm2 = self._parse_net(other)
134 return self.parsed == p2
135
/netdisk/home/gpotter/github/scapy/scapy/base_classes.py in _parse_net(cls, net)
97 @classmethod
98 def _parse_net(cls, net):
---> 99 tmp = net.split('/') + ["32"]
100 if not cls.ip_regex.match(net):
101 tmp[0] = socket.gethostbyname(tmp[0])
AttributeError: 'NoneType' object has no attribute 'split'
|
AttributeError
|
def __eq__(self, other):
if not other:
return False
if hasattr(other, "parsed"):
p2 = other.parsed
else:
p2, nm2 = self._parse_net(other)
return self.parsed == p2
|
def __eq__(self, other):
if hasattr(other, "parsed"):
p2 = other.parsed
else:
p2, nm2 = self._parse_net(other)
return self.parsed == p2
|
https://github.com/secdev/scapy/issues/2393
|
a = Ether(dst='64:A2:F9:A9:2E:A9')/ARP(pdst='192.168.178.0/24')
a.show()
AttributeError
Traceback (most recent call last)<ipython-input-18-97d3136b2c17> in <module>
----> 1 a.show()
/netdisk/home/gpotter/github/scapy/scapy/packet.py in show(self, dump, indent, lvl, label_lvl)
1251 :return: return a hierarchical view if dump, else print it
1252 """
-> 1253 return self._show_or_dump(dump, indent, lvl, label_lvl)
1254
1255 def show2(self, dump=False, indent=3, lvl="", label_lvl=""):
/netdisk/home/gpotter/github/scapy/scapy/packet.py in _show_or_dump(self, dump, indent, lvl, label_lvl, first_call)
1225 ncol(f.name),
1226 ct.punct("="),)
-> 1227 reprval = f.i2repr(self, fvalue)
1228 if isinstance(reprval, str):
1229 reprval = reprval.replace("\n", "\n" + " " * (len(label_lvl) + # noqa: E501
/netdisk/home/gpotter/github/scapy/scapy/fields.py in i2repr(self, pkt, x)
513
514 def i2repr(self, pkt, x):
--> 515 x = self.i2h(pkt, x)
516 if self in conf.resolve:
517 x = conf.manufdb._resolve_MAC(x)
/netdisk/home/gpotter/github/scapy/scapy/layers/l2.py in i2h(self, pkt, x)
140 def i2h(self, pkt, x):
141 if x is None:
--> 142 iff = self.getif(pkt)
143 if iff is None:
144 iff = conf.iface
/netdisk/home/gpotter/github/scapy/scapy/layers/l2.py in <lambda>(pkt)
136 def __init__(self, name, getif=None):
137 MACField.__init__(self, name, None)
--> 138 self.getif = (lambda pkt: pkt.route()[0]) if getif is None else getif
139
140 def i2h(self, pkt, x):
/netdisk/home/gpotter/github/scapy/scapy/packet.py in route(self)
1175
1176 def route(self):
-> 1177 return self.payload.route()
1178
1179 def fragment(self, *args, **kargs):
/netdisk/home/gpotter/github/scapy/scapy/layers/l2.py in route(self)
411 def route(self):
412 fld, dst = self.getfield_and_val("pdst")--> 413 fld, dst = fld._find_fld_pkt_val(self, dst)
414 if isinstance(dst, Gen):
415 dst = next(iter(dst))
/netdisk/home/gpotter/github/scapy/scapy/fields.py in _find_fld_pkt_val(self, pkt, val)
324 # Default ? (in this case, let's make sure it's up-do-date)
325 dflts_pkt = pkt.default_fields
--> 326 if val == dflts_pkt[self.name] and self.name not in pkt.fields:
327 dflts_pkt[self.name] = fld.default
328 val = fld.default
/netdisk/home/gpotter/github/scapy/scapy/base_classes.py in __eq__(self, other)
131 p2 = other.parsed
132 else:
--> 133 p2, nm2 = self._parse_net(other)
134 return self.parsed == p2
135
/netdisk/home/gpotter/github/scapy/scapy/base_classes.py in _parse_net(cls, net)
97 @classmethod
98 def _parse_net(cls, net):
---> 99 tmp = net.split('/') + ["32"]
100 if not cls.ip_regex.match(net):
101 tmp[0] = socket.gethostbyname(tmp[0])
AttributeError: 'NoneType' object has no attribute 'split'
|
AttributeError
|
def explore(layer=None):
"""Function used to discover the Scapy layers and protocols.
It helps to see which packets exists in contrib or layer files.
params:
- layer: If specified, the function will explore the layer. If not,
the GUI mode will be activated, to browse the available layers
examples:
>>> explore() # Launches the GUI
>>> explore("dns") # Explore scapy.layers.dns
>>> explore("http2") # Explore scapy.contrib.http2
>>> explore(scapy.layers.bluetooth4LE)
Note: to search a packet by name, use ls("name") rather than explore.
"""
if layer is None: # GUI MODE
if not conf.interactive:
raise Scapy_Exception(
"explore() GUI-mode cannot be run in "
"interactive mode. Please provide a "
"'layer' parameter !"
)
# 0 - Imports
try:
import prompt_toolkit
except ImportError:
raise ImportError(
"prompt_toolkit is not installed ! "
"You may install IPython, which contains it, via"
" `pip install ipython`"
)
if not _version_checker(prompt_toolkit, (2, 0)):
raise ImportError("prompt_toolkit >= 2.0.0 is required !")
# Only available with prompt_toolkit > 2.0, not released on PyPi yet
from prompt_toolkit.shortcuts.dialogs import radiolist_dialog, button_dialog
from prompt_toolkit.formatted_text import HTML
# Check for prompt_toolkit >= 3.0.0
if _version_checker(prompt_toolkit, (3, 0)):
call_ptk = lambda x: x.run()
else:
call_ptk = lambda x: x
# 1 - Ask for layer or contrib
btn_diag = button_dialog(
title=six.text_type("Scapy v%s" % conf.version),
text=HTML(
six.text_type(
'<style bg="white" fg="red">Chose the type of packets'
" you want to explore:</style>"
)
),
buttons=[
(six.text_type("Layers"), "layers"),
(six.text_type("Contribs"), "contribs"),
(six.text_type("Cancel"), "cancel"),
],
)
action = call_ptk(btn_diag)
# 2 - Retrieve list of Packets
if action == "layers":
# Get all loaded layers
_radio_values = conf.layers.layers()
# Restrict to layers-only (not contribs) + packet.py and asn1*.py
_radio_values = [
x
for x in _radio_values
if ("layers" in x[0] or "packet" in x[0] or "asn1" in x[0])
]
elif action == "contribs":
# Get all existing contribs
from scapy.main import list_contrib
_radio_values = list_contrib(ret=True)
_radio_values = [(x["name"], x["description"]) for x in _radio_values]
# Remove very specific modules
_radio_values = [x for x in _radio_values if not ("can" in x[0])]
else:
# Escape/Cancel was pressed
return
# Python 2 compat
if six.PY2:
_radio_values = [
(six.text_type(x), six.text_type(y)) for x, y in _radio_values
]
# 3 - Ask for the layer/contrib module to explore
rd_diag = radiolist_dialog(
values=_radio_values,
title=six.text_type("Scapy v%s" % conf.version),
text=HTML(
six.text_type(
'<style bg="white" fg="red">Please select a layer among'
" the following, to see all packets contained in"
" it:</style>"
)
),
)
result = call_ptk(rd_diag)
if result is None:
return # User pressed "Cancel"
# 4 - (Contrib only): load contrib
if action == "contribs":
from scapy.main import load_contrib
load_contrib(result)
result = "scapy.contrib." + result
else: # NON-GUI MODE
# We handle layer as a short layer name, full layer name
# or the module itself
if isinstance(layer, types.ModuleType):
layer = layer.__name__
if isinstance(layer, str):
if layer.startswith("scapy.layers."):
result = layer
else:
if layer.startswith("scapy.contrib."):
layer = layer.replace("scapy.contrib.", "")
from scapy.main import load_contrib
load_contrib(layer)
result_layer, result_contrib = (
("scapy.layers.%s" % layer),
("scapy.contrib.%s" % layer),
)
if result_layer in conf.layers.ldict:
result = result_layer
elif result_contrib in conf.layers.ldict:
result = result_contrib
else:
raise Scapy_Exception("Unknown scapy module '%s'" % layer)
else:
warning("Wrong usage ! Check out help(explore)")
return
# COMMON PART
# Get the list of all Packets contained in that module
try:
all_layers = conf.layers.ldict[result]
except KeyError:
raise Scapy_Exception("Unknown scapy module '%s'" % layer)
# Print
print(conf.color_theme.layer_name("Packets contained in %s:" % result))
rtlst = [(lay.__name__ or "", lay._name or "") for lay in all_layers]
print(pretty_list(rtlst, [("Class", "Name")], borders=True))
|
def explore(layer=None):
"""Function used to discover the Scapy layers and protocols.
It helps to see which packets exists in contrib or layer files.
params:
- layer: If specified, the function will explore the layer. If not,
the GUI mode will be activated, to browse the available layers
examples:
>>> explore() # Launches the GUI
>>> explore("dns") # Explore scapy.layers.dns
>>> explore("http2") # Explore scapy.contrib.http2
>>> explore(scapy.layers.bluetooth4LE)
Note: to search a packet by name, use ls("name") rather than explore.
"""
if layer is None: # GUI MODE
if not conf.interactive:
raise Scapy_Exception(
"explore() GUI-mode cannot be run in "
"interactive mode. Please provide a "
"'layer' parameter !"
)
# 0 - Imports
try:
import prompt_toolkit
except ImportError:
raise ImportError(
"prompt_toolkit is not installed ! "
"You may install IPython, which contains it, via"
" `pip install ipython`"
)
if not _version_checker(prompt_toolkit, (2, 0)):
raise ImportError("prompt_toolkit >= 2.0.0 is required !")
# Only available with prompt_toolkit > 2.0, not released on PyPi yet
from prompt_toolkit.shortcuts.dialogs import radiolist_dialog, button_dialog
from prompt_toolkit.formatted_text import HTML
# Check for prompt_toolkit >= 3.0.0
if _version_checker(prompt_toolkit, (3, 0)):
call_ptk = lambda x: x.run()
else:
call_ptk = lambda x: x
# 1 - Ask for layer or contrib
btn_diag = button_dialog(
title="Scapy v%s" % conf.version,
text=HTML(
six.text_type(
'<style bg="white" fg="red">Chose the type of packets'
" you want to explore:</style>"
)
),
buttons=[
(six.text_type("Layers"), "layers"),
(six.text_type("Contribs"), "contribs"),
(six.text_type("Cancel"), "cancel"),
],
)
action = call_ptk(btn_diag)
# 2 - Retrieve list of Packets
if action == "layers":
# Get all loaded layers
_radio_values = conf.layers.layers()
# Restrict to layers-only (not contribs) + packet.py and asn1*.py
_radio_values = [
x
for x in _radio_values
if ("layers" in x[0] or "packet" in x[0] or "asn1" in x[0])
]
elif action == "contribs":
# Get all existing contribs
from scapy.main import list_contrib
_radio_values = list_contrib(ret=True)
_radio_values = [(x["name"], x["description"]) for x in _radio_values]
# Remove very specific modules
_radio_values = [x for x in _radio_values if not ("can" in x[0])]
else:
# Escape/Cancel was pressed
return
# Python 2 compat
if six.PY2:
_radio_values = [
(six.text_type(x), six.text_type(y)) for x, y in _radio_values
]
# 3 - Ask for the layer/contrib module to explore
rd_diag = radiolist_dialog(
values=_radio_values,
title="Scapy v%s" % conf.version,
text=HTML(
six.text_type(
'<style bg="white" fg="red">Please select a layer among'
" the following, to see all packets contained in"
" it:</style>"
)
),
)
result = call_ptk(rd_diag)
if result is None:
return # User pressed "Cancel"
# 4 - (Contrib only): load contrib
if action == "contribs":
from scapy.main import load_contrib
load_contrib(result)
result = "scapy.contrib." + result
else: # NON-GUI MODE
# We handle layer as a short layer name, full layer name
# or the module itself
if isinstance(layer, types.ModuleType):
layer = layer.__name__
if isinstance(layer, str):
if layer.startswith("scapy.layers."):
result = layer
else:
if layer.startswith("scapy.contrib."):
layer = layer.replace("scapy.contrib.", "")
from scapy.main import load_contrib
load_contrib(layer)
result_layer, result_contrib = (
("scapy.layers.%s" % layer),
("scapy.contrib.%s" % layer),
)
if result_layer in conf.layers.ldict:
result = result_layer
elif result_contrib in conf.layers.ldict:
result = result_contrib
else:
raise Scapy_Exception("Unknown scapy module '%s'" % layer)
else:
warning("Wrong usage ! Check out help(explore)")
return
# COMMON PART
# Get the list of all Packets contained in that module
try:
all_layers = conf.layers.ldict[result]
except KeyError:
raise Scapy_Exception("Unknown scapy module '%s'" % layer)
# Print
print(conf.color_theme.layer_name("Packets contained in %s:" % result))
rtlst = [(lay.__name__ or "", lay._name or "") for lay in all_layers]
print(pretty_list(rtlst, [("Class", "Name")], borders=True))
|
https://github.com/secdev/scapy/issues/2241
|
explore()
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy/packet.py", line 1845, in explore
(six.text_type("Cancel"), "cancel")
File "/usr/local/lib/python2.7/dist-packages/prompt_toolkit/shortcuts/dialogs.py", line 60, in button_dialog
with_background=True)
File "/usr/local/lib/python2.7/dist-packages/prompt_toolkit/widgets/dialogs.py", line 32, in __init__
assert is_formatted_text(title)
AssertionError
|
AssertionError
|
def post_build(self, p, pay):
p += pay
dataofs = self.dataofs
if dataofs is None:
opt_len = len(self.get_field("options").i2m(self, self.options))
dataofs = 5 + ((opt_len + 3) // 4)
dataofs = (dataofs << 4) | orb(p[12]) & 0x0F
p = p[:12] + chb(dataofs & 0xFF) + p[13:]
if self.chksum is None:
if isinstance(self.underlayer, IP):
ck = in4_chksum(socket.IPPROTO_TCP, self.underlayer, p)
p = p[:16] + struct.pack("!H", ck) + p[18:]
elif (
conf.ipv6_enabled
and isinstance(self.underlayer, scapy.layers.inet6.IPv6)
or isinstance(self.underlayer, scapy.layers.inet6._IPv6ExtHdr)
): # noqa: E501
ck = scapy.layers.inet6.in6_chksum(socket.IPPROTO_TCP, self.underlayer, p) # noqa: E501
p = p[:16] + struct.pack("!H", ck) + p[18:]
else:
warning("No IP underlayer to compute checksum. Leaving null.")
return p
|
def post_build(self, p, pay):
p += pay
dataofs = self.dataofs
if dataofs is None:
dataofs = 5 + (
(len(self.get_field("options").i2m(self, self.options)) + 3) // 4
) # noqa: E501
p = p[:12] + chb((dataofs << 4) | orb(p[12]) & 0x0F) + p[13:]
if self.chksum is None:
if isinstance(self.underlayer, IP):
ck = in4_chksum(socket.IPPROTO_TCP, self.underlayer, p)
p = p[:16] + struct.pack("!H", ck) + p[18:]
elif (
conf.ipv6_enabled
and isinstance(self.underlayer, scapy.layers.inet6.IPv6)
or isinstance(self.underlayer, scapy.layers.inet6._IPv6ExtHdr)
): # noqa: E501
ck = scapy.layers.inet6.in6_chksum(socket.IPPROTO_TCP, self.underlayer, p) # noqa: E501
p = p[:16] + struct.pack("!H", ck) + p[18:]
else:
warning("No IP underlayer to compute checksum. Leaving null.")
return p
|
https://github.com/secdev/scapy/issues/2209
|
Traceback (most recent call last):
File "../../test.py", line 6, in <module>
bytes(test)
File "/usr/local/lib/python3.6/site-packages/scapy/packet.py", line 487, in __bytes__
return self.build()
File "/usr/local/lib/python3.6/site-packages/scapy/packet.py", line 607, in build
p = self.do_build()
File "/usr/local/lib/python3.6/site-packages/scapy/packet.py", line 592, in do_build
pay = self.do_build_payload()
File "/usr/local/lib/python3.6/site-packages/scapy/packet.py", line 579, in do_build_payload
return self.payload.do_build()
File "/usr/local/lib/python3.6/site-packages/scapy/packet.py", line 592, in do_build
pay = self.do_build_payload()
File "/usr/local/lib/python3.6/site-packages/scapy/packet.py", line 579, in do_build_payload
return self.payload.do_build()
File "/usr/local/lib/python3.6/site-packages/scapy/packet.py", line 594, in do_build
return self.post_build(pkt, pay)
File "/usr/local/lib/python3.6/site-packages/scapy/layers/inet.py", line 629, in post_build
p = p[:12] + chb((dataofs << 4) | orb(p[12]) & 0x0f) + p[13:]
File "/usr/local/lib/python3.6/site-packages/scapy/compat.py", line 70, in chb
return struct.pack("!B", x)
struct.error: ubyte format requires 0 <= number <= 255
|
struct.error
|
def send(self, x):
iff = x.route()[0]
if iff is None:
iff = conf.iface
sdto = (iff, self.type)
self.outs.bind(sdto)
sn = self.outs.getsockname()
ll = lambda x: x
if type(x) in conf.l3types:
sdto = (iff, conf.l3types[type(x)])
if sn[3] in conf.l2types:
ll = lambda x: conf.l2types[sn[3]]() / x
sx = raw(ll(x))
try:
self.outs.sendto(sx, sdto)
except socket.error as msg:
if msg.errno == 22 and len(sx) < conf.min_pkt_size:
self.outs.send(sx + b"\x00" * (conf.min_pkt_size - len(sx)))
elif conf.auto_fragment and msg.errno == 90:
for p in x.fragment():
self.outs.sendto(raw(ll(p)), sdto)
else:
raise
x.sent_time = time.time()
|
def send(self, x):
iff, a, gw = x.route()
if iff is None:
iff = conf.iface
sdto = (iff, self.type)
self.outs.bind(sdto)
sn = self.outs.getsockname()
ll = lambda x: x
if type(x) in conf.l3types:
sdto = (iff, conf.l3types[type(x)])
if sn[3] in conf.l2types:
ll = lambda x: conf.l2types[sn[3]]() / x
sx = raw(ll(x))
try:
self.outs.sendto(sx, sdto)
except socket.error as msg:
if msg.errno == 22 and len(sx) < conf.min_pkt_size:
self.outs.send(sx + b"\x00" * (conf.min_pkt_size - len(sx)))
elif conf.auto_fragment and msg.errno == 90:
for p in x.fragment():
self.outs.sendto(raw(ll(p)), sdto)
else:
raise
x.sent_time = time.time()
|
https://github.com/secdev/scapy/issues/2166
|
faucet@faucet:~$ sudo bash
root@faucet:~# python3
Python 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]] on linux
Type "help", "copyright", "credits" or "license" for more information.
from scapy.all import *
scapy.all.send(fuzz(ARP(pdst='127.0.0.1')))
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 117, in raw
return bytes(x)
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 441, in __bytes__
return self.build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 556, in build
p = self.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 541, in do_build
pay = self.do_build_payload()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 528, in do_build_payload
return self.payload.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 538, in do_build
pkt = self.self_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 519, in self_build
p = f.addfield(self, p, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 134, in addfield
return s + struct.pack(self.fmt, self.i2m(pkt, val))
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 1314, in i2m
f = fld.i2len(pkt, fval)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 343, in i2len
return self._find_fld_pkt_val(pkt, val).i2len(pkt, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 884, in i2len
return len(i)
TypeError: object of type 'NoneType' has no len()
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 314, in send
realtime=realtime, return_packets=return_packets)
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 287, in __gen_send
s.send(p)
File "/usr/local/lib/python3.6/dist-packages/scapy/arch/linux.py", line 533, in send
sx = raw(ll(x))
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 119, in raw
return bytes(x, encoding="utf8")
TypeError: encoding without a string argument
|
TypeError
|
def _find_fld(self):
"""Returns the Field subclass to be used, depending on the Packet
instance, or the default subclass.
DEV: since the Packet instance is not provided, we have to use a hack
to guess it. It should only be used if you cannot provide the current
Packet instance (for example, because of the current Scapy API).
If you have the current Packet instance, use ._find_fld_pkt_val() (if
the value to set is also known) of ._find_fld_pkt() instead.
"""
# Hack to preserve current Scapy API
# See https://stackoverflow.com/a/7272464/3223422
frame = inspect.currentframe().f_back.f_back
while frame is not None:
try:
pkt = frame.f_locals["self"]
except KeyError:
pass
else:
if isinstance(pkt, tuple(self.dflt.owners)):
if not pkt.default_fields:
# Packet not initialized
return self.dflt
return self._find_fld_pkt(pkt)
frame = frame.f_back
return self.dflt
|
def _find_fld(self):
"""Returns the Field subclass to be used, depending on the Packet
instance, or the default subclass.
DEV: since the Packet instance is not provided, we have to use a hack
to guess it. It should only be used if you cannot provide the current
Packet instance (for example, because of the current Scapy API).
If you have the current Packet instance, use ._find_fld_pkt_val() (if
the value to set is also known) of ._find_fld_pkt() instead.
"""
# Hack to preserve current Scapy API
# See https://stackoverflow.com/a/7272464/3223422
frame = inspect.currentframe().f_back.f_back
while frame is not None:
try:
pkt = frame.f_locals["self"]
except KeyError:
pass
else:
if not pkt.default_fields:
# Packet not initialized
return self.dflt
if isinstance(pkt, tuple(self.dflt.owners)):
return self._find_fld_pkt(pkt)
frame = frame.f_back
return self.dflt
|
https://github.com/secdev/scapy/issues/2166
|
faucet@faucet:~$ sudo bash
root@faucet:~# python3
Python 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]] on linux
Type "help", "copyright", "credits" or "license" for more information.
from scapy.all import *
scapy.all.send(fuzz(ARP(pdst='127.0.0.1')))
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 117, in raw
return bytes(x)
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 441, in __bytes__
return self.build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 556, in build
p = self.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 541, in do_build
pay = self.do_build_payload()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 528, in do_build_payload
return self.payload.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 538, in do_build
pkt = self.self_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 519, in self_build
p = f.addfield(self, p, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 134, in addfield
return s + struct.pack(self.fmt, self.i2m(pkt, val))
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 1314, in i2m
f = fld.i2len(pkt, fval)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 343, in i2len
return self._find_fld_pkt_val(pkt, val).i2len(pkt, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 884, in i2len
return len(i)
TypeError: object of type 'NoneType' has no len()
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 314, in send
realtime=realtime, return_packets=return_packets)
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 287, in __gen_send
s.send(p)
File "/usr/local/lib/python3.6/dist-packages/scapy/arch/linux.py", line 533, in send
sx = raw(ll(x))
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 119, in raw
return bytes(x, encoding="utf8")
TypeError: encoding without a string argument
|
TypeError
|
def addfield(self, pkt, s, val):
len_pkt = self.length_from(pkt)
if len_pkt is None:
return s + self.i2m(pkt, val)
return s + struct.pack("%is" % len_pkt, self.i2m(pkt, val))
|
def addfield(self, pkt, s, val):
len_pkt = self.length_from(pkt)
return s + struct.pack("%is" % len_pkt, self.i2m(pkt, val))
|
https://github.com/secdev/scapy/issues/2166
|
faucet@faucet:~$ sudo bash
root@faucet:~# python3
Python 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]] on linux
Type "help", "copyright", "credits" or "license" for more information.
from scapy.all import *
scapy.all.send(fuzz(ARP(pdst='127.0.0.1')))
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 117, in raw
return bytes(x)
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 441, in __bytes__
return self.build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 556, in build
p = self.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 541, in do_build
pay = self.do_build_payload()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 528, in do_build_payload
return self.payload.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 538, in do_build
pkt = self.self_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 519, in self_build
p = f.addfield(self, p, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 134, in addfield
return s + struct.pack(self.fmt, self.i2m(pkt, val))
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 1314, in i2m
f = fld.i2len(pkt, fval)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 343, in i2len
return self._find_fld_pkt_val(pkt, val).i2len(pkt, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 884, in i2len
return len(i)
TypeError: object of type 'NoneType' has no len()
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 314, in send
realtime=realtime, return_packets=return_packets)
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 287, in __gen_send
s.send(p)
File "/usr/local/lib/python3.6/dist-packages/scapy/arch/linux.py", line 533, in send
sx = raw(ll(x))
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 119, in raw
return bytes(x, encoding="utf8")
TypeError: encoding without a string argument
|
TypeError
|
def route(self):
fld, dst = self.getfield_and_val("pdst")
fld, dst = fld._find_fld_pkt_val(self, dst)
if isinstance(dst, Gen):
dst = next(iter(dst))
if isinstance(fld, IP6Field):
return conf.route6.route(dst)
elif isinstance(fld, IPField):
return conf.route.route(dst)
else:
return None, None, None
|
def route(self):
fld, dst = self.getfield_and_val("pdst")
fld, dst = fld._find_fld_pkt_val(self, dst)
if isinstance(dst, Gen):
dst = next(iter(dst))
if isinstance(fld, IP6Field):
return conf.route6.route(dst)
elif isinstance(fld, IPField):
return conf.route.route(dst)
else:
return None
|
https://github.com/secdev/scapy/issues/2166
|
faucet@faucet:~$ sudo bash
root@faucet:~# python3
Python 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]] on linux
Type "help", "copyright", "credits" or "license" for more information.
from scapy.all import *
scapy.all.send(fuzz(ARP(pdst='127.0.0.1')))
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 117, in raw
return bytes(x)
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 441, in __bytes__
return self.build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 556, in build
p = self.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 541, in do_build
pay = self.do_build_payload()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 528, in do_build_payload
return self.payload.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 538, in do_build
pkt = self.self_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 519, in self_build
p = f.addfield(self, p, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 134, in addfield
return s + struct.pack(self.fmt, self.i2m(pkt, val))
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 1314, in i2m
f = fld.i2len(pkt, fval)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 343, in i2len
return self._find_fld_pkt_val(pkt, val).i2len(pkt, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 884, in i2len
return len(i)
TypeError: object of type 'NoneType' has no len()
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 314, in send
realtime=realtime, return_packets=return_packets)
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 287, in __gen_send
s.send(p)
File "/usr/local/lib/python3.6/dist-packages/scapy/arch/linux.py", line 533, in send
sx = raw(ll(x))
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 119, in raw
return bytes(x, encoding="utf8")
TypeError: encoding without a string argument
|
TypeError
|
def fuzz(p, _inplace=0):
"""
Transform a layer into a fuzzy layer by replacing some default values
by random objects.
:param p: the Packet instance to fuzz
:returns: the fuzzed packet.
"""
if not _inplace:
p = p.copy()
q = p
while not isinstance(q, NoPayload):
new_default_fields = {}
multiple_type_fields = []
for f in q.fields_desc:
if isinstance(f, PacketListField):
for r in getattr(q, f.name):
print("fuzzing", repr(r))
fuzz(r, _inplace=1)
elif isinstance(f, MultipleTypeField):
# the type of the field will depend on others
multiple_type_fields.append(f.name)
elif f.default is not None:
if not isinstance(f, ConditionalField) or f._evalcond(q):
rnd = f.randval()
if rnd is not None:
new_default_fields[f.name] = rnd
# Process packets with MultipleTypeFields
if multiple_type_fields:
# freeze the other random values
new_default_fields = {
key: (val._fix() if isinstance(val, VolatileValue) else val)
for key, val in six.iteritems(new_default_fields)
}
q.default_fields.update(new_default_fields)
# add the random values of the MultipleTypeFields
for name in multiple_type_fields:
rnd = q.get_field(name)._find_fld_pkt(q).randval()
if rnd is not None:
new_default_fields[name] = rnd
q.default_fields.update(new_default_fields)
q = q.payload
return p
|
def fuzz(p, _inplace=0):
"""Transform a layer into a fuzzy layer by replacing some default values by random objects""" # noqa: E501
if not _inplace:
p = p.copy()
q = p
while not isinstance(q, NoPayload):
for f in q.fields_desc:
if isinstance(f, PacketListField):
for r in getattr(q, f.name):
print("fuzzing", repr(r))
fuzz(r, _inplace=1)
elif f.default is not None:
if not isinstance(f, ConditionalField) or f._evalcond(q):
rnd = f.randval()
if rnd is not None:
q.default_fields[f.name] = rnd
q = q.payload
return p
|
https://github.com/secdev/scapy/issues/2166
|
faucet@faucet:~$ sudo bash
root@faucet:~# python3
Python 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]] on linux
Type "help", "copyright", "credits" or "license" for more information.
from scapy.all import *
scapy.all.send(fuzz(ARP(pdst='127.0.0.1')))
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 117, in raw
return bytes(x)
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 441, in __bytes__
return self.build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 556, in build
p = self.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 541, in do_build
pay = self.do_build_payload()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 528, in do_build_payload
return self.payload.do_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 538, in do_build
pkt = self.self_build()
File "/usr/local/lib/python3.6/dist-packages/scapy/packet.py", line 519, in self_build
p = f.addfield(self, p, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 134, in addfield
return s + struct.pack(self.fmt, self.i2m(pkt, val))
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 1314, in i2m
f = fld.i2len(pkt, fval)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 343, in i2len
return self._find_fld_pkt_val(pkt, val).i2len(pkt, val)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 884, in i2len
return len(i)
TypeError: object of type 'NoneType' has no len()
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 314, in send
realtime=realtime, return_packets=return_packets)
File "/usr/local/lib/python3.6/dist-packages/scapy/sendrecv.py", line 287, in __gen_send
s.send(p)
File "/usr/local/lib/python3.6/dist-packages/scapy/arch/linux.py", line 533, in send
sx = raw(ll(x))
File "/usr/local/lib/python3.6/dist-packages/scapy/compat.py", line 119, in raw
return bytes(x, encoding="utf8")
TypeError: encoding without a string argument
|
TypeError
|
def read_routes():
if SOLARIS:
f = os.popen("netstat -rvn") # -f inet
elif FREEBSD:
f = os.popen("netstat -rnW") # -W to handle long interface names
else:
f = os.popen("netstat -rn") # -f inet
ok = 0
mtu_present = False
prio_present = False
refs_present = False
use_present = False
routes = []
pending_if = []
for line in f.readlines():
if not line:
break
line = line.strip()
if line.find("----") >= 0: # a separation line
continue
if not ok:
if line.find("Destination") >= 0:
ok = 1
mtu_present = "Mtu" in line
prio_present = "Prio" in line
refs_present = "Refs" in line
use_present = "Use" in line
continue
if not line:
break
if SOLARIS:
lspl = line.split()
if len(lspl) == 10:
dest, mask, gw, netif, mxfrg, rtt, ref, flg = lspl[:8]
else: # missing interface
dest, mask, gw, mxfrg, rtt, ref, flg = lspl[:7]
netif = None
else:
rt = line.split()
dest, gw, flg = rt[:3]
locked = OPENBSD and rt[6] == "L"
offset = mtu_present + prio_present + refs_present + locked
offset += use_present
netif = rt[3 + offset]
if flg.find("Lc") >= 0:
continue
if dest == "default":
dest = 0
netmask = 0
else:
if SOLARIS:
netmask = scapy.utils.atol(mask)
elif "/" in dest:
dest, netmask = dest.split("/")
netmask = scapy.utils.itom(int(netmask))
else:
netmask = scapy.utils.itom((dest.count(".") + 1) * 8)
dest += ".0" * (3 - dest.count("."))
dest = scapy.utils.atol(dest)
# XXX: TODO: add metrics for unix.py (use -e option on netstat)
metric = 1
if "G" not in flg:
gw = "0.0.0.0"
if netif is not None:
try:
ifaddr = get_if_addr(netif)
routes.append((dest, netmask, gw, netif, ifaddr, metric))
except OSError as exc:
if exc.message == "Device not configured":
# This means the interface name is probably truncated by
# netstat -nr. We attempt to guess it's name and if not we
# ignore it.
guessed_netif = _guess_iface_name(netif)
if guessed_netif is not None:
ifaddr = get_if_addr(guessed_netif)
routes.append(
(dest, netmask, gw, guessed_netif, ifaddr, metric)
) # noqa: E501
else:
warning("Could not guess partial interface name: %s", netif) # noqa: E501
else:
raise
else:
pending_if.append((dest, netmask, gw))
f.close()
# On Solaris, netstat does not provide output interfaces for some routes
# We need to parse completely the routing table to route their gw and
# know their output interface
for dest, netmask, gw in pending_if:
gw_l = scapy.utils.atol(gw)
(
max_rtmask,
gw_if,
gw_if_addr,
) = 0, None, None
for rtdst, rtmask, _, rtif, rtaddr in routes[:]:
if gw_l & rtmask == rtdst:
if rtmask >= max_rtmask:
max_rtmask = rtmask
gw_if = rtif
gw_if_addr = rtaddr
# XXX: TODO add metrics
metric = 1
if gw_if:
routes.append((dest, netmask, gw, gw_if, gw_if_addr, metric))
else:
warning("Did not find output interface to reach gateway %s", gw)
return routes
|
def read_routes():
if SOLARIS:
f = os.popen("netstat -rvn") # -f inet
elif FREEBSD:
f = os.popen("netstat -rnW") # -W to handle long interface names
else:
f = os.popen("netstat -rn") # -f inet
ok = 0
mtu_present = False
prio_present = False
routes = []
pending_if = []
for line in f.readlines():
if not line:
break
line = line.strip()
if line.find("----") >= 0: # a separation line
continue
if not ok:
if line.find("Destination") >= 0:
ok = 1
mtu_present = "Mtu" in line
prio_present = "Prio" in line
refs_present = "Refs" in line
continue
if not line:
break
if SOLARIS:
lspl = line.split()
if len(lspl) == 10:
dest, mask, gw, netif, mxfrg, rtt, ref, flg = lspl[:8]
else: # missing interface
dest, mask, gw, mxfrg, rtt, ref, flg = lspl[:7]
netif = None
else:
rt = line.split()
dest, gw, flg = rt[:3]
locked = OPENBSD and rt[6] == "L"
netif = rt[4 + mtu_present + prio_present + refs_present + locked]
if flg.find("Lc") >= 0:
continue
if dest == "default":
dest = 0
netmask = 0
else:
if SOLARIS:
netmask = scapy.utils.atol(mask)
elif "/" in dest:
dest, netmask = dest.split("/")
netmask = scapy.utils.itom(int(netmask))
else:
netmask = scapy.utils.itom((dest.count(".") + 1) * 8)
dest += ".0" * (3 - dest.count("."))
dest = scapy.utils.atol(dest)
# XXX: TODO: add metrics for unix.py (use -e option on netstat)
metric = 1
if "G" not in flg:
gw = "0.0.0.0"
if netif is not None:
try:
ifaddr = get_if_addr(netif)
routes.append((dest, netmask, gw, netif, ifaddr, metric))
except OSError as exc:
if exc.message == "Device not configured":
# This means the interface name is probably truncated by
# netstat -nr. We attempt to guess it's name and if not we
# ignore it.
guessed_netif = _guess_iface_name(netif)
if guessed_netif is not None:
ifaddr = get_if_addr(guessed_netif)
routes.append(
(dest, netmask, gw, guessed_netif, ifaddr, metric)
) # noqa: E501
else:
warning("Could not guess partial interface name: %s", netif) # noqa: E501
else:
raise
else:
pending_if.append((dest, netmask, gw))
f.close()
# On Solaris, netstat does not provide output interfaces for some routes
# We need to parse completely the routing table to route their gw and
# know their output interface
for dest, netmask, gw in pending_if:
gw_l = scapy.utils.atol(gw)
(
max_rtmask,
gw_if,
gw_if_addr,
) = 0, None, None
for rtdst, rtmask, _, rtif, rtaddr in routes[:]:
if gw_l & rtmask == rtdst:
if rtmask >= max_rtmask:
max_rtmask = rtmask
gw_if = rtif
gw_if_addr = rtaddr
# XXX: TODO add metrics
metric = 1
if gw_if:
routes.append((dest, netmask, gw, gw_if, gw_if_addr, metric))
else:
warning("Did not find output interface to reach gateway %s", gw)
return routes
|
https://github.com/secdev/scapy/issues/2134
|
import scapy.all as scapy
p = scapy.sniff()
p.summary()
Result:
Traceback (most recent call last):
File "/Users/tom/PycharmProjects/sniffer/main.py", line 2, in <module>
import scapy.all as scapy
File "/Users/tom/PycharmProjects/sniffer/venv/lib/python3.7/site-packages/scapy/sendrecv.py", line 33, in <module>
import scapy.route # noqa: F401
File "/Users/tom/PycharmProjects/sniffer/venv/lib/python3.7/site-packages/scapy/route.py", line 194, in <module>
conf.route = Route()
File "/Users/tom/PycharmProjects/sniffer/venv/lib/python3.7/site-packages/scapy/route.py", line 27, in __init__
self.resync()
File "/Users/tom/PycharmProjects/sniffer/venv/lib/python3.7/site-packages/scapy/route.py", line 35, in resync
self.routes = read_routes()
File "/Users/tom/PycharmProjects/sniffer/venv/lib/python3.7/site-packages/scapy/arch/unix.py", line 82, in read_routes
netif = rt[4 + mtu_present + prio_present + refs_present + locked]
IndexError: list index out of range
|
IndexError
|
def sniff(
count=0,
store=True,
offline=None,
prn=None,
lfilter=None,
L2socket=None,
timeout=None,
opened_socket=None,
stop_filter=None,
iface=None,
started_callback=None,
session=None,
*arg,
**karg,
):
"""Sniff packets and return a list of packets.
Args:
count: number of packets to capture. 0 means infinity.
store: whether to store sniffed packets or discard them
prn: function to apply to each packet. If something is returned, it
is displayed.
--Ex: prn = lambda x: x.summary()
session: a session = a flow decoder used to handle stream of packets.
e.g: IPSession (to defragment on-the-flow) or NetflowSession
filter: BPF filter to apply.
lfilter: Python function applied to each packet to determine if
further action may be done.
--Ex: lfilter = lambda x: x.haslayer(Padding)
offline: PCAP file (or list of PCAP files) to read packets from,
instead of sniffing them
timeout: stop sniffing after a given time (default: None).
L2socket: use the provided L2socket (default: use conf.L2listen).
opened_socket: provide an object (or a list of objects) ready to use
.recv() on.
stop_filter: Python function applied to each packet to determine if
we have to stop the capture after this packet.
--Ex: stop_filter = lambda x: x.haslayer(TCP)
iface: interface or list of interfaces (default: None for sniffing
on all interfaces).
monitor: use monitor mode. May not be available on all OS
started_callback: called as soon as the sniffer starts sniffing
(default: None).
The iface, offline and opened_socket parameters can be either an
element, a list of elements, or a dict object mapping an element to a
label (see examples below).
Examples:
>>> sniff(filter="arp")
>>> sniff(filter="tcp",
... session=IPSession, # defragment on-the-flow
... prn=lambda x: x.summary())
>>> sniff(lfilter=lambda pkt: ARP in pkt)
>>> sniff(iface="eth0", prn=Packet.summary)
>>> sniff(iface=["eth0", "mon0"],
... prn=lambda pkt: "%s: %s" % (pkt.sniffed_on,
... pkt.summary()))
>>> sniff(iface={"eth0": "Ethernet", "mon0": "Wifi"},
... prn=lambda pkt: "%s: %s" % (pkt.sniffed_on,
... pkt.summary()))
"""
c = 0
session = session or DefaultSession
session = session(prn, store) # instantiate session
sniff_sockets = {} # socket: label dict
if opened_socket is not None:
if isinstance(opened_socket, list):
sniff_sockets.update(
(s, "socket%d" % i) for i, s in enumerate(opened_socket)
)
elif isinstance(opened_socket, dict):
sniff_sockets.update(
(s, label) for s, label in six.iteritems(opened_socket)
)
else:
sniff_sockets[opened_socket] = "socket0"
if offline is not None:
flt = karg.get("filter")
from scapy.arch.common import TCPDUMP
if not TCPDUMP and flt is not None:
message = "tcpdump is not available. Cannot use filter!"
raise Scapy_Exception(message)
if isinstance(offline, list):
sniff_sockets.update(
(
PcapReader(
fname
if flt is None
else tcpdump(fname, args=["-w", "-", flt], getfd=True)
),
fname,
)
for fname in offline
)
elif isinstance(offline, dict):
sniff_sockets.update(
(
PcapReader(
fname
if flt is None
else tcpdump(fname, args=["-w", "-", flt], getfd=True)
),
label,
)
for fname, label in six.iteritems(offline)
)
else:
sniff_sockets[
PcapReader(
offline
if flt is None
else tcpdump(offline, args=["-w", "-", flt], getfd=True)
)
] = offline
if not sniff_sockets or iface is not None:
if L2socket is None:
L2socket = conf.L2listen
if isinstance(iface, list):
sniff_sockets.update(
(L2socket(type=ETH_P_ALL, iface=ifname, *arg, **karg), ifname)
for ifname in iface
)
elif isinstance(iface, dict):
sniff_sockets.update(
(L2socket(type=ETH_P_ALL, iface=ifname, *arg, **karg), iflabel)
for ifname, iflabel in six.iteritems(iface)
)
else:
sniff_sockets[L2socket(type=ETH_P_ALL, iface=iface, *arg, **karg)] = iface
if timeout is not None:
stoptime = time.time() + timeout
remain = None
# Get select information from the sockets
_main_socket = next(iter(sniff_sockets))
read_allowed_exceptions = _main_socket.read_allowed_exceptions
select_func = _main_socket.select
# We check that all sockets use the same select(), or raise a warning
if not all(select_func == sock.select for sock in sniff_sockets):
warning(
"Warning: inconsistent socket types ! The used select function"
"will be the one of the first socket"
)
# Now let's build the select function, used later on
_select = lambda sockets, remain: select_func(sockets, remain)[0]
try:
if started_callback:
started_callback()
continue_sniff = True
while sniff_sockets and continue_sniff:
if timeout is not None:
remain = stoptime - time.time()
if remain <= 0:
break
for s in _select(sniff_sockets, remain):
try:
p = s.recv()
except socket.error as ex:
warning(
"Socket %s failed with '%s' and thus will be ignored" % (s, ex)
)
del sniff_sockets[s]
continue
except read_allowed_exceptions:
continue
if p is None:
try:
if s.promisc:
continue
except AttributeError:
pass
del sniff_sockets[s]
break
if lfilter and not lfilter(p):
continue
p.sniffed_on = sniff_sockets[s]
c += 1
# on_packet_received handles the prn/storage
session.on_packet_received(p)
if stop_filter and stop_filter(p):
continue_sniff = False
break
if 0 < count <= c:
continue_sniff = False
break
except KeyboardInterrupt:
pass
if opened_socket is None:
for s in sniff_sockets:
s.close()
return session.toPacketList()
|
def sniff(
count=0,
store=True,
offline=None,
prn=None,
lfilter=None,
L2socket=None,
timeout=None,
opened_socket=None,
stop_filter=None,
iface=None,
started_callback=None,
session=None,
*arg,
**karg,
):
"""Sniff packets and return a list of packets.
Args:
count: number of packets to capture. 0 means infinity.
store: whether to store sniffed packets or discard them
prn: function to apply to each packet. If something is returned, it
is displayed.
--Ex: prn = lambda x: x.summary()
session: a session = a flow decoder used to handle stream of packets.
e.g: IPSession (to defragment on-the-flow) or NetflowSession
filter: BPF filter to apply.
lfilter: Python function applied to each packet to determine if
further action may be done.
--Ex: lfilter = lambda x: x.haslayer(Padding)
offline: PCAP file (or list of PCAP files) to read packets from,
instead of sniffing them
timeout: stop sniffing after a given time (default: None).
L2socket: use the provided L2socket (default: use conf.L2listen).
opened_socket: provide an object (or a list of objects) ready to use
.recv() on.
stop_filter: Python function applied to each packet to determine if
we have to stop the capture after this packet.
--Ex: stop_filter = lambda x: x.haslayer(TCP)
iface: interface or list of interfaces (default: None for sniffing
on all interfaces).
monitor: use monitor mode. May not be available on all OS
started_callback: called as soon as the sniffer starts sniffing
(default: None).
The iface, offline and opened_socket parameters can be either an
element, a list of elements, or a dict object mapping an element to a
label (see examples below).
Examples:
>>> sniff(filter="arp")
>>> sniff(filter="tcp",
... session=IPSession, # defragment on-the-flow
... prn=lambda x: x.summary())
>>> sniff(lfilter=lambda pkt: ARP in pkt)
>>> sniff(iface="eth0", prn=Packet.summary)
>>> sniff(iface=["eth0", "mon0"],
... prn=lambda pkt: "%s: %s" % (pkt.sniffed_on,
... pkt.summary()))
>>> sniff(iface={"eth0": "Ethernet", "mon0": "Wifi"},
... prn=lambda pkt: "%s: %s" % (pkt.sniffed_on,
... pkt.summary()))
"""
c = 0
session = session or DefaultSession
session = session(prn, store) # instantiate session
sniff_sockets = {} # socket: label dict
if opened_socket is not None:
if isinstance(opened_socket, list):
sniff_sockets.update(
(s, "socket%d" % i) for i, s in enumerate(opened_socket)
)
elif isinstance(opened_socket, dict):
sniff_sockets.update(
(s, label) for s, label in six.iteritems(opened_socket)
)
else:
sniff_sockets[opened_socket] = "socket0"
if offline is not None:
flt = karg.get("filter")
if not TCPDUMP and flt is not None:
message = "tcpdump is not available. Cannot use filter!"
raise Scapy_Exception(message)
if isinstance(offline, list):
sniff_sockets.update(
(
PcapReader(
fname
if flt is None
else tcpdump(fname, args=["-w", "-", flt], getfd=True)
),
fname,
)
for fname in offline
)
elif isinstance(offline, dict):
sniff_sockets.update(
(
PcapReader(
fname
if flt is None
else tcpdump(fname, args=["-w", "-", flt], getfd=True)
),
label,
)
for fname, label in six.iteritems(offline)
)
else:
sniff_sockets[
PcapReader(
offline
if flt is None
else tcpdump(offline, args=["-w", "-", flt], getfd=True)
)
] = offline
if not sniff_sockets or iface is not None:
if L2socket is None:
L2socket = conf.L2listen
if isinstance(iface, list):
sniff_sockets.update(
(L2socket(type=ETH_P_ALL, iface=ifname, *arg, **karg), ifname)
for ifname in iface
)
elif isinstance(iface, dict):
sniff_sockets.update(
(L2socket(type=ETH_P_ALL, iface=ifname, *arg, **karg), iflabel)
for ifname, iflabel in six.iteritems(iface)
)
else:
sniff_sockets[L2socket(type=ETH_P_ALL, iface=iface, *arg, **karg)] = iface
if timeout is not None:
stoptime = time.time() + timeout
remain = None
# Get select information from the sockets
_main_socket = next(iter(sniff_sockets))
read_allowed_exceptions = _main_socket.read_allowed_exceptions
select_func = _main_socket.select
# We check that all sockets use the same select(), or raise a warning
if not all(select_func == sock.select for sock in sniff_sockets):
warning(
"Warning: inconsistent socket types ! The used select function"
"will be the one of the first socket"
)
# Now let's build the select function, used later on
_select = lambda sockets, remain: select_func(sockets, remain)[0]
try:
if started_callback:
started_callback()
continue_sniff = True
while sniff_sockets and continue_sniff:
if timeout is not None:
remain = stoptime - time.time()
if remain <= 0:
break
for s in _select(sniff_sockets, remain):
try:
p = s.recv()
except socket.error as ex:
warning(
"Socket %s failed with '%s' and thus will be ignored" % (s, ex)
)
del sniff_sockets[s]
continue
except read_allowed_exceptions:
continue
if p is None:
try:
if s.promisc:
continue
except AttributeError:
pass
del sniff_sockets[s]
break
if lfilter and not lfilter(p):
continue
p.sniffed_on = sniff_sockets[s]
c += 1
# on_packet_received handles the prn/storage
session.on_packet_received(p)
if stop_filter and stop_filter(p):
continue_sniff = False
break
if 0 < count <= c:
continue_sniff = False
break
except KeyboardInterrupt:
pass
if opened_socket is None:
for s in sniff_sockets:
s.close()
return session.toPacketList()
|
https://github.com/secdev/scapy/issues/2052
|
$ ipython3
In [1]: from scapy.utils import strxor
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-1-90b66f0baa84> in <module>
----> 1 from scapy.utils import strxor
~/src/scapy/scapy/utils.py in <module>
33 from scapy.error import log_runtime, Scapy_Exception, warning
34 from scapy.pton_ntop import inet_pton
---> 35 from scapy.arch.common import TCPDUMP
36
37 ###########
~/src/scapy/scapy/arch/__init__.py in <module>
54
55 if LINUX:
---> 56 from scapy.arch.linux import * # noqa F403
57 elif BSD:
58 from scapy.arch.unix import read_routes, read_routes6, in6_getifaddr # noqa: F401, E501
~/src/scapy/scapy/arch/linux.py in <module>
25 from scapy.consts import LOOPBACK_NAME, LINUX
26 import scapy.utils
---> 27 import scapy.utils6
28 from scapy.packet import Packet, Padding
29 from scapy.config import conf
~/src/scapy/scapy/utils6.py in <module>
24 IPV6_ADDR_SITELOCAL, IPV6_ADDR_LOOPBACK, IPV6_ADDR_UNICAST,\
25 IPV6_ADDR_MULTICAST, IPV6_ADDR_6TO4, IPV6_ADDR_UNSPECIFIED
---> 26 from scapy.utils import strxor
27 from scapy.compat import orb, chb
28 from scapy.pton_ntop import inet_pton, inet_ntop
ImportError: cannot import name 'strxor' from 'scapy.utils' (/home/user/src/scapy/scapy/utils.py)
|
ImportError
|
def tcpdump(
pktlist,
dump=False,
getfd=False,
args=None,
prog=None,
getproc=False,
quiet=False,
use_tempfile=None,
read_stdin_opts=None,
linktype=None,
wait=True,
):
"""Run tcpdump or tshark on a list of packets.
When using ``tcpdump`` on OSX (``prog == conf.prog.tcpdump``), this uses a
temporary file to store the packets. This works around a bug in Apple's
version of ``tcpdump``: http://apple.stackexchange.com/questions/152682/
Otherwise, the packets are passed in stdin.
This function can be explicitly enabled or disabled with the
``use_tempfile`` parameter.
When using ``wireshark``, it will be called with ``-ki -`` to start
immediately capturing packets from stdin.
Otherwise, the command will be run with ``-r -`` (which is correct for
``tcpdump`` and ``tshark``).
This can be overridden with ``read_stdin_opts``. This has no effect when
``use_tempfile=True``, or otherwise reading packets from a regular file.
pktlist: a Packet instance, a PacketList instance or a list of Packet
instances. Can also be a filename (as a string), an open
file-like object that must be a file format readable by
tshark (Pcap, PcapNg, etc.) or None (to sniff)
dump: when set to True, returns a string instead of displaying it.
getfd: when set to True, returns a file-like object to read data
from tcpdump or tshark from.
getproc: when set to True, the subprocess.Popen object is returned
args: arguments (as a list) to pass to tshark (example for tshark:
args=["-T", "json"]).
prog: program to use (defaults to tcpdump, will work with tshark)
quiet: when set to True, the process stderr is discarded
use_tempfile: When set to True, always use a temporary file to store packets.
When set to False, pipe packets through stdin.
When set to None (default), only use a temporary file with
``tcpdump`` on OSX.
read_stdin_opts: When set, a list of arguments needed to capture from stdin.
Otherwise, attempts to guess.
linktype: A custom DLT value or name, to overwrite the default values.
wait: If True (default), waits for the process to terminate before returning
to Scapy. If False, the process will be detached to the background. If
dump, getproc or getfd is True, these have the same effect as
``wait=False``.
Examples:
>>> tcpdump([IP()/TCP(), IP()/UDP()])
reading from file -, link-type RAW (Raw IP)
16:46:00.474515 IP 127.0.0.1.20 > 127.0.0.1.80: Flags [S], seq 0, win 8192, length 0 # noqa: E501
16:46:00.475019 IP 127.0.0.1.53 > 127.0.0.1.53: [|domain]
>>> tcpdump([IP()/TCP(), IP()/UDP()], prog=conf.prog.tshark)
1 0.000000 127.0.0.1 -> 127.0.0.1 TCP 40 20->80 [SYN] Seq=0 Win=8192 Len=0 # noqa: E501
2 0.000459 127.0.0.1 -> 127.0.0.1 UDP 28 53->53 Len=0
To get a JSON representation of a tshark-parsed PacketList(), one can:
>>> import json, pprint
>>> json_data = json.load(tcpdump(IP(src="217.25.178.5", dst="45.33.32.156"),
... prog=conf.prog.tshark, args=["-T", "json"],
... getfd=True))
>>> pprint.pprint(json_data)
[{u'_index': u'packets-2016-12-23',
u'_score': None,
u'_source': {u'layers': {u'frame': {u'frame.cap_len': u'20',
u'frame.encap_type': u'7',
[...]
u'frame.time_relative': u'0.000000000'},
u'ip': {u'ip.addr': u'45.33.32.156',
u'ip.checksum': u'0x0000a20d',
[...]
u'ip.ttl': u'64',
u'ip.version': u'4'},
u'raw': u'Raw packet data'}},
u'_type': u'pcap_file'}]
>>> json_data[0]['_source']['layers']['ip']['ip.ttl']
u'64'
"""
getfd = getfd or getproc
if prog is None:
prog = [conf.prog.tcpdump]
_prog_name = "windump()" if WINDOWS else "tcpdump()"
elif isinstance(prog, six.string_types):
_prog_name = "{}()".format(prog)
prog = [prog]
else:
raise ValueError("prog must be a string")
from scapy.arch.common import TCPDUMP
if prog[0] == conf.prog.tcpdump and not TCPDUMP:
message = "tcpdump is not available. Cannot use tcpdump() !"
raise Scapy_Exception(message)
if linktype is not None:
# Tcpdump does not support integers in -y (yet)
# https://github.com/the-tcpdump-group/tcpdump/issues/758
if isinstance(linktype, int):
# Guess name from value
try:
linktype_name = _guess_linktype_name(linktype)
except StopIteration:
linktype = -1
else:
# Guess value from name
if linktype.startswith("DLT_"):
linktype = linktype[4:]
linktype_name = linktype
try:
linktype = _guess_linktype_value(linktype)
except KeyError:
linktype = -1
if linktype == -1:
raise ValueError("Unknown linktype. Try passing its datalink name instead")
prog += ["-y", linktype_name]
# Build Popen arguments
if args is None:
args = []
else:
# Make a copy of args
args = list(args)
stdout = subprocess.PIPE if dump or getfd else None
stderr = open(os.devnull) if quiet else None
if use_tempfile is None:
# Apple's tcpdump cannot read from stdin, see:
# http://apple.stackexchange.com/questions/152682/
use_tempfile = DARWIN and prog[0] == conf.prog.tcpdump
if read_stdin_opts is None:
if prog[0] == conf.prog.wireshark:
# Start capturing immediately (-k) from stdin (-i -)
read_stdin_opts = ["-ki", "-"]
else:
read_stdin_opts = ["-r", "-"]
else:
# Make a copy of read_stdin_opts
read_stdin_opts = list(read_stdin_opts)
if pktlist is None:
# sniff
with ContextManagerSubprocess(_prog_name, prog[0]):
proc = subprocess.Popen(
prog + args,
stdout=stdout,
stderr=stderr,
)
elif isinstance(pktlist, six.string_types):
# file
with ContextManagerSubprocess(_prog_name, prog[0]):
proc = subprocess.Popen(
prog + ["-r", pktlist] + args,
stdout=stdout,
stderr=stderr,
)
elif use_tempfile:
tmpfile = get_temp_file(autoext=".pcap", fd=True)
try:
tmpfile.writelines(iter(lambda: pktlist.read(1048576), b""))
except AttributeError:
wrpcap(tmpfile, pktlist, linktype=linktype)
else:
tmpfile.close()
with ContextManagerSubprocess(_prog_name, prog[0]):
proc = subprocess.Popen(
prog + ["-r", tmpfile.name] + args,
stdout=stdout,
stderr=stderr,
)
else:
# pass the packet stream
with ContextManagerSubprocess(_prog_name, prog[0]):
proc = subprocess.Popen(
prog + read_stdin_opts + args,
stdin=subprocess.PIPE,
stdout=stdout,
stderr=stderr,
)
try:
proc.stdin.writelines(iter(lambda: pktlist.read(1048576), b""))
except AttributeError:
wrpcap(proc.stdin, pktlist, linktype=linktype)
except UnboundLocalError:
raise IOError("%s died unexpectedly !" % prog)
else:
proc.stdin.close()
if dump:
return b"".join(iter(lambda: proc.stdout.read(1048576), b""))
if getproc:
return proc
if getfd:
return proc.stdout
if wait:
proc.wait()
|
def tcpdump(
pktlist,
dump=False,
getfd=False,
args=None,
prog=None,
getproc=False,
quiet=False,
use_tempfile=None,
read_stdin_opts=None,
linktype=None,
wait=True,
):
"""Run tcpdump or tshark on a list of packets.
When using ``tcpdump`` on OSX (``prog == conf.prog.tcpdump``), this uses a
temporary file to store the packets. This works around a bug in Apple's
version of ``tcpdump``: http://apple.stackexchange.com/questions/152682/
Otherwise, the packets are passed in stdin.
This function can be explicitly enabled or disabled with the
``use_tempfile`` parameter.
When using ``wireshark``, it will be called with ``-ki -`` to start
immediately capturing packets from stdin.
Otherwise, the command will be run with ``-r -`` (which is correct for
``tcpdump`` and ``tshark``).
This can be overridden with ``read_stdin_opts``. This has no effect when
``use_tempfile=True``, or otherwise reading packets from a regular file.
pktlist: a Packet instance, a PacketList instance or a list of Packet
instances. Can also be a filename (as a string), an open
file-like object that must be a file format readable by
tshark (Pcap, PcapNg, etc.) or None (to sniff)
dump: when set to True, returns a string instead of displaying it.
getfd: when set to True, returns a file-like object to read data
from tcpdump or tshark from.
getproc: when set to True, the subprocess.Popen object is returned
args: arguments (as a list) to pass to tshark (example for tshark:
args=["-T", "json"]).
prog: program to use (defaults to tcpdump, will work with tshark)
quiet: when set to True, the process stderr is discarded
use_tempfile: When set to True, always use a temporary file to store packets.
When set to False, pipe packets through stdin.
When set to None (default), only use a temporary file with
``tcpdump`` on OSX.
read_stdin_opts: When set, a list of arguments needed to capture from stdin.
Otherwise, attempts to guess.
linktype: A custom DLT value or name, to overwrite the default values.
wait: If True (default), waits for the process to terminate before returning
to Scapy. If False, the process will be detached to the background. If
dump, getproc or getfd is True, these have the same effect as
``wait=False``.
Examples:
>>> tcpdump([IP()/TCP(), IP()/UDP()])
reading from file -, link-type RAW (Raw IP)
16:46:00.474515 IP 127.0.0.1.20 > 127.0.0.1.80: Flags [S], seq 0, win 8192, length 0 # noqa: E501
16:46:00.475019 IP 127.0.0.1.53 > 127.0.0.1.53: [|domain]
>>> tcpdump([IP()/TCP(), IP()/UDP()], prog=conf.prog.tshark)
1 0.000000 127.0.0.1 -> 127.0.0.1 TCP 40 20->80 [SYN] Seq=0 Win=8192 Len=0 # noqa: E501
2 0.000459 127.0.0.1 -> 127.0.0.1 UDP 28 53->53 Len=0
To get a JSON representation of a tshark-parsed PacketList(), one can:
>>> import json, pprint
>>> json_data = json.load(tcpdump(IP(src="217.25.178.5", dst="45.33.32.156"),
... prog=conf.prog.tshark, args=["-T", "json"],
... getfd=True))
>>> pprint.pprint(json_data)
[{u'_index': u'packets-2016-12-23',
u'_score': None,
u'_source': {u'layers': {u'frame': {u'frame.cap_len': u'20',
u'frame.encap_type': u'7',
[...]
u'frame.time_relative': u'0.000000000'},
u'ip': {u'ip.addr': u'45.33.32.156',
u'ip.checksum': u'0x0000a20d',
[...]
u'ip.ttl': u'64',
u'ip.version': u'4'},
u'raw': u'Raw packet data'}},
u'_type': u'pcap_file'}]
>>> json_data[0]['_source']['layers']['ip']['ip.ttl']
u'64'
"""
getfd = getfd or getproc
if prog is None:
prog = [conf.prog.tcpdump]
_prog_name = "windump()" if WINDOWS else "tcpdump()"
elif isinstance(prog, six.string_types):
_prog_name = "{}()".format(prog)
prog = [prog]
else:
raise ValueError("prog must be a string")
if prog[0] == conf.prog.tcpdump and not TCPDUMP:
message = "tcpdump is not available. Cannot use tcpdump() !"
raise Scapy_Exception(message)
if linktype is not None:
# Tcpdump does not support integers in -y (yet)
# https://github.com/the-tcpdump-group/tcpdump/issues/758
if isinstance(linktype, int):
# Guess name from value
try:
linktype_name = _guess_linktype_name(linktype)
except StopIteration:
linktype = -1
else:
# Guess value from name
if linktype.startswith("DLT_"):
linktype = linktype[4:]
linktype_name = linktype
try:
linktype = _guess_linktype_value(linktype)
except KeyError:
linktype = -1
if linktype == -1:
raise ValueError("Unknown linktype. Try passing its datalink name instead")
prog += ["-y", linktype_name]
# Build Popen arguments
if args is None:
args = []
else:
# Make a copy of args
args = list(args)
stdout = subprocess.PIPE if dump or getfd else None
stderr = open(os.devnull) if quiet else None
if use_tempfile is None:
# Apple's tcpdump cannot read from stdin, see:
# http://apple.stackexchange.com/questions/152682/
use_tempfile = DARWIN and prog[0] == conf.prog.tcpdump
if read_stdin_opts is None:
if prog[0] == conf.prog.wireshark:
# Start capturing immediately (-k) from stdin (-i -)
read_stdin_opts = ["-ki", "-"]
else:
read_stdin_opts = ["-r", "-"]
else:
# Make a copy of read_stdin_opts
read_stdin_opts = list(read_stdin_opts)
if pktlist is None:
# sniff
with ContextManagerSubprocess(_prog_name, prog[0]):
proc = subprocess.Popen(
prog + args,
stdout=stdout,
stderr=stderr,
)
elif isinstance(pktlist, six.string_types):
# file
with ContextManagerSubprocess(_prog_name, prog[0]):
proc = subprocess.Popen(
prog + ["-r", pktlist] + args,
stdout=stdout,
stderr=stderr,
)
elif use_tempfile:
tmpfile = get_temp_file(autoext=".pcap", fd=True)
try:
tmpfile.writelines(iter(lambda: pktlist.read(1048576), b""))
except AttributeError:
wrpcap(tmpfile, pktlist, linktype=linktype)
else:
tmpfile.close()
with ContextManagerSubprocess(_prog_name, prog[0]):
proc = subprocess.Popen(
prog + ["-r", tmpfile.name] + args,
stdout=stdout,
stderr=stderr,
)
else:
# pass the packet stream
with ContextManagerSubprocess(_prog_name, prog[0]):
proc = subprocess.Popen(
prog + read_stdin_opts + args,
stdin=subprocess.PIPE,
stdout=stdout,
stderr=stderr,
)
try:
proc.stdin.writelines(iter(lambda: pktlist.read(1048576), b""))
except AttributeError:
wrpcap(proc.stdin, pktlist, linktype=linktype)
except UnboundLocalError:
raise IOError("%s died unexpectedly !" % prog)
else:
proc.stdin.close()
if dump:
return b"".join(iter(lambda: proc.stdout.read(1048576), b""))
if getproc:
return proc
if getfd:
return proc.stdout
if wait:
proc.wait()
|
https://github.com/secdev/scapy/issues/2052
|
$ ipython3
In [1]: from scapy.utils import strxor
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-1-90b66f0baa84> in <module>
----> 1 from scapy.utils import strxor
~/src/scapy/scapy/utils.py in <module>
33 from scapy.error import log_runtime, Scapy_Exception, warning
34 from scapy.pton_ntop import inet_pton
---> 35 from scapy.arch.common import TCPDUMP
36
37 ###########
~/src/scapy/scapy/arch/__init__.py in <module>
54
55 if LINUX:
---> 56 from scapy.arch.linux import * # noqa F403
57 elif BSD:
58 from scapy.arch.unix import read_routes, read_routes6, in6_getifaddr # noqa: F401, E501
~/src/scapy/scapy/arch/linux.py in <module>
25 from scapy.consts import LOOPBACK_NAME, LINUX
26 import scapy.utils
---> 27 import scapy.utils6
28 from scapy.packet import Packet, Padding
29 from scapy.config import conf
~/src/scapy/scapy/utils6.py in <module>
24 IPV6_ADDR_SITELOCAL, IPV6_ADDR_LOOPBACK, IPV6_ADDR_UNICAST,\
25 IPV6_ADDR_MULTICAST, IPV6_ADDR_6TO4, IPV6_ADDR_UNSPECIFIED
---> 26 from scapy.utils import strxor
27 from scapy.compat import orb, chb
28 from scapy.pton_ntop import inet_pton, inet_ntop
ImportError: cannot import name 'strxor' from 'scapy.utils' (/home/user/src/scapy/scapy/utils.py)
|
ImportError
|
def network_stats(self):
"""Return a dictionary containing a summary of the Dot11
elements fields
"""
summary = {}
crypto = set()
p = self.payload
while isinstance(p, Dot11Elt):
if p.ID == 0:
summary["ssid"] = plain_str(p.info)
elif p.ID == 3:
summary["channel"] = ord(p.info)
elif isinstance(p, Dot11EltRates):
summary["rates"] = p.rates
elif isinstance(p, Dot11EltRSN):
crypto.add("WPA2")
elif p.ID == 221:
if isinstance(p, Dot11EltMicrosoftWPA) or p.info.startswith(
b"\x00P\xf2\x01\x01\x00"
):
crypto.add("WPA")
p = p.payload
if not crypto:
if self.cap.privacy:
crypto.add("WEP")
else:
crypto.add("OPN")
summary["crypto"] = crypto
return summary
|
def network_stats(self):
"""Return a dictionary containing a summary of the Dot11
elements fields
"""
summary = {}
crypto = set()
p = self.payload
while isinstance(p, Dot11Elt):
if p.ID == 0:
summary["ssid"] = plain_str(p.info)
elif p.ID == 3:
summary["channel"] = ord(p.info)
elif isinstance(p, Dot11EltRates):
summary["rates"] = p.rates
elif isinstance(p, Dot11EltRSN):
crypto.add("WPA2")
elif p.ID == 221:
if isinstance(p, Dot11EltMicrosoftWPA) or p.info.startswith(
"\x00P\xf2\x01\x01\x00"
):
crypto.add("WPA")
p = p.payload
if not crypto:
if self.cap.privacy:
crypto.add("WEP")
else:
crypto.add("OPN")
summary["crypto"] = crypto
return summary
|
https://github.com/secdev/scapy/issues/1872
|
Traceback (most recent call last):
File "min_script.py", line 25, in <module>
sniff(iface="mon0", lfilter=filter_beacon, prn=print_ap)
File "/usr/lib/python3.7/site-packages/scapy/sendrecv.py", line 886, in sniff
r = prn(p)
File "min_script.py", line 12, in print_ap
netstats = p[Dot11Beacon].network_stats()
File "/usr/lib/python3.7/site-packages/scapy/layers/dot11.py", line 430, in network_stats
p.info.startswith('\x00P\xf2\x01\x01\x00'):
TypeError: startswith first arg must be bytes or a tuple of bytes, not str
|
TypeError
|
def _set_conf_sockets():
"""Populate the conf.L2Socket and conf.L3Socket
according to the various use_* parameters
"""
if conf.use_bpf and not BSD:
Interceptor.set_from_hook(conf, "use_bpf", False)
raise ScapyInvalidPlatformException("BSD-like (OSX, *BSD...) only !")
if conf.use_winpcapy and not WINDOWS:
Interceptor.set_from_hook(conf, "use_winpcapy", False)
raise ScapyInvalidPlatformException("Windows only !")
# we are already in an Interceptor hook, use Interceptor.set_from_hook
if conf.use_pcap or conf.use_dnet or conf.use_winpcapy:
try:
from scapy.arch.pcapdnet import (
L2pcapListenSocket,
L2pcapSocket,
L3pcapSocket,
)
except ImportError:
warning("No pcap provider available ! pcap won't be used")
Interceptor.set_from_hook(conf, "use_winpcapy", False)
Interceptor.set_from_hook(conf, "use_pcap", False)
else:
conf.L2listen = L2pcapListenSocket
conf.L2socket = L2pcapSocket
conf.L3socket = L3pcapSocket
return
if conf.use_bpf:
from scapy.arch.bpf.supersocket import (
L2bpfListenSocket,
L2bpfSocket,
L3bpfSocket,
)
conf.L2listen = L2bpfListenSocket
conf.L2socket = L2bpfSocket
conf.L3socket = L3bpfSocket
return
if LINUX:
from scapy.arch.linux import L3PacketSocket, L2Socket, L2ListenSocket
conf.L3socket = L3PacketSocket
conf.L2socket = L2Socket
conf.L2listen = L2ListenSocket
return
if WINDOWS: # Should have been conf.use_winpcapy
from scapy.arch.windows import _NotAvailableSocket
conf.L2socket = _NotAvailableSocket
conf.L2listen = _NotAvailableSocket
conf.L3socket = _NotAvailableSocket
return
from scapy.supersocket import L3RawSocket
conf.L3socket = L3RawSocket
|
def _set_conf_sockets():
"""Populate the conf.L2Socket and conf.L3Socket
according to the various use_* parameters
"""
if conf.use_bpf and not DARWIN:
Interceptor.set_from_hook(conf, "use_bpf", False)
raise ScapyInvalidPlatformException("Darwin (OSX) only !")
if conf.use_winpcapy and not WINDOWS:
Interceptor.set_from_hook(conf, "use_winpcapy", False)
raise ScapyInvalidPlatformException("Windows only !")
# we are already in an Interceptor hook, use Interceptor.set_from_hook
if conf.use_pcap or conf.use_dnet or conf.use_winpcapy:
try:
from scapy.arch.pcapdnet import (
L2pcapListenSocket,
L2pcapSocket,
L3pcapSocket,
)
except ImportError:
warning("No pcap provider available ! pcap won't be used")
Interceptor.set_from_hook(conf, "use_winpcapy", False)
Interceptor.set_from_hook(conf, "use_pcap", False)
else:
conf.L2listen = L2pcapListenSocket
conf.L2socket = L2pcapSocket
conf.L3socket = L3pcapSocket
return
if conf.use_bpf:
from scapy.arch.bpf.supersocket import (
L2bpfListenSocket,
L2bpfSocket,
L3bpfSocket,
)
conf.L2listen = L2bpfListenSocket
conf.L2socket = L2bpfSocket
conf.L3socket = L3bpfSocket
return
if LINUX:
from scapy.arch.linux import L3PacketSocket, L2Socket, L2ListenSocket
conf.L3socket = L3PacketSocket
conf.L2socket = L2Socket
conf.L2listen = L2ListenSocket
return
if WINDOWS: # Should have been conf.use_winpcapy
from scapy.arch.windows import _NotAvailableSocket
conf.L2socket = _NotAvailableSocket
conf.L2listen = _NotAvailableSocket
conf.L3socket = _NotAvailableSocket
return
from scapy.supersocket import L3RawSocket
conf.L3socket = L3RawSocket
|
https://github.com/secdev/scapy/issues/1793
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/user/venv/lib/python3.6/site-packages/scapy/all.py", line 18, in <module>
from scapy.arch import *
File "/home/user/venv/lib/python3.6/site-packages/scapy/arch/__init__.py", line 63, in <module>
conf.use_bpf = True
File "/home/user/venv/lib/python3.6/site-packages/scapy/config.py", line 75, in __set__
self.hook(self.name, val, *self.args, **self.kargs)
File "/home/user/venv/lib/python3.6/site-packages/scapy/config.py", line 491, in _socket_changer
_set_conf_sockets()
File "/home/user/venv/lib/python3.6/site-packages/scapy/config.py", line 436, in _set_conf_sockets
raise ScapyInvalidPlatformException("Darwin (OSX) only !")
scapy.error.ScapyInvalidPlatformException: Darwin (OSX) only !
|
scapy.error.ScapyInvalidPlatformException
|
def get_ip_from_name(ifname, v6=False):
"""Backward compatibility: indirectly calls get_ips
Deprecated."""
return get_ips(v6=v6).get(ifname, [""])[0]
|
def get_ip_from_name(ifname, v6=False):
"""Backward compatibility: indirectly calls get_ips
Deprecated."""
return get_ips(v6=v6).get(ifname, "")[0]
|
https://github.com/secdev/scapy/issues/1727
|
from scapy.all import *
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "scapy\all.py", line 18, in <module>
from scapy.arch import *
File "scapy\arch\__init__.py", line 73, in <module>
from scapy.arch.windows import * # noqa F403
File "scapy\arch\windows\__init__.py", line 953, in <module>
IFACES.load_from_powershell()
File "scapy\arch\windows\__init__.py", line 843, in load_from_powershell
interface.ip = ifaces_ips.get(interface.name, "")[0]
IndexError: string index out of range
|
IndexError
|
def load_from_powershell(self):
if not conf.prog.os_access:
return
ifaces_ips = None
for i in get_windows_if_list():
try:
interface = NetworkInterface(i)
self.data[interface.guid] = interface
# If no IP address was detected using winpcap and if
# the interface is not the loopback one, look for
# internal windows interfaces
if not interface.ip:
if not ifaces_ips: # ifaces_ips is used as a cache
ifaces_ips = get_ips()
# If it exists, retrieve the interface's IP from the cache
interface.ip = ifaces_ips.get(interface.name, [""])[0]
except (KeyError, PcapNameNotFoundError):
pass
if not self.data and conf.use_winpcapy:
_detect = pcap_service_status()
def _ask_user():
if not conf.interactive:
return False
while True:
_confir = (
input("Do you want to start it ? (yes/no) [y]: ").lower().strip()
) # noqa: E501
if _confir in ["yes", "y", ""]:
return True
elif _confir in ["no", "n"]:
return False
return False
_error_msg = "No match between your pcap and windows network interfaces found. " # noqa: E501
if (
_detect[0]
and not _detect[2]
and not (hasattr(self, "restarted_adapter") and self.restarted_adapter)
): # noqa: E501
warning("Scapy has detected that your pcap service is not running !") # noqa: E501
if not conf.interactive or _ask_user():
succeed = pcap_service_start(askadmin=conf.interactive)
self.restarted_adapter = True
if succeed:
log_loading.info("Pcap service started !")
self.load_from_powershell()
return
_error_msg = "Could not start the pcap service ! "
warning(
_error_msg + "You probably won't be able to send packets. "
"Deactivating unneeded interfaces and restarting Scapy might help. " # noqa: E501
"Check your winpcap and powershell installation, and access rights."
) # noqa: E501
else:
# Loading state: remove invalid interfaces
self.remove_invalid_ifaces()
# Replace LOOPBACK_INTERFACE
try:
scapy.consts.LOOPBACK_INTERFACE = self.dev_from_name(
scapy.consts.LOOPBACK_NAME,
)
except ValueError:
pass
|
def load_from_powershell(self):
if not conf.prog.os_access:
return
ifaces_ips = None
for i in get_windows_if_list():
try:
interface = NetworkInterface(i)
self.data[interface.guid] = interface
# If no IP address was detected using winpcap and if
# the interface is not the loopback one, look for
# internal windows interfaces
if not interface.ip:
if not ifaces_ips: # ifaces_ips is used as a cache
ifaces_ips = get_ips()
# If it exists, retrieve the interface's IP from the cache
interface.ip = ifaces_ips.get(interface.name, "")[0]
except (KeyError, PcapNameNotFoundError):
pass
if not self.data and conf.use_winpcapy:
_detect = pcap_service_status()
def _ask_user():
if not conf.interactive:
return False
while True:
_confir = (
input("Do you want to start it ? (yes/no) [y]: ").lower().strip()
) # noqa: E501
if _confir in ["yes", "y", ""]:
return True
elif _confir in ["no", "n"]:
return False
return False
_error_msg = "No match between your pcap and windows network interfaces found. " # noqa: E501
if (
_detect[0]
and not _detect[2]
and not (hasattr(self, "restarted_adapter") and self.restarted_adapter)
): # noqa: E501
warning("Scapy has detected that your pcap service is not running !") # noqa: E501
if not conf.interactive or _ask_user():
succeed = pcap_service_start(askadmin=conf.interactive)
self.restarted_adapter = True
if succeed:
log_loading.info("Pcap service started !")
self.load_from_powershell()
return
_error_msg = "Could not start the pcap service ! "
warning(
_error_msg + "You probably won't be able to send packets. "
"Deactivating unneeded interfaces and restarting Scapy might help. " # noqa: E501
"Check your winpcap and powershell installation, and access rights."
) # noqa: E501
else:
# Loading state: remove invalid interfaces
self.remove_invalid_ifaces()
# Replace LOOPBACK_INTERFACE
try:
scapy.consts.LOOPBACK_INTERFACE = self.dev_from_name(
scapy.consts.LOOPBACK_NAME,
)
except ValueError:
pass
|
https://github.com/secdev/scapy/issues/1727
|
from scapy.all import *
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "scapy\all.py", line 18, in <module>
from scapy.arch import *
File "scapy\arch\__init__.py", line 73, in <module>
from scapy.arch.windows import * # noqa F403
File "scapy\arch\windows\__init__.py", line 953, in <module>
IFACES.load_from_powershell()
File "scapy\arch\windows\__init__.py", line 843, in load_from_powershell
interface.ip = ifaces_ips.get(interface.name, "")[0]
IndexError: string index out of range
|
IndexError
|
def _exec_query_ps(cmd, fields):
"""Execute a PowerShell query, using the cmd command,
and select and parse the provided fields.
"""
if not conf.prog.powershell:
raise OSError("Scapy could not detect powershell !")
# Build query
query_cmd = cmd + [
"|",
"select %s" % ", ".join(fields), # select fields
"|",
"fl", # print as a list
"|",
"out-string",
"-Width",
"4096",
] # do not crop
lines = []
# Ask the powershell manager to process the query
stdout = POWERSHELL_PROCESS.query(query_cmd)
# Process stdout
for line in stdout:
if not line.strip(): # skip empty lines
continue
sl = line.split(":", 1)
if sl[0].strip() not in fields:
# The previous line was cropped. Let's add the missing part
lines[-1] += line.strip()
continue
else:
# We put it here to ensure we never return too early,
# missing some cropped lines
if len(lines) == len(fields):
yield lines
lines = []
lines.append(sl[1].strip())
yield lines # Last buffer won't be returned in the if
|
def _exec_query_ps(cmd, fields):
"""Execute a PowerShell query, using the cmd command,
and select and parse the provided fields.
"""
if not conf.prog.powershell:
raise OSError("Scapy could not detect powershell !")
# Build query
query_cmd = cmd + [
"|",
"select %s" % ", ".join(fields), # select fields
"|",
"fl", # print as a list
"|",
"out-string",
"-Width",
"4096",
] # do not crop
lines = []
# Ask the powershell manager to process the query
stdout = POWERSHELL_PROCESS.query(query_cmd)
# Process stdout
for line in stdout:
if not line.strip(): # skip empty lines
continue
sl = line.split(":", 1)
if len(sl) == 1:
lines[-1] += sl[0].strip()
continue
else:
lines.append(sl[1].strip())
if len(lines) == len(fields):
yield lines
lines = []
|
https://github.com/secdev/scapy/issues/1718
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "scapy\all.py", line 18, in <module>
from scapy.arch import *
File "scapy\arch\__init__.py", line 73, in <module>
from scapy.arch.windows import * # noqa F403
File "scapy\arch\windows\__init__.py", line 946, in <module>
IFACES.load_from_powershell()
File "scapy\arch\windows\__init__.py", line 834, in load_from_powershell
ifaces_ips = get_ips()
File "scapy\arch\windows\__init__.py", line 544, in get_ips
['Description', 'IPAddress']):
File "scapy\arch\windows\__init__.py", line 243, in _exec_query_ps
lines[-1] += sl[0].strip()
IndexError: list index out of range
|
IndexError
|
def get_ips(v6=False):
"""Returns all available IPs matching to interfaces, using the windows system.
Should only be used as a WinPcapy fallback."""
res = {}
for descr, ipaddr in exec_query(
["Get-WmiObject", "Win32_NetworkAdapterConfiguration"],
["Description", "IPAddress"],
):
if ipaddr.strip():
# This requires lots of stripping
ip_string = ipaddr.split(",", 1)[v6].strip("{}").strip()
res[descr] = [ip.strip() for ip in ip_string.split(",")]
return res
|
def get_ips(v6=False):
"""Returns all available IPs matching to interfaces, using the windows system.
Should only be used as a WinPcapy fallback."""
res = {}
for descr, ipaddr in exec_query(
["Get-WmiObject", "Win32_NetworkAdapterConfiguration"],
["Description", "IPAddress"],
):
if ipaddr.strip():
res[descr] = ipaddr.split(",", 1)[v6].strip("{}").strip()
return res
|
https://github.com/secdev/scapy/issues/1718
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "scapy\all.py", line 18, in <module>
from scapy.arch import *
File "scapy\arch\__init__.py", line 73, in <module>
from scapy.arch.windows import * # noqa F403
File "scapy\arch\windows\__init__.py", line 946, in <module>
IFACES.load_from_powershell()
File "scapy\arch\windows\__init__.py", line 834, in load_from_powershell
ifaces_ips = get_ips()
File "scapy\arch\windows\__init__.py", line 544, in get_ips
['Description', 'IPAddress']):
File "scapy\arch\windows\__init__.py", line 243, in _exec_query_ps
lines[-1] += sl[0].strip()
IndexError: list index out of range
|
IndexError
|
def get_ip_from_name(ifname, v6=False):
"""Backward compatibility: indirectly calls get_ips
Deprecated."""
return get_ips(v6=v6).get(ifname, "")[0]
|
def get_ip_from_name(ifname, v6=False):
"""Backward compatibility: indirectly calls get_ips
Deprecated."""
return get_ips(v6=v6).get(ifname, "")
|
https://github.com/secdev/scapy/issues/1718
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "scapy\all.py", line 18, in <module>
from scapy.arch import *
File "scapy\arch\__init__.py", line 73, in <module>
from scapy.arch.windows import * # noqa F403
File "scapy\arch\windows\__init__.py", line 946, in <module>
IFACES.load_from_powershell()
File "scapy\arch\windows\__init__.py", line 834, in load_from_powershell
ifaces_ips = get_ips()
File "scapy\arch\windows\__init__.py", line 544, in get_ips
['Description', 'IPAddress']):
File "scapy\arch\windows\__init__.py", line 243, in _exec_query_ps
lines[-1] += sl[0].strip()
IndexError: list index out of range
|
IndexError
|
def load_from_powershell(self):
if not conf.prog.os_access:
return
ifaces_ips = None
for i in get_windows_if_list():
try:
interface = NetworkInterface(i)
self.data[interface.guid] = interface
# If no IP address was detected using winpcap and if
# the interface is not the loopback one, look for
# internal windows interfaces
if not interface.ip:
if not ifaces_ips: # ifaces_ips is used as a cache
ifaces_ips = get_ips()
# If it exists, retrieve the interface's IP from the cache
interface.ip = ifaces_ips.get(interface.name, "")[0]
except (KeyError, PcapNameNotFoundError):
pass
if not self.data and conf.use_winpcapy:
_detect = pcap_service_status()
def _ask_user():
if not conf.interactive:
return False
while True:
_confir = (
input("Do you want to start it ? (yes/no) [y]: ").lower().strip()
) # noqa: E501
if _confir in ["yes", "y", ""]:
return True
elif _confir in ["no", "n"]:
return False
return False
_error_msg = "No match between your pcap and windows network interfaces found. " # noqa: E501
if (
_detect[0]
and not _detect[2]
and not (hasattr(self, "restarted_adapter") and self.restarted_adapter)
): # noqa: E501
warning("Scapy has detected that your pcap service is not running !") # noqa: E501
if not conf.interactive or _ask_user():
succeed = pcap_service_start(askadmin=conf.interactive)
self.restarted_adapter = True
if succeed:
log_loading.info("Pcap service started !")
self.load_from_powershell()
return
_error_msg = "Could not start the pcap service ! "
warning(
_error_msg + "You probably won't be able to send packets. "
"Deactivating unneeded interfaces and restarting Scapy might help. " # noqa: E501
"Check your winpcap and powershell installation, and access rights."
) # noqa: E501
else:
# Loading state: remove invalid interfaces
self.remove_invalid_ifaces()
# Replace LOOPBACK_INTERFACE
try:
scapy.consts.LOOPBACK_INTERFACE = self.dev_from_name(
scapy.consts.LOOPBACK_NAME,
)
except ValueError:
pass
|
def load_from_powershell(self):
if not conf.prog.os_access:
return
ifaces_ips = None
for i in get_windows_if_list():
try:
interface = NetworkInterface(i)
self.data[interface.guid] = interface
# If no IP address was detected using winpcap and if
# the interface is not the loopback one, look for
# internal windows interfaces
if not interface.ip:
if not ifaces_ips: # ifaces_ips is used as a cache
ifaces_ips = get_ips()
# If it exists, retrieve the interface's IP from the cache
interface.ip = ifaces_ips.get(interface.name, "")
except (KeyError, PcapNameNotFoundError):
pass
if not self.data and conf.use_winpcapy:
_detect = pcap_service_status()
def _ask_user():
if not conf.interactive:
return False
while True:
_confir = (
input("Do you want to start it ? (yes/no) [y]: ").lower().strip()
) # noqa: E501
if _confir in ["yes", "y", ""]:
return True
elif _confir in ["no", "n"]:
return False
return False
_error_msg = "No match between your pcap and windows network interfaces found. " # noqa: E501
if (
_detect[0]
and not _detect[2]
and not (hasattr(self, "restarted_adapter") and self.restarted_adapter)
): # noqa: E501
warning("Scapy has detected that your pcap service is not running !") # noqa: E501
if not conf.interactive or _ask_user():
succeed = pcap_service_start(askadmin=conf.interactive)
self.restarted_adapter = True
if succeed:
log_loading.info("Pcap service started !")
self.load_from_powershell()
return
_error_msg = "Could not start the pcap service ! "
warning(
_error_msg + "You probably won't be able to send packets. "
"Deactivating unneeded interfaces and restarting Scapy might help. " # noqa: E501
"Check your winpcap and powershell installation, and access rights."
) # noqa: E501
else:
# Loading state: remove invalid interfaces
self.remove_invalid_ifaces()
# Replace LOOPBACK_INTERFACE
try:
scapy.consts.LOOPBACK_INTERFACE = self.dev_from_name(
scapy.consts.LOOPBACK_NAME,
)
except ValueError:
pass
|
https://github.com/secdev/scapy/issues/1718
|
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "scapy\all.py", line 18, in <module>
from scapy.arch import *
File "scapy\arch\__init__.py", line 73, in <module>
from scapy.arch.windows import * # noqa F403
File "scapy\arch\windows\__init__.py", line 946, in <module>
IFACES.load_from_powershell()
File "scapy\arch\windows\__init__.py", line 834, in load_from_powershell
ifaces_ips = get_ips()
File "scapy\arch\windows\__init__.py", line 544, in get_ips
['Description', 'IPAddress']):
File "scapy\arch\windows\__init__.py", line 243, in _exec_query_ps
lines[-1] += sl[0].strip()
IndexError: list index out of range
|
IndexError
|
def i2h(self, pkt, x):
if isinstance(x, VolatileValue):
return super(FlagsField, self).i2h(pkt, x)
return self._fixup_val(super(FlagsField, self).i2h(pkt, x))
|
def i2h(self, pkt, x):
return self._fixup_val(super(FlagsField, self).i2h(pkt, x))
|
https://github.com/secdev/scapy/issues/1576
|
ubuntu:/$ scapy
... (welcome info omitted)
p = IP()/TCP()
p = fuzz(p)
p["IP"].show()
###[ IP ]###
version= <RandNum>
ihl= None
tos= 98
len= None
id= <RandShort>
flags= DF+evil
frag= 0
ttl= <RandByte>
proto= tcp
chksum= None
src= 127.0.0.1
dst= 127.0.0.1
\options\
###[ TCP ]###
sport= <RandShort>
dport= <RandShort>
seq= <RandInt>
ack= <RandInt>
dataofs= None
reserved= <RandNum>
flags= SAUC
window= <RandShort>
chksum= None
urgptr= <RandShort>
options= []
# You can query flags repeatedly to see that it's indeed a random value
p["TCP"].flags
<Flag 163 (FSUC)>
p["TCP"].flags
<Flag 414 (SRPACN)>
# But if you try to _fix() it, like you can with other parts of the packet:
p["TCP"].flags._fix()
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 1497, in __getattr__
for flag in attr)
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 1497, in <genexpr>
for flag in attr)
ValueError: substring not found
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/usr/local/lib/python3.6/dist-packages/scapy/fields.py", line 1499, in __getattr__
return super(FlagValue, self).__getattr__(attr)
AttributeError: 'super' object has no attribute '__getattr__'
|
ValueError
|
def cpu_freq():
"""Alternate implementation using /proc/cpuinfo.
min and max frequencies are not available and are set to None.
"""
ret = []
with open_binary("%s/cpuinfo" % get_procfs_path()) as f:
for line in f:
if line.lower().startswith(b"cpu mhz"):
key, value = line.split(b"\t:", 1)
ret.append(_common.scpufreq(float(value), 0.0, 0.0))
return ret
|
def cpu_freq():
"""Alternate implementation using /proc/cpuinfo.
min and max frequencies are not available and are set to None.
"""
ret = []
with open_binary("%s/cpuinfo" % get_procfs_path()) as f:
for line in f:
if line.lower().startswith(b"cpu mhz"):
key, value = line.split(b"\t:", 1)
ret.append(_common.scpufreq(float(value), None, None))
return ret
|
https://github.com/giampaolo/psutil/issues/1456
|
$ s-tui
Traceback (most recent call last):
File "/home/user/venv/bin/s-tui", line 11, in <module>
sys.exit(main())
File "/home/user/venv/lib/python3.6/site-packages/s_tui/s_tui.py", line 927, in main
graph_controller = GraphController(args)
File "/home/user/venv/lib/python3.6/site-packages/s_tui/s_tui.py", line 728, in __init__
self.view = GraphView(self)
File "/home/user/venv/lib/python3.6/site-packages/s_tui/s_tui.py", line 233, in __init__
urwid.WidgetPlaceholder.__init__(self, self.main_window())
File "/home/user/venv/lib/python3.6/site-packages/s_tui/s_tui.py", line 531, in main_window
freq_source = FreqSource(is_admin)
File "/home/user/venv/lib/python3.6/site-packages/s_tui/Sources/FreqSource.py", line 118, in __init__
if self.last_freq >= 0 and self.top_freq <= 0:
TypeError: '<=' not supported between instances of 'NoneType' and 'int'
|
TypeError
|
def _proc_cred(self):
return cext.proc_cred(self.pid, self._procfs_path)
|
def _proc_cred(self):
@wrap_exceptions
def proc_cred(self):
return cext.proc_cred(self.pid, self._procfs_path)
return proc_cred(self)
|
https://github.com/giampaolo/psutil/issues/1447
|
Traceback (most recent call last):
File "/data/jon/h2oai/h2oaicore/systemutils.py", line 3100, in find_procs_by_name
for p in psutil.process_iter(attrs=['name']):
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/__init__.py", line 1440, in process_iter
if proc.is_running():
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/__init__.py", line 569, in is_running
return self == Process(self.pid)
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/__init__.py", line 340, in __init__
self._init(pid)
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/__init__.py", line 367, in _init
self.create_time()
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/__init__.py", line 698, in create_time
self._create_time = self._proc.create_time()
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/_pslinux.py", line 1402, in wrapper
return fun(self, *args, **kwargs)
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/_pslinux.py", line 1584, in create_time
values = self._parse_stat_file()
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/_common.py", line 337, in wrapper
return fun(self)
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/_pslinux.py", line 1441, in _parse_stat_file
with open_binary("%s/%s/stat" % (self._procfs_path, self.pid)) as f:
File "/home/jon/.pyenv/versions/3.6.4/lib/python3.6/site-packages/psutil/_pslinux.py", line 187, in open_binary
return open(fname, "rb", **kwargs)
FileNotFoundError: [Errno 2] No such file or directory: '/proc/12825/stat'
|
FileNotFoundError
|
def main():
ad_pids = []
procs = []
for p in psutil.process_iter():
with p.oneshot():
try:
mem = p.memory_full_info()
info = p.as_dict(["cmdline", "username"])
except psutil.AccessDenied:
ad_pids.append(p.pid)
except psutil.NoSuchProcess:
pass
else:
p._uss = mem.uss
p._rss = mem.rss
if not p._uss:
continue
p._pss = getattr(mem, "pss", "")
p._swap = getattr(mem, "swap", "")
p._info = info
procs.append(p)
procs.sort(key=lambda p: p._uss)
templ = "%-7s %-7s %7s %7s %7s %7s %7s"
print(templ % ("PID", "User", "USS", "PSS", "Swap", "RSS", "Cmdline"))
print("=" * 78)
for p in procs[:86]:
cmd = " ".join(p._info["cmdline"])[:50] if p._info["cmdline"] else ""
line = templ % (
p.pid,
p._info["username"][:7] if p._info["username"] else "",
convert_bytes(p._uss),
convert_bytes(p._pss) if p._pss != "" else "",
convert_bytes(p._swap) if p._swap != "" else "",
convert_bytes(p._rss),
cmd,
)
print(line)
if ad_pids:
print("warning: access denied for %s pids" % (len(ad_pids)), file=sys.stderr)
|
def main():
ad_pids = []
procs = []
for p in psutil.process_iter():
with p.oneshot():
try:
mem = p.memory_full_info()
info = p.as_dict(["cmdline", "username"])
except psutil.AccessDenied:
ad_pids.append(p.pid)
except psutil.NoSuchProcess:
pass
else:
p._uss = mem.uss
p._rss = mem.rss
if not p._uss:
continue
p._pss = getattr(mem, "pss", "")
p._swap = getattr(mem, "swap", "")
p._info = info
procs.append(p)
procs.sort(key=lambda p: p._uss)
templ = "%-7s %-7s %-30s %7s %7s %7s %7s"
print(templ % ("PID", "User", "Cmdline", "USS", "PSS", "Swap", "RSS"))
print("=" * 78)
for p in procs[:86]:
line = templ % (
p.pid,
p._info["username"][:7] if p._info["username"] else "",
" ".join(p._info["cmdline"])[:30],
convert_bytes(p._uss),
convert_bytes(p._pss) if p._pss != "" else "",
convert_bytes(p._swap) if p._swap != "" else "",
convert_bytes(p._rss),
)
print(line)
if ad_pids:
print("warning: access denied for %s pids" % (len(ad_pids)), file=sys.stderr)
|
https://github.com/giampaolo/psutil/issues/1877
|
Traceback (most recent call last):
File ".\teste.py", line 6, in <module>
for proc in psutil.process_iter():
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\__init__.py", line 1457, in process_iter
yield add(pid)
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\__init__.py", line 1432, in add
proc = Process(pid)
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\__init__.py", line 346, in __init__
self._init(pid)
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\__init__.py", line 373, in _init
self.create_time()
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\__init__.py", line 723, in create_time
self._create_time = self._proc.create_time()
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\_pswindows.py", line 681, in wrapper
raise convert_oserror(err, pid=self.pid, name=self._name)
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\_pswindows.py", line 671, in convert_oserr
or
raise exc
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\_pswindows.py", line 679, in wrapper
return fun(self, *args, **kwargs)
File "C:\Users\user10\AppData\Roaming\Python\Python38\site-packages\psutil\_pswindows.py", line 933, in create_time
user, system, created = cext.proc_times(self.pid)
OSError: [WinError 0] The operation completed successfully: '(originated from OpenProcess)'
}
|
OSError
|
def wrapper(self, *args, **kwargs):
delay = 0.0001
times = 33
for x in range(times): # retries for roughly 1 second
try:
return fun(self, *args, **kwargs)
except WindowsError as _:
err = _
if err.winerror == ERROR_PARTIAL_COPY:
time.sleep(delay)
delay = min(delay * 2, 0.04)
continue
else:
raise
else:
msg = (
"%s retried %s times, converted to AccessDenied as it's "
"still returning %r" % (fun, times, err)
)
raise AccessDenied(pid=self.pid, name=self._name, msg=msg)
|
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except OSError as err:
raise convert_oserror(err, pid=self.pid, name=self._name)
|
https://github.com/giampaolo/psutil/issues/875
|
======================================================================
ERROR: test_proc_environ (test_process.TestNonUnicode)
----------------------------------------------------------------------
Traceback (most recent call last):
File "C:\projects\psutil\psutil\tests\test_process.py", line 2054, in test_proc_environ
encode_path(p.environ()['FUNNY_ARG']), funny_path)
File "c:\projects\psutil\psutil\__init__.py", line 772, in environ
return self._proc.environ()
File "c:\projects\psutil\psutil\_pswindows.py", line 536, in wrapper
return fun(self, *args, **kwargs)
File "c:\projects\psutil\psutil\_pswindows.py", line 597, in environ
return parse_environ_block(cext.proc_environ(self.pid))
WindowsError: [Error 299] Only part of a ReadProcessMemory or WriteProcessMemory request was comp
|
WindowsError
|
def wrap_exceptions(fun):
"""Call callable into a try/except clause and translate ENOENT,
EACCES and EPERM in NoSuchProcess or AccessDenied exceptions.
"""
@functools.wraps(fun)
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except EnvironmentError as err:
# support for private module import
if NoSuchProcess is None or AccessDenied is None or ZombieProcess is None:
raise
# ENOENT (no such file or directory) gets raised on open().
# ESRCH (no such process) can get raised on read() if
# process is gone in meantime.
if err.errno in (errno.ENOENT, errno.ESRCH):
if not pid_exists(self.pid):
raise NoSuchProcess(self.pid, self._name)
else:
raise ZombieProcess(self.pid, self._name, self._ppid)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
return wrapper
|
def wrap_exceptions(fun):
"""Call callable into a try/except clause and translate ENOENT,
EACCES and EPERM in NoSuchProcess or AccessDenied exceptions.
"""
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except EnvironmentError as err:
# support for private module import
if NoSuchProcess is None or AccessDenied is None or ZombieProcess is None:
raise
# ENOENT (no such file or directory) gets raised on open().
# ESRCH (no such process) can get raised on read() if
# process is gone in meantime.
if err.errno in (errno.ENOENT, errno.ESRCH):
if not pid_exists(self.pid):
raise NoSuchProcess(self.pid, self._name)
else:
raise ZombieProcess(self.pid, self._name, self._ppid)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
return wrapper
|
https://github.com/giampaolo/psutil/issues/1486
|
Traceback (most recent call last):
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 593, in oneshot
self._proc.oneshot_enter()
File "/opt/freeware/lib64/python3.7/site-packages/psutil/_psaix.py", line 369, in oneshot_enter
self._proc_name_and_args.cache_activate(self)
AttributeError: 'function' object has no attribute 'cache_activate'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 1562, in process_iter
yield add(pid)
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 1539, in add
proc.info = proc.as_dict(attrs=attrs, ad_value=ad_value)
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 627, in as_dict
with self.oneshot():
File "/opt/freeware/lib64/python3.7/contextlib.py", line 112, in __enter__
return next(self.gen)
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 601, in oneshot
self._proc.oneshot_exit()
File "/opt/freeware/lib64/python3.7/site-packages/psutil/_psaix.py", line 374, in oneshot_exit
self._proc_name_and_args.cache_deactivate(self)
AttributeError: 'function' object has no attribute 'cache_deactivate'
|
AttributeError
|
def wrap_exceptions(fun):
"""Call callable into a try/except clause and translate ENOENT,
EACCES and EPERM in NoSuchProcess or AccessDenied exceptions.
"""
@functools.wraps(fun)
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except EnvironmentError as err:
if self.pid == 0:
if 0 in pids():
raise AccessDenied(self.pid, self._name)
else:
raise
# ENOENT (no such file or directory) gets raised on open().
# ESRCH (no such process) can get raised on read() if
# process is gone in meantime.
if err.errno in (errno.ENOENT, errno.ESRCH):
if not pid_exists(self.pid):
raise NoSuchProcess(self.pid, self._name)
else:
raise ZombieProcess(self.pid, self._name, self._ppid)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
return wrapper
|
def wrap_exceptions(fun):
"""Call callable into a try/except clause and translate ENOENT,
EACCES and EPERM in NoSuchProcess or AccessDenied exceptions.
"""
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except EnvironmentError as err:
if self.pid == 0:
if 0 in pids():
raise AccessDenied(self.pid, self._name)
else:
raise
# ENOENT (no such file or directory) gets raised on open().
# ESRCH (no such process) can get raised on read() if
# process is gone in meantime.
if err.errno in (errno.ENOENT, errno.ESRCH):
if not pid_exists(self.pid):
raise NoSuchProcess(self.pid, self._name)
else:
raise ZombieProcess(self.pid, self._name, self._ppid)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
return wrapper
|
https://github.com/giampaolo/psutil/issues/1486
|
Traceback (most recent call last):
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 593, in oneshot
self._proc.oneshot_enter()
File "/opt/freeware/lib64/python3.7/site-packages/psutil/_psaix.py", line 369, in oneshot_enter
self._proc_name_and_args.cache_activate(self)
AttributeError: 'function' object has no attribute 'cache_activate'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 1562, in process_iter
yield add(pid)
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 1539, in add
proc.info = proc.as_dict(attrs=attrs, ad_value=ad_value)
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 627, in as_dict
with self.oneshot():
File "/opt/freeware/lib64/python3.7/contextlib.py", line 112, in __enter__
return next(self.gen)
File "/opt/freeware/lib64/python3.7/site-packages/psutil/__init__.py", line 601, in oneshot
self._proc.oneshot_exit()
File "/opt/freeware/lib64/python3.7/site-packages/psutil/_psaix.py", line 374, in oneshot_exit
self._proc_name_and_args.cache_deactivate(self)
AttributeError: 'function' object has no attribute 'cache_deactivate'
|
AttributeError
|
def wrap_exceptions(fun):
"""Decorator which translates bare OSError exceptions into
NoSuchProcess and AccessDenied.
"""
@functools.wraps(fun)
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except OSError as err:
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, self._name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
except cext.ZombieProcessError:
raise ZombieProcess(self.pid, self._name, self._ppid)
return wrapper
|
def wrap_exceptions(fun):
"""Decorator which translates bare OSError exceptions into
NoSuchProcess and AccessDenied.
"""
@functools.wraps(fun)
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except OSError as err:
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, self._name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
return wrapper
|
https://github.com/giampaolo/psutil/issues/1209
|
======================================================================
ERROR: psutil.tests.test_process.TestProcess.test_zombie_process
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/travis/build/giampaolo/psutil/psutil/tests/test_process.py", line 1286, in test_zombie_process
zproc.as_dict()
File "/Users/travis/build/giampaolo/psutil/psutil/__init__.py", line 528, in as_dict
ret = meth()
File "/Users/travis/build/giampaolo/psutil/psutil/__init__.py", line 1113, in memory_maps
it = self._proc.memory_maps()
File "/Users/travis/build/giampaolo/psutil/psutil/_psosx.py", line 330, in wrapper
return fun(self, *args, **kwargs)
File "/Users/travis/build/giampaolo/psutil/psutil/_psosx.py", line 571, in memory_maps
return cext.proc_memory_maps(self.pid)
OSError: [Errno 22] Invalid argument
|
OSError
|
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except OSError as err:
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, self._name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
except cext.ZombieProcessError:
raise ZombieProcess(self.pid, self._name, self._ppid)
|
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except OSError as err:
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, self._name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
|
https://github.com/giampaolo/psutil/issues/1209
|
======================================================================
ERROR: psutil.tests.test_process.TestProcess.test_zombie_process
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/travis/build/giampaolo/psutil/psutil/tests/test_process.py", line 1286, in test_zombie_process
zproc.as_dict()
File "/Users/travis/build/giampaolo/psutil/psutil/__init__.py", line 528, in as_dict
ret = meth()
File "/Users/travis/build/giampaolo/psutil/psutil/__init__.py", line 1113, in memory_maps
it = self._proc.memory_maps()
File "/Users/travis/build/giampaolo/psutil/psutil/_psosx.py", line 330, in wrapper
return fun(self, *args, **kwargs)
File "/Users/travis/build/giampaolo/psutil/psutil/_psosx.py", line 571, in memory_maps
return cext.proc_memory_maps(self.pid)
OSError: [Errno 22] Invalid argument
|
OSError
|
def threads(self):
rawlist = cext.proc_threads(self.pid)
retlist = []
for thread_id, utime, stime in rawlist:
ntuple = _common.pthread(thread_id, utime, stime)
retlist.append(ntuple)
return retlist
|
def threads(self):
with catch_zombie(self):
rawlist = cext.proc_threads(self.pid)
retlist = []
for thread_id, utime, stime in rawlist:
ntuple = _common.pthread(thread_id, utime, stime)
retlist.append(ntuple)
return retlist
|
https://github.com/giampaolo/psutil/issues/1209
|
======================================================================
ERROR: psutil.tests.test_process.TestProcess.test_zombie_process
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/travis/build/giampaolo/psutil/psutil/tests/test_process.py", line 1286, in test_zombie_process
zproc.as_dict()
File "/Users/travis/build/giampaolo/psutil/psutil/__init__.py", line 528, in as_dict
ret = meth()
File "/Users/travis/build/giampaolo/psutil/psutil/__init__.py", line 1113, in memory_maps
it = self._proc.memory_maps()
File "/Users/travis/build/giampaolo/psutil/psutil/_psosx.py", line 330, in wrapper
return fun(self, *args, **kwargs)
File "/Users/travis/build/giampaolo/psutil/psutil/_psosx.py", line 571, in memory_maps
return cext.proc_memory_maps(self.pid)
OSError: [Errno 22] Invalid argument
|
OSError
|
def memory_maps(self):
return cext.proc_memory_maps(self.pid)
|
def memory_maps(self):
with catch_zombie(self):
return cext.proc_memory_maps(self.pid)
|
https://github.com/giampaolo/psutil/issues/1209
|
======================================================================
ERROR: psutil.tests.test_process.TestProcess.test_zombie_process
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/travis/build/giampaolo/psutil/psutil/tests/test_process.py", line 1286, in test_zombie_process
zproc.as_dict()
File "/Users/travis/build/giampaolo/psutil/psutil/__init__.py", line 528, in as_dict
ret = meth()
File "/Users/travis/build/giampaolo/psutil/psutil/__init__.py", line 1113, in memory_maps
it = self._proc.memory_maps()
File "/Users/travis/build/giampaolo/psutil/psutil/_psosx.py", line 330, in wrapper
return fun(self, *args, **kwargs)
File "/Users/travis/build/giampaolo/psutil/psutil/_psosx.py", line 571, in memory_maps
return cext.proc_memory_maps(self.pid)
OSError: [Errno 22] Invalid argument
|
OSError
|
def exe(self):
# Dual implementation, see:
# https://github.com/giampaolo/psutil/pull/1413
if not IS_WIN_XP:
exe = cext.proc_exe(self.pid)
else:
if self.pid in (0, 4):
# https://github.com/giampaolo/psutil/issues/414
# https://github.com/giampaolo/psutil/issues/528
raise AccessDenied(self.pid, self._name)
exe = cext.proc_exe(self.pid)
exe = convert_dos_path(exe)
return py2_strencode(exe)
|
def exe(self):
# Note: os.path.exists(path) may return False even if the file
# is there, see:
# http://stackoverflow.com/questions/3112546/os-path-exists-lies
# see https://github.com/giampaolo/psutil/issues/414
# see https://github.com/giampaolo/psutil/issues/528
if self.pid in (0, 4):
raise AccessDenied(self.pid, self._name)
exe = cext.proc_exe(self.pid)
exe = convert_dos_path(exe)
return py2_strencode(exe)
|
https://github.com/giampaolo/psutil/issues/1394
|
Python 2.7.13 (v2.7.13:a06454b1afa1, Dec 17 2016, 20:53:40) [MSC v.1500 64 bit (AMD64)] on win32
Type "help", "copyright", "credits" or "license" for more information.
import psutil
[p for p in psutil.process_iter()]
[psutil.Process(pid=0, name='System Idle Process', started='2018-11-06 04:18:12'), psutil.Process(pid=4, name='System', started='2018-11-06 04:18:12'), Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Python27\lib\site-packages\psutil\__init__.py", line 395, in __str__
info["name"] = self.name()
File "C:\Python27\lib\site-packages\psutil\__init__.py", line 609, in name
name = self._proc.name()
File "C:\Python27\lib\site-packages\psutil\_pswindows.py", line 635, in wrapper
return fun(self, *args, **kwargs)
File "C:\Python27\lib\site-packages\psutil\_pswindows.py", line 687, in name
return py2_strencode(os.path.basename(self.exe()))
File "C:\Python27\lib\site-packages\psutil\_pswindows.py", line 635, in wrapper
return fun(self, *args, **kwargs)
File "C:\Python27\lib\site-packages\psutil\_pswindows.py", line 701, in exe
return py2_strencode(convert_dos_path(cext.proc_exe(self.pid)))
WindowsError: [Error 0] The operation completed successfully
|
WindowsError
|
def exe(self):
# Note: os.path.exists(path) may return False even if the file
# is there, see:
# http://stackoverflow.com/questions/3112546/os-path-exists-lies
# see https://github.com/giampaolo/psutil/issues/414
# see https://github.com/giampaolo/psutil/issues/528
if self.pid in (0, 4):
raise AccessDenied(self.pid, self._name)
exe = cext.proc_exe(self.pid)
exe = convert_dos_path(exe)
return py2_strencode(exe)
|
def exe(self):
# Note: os.path.exists(path) may return False even if the file
# is there, see:
# http://stackoverflow.com/questions/3112546/os-path-exists-lies
# see https://github.com/giampaolo/psutil/issues/414
# see https://github.com/giampaolo/psutil/issues/528
if self.pid in (0, 4):
raise AccessDenied(self.pid, self._name)
return py2_strencode(convert_dos_path(cext.proc_exe(self.pid)))
|
https://github.com/giampaolo/psutil/issues/1394
|
Python 2.7.13 (v2.7.13:a06454b1afa1, Dec 17 2016, 20:53:40) [MSC v.1500 64 bit (AMD64)] on win32
Type "help", "copyright", "credits" or "license" for more information.
import psutil
[p for p in psutil.process_iter()]
[psutil.Process(pid=0, name='System Idle Process', started='2018-11-06 04:18:12'), psutil.Process(pid=4, name='System', started='2018-11-06 04:18:12'), Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Python27\lib\site-packages\psutil\__init__.py", line 395, in __str__
info["name"] = self.name()
File "C:\Python27\lib\site-packages\psutil\__init__.py", line 609, in name
name = self._proc.name()
File "C:\Python27\lib\site-packages\psutil\_pswindows.py", line 635, in wrapper
return fun(self, *args, **kwargs)
File "C:\Python27\lib\site-packages\psutil\_pswindows.py", line 687, in name
return py2_strencode(os.path.basename(self.exe()))
File "C:\Python27\lib\site-packages\psutil\_pswindows.py", line 635, in wrapper
return fun(self, *args, **kwargs)
File "C:\Python27\lib\site-packages\psutil\_pswindows.py", line 701, in exe
return py2_strencode(convert_dos_path(cext.proc_exe(self.pid)))
WindowsError: [Error 0] The operation completed successfully
|
WindowsError
|
def disk_io_counters(perdisk=False):
"""Return disk I/O statistics for every disk installed on the
system as a dict of raw tuples.
"""
def read_procfs():
# OK, this is a bit confusing. The format of /proc/diskstats can
# have 3 variations.
# On Linux 2.4 each line has always 15 fields, e.g.:
# "3 0 8 hda 8 8 8 8 8 8 8 8 8 8 8"
# On Linux 2.6+ each line *usually* has 14 fields, and the disk
# name is in another position, like this:
# "3 0 hda 8 8 8 8 8 8 8 8 8 8 8"
# ...unless (Linux 2.6) the line refers to a partition instead
# of a disk, in which case the line has less fields (7):
# "3 1 hda1 8 8 8 8"
# 4.18+ has 4 fields added:
# "3 0 hda 8 8 8 8 8 8 8 8 8 8 8 0 0 0 0"
# See:
# https://www.kernel.org/doc/Documentation/iostats.txt
# https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats
with open_text("%s/diskstats" % get_procfs_path()) as f:
lines = f.readlines()
for line in lines:
fields = line.split()
flen = len(fields)
if flen == 15:
# Linux 2.4
name = fields[3]
reads = int(fields[2])
(
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields[4:14])
elif flen == 14 or flen == 18:
# Linux 2.6+, line referring to a disk
name = fields[2]
(
reads,
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields[3:14])
elif flen == 7:
# Linux 2.6+, line referring to a partition
name = fields[2]
reads, rbytes, writes, wbytes = map(int, fields[3:])
rtime = wtime = reads_merged = writes_merged = busy_time = 0
else:
raise ValueError("not sure how to interpret line %r" % line)
yield (
name,
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
)
def read_sysfs():
for block in os.listdir("/sys/block"):
for root, _, files in os.walk(os.path.join("/sys/block", block)):
if "stat" not in files:
continue
with open_text(os.path.join(root, "stat")) as f:
fields = f.read().strip().split()
name = os.path.basename(root)
(
reads,
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields)
yield (
name,
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
)
if os.path.exists("%s/diskstats" % get_procfs_path()):
gen = read_procfs()
elif os.path.exists("/sys/block"):
gen = read_sysfs()
else:
raise NotImplementedError(
"%s/diskstats nor /sys/block filesystem are available on this "
"system" % get_procfs_path()
)
retdict = {}
for entry in gen:
(
name,
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
) = entry
if not perdisk and not is_storage_device(name):
# perdisk=False means we want to calculate totals so we skip
# partitions (e.g. 'sda1', 'nvme0n1p1') and only include
# base disk devices (e.g. 'sda', 'nvme0n1'). Base disks
# include a total of all their partitions + some extra size
# of their own:
# $ cat /proc/diskstats
# 259 0 sda 10485760 ...
# 259 1 sda1 5186039 ...
# 259 1 sda2 5082039 ...
# See:
# https://github.com/giampaolo/psutil/pull/1313
continue
rbytes *= DISK_SECTOR_SIZE
wbytes *= DISK_SECTOR_SIZE
retdict[name] = (
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
)
return retdict
|
def disk_io_counters(perdisk=False):
"""Return disk I/O statistics for every disk installed on the
system as a dict of raw tuples.
"""
def read_procfs():
# OK, this is a bit confusing. The format of /proc/diskstats can
# have 3 variations.
# On Linux 2.4 each line has always 15 fields, e.g.:
# "3 0 8 hda 8 8 8 8 8 8 8 8 8 8 8"
# On Linux 2.6+ each line *usually* has 14 fields, and the disk
# name is in another position, like this:
# "3 0 hda 8 8 8 8 8 8 8 8 8 8 8"
# ...unless (Linux 2.6) the line refers to a partition instead
# of a disk, in which case the line has less fields (7):
# "3 1 hda1 8 8 8 8"
# See:
# https://www.kernel.org/doc/Documentation/iostats.txt
# https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats
with open_text("%s/diskstats" % get_procfs_path()) as f:
lines = f.readlines()
for line in lines:
fields = line.split()
flen = len(fields)
if flen == 15:
# Linux 2.4
name = fields[3]
reads = int(fields[2])
(
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields[4:14])
elif flen == 14:
# Linux 2.6+, line referring to a disk
name = fields[2]
(
reads,
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields[3:14])
elif flen == 7:
# Linux 2.6+, line referring to a partition
name = fields[2]
reads, rbytes, writes, wbytes = map(int, fields[3:])
rtime = wtime = reads_merged = writes_merged = busy_time = 0
else:
raise ValueError("not sure how to interpret line %r" % line)
yield (
name,
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
)
def read_sysfs():
for block in os.listdir("/sys/block"):
for root, _, files in os.walk(os.path.join("/sys/block", block)):
if "stat" not in files:
continue
with open_text(os.path.join(root, "stat")) as f:
fields = f.read().strip().split()
name = os.path.basename(root)
(
reads,
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields)
yield (
name,
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
)
if os.path.exists("%s/diskstats" % get_procfs_path()):
gen = read_procfs()
elif os.path.exists("/sys/block"):
gen = read_sysfs()
else:
raise NotImplementedError(
"%s/diskstats nor /sys/block filesystem are available on this "
"system" % get_procfs_path()
)
retdict = {}
for entry in gen:
(
name,
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
) = entry
if not perdisk and not is_storage_device(name):
# perdisk=False means we want to calculate totals so we skip
# partitions (e.g. 'sda1', 'nvme0n1p1') and only include
# base disk devices (e.g. 'sda', 'nvme0n1'). Base disks
# include a total of all their partitions + some extra size
# of their own:
# $ cat /proc/diskstats
# 259 0 sda 10485760 ...
# 259 1 sda1 5186039 ...
# 259 1 sda2 5082039 ...
# See:
# https://github.com/giampaolo/psutil/pull/1313
continue
rbytes *= DISK_SECTOR_SIZE
wbytes *= DISK_SECTOR_SIZE
retdict[name] = (
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
)
return retdict
|
https://github.com/giampaolo/psutil/issues/1354
|
(psutil_venv) -bash-4.2$ python
Python 3.6.2 (default, Apr 24 2018, 04:27:15)
[GCC 4.8.5 20150623 (Red Hat 4.8.5-16)] on linux
Type "help", "copyright", "credits" or "license" for more information.
import psutil
psutil.__version__
'5.4.7'
psutil.disk_io_counters()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/9ld/psutil_venv/lib/python3.6/site-packages/psutil/__init__.py", line 2017, in disk_io_counters
rawdict = _psplatform.disk_io_counters(**kwargs)
File "/home/9ld/psutil_venv/lib/python3.6/site-packages/psutil/_pslinux.py", line 1117, in disk_io_counters
for entry in gen:
File "/home/9ld/psutil_venv/lib/python3.6/site-packages/psutil/_pslinux.py", line 1090, in read_procfs
raise ValueError("not sure how to interpret line %r" % line)
ValueError: not sure how to interpret line ' 8 16 sdb 147 0 9545 368 3 0 4096 17 0 352 385 0 0 0 0\n'
|
ValueError
|
def read_procfs():
# OK, this is a bit confusing. The format of /proc/diskstats can
# have 3 variations.
# On Linux 2.4 each line has always 15 fields, e.g.:
# "3 0 8 hda 8 8 8 8 8 8 8 8 8 8 8"
# On Linux 2.6+ each line *usually* has 14 fields, and the disk
# name is in another position, like this:
# "3 0 hda 8 8 8 8 8 8 8 8 8 8 8"
# ...unless (Linux 2.6) the line refers to a partition instead
# of a disk, in which case the line has less fields (7):
# "3 1 hda1 8 8 8 8"
# 4.18+ has 4 fields added:
# "3 0 hda 8 8 8 8 8 8 8 8 8 8 8 0 0 0 0"
# See:
# https://www.kernel.org/doc/Documentation/iostats.txt
# https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats
with open_text("%s/diskstats" % get_procfs_path()) as f:
lines = f.readlines()
for line in lines:
fields = line.split()
flen = len(fields)
if flen == 15:
# Linux 2.4
name = fields[3]
reads = int(fields[2])
(
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields[4:14])
elif flen == 14 or flen == 18:
# Linux 2.6+, line referring to a disk
name = fields[2]
(
reads,
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields[3:14])
elif flen == 7:
# Linux 2.6+, line referring to a partition
name = fields[2]
reads, rbytes, writes, wbytes = map(int, fields[3:])
rtime = wtime = reads_merged = writes_merged = busy_time = 0
else:
raise ValueError("not sure how to interpret line %r" % line)
yield (
name,
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
)
|
def read_procfs():
# OK, this is a bit confusing. The format of /proc/diskstats can
# have 3 variations.
# On Linux 2.4 each line has always 15 fields, e.g.:
# "3 0 8 hda 8 8 8 8 8 8 8 8 8 8 8"
# On Linux 2.6+ each line *usually* has 14 fields, and the disk
# name is in another position, like this:
# "3 0 hda 8 8 8 8 8 8 8 8 8 8 8"
# ...unless (Linux 2.6) the line refers to a partition instead
# of a disk, in which case the line has less fields (7):
# "3 1 hda1 8 8 8 8"
# See:
# https://www.kernel.org/doc/Documentation/iostats.txt
# https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats
with open_text("%s/diskstats" % get_procfs_path()) as f:
lines = f.readlines()
for line in lines:
fields = line.split()
flen = len(fields)
if flen == 15:
# Linux 2.4
name = fields[3]
reads = int(fields[2])
(
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields[4:14])
elif flen == 14:
# Linux 2.6+, line referring to a disk
name = fields[2]
(
reads,
reads_merged,
rbytes,
rtime,
writes,
writes_merged,
wbytes,
wtime,
_,
busy_time,
_,
) = map(int, fields[3:14])
elif flen == 7:
# Linux 2.6+, line referring to a partition
name = fields[2]
reads, rbytes, writes, wbytes = map(int, fields[3:])
rtime = wtime = reads_merged = writes_merged = busy_time = 0
else:
raise ValueError("not sure how to interpret line %r" % line)
yield (
name,
reads,
writes,
rbytes,
wbytes,
rtime,
wtime,
reads_merged,
writes_merged,
busy_time,
)
|
https://github.com/giampaolo/psutil/issues/1354
|
(psutil_venv) -bash-4.2$ python
Python 3.6.2 (default, Apr 24 2018, 04:27:15)
[GCC 4.8.5 20150623 (Red Hat 4.8.5-16)] on linux
Type "help", "copyright", "credits" or "license" for more information.
import psutil
psutil.__version__
'5.4.7'
psutil.disk_io_counters()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/9ld/psutil_venv/lib/python3.6/site-packages/psutil/__init__.py", line 2017, in disk_io_counters
rawdict = _psplatform.disk_io_counters(**kwargs)
File "/home/9ld/psutil_venv/lib/python3.6/site-packages/psutil/_pslinux.py", line 1117, in disk_io_counters
for entry in gen:
File "/home/9ld/psutil_venv/lib/python3.6/site-packages/psutil/_pslinux.py", line 1090, in read_procfs
raise ValueError("not sure how to interpret line %r" % line)
ValueError: not sure how to interpret line ' 8 16 sdb 147 0 9545 368 3 0 4096 17 0 352 385 0 0 0 0\n'
|
ValueError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.