code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
"""
Implementation of interpreter-level 'sys' routines.
"""
from pypy.interpreter.error import OperationError
import sys
# ____________________________________________________________
def setbuiltinmodule(w_module, name):
""" put a module into the modules builtin_modules dicts """
if builtin_modules[name] is None:
builtin_modules[name] = space.unwrap(w_module)
else:
assert builtin_modules[name] is space.unwrap(w_module), (
"trying to change the builtin-in module %r" % (name,))
space.setitem(w_modules, space.wrap(name), w_module)
def _getframe(space, w_depth=0):
"""Return a frame object from the call stack. If optional integer depth is
given, return the frame object that many calls below the top of the stack.
If that is deeper than the call stack, ValueError is raised. The default
for depth is zero, returning the frame at the top of the call stack.
This function should be used for internal and specialized
purposes only."""
depth = space.int_w(w_depth)
try:
f = space.getexecutioncontext().framestack.top(depth)
except IndexError:
raise OperationError(space.w_ValueError,
space.wrap("call stack is not deep enough"))
except ValueError:
raise OperationError(space.w_ValueError,
space.wrap("frame index must not be negative"))
return space.wrap(f)
# directly from the C code in ceval.c, might be moved somewhere else.
def setrecursionlimit(space, w_new_limit):
"""Set the maximum depth of the Python interpreter stack to n. This
limit prevents infinite recursion from causing an overflow of the C
stack and crashing Python. The highest possible limit is platform
dependent."""
new_limit = space.int_w(w_new_limit)
if new_limit <= 0:
raise OperationError(space.w_ValueError,
space.wrap("recursion limit must be positive"))
# global recursion_limit
# we need to do it without writing globals.
space.sys.recursionlimit = new_limit
def getrecursionlimit(space):
"""Return the current value of the recursion limit, the maximum depth
of the Python interpreter stack. This limit prevents infinite
recursion from causing an overflow of the C stack and crashing Python.
"""
return space.wrap(space.sys.recursionlimit)
def setcheckinterval(space, w_interval):
"""Tell the Python interpreter to check for asynchronous events every
n instructions. This also affects how often thread switches occur."""
space.sys.checkinterval = space.int_w(w_interval)
space.getexecutioncontext().ticker = 0
def getcheckinterval(space):
"""Return the current check interval; see setcheckinterval()."""
return space.wrap(space.sys.checkinterval)
def exc_info(space):
"""Return the (type, value, traceback) of the most recent exception
caught by an except clause in the current stack frame or in an older stack
frame."""
operror = space.getexecutioncontext().sys_exc_info()
if operror is None:
return space.newtuple([space.w_None,space.w_None,space.w_None])
else:
return space.newtuple([operror.w_type, operror.w_value,
space.wrap(operror.application_traceback)])
def exc_clear(space):
"""Clear global information on the current exception. Subsequent calls
to exc_info() will return (None,None,None) until another exception is
raised and caught in the current thread or the execution stack returns to a
frame where another exception is being handled."""
operror = space.getexecutioncontext().sys_exc_info()
if operror is not None:
operror.clear(space)
def pypy_getudir(space):
"""NOT_RPYTHON"""
from pypy.tool.udir import udir
return space.wrap(str(udir))
## def getrefcount(space, w_obj):
## """getrefcount(object) -> integer
## Return the reference count of object. The count returned is generally
## one higher than you might expect, because it includes the (temporary)
## reference as an argument to getrefcount().
## """
## # From the results i get when using this i need to apply a fudge
## # value of 6 to get results comparable to cpythons. /Arre
## return space.wrap(sys.getrefcount(w_obj) - 6)
def settrace(space, w_func):
"""Set the global debug tracing function. It will be called on each
function call. See the debugger chapter in the library manual."""
space.getexecutioncontext().settrace(w_func)
def setprofile(space, w_func):
"""Set the profiling function. It will be called on each function call
and return. See the profiler chapter in the library manual."""
space.getexecutioncontext().setprofile(w_func)
def call_tracing(space, w_func, w_args):
"""Call func(*args), while tracing is enabled. The tracing state is
saved, and restored afterwards. This is intended to be called from
a debugger from a checkpoint, to recursively debug some other code."""
return space.getexecutioncontext().call_tracing(w_func, w_args)
| Python |
"""
Implementation of interpreter-level 'sys' routines.
"""
import pypy
from pypy.interpreter.error import OperationError
from pypy.interpreter.gateway import ObjSpace
import sys, os, stat, errno
# ____________________________________________________________
#
class State:
def __init__(self, space):
self.space = space
self.w_modules = space.newdict()
self.w_warnoptions = space.newlist([])
self.w_argv = space.newlist([])
self.setinitialpath(space)
def setinitialpath(self, space):
# Initialize the default path
pypydir = os.path.dirname(os.path.abspath(pypy.__file__))
srcdir = os.path.dirname(pypydir)
path = getinitialpath(srcdir)
self.w_path = space.newlist([space.wrap(p) for p in path])
def checkdir(path):
st = os.stat(path)
if not stat.S_ISDIR(st[0]):
raise OSError(errno.ENOTDIR, path)
def getinitialpath(srcdir):
# build the initial path from the srcdir, which is the path of
# the "dist" directory of a PyPy checkout.
from pypy.module.sys.version import CPYTHON_VERSION
from pypy.rlib import ros
dirname = '%d.%d.%d' % (CPYTHON_VERSION[0],
CPYTHON_VERSION[1],
CPYTHON_VERSION[2])
lib_python = os.path.join(srcdir, 'lib-python')
python_std_lib = os.path.join(lib_python, dirname)
checkdir(python_std_lib)
python_std_lib_modified = os.path.join(lib_python, 'modified-' + dirname)
checkdir(python_std_lib_modified)
pypydir = os.path.join(srcdir, 'pypy')
pypy_lib = os.path.join(pypydir, 'lib')
checkdir(pypy_lib)
importlist = ['']
pythonpath = ros.getenv('PYTHONPATH')
if pythonpath:
for p in pythonpath.split(os.pathsep):
if p:
importlist.append(p)
importlist.append(pypy_lib)
importlist.append(python_std_lib_modified)
importlist.append(python_std_lib)
return importlist
def pypy_initial_path(space, srcdir):
try:
path = getinitialpath(srcdir)
except OSError:
return space.w_None
else:
return space.newlist([space.wrap(p) for p in path])
pypy_initial_path.unwrap_spec = [ObjSpace, str]
def get(space):
return space.fromcache(State)
class IOState:
def __init__(self, space):
self.space = space
w_fdopen = space.getattr(space.builtin.get('file'),
space.wrap("fdopen"))
self.w_stdin = space.call_function(
w_fdopen, space.wrap(0), space.wrap("r"),
space.wrap(1))
space.setattr(self.w_stdin, space.wrap("_name"),
space.wrap("<stdin>"))
self.w_stdout = space.call_function(
w_fdopen, space.wrap(1), space.wrap("w"),
space.wrap(1))
space.setattr(self.w_stdout, space.wrap("_name"),
space.wrap("<stdout>"))
self.w_stderr = space.call_function(
w_fdopen, space.wrap(2), space.wrap("w"),
space.wrap(0))
space.setattr(self.w_stderr, space.wrap("_name"),
space.wrap("<stderr>"))
def getio(space):
return space.fromcache(IOState)
def _pypy_getudir(space):
"""NOT_RPYTHON"""
from pypy.tool.udir import udir
return space.wrap(str(udir))
_pypy_getudir._annspecialcase_ = "override:ignore"
# we need the indirection because this function will live in a dictionary with other
# RPYTHON functions and share call sites with them. Better it not be a special-case
# directly.
def pypy_getudir(space):
return _pypy_getudir(space)
| Python |
from pypy.interpreter.pyopcode import print_item_to, print_newline_to, sys_stdout
def displayhook(space, w_obj):
"""Print an object to sys.stdout and also save it in __builtin__._"""
if not space.is_w(w_obj, space.w_None):
space.setitem(space.builtin.w_dict, space.wrap('_'), w_obj)
# NB. this is slightly more complicated in CPython,
# see e.g. the difference with >>> print 5,; 8
print_item_to(space, space.repr(w_obj), sys_stdout(space))
print_newline_to(space, sys_stdout(space))
__displayhook__ = displayhook # this is exactly like in CPython
| Python |
"""
Version numbers exposed by PyPy through the 'sys' module.
"""
import os
CPYTHON_VERSION = (2, 4, 1, "alpha", 42)
CPYTHON_API_VERSION = 1012
PYPY_VERSION = (1, 0, 0, "alpha", '?')
# the last item is replaced by the svn revision ^^^
SVN_URL = "$HeadURL: https://codespeak.net/svn/pypy/branch/flex-backend/pypy/module/sys/version.py $"[10:-28]
REV = "$LastChangedRevision: 41081 $"[22:-2]
import pypy
pypydir = os.path.dirname(os.path.abspath(pypy.__file__))
del pypy
# ____________________________________________________________
def get_api_version(space):
return space.wrap(CPYTHON_API_VERSION)
def get_version_info(space):
return space.wrap(CPYTHON_VERSION)
def get_version(space):
return space.wrap("%d.%d.%d (pypy %d.%d.%d build %d)" % (
CPYTHON_VERSION[0],
CPYTHON_VERSION[1],
CPYTHON_VERSION[2],
PYPY_VERSION[0],
PYPY_VERSION[1],
PYPY_VERSION[2],
svn_revision()))
def get_hexversion(space):
return space.wrap(tuple2hex(CPYTHON_VERSION))
def get_pypy_version_info(space):
ver = PYPY_VERSION
ver = ver[:-1] + (svn_revision(),)
return space.wrap(ver)
def get_svn_url(space):
return space.wrap((SVN_URL, svn_revision()))
def tuple2hex(ver):
d = {'alpha': 0xA,
'beta': 0xB,
'candidate': 0xC,
'final': 0xF,
}
subver = ver[4]
if not (0 <= subver <= 9):
subver = 0
return (ver[0] << 24 |
ver[1] << 16 |
ver[2] << 8 |
d[ver[3]] << 4 |
subver)
def svn_revision():
"Return the last-changed svn revision number."
# NB. we hack the number directly out of the .svn directory to avoid
# to depend on an external 'svn' executable in the path.
rev = int(REV)
try:
f = open(os.path.join(pypydir, '.svn', 'format'), 'r')
format = int(f.readline().strip())
f.close()
if format <= 6: # Old XML-format
f = open(os.path.join(pypydir, '.svn', 'entries'), 'r')
for line in f:
line = line.strip()
if line.startswith('committed-rev="') and line.endswith('"'):
rev = int(line[15:-1])
break
f.close()
else: # New format
f = open(os.path.join(pypydir, '.svn', 'entries'), 'r')
format = int(f.readline().strip())
for entry in f.read().split('\f'):
lines = entry.split('\n')
name, kind, revstr = lines[:3]
if name == '' and kind == 'dir': # The current directory
rev = int(revstr)
break
f.close()
except (IOError, OSError):
pass
return rev
| Python |
from pypy.interpreter.mixedmodule import MixedModule
from pypy.interpreter.error import OperationError
class Module(MixedModule):
"""Sys Builtin Module. """
def __init__(self, space, w_name):
"""NOT_RPYTHON""" # because parent __init__ isn't
super(Module, self).__init__(space, w_name)
self.checkinterval = 100
self.recursionlimit = 100
interpleveldefs = {
'__name__' : '(space.wrap("sys"))',
'__doc__' : '(space.wrap("PyPy sys module"))',
'platform' : 'space.wrap(sys.platform)',
'maxint' : 'space.wrap(sys.maxint)',
'byteorder' : 'space.wrap(sys.byteorder)',
'exec_prefix' : 'space.wrap(sys.exec_prefix)',
'prefix' : 'space.wrap(sys.prefix)',
'maxunicode' : 'space.wrap(sys.maxunicode)',
'maxint' : 'space.wrap(sys.maxint)',
'stdin' : 'state.getio(space).w_stdin',
'__stdin__' : 'state.getio(space).w_stdin',
'stdout' : 'state.getio(space).w_stdout',
'__stdout__' : 'state.getio(space).w_stdout',
'stderr' : 'state.getio(space).w_stderr',
'__stderr__' : 'state.getio(space).w_stderr',
'pypy_objspaceclass' : 'space.wrap(repr(space))',
'path' : 'state.get(space).w_path',
'modules' : 'state.get(space).w_modules',
'argv' : 'state.get(space).w_argv',
'warnoptions' : 'state.get(space).w_warnoptions',
'builtin_module_names' : 'state.w_None',
'pypy_getudir' : 'state.pypy_getudir',
'pypy_initial_path' : 'state.pypy_initial_path',
'_getframe' : 'vm._getframe',
'setrecursionlimit' : 'vm.setrecursionlimit',
'getrecursionlimit' : 'vm.getrecursionlimit',
'setcheckinterval' : 'vm.setcheckinterval',
'getcheckinterval' : 'vm.getcheckinterval',
'exc_info' : 'vm.exc_info',
'exc_clear' : 'vm.exc_clear',
'settrace' : 'vm.settrace',
'setprofile' : 'vm.setprofile',
'call_tracing' : 'vm.call_tracing',
'executable' : 'space.wrap("py.py")',
'copyright' : 'space.wrap("MIT-License")',
'api_version' : 'version.get_api_version(space)',
'version_info' : 'version.get_version_info(space)',
'version' : 'version.get_version(space)',
'pypy_version_info' : 'version.get_pypy_version_info(space)',
'pypy_svn_url' : 'version.get_svn_url(space)',
'hexversion' : 'version.get_hexversion(space)',
'ps1' : 'space.wrap(">>>> ")',
'ps2' : 'space.wrap(".... ")',
'displayhook' : 'hook.displayhook',
'__displayhook__' : 'hook.__displayhook__',
'meta_path' : 'space.wrap([])',
'path_hooks' : 'space.wrap([])',
'path_importer_cache' : 'space.wrap({})',
#'subversion' : added in Python 2.5
}
appleveldefs = {
#'displayhook' : 'app.displayhook',
#'__displayhook__' : 'app.__displayhook__',
'excepthook' : 'app.excepthook',
'__excepthook__' : 'app.excepthook',
'exit' : 'app.exit',
'exitfunc' : 'app.exitfunc',
'pypy__exithandlers__' : 'app.pypy__exithandlers__', # internal
'getfilesystemencoding' : 'app.getfilesystemencoding',
'callstats' : 'app.callstats',
'getdefaultencoding' : 'app.getdefaultencoding',
'setdefaultencoding' : 'app.setdefaultencoding',
}
def setbuiltinmodule(self, w_module, name):
w_name = self.space.wrap(name)
w_modules = self.get('modules')
self.space.setitem(w_modules, w_name, w_module)
def getmodule(self, name):
space = self.space
w_modules = self.get('modules')
try:
return space.getitem(w_modules, space.wrap(name))
except OperationError, e:
if not e.match(space, space.w_KeyError):
raise
return None
def setmodule(self, w_module):
space = self.space
w_name = self.space.getattr(w_module, space.wrap('__name__'))
w_modules = self.get('modules')
self.space.setitem(w_modules, w_name, w_module)
def getdictvalue(self, space, w_attr):
""" specialize access to dynamic exc_* attributes. """
value = MixedModule.getdictvalue(self, space, w_attr)
if value is not None:
return value
attr = space.str_w(w_attr)
if attr == 'exc_type':
operror = space.getexecutioncontext().sys_exc_info()
if operror is None:
return space.w_None
else:
return operror.w_type
elif attr == 'exc_value':
operror = space.getexecutioncontext().sys_exc_info()
if operror is None:
return space.w_None
else:
return operror.w_value
elif attr == 'exc_traceback':
operror = space.getexecutioncontext().sys_exc_info()
if operror is None:
return space.w_None
else:
return space.wrap(operror.application_traceback)
return None
| Python |
"""
self cloning, automatic path configuration
copy this into any subdirectory of pypy from which scripts need
to be run, typically all of the test subdirs.
The idea is that any such script simply issues
import autopath
and this will make sure that the parent directory containing "pypy"
is in sys.path.
If you modify the master "autopath.py" version (in pypy/tool/autopath.py)
you can directly run it which will copy itself on all autopath.py files
it finds under the pypy root directory.
This module always provides these attributes:
pypydir pypy root directory path
this_dir directory where this autopath.py resides
"""
def __dirinfo(part):
""" return (partdir, this_dir) and insert parent of partdir
into sys.path. If the parent directories don't have the part
an EnvironmentError is raised."""
import sys, os
try:
head = this_dir = os.path.realpath(os.path.dirname(__file__))
except NameError:
head = this_dir = os.path.realpath(os.path.dirname(sys.argv[0]))
while head:
partdir = head
head, tail = os.path.split(head)
if tail == part:
break
else:
raise EnvironmentError, "'%s' missing in '%r'" % (partdir, this_dir)
pypy_root = os.path.join(head, '')
try:
sys.path.remove(head)
except ValueError:
pass
sys.path.insert(0, head)
munged = {}
for name, mod in sys.modules.items():
if '.' in name:
continue
fn = getattr(mod, '__file__', None)
if not isinstance(fn, str):
continue
newname = os.path.splitext(os.path.basename(fn))[0]
if not newname.startswith(part + '.'):
continue
path = os.path.join(os.path.dirname(os.path.realpath(fn)), '')
if path.startswith(pypy_root) and newname != part:
modpaths = os.path.normpath(path[len(pypy_root):]).split(os.sep)
if newname != '__init__':
modpaths.append(newname)
modpath = '.'.join(modpaths)
if modpath not in sys.modules:
munged[modpath] = mod
for name, mod in munged.iteritems():
if name not in sys.modules:
sys.modules[name] = mod
if '.' in name:
prename = name[:name.rfind('.')]
postname = name[len(prename)+1:]
if prename not in sys.modules:
__import__(prename)
if not hasattr(sys.modules[prename], postname):
setattr(sys.modules[prename], postname, mod)
return partdir, this_dir
def __clone():
""" clone master version of autopath.py into all subdirs """
from os.path import join, walk
if not this_dir.endswith(join('pypy','tool')):
raise EnvironmentError("can only clone master version "
"'%s'" % join(pypydir, 'tool',_myname))
def sync_walker(arg, dirname, fnames):
if _myname in fnames:
fn = join(dirname, _myname)
f = open(fn, 'rwb+')
try:
if f.read() == arg:
print "checkok", fn
else:
print "syncing", fn
f = open(fn, 'w')
f.write(arg)
finally:
f.close()
s = open(join(pypydir, 'tool', _myname), 'rb').read()
walk(pypydir, sync_walker, s)
_myname = 'autopath.py'
# set guaranteed attributes
pypydir, this_dir = __dirinfo('pypy')
if __name__ == '__main__':
__clone()
| Python |
from pypy.interpreter.baseobjspace import Wrappable
from pypy.interpreter.typedef import GetSetProperty, TypeDef
from pypy.interpreter.typedef import interp_attrproperty, interp_attrproperty_w
from pypy.interpreter.gateway import interp2app, ObjSpace, W_Root
from pypy.interpreter.error import OperationError
from pypy.rlib.rarithmetic import intmask
import os
class tasklet(Wrappable):
def __init__(self, space):
self.space = space
self.flags = 0
self.state = None
def descr_method__new__(space, w_subtype):
t = space.allocate_instance(tasklet, w_subtype)
tasklet.__init__(t, space)
return space.wrap(t)
def w_demo(self):
output("42")
tasklet.typedef = TypeDef("tasklet",
__new__ = interp2app(tasklet.descr_method__new__.im_func),
demo = interp2app(tasklet.w_demo),
)
def output(stuff):
os.write(2, stuff + '\n')
| Python |
class GreenletExit(Exception):
pass
class GreenletError(Exception):
pass
| Python |
"""
Coroutine implementation for application level on top
of the internal coroutines.
This is an extensible concept. Multiple implementations
of concurrency can exist together, if they follow the
basic concept of maintaining their own costate.
There is also some diversification possible by using
multiple costates for the same type. This leads to
disjoint switchable sets within the same type.
I'm not so sure to what extent the opposite is possible, too.
I.e., merging the costate of tasklets and greenlets would
allow them to be parents of each other. Needs a bit more
experience to decide where to set the limits.
"""
from pypy.interpreter.baseobjspace import Wrappable, UnpackValueError
from pypy.interpreter.argument import Arguments
from pypy.interpreter.typedef import GetSetProperty, TypeDef
from pypy.interpreter.typedef import interp_attrproperty, interp_attrproperty_w
from pypy.interpreter.gateway import interp2app, ObjSpace, W_Root
from pypy.interpreter.error import OperationError
from pypy.interpreter.function import StaticMethod
from pypy.module._stackless.stackless_flags import StacklessFlags
from pypy.module._stackless.interp_coroutine import Coroutine, BaseCoState, AbstractThunk
from pypy.rlib import rstack # for resume points
from pypy.tool import stdlib_opcode as pythonopcode
class _AppThunk(AbstractThunk):
def __init__(self, space, costate, w_obj, args):
self.space = space
self.costate = costate
if not space.is_true(space.callable(w_obj)):
raise OperationError(
space.w_TypeError,
space.mod(space.wrap('object %r is not callable'),
space.newtuple([w_obj])))
self.w_func = w_obj
self.args = args
def call(self):
costate = self.costate
w_result = self.space.call_args(self.w_func, self.args)
rstack.resume_point("appthunk", costate, returns=w_result)
costate.w_tempval = w_result
class AppCoroutine(Coroutine): # XXX, StacklessFlags):
def __init__(self, space, state=None):
self.space = space
if state is None:
state = AppCoroutine._get_state(space)
Coroutine.__init__(self, state)
self.flags = 0
self.newsubctx()
def newsubctx(self):
ec = self.space.getexecutioncontext()
self.subctx = ec.Subcontext()
def descr_method__new__(space, w_subtype):
co = space.allocate_instance(AppCoroutine, w_subtype)
AppCoroutine.__init__(co, space)
return space.wrap(co)
def _get_state(space):
return space.fromcache(AppCoState)
_get_state = staticmethod(_get_state)
def w_bind(self, w_func, __args__):
space = self.space
if self.frame is not None:
raise OperationError(space.w_ValueError, space.wrap(
"cannot bind a bound Coroutine"))
state = self.costate
thunk = _AppThunk(space, state, w_func, __args__)
self.bind(thunk)
def w_switch(self):
space = self.space
if self.frame is None:
raise OperationError(space.w_ValueError, space.wrap(
"cannot switch to an unbound Coroutine"))
state = self.costate
self.switch()
rstack.resume_point("w_switch", state, space)
w_ret, state.w_tempval = state.w_tempval, space.w_None
return w_ret
def w_finished(self, w_excinfo):
pass
def finish(self, operror=None):
space = self.space
if isinstance(operror, OperationError):
w_exctype = operror.w_type
w_excvalue = operror.w_value
w_exctraceback = operror.application_traceback
w_excinfo = space.newtuple([w_exctype, w_excvalue, w_exctraceback])
else:
w_N = space.w_None
w_excinfo = space.newtuple([w_N, w_N, w_N])
return space.call_method(space.wrap(self),'finished', w_excinfo)
def hello(self):
ec = self.space.getexecutioncontext()
self.subctx.enter(ec)
def goodbye(self):
ec = self.space.getexecutioncontext()
self.subctx.leave(ec)
def w_kill(self):
self.kill()
def _userdel(self):
if self.get_is_zombie():
return
self.set_is_zombie(True)
self.space.userdel(self.space.wrap(self))
def w_getcurrent(space):
return space.wrap(AppCoroutine._get_state(space).current)
w_getcurrent = staticmethod(w_getcurrent)
# pickling interface
def descr__reduce__(self, space):
# this is trying to be simplistic at the moment.
# we neither allow to pickle main (which can become a mess
# since it has some deep anchestor frames)
# nor we allow to pickle the current coroutine.
# rule: switch before pickling.
# you cannot construct the tree that you are climbing.
from pypy.interpreter.mixedmodule import MixedModule
w_mod = space.getbuiltinmodule('_stackless')
mod = space.interp_w(MixedModule, w_mod)
w_mod2 = space.getbuiltinmodule('_pickle_support')
mod2 = space.interp_w(MixedModule, w_mod2)
w_new_inst = mod.get('coroutine')
w = space.wrap
nt = space.newtuple
ec = self.space.getexecutioncontext()
if self is self.costate.main:
return nt([mod.get('_return_main'), nt([])])
thunk = self.thunk
if isinstance(thunk, _AppThunk):
w_args, w_kwds = thunk.args.topacked()
w_thunk = nt([thunk.w_func, w_args, w_kwds])
else:
w_thunk = space.w_None
tup_base = [
]
tup_state = [
w(self.flags),
self.subctx.getstate(space),
w_thunk,
w(self.parent),
]
return nt([w_new_inst, nt(tup_base), nt(tup_state)])
def descr__setstate__(self, space, w_args):
try:
w_flags, w_state, w_thunk, w_parent = space.unpackiterable(w_args,
expected_length=4)
except UnpackValueError, e:
raise OperationError(space.w_ValueError, space.wrap(e.msg))
self.flags = space.int_w(w_flags)
self.parent = space.interp_w(AppCoroutine, w_parent, can_be_None=True)
ec = self.space.getexecutioncontext()
self.subctx.setstate(self.space, w_state)
self.reconstruct_framechain()
if space.is_w(w_thunk, space.w_None):
self.thunk = None
else:
try:
w_func, w_args, w_kwds = space.unpackiterable(w_thunk,
expected_length=3)
except UnpackValueError, e:
raise OperationError(space.w_ValueError, space.wrap(e.msg))
args = Arguments.frompacked(space, w_args, w_kwds)
self.bind(_AppThunk(space, self.costate, w_func, args))
def reconstruct_framechain(self):
from pypy.interpreter.pyframe import PyFrame
from pypy.rlib.rstack import resume_state_create
if self.subctx.framestack.empty():
self.frame = None
return
space = self.space
ec = space.getexecutioncontext()
costate = self.costate
# now the big fun of recreating tiny things...
bottom = resume_state_create(None, "yield_current_frame_to_caller_1")
# ("coroutine__bind", state)
_bind_frame = resume_state_create(bottom, "coroutine__bind", costate)
# ("appthunk", costate, returns=w_result)
appthunk_frame = resume_state_create(_bind_frame, "appthunk", costate)
chain = appthunk_frame
for frame in self.subctx.framestack.items:
assert isinstance(frame, PyFrame)
# ("execute_frame", self, executioncontext, returns=w_exitvalue)
chain = resume_state_create(chain, "execute_frame", frame, ec)
code = frame.pycode.co_code
# ("dispatch", self, co_code, ec, returns=next_instr)
chain = resume_state_create(chain, "dispatch", frame, code, ec)
# ("handle_bytecode", self, co_code, ec, returns=next_instr)
chain = resume_state_create(chain, "handle_bytecode", frame, code,
ec)
instr = frame.last_instr
opcode = ord(code[instr])
assert opcode == pythonopcode.opmap['CALL_FUNCTION']
# ("dispatch_call", self, co_code, next_instr, ec)
chain = resume_state_create(chain, "dispatch_call", frame, code,
instr+3, ec)
instr += 1
oparg = ord(code[instr]) | ord(code[instr + 1]) << 8
if (oparg >> 8) & 0xff == 0:
# Only positional arguments
nargs = oparg & 0xff
# case1: ("CALL_FUNCTION", f, nargs, returns=w_result)
chain = resume_state_create(chain, 'CALL_FUNCTION', frame,
nargs)
else:
# case2: ("call_function", f, returns=w_result)
chain = resume_state_create(chain, 'call_function', frame)
# ("w_switch", state, space)
w_switch_frame = resume_state_create(chain, 'w_switch', costate, space)
# ("coroutine_switch", state, returns=incoming_frame)
switch_frame = resume_state_create(w_switch_frame, "coroutine_switch", costate)
self.frame = switch_frame
# _mixin_ did not work
for methname in StacklessFlags.__dict__:
meth = getattr(StacklessFlags, methname)
if hasattr(meth, 'im_func'):
setattr(AppCoroutine, meth.__name__, meth.im_func)
del meth, methname
def w_get_is_zombie(space, self):
return space.wrap(self.get_is_zombie())
AppCoroutine.w_get_is_zombie = w_get_is_zombie
def w_get_is_alive(space, self):
return space.wrap(self.is_alive())
AppCoroutine.w_get_is_alive = w_get_is_alive
def w_descr__framestack(space, self):
assert isinstance(self, AppCoroutine)
if self.subctx.framestack is not None:
items = [space.wrap(item) for item in self.subctx.framestack.items]
return space.newtuple(items)
else:
return space.newtuple([])
def makeStaticMethod(module, classname, funcname):
space = module.space
w_klass = space.getattr(space.wrap(module), space.wrap(classname))
# HACK HACK HACK
# make the typeobject mutable for a while
from pypy.objspace.std.typeobject import _HEAPTYPE, W_TypeObject
assert isinstance(w_klass, W_TypeObject)
old_flags = w_klass.__flags__
w_klass.__flags__ |= _HEAPTYPE
space.appexec([w_klass, space.wrap(funcname)], """
(klass, funcname):
func = getattr(klass, funcname)
setattr(klass, funcname, staticmethod(func.im_func))
""")
w_klass.__flags__ = old_flags
def post_install(module):
makeStaticMethod(module, 'coroutine', 'getcurrent')
space = module.space
AppCoroutine._get_state(space).post_install()
# space.appexec("""() :
# maybe use __spacebind__ for postprocessing
AppCoroutine.typedef = TypeDef("coroutine",
__new__ = interp2app(AppCoroutine.descr_method__new__.im_func),
bind = interp2app(AppCoroutine.w_bind,
unwrap_spec=['self', W_Root, Arguments]),
switch = interp2app(AppCoroutine.w_switch),
kill = interp2app(AppCoroutine.w_kill),
finished = interp2app(AppCoroutine.w_finished),
is_alive = GetSetProperty(AppCoroutine.w_get_is_alive),
is_zombie = GetSetProperty(AppCoroutine.w_get_is_zombie,
doc=AppCoroutine.get_is_zombie.__doc__), #--- this flag is a bit obscure
# and not useful (it's totally different from Coroutine.is_zombie(), too)
# but lib/stackless.py uses it
_framestack = GetSetProperty(w_descr__framestack),
getcurrent = interp2app(AppCoroutine.w_getcurrent),
__reduce__ = interp2app(AppCoroutine.descr__reduce__,
unwrap_spec=['self', ObjSpace]),
__setstate__ = interp2app(AppCoroutine.descr__setstate__,
unwrap_spec=['self', ObjSpace, W_Root]),
__module__ = '_stackless',
)
class AppCoState(BaseCoState):
def __init__(self, space):
BaseCoState.__init__(self)
self.w_tempval = space.w_None
self.space = space
def post_install(self):
self.current = self.main = AppCoroutine(self.space, state=self)
self.main.subctx.framestack = None # wack
def return_main(space):
return AppCoroutine._get_state(space).main
return_main.unwrap_spec = [ObjSpace]
| Python |
"""
Basic Concept:
--------------
All concurrency is expressed by some means of coroutines.
This is the lowest possible exposable interface.
A coroutine is a structure that controls a sequence
of continuations in time. It contains a frame object
that is a restartable stack chain. This frame object
is updated on every switch.
The frame can be None. Either the coroutine is not yet
bound, or it is the current coroutine of some costate.
See below. XXX rewrite a definition of these terms.
There is always a notation of a "current" and a "last"
coroutine. Current has no frame and represents the
running program. last is needed to keep track of the
coroutine that receives a new frame chain after a switch.
A costate object holds last and current.
There are different coroutine concepts existing in
parallel, like plain interp-level coroutines and
app-level structures like coroutines, greenlets and
tasklets.
Every concept is associated with its own costate object.
This allows for peaceful co-existence of many concepts.
The type of a switch is determined by the target's costate.
"""
from pypy.interpreter.baseobjspace import Wrappable
from pypy.rlib.rstack import yield_current_frame_to_caller, resume_point
from pypy.rlib.objectmodel import we_are_translated
try:
from py.magic import greenlet
main_greenlet = greenlet.getcurrent()
except (ImportError, ValueError):
def greenlet(*args, **kwargs):
raise NotImplementedError("need either greenlets or a translated version of pypy")
class FrameChain(object):
"""Greenlet-based emulation of the primitive rstack 'frames' of RPython"""
def __init__(self, thunk=None):
if thunk:
self.greenlet = greenlet(thunk)
else:
self.greenlet = greenlet.getcurrent()
def switch(self):
last = FrameChain()
return self.greenlet.switch(last)
import sys, os
class BaseCoState(object):
def __init__(self):
self.current = self.main = None
def __repr__(self):
"NOT_RPYTHON"
# for debugging only
return '<%s current=%r>' % (self.__class__.__name__, self.current)
def update(self, new):
syncstate.leaving = self.current
syncstate.entering = new
self.current = new
frame, new.frame = new.frame, None
return frame
class CoState(BaseCoState):
def __init__(self):
BaseCoState.__init__(self)
self.current = self.main = Coroutine(self)
class CoroutineDamage(SystemError):
pass
class SyncState(object):
def __init__(self):
self.reset()
def reset(self):
self.default_costate = None
self.leaving = None
self.entering = None
self.things_to_do = False
self.temp_exc = None
self.to_delete = []
def switched(self, incoming_frame):
left = syncstate.leaving
entered = syncstate.entering
syncstate.leaving = syncstate.entering = None
if left is not None: # mostly to work around an annotation problem;
# should not really be None
left.frame = incoming_frame
left.goodbye()
if entered is not None:
entered.hello()
if self.things_to_do:
self._do_things_to_do()
def push_exception(self, exc):
self.things_to_do = True
self.temp_exc = exc
def check_for_zombie(self, obj):
return co in self.to_delete
def postpone_deletion(self, obj):
self.to_delete.append(obj)
self.things_to_do = True
def _do_things_to_do(self):
if self.temp_exc is not None:
# somebody left an unhandled exception and switched to us.
# this both provides default exception handling and the
# way to inject an exception, like CoroutineExit.
e, self.temp_exc = self.temp_exc, None
self.things_to_do = bool(self.to_delete)
raise e
while self.to_delete:
delete, self.to_delete = self.to_delete, []
for obj in delete:
obj.parent = obj.costate.current
obj._kill_finally()
else:
self.things_to_do = False
syncstate = SyncState()
class CoroutineExit(SystemExit):
# XXX SystemExit's __init__ creates problems in bookkeeper.
def __init__(self):
pass
class AbstractThunk(object):
def call(self):
raise NotImplementedError("abstract base class")
class Coroutine(Wrappable):
def __init__(self, state=None):
self.frame = None
if state is None:
state = self._get_default_costate()
self.costate = state
self.parent = None
self.thunk = None
def __repr__(self):
'NOT_RPYTHON'
# just for debugging
if hasattr(self, '__name__'):
return '<Coro %s frame=%r %s>' % (self.__name__, self.frame, self.thunk is not None)
else:
return '<coro frame=%r %s>' % (self.frame, self.thunk is not None)
def _get_default_costate():
state = syncstate.default_costate
if state is None:
state = syncstate.default_costate = CoState()
return state
_get_default_costate = staticmethod(_get_default_costate)
def _get_default_parent(self):
return self.costate.current
def bind(self, thunk):
assert isinstance(thunk, AbstractThunk)
if self.frame is not None:
raise CoroutineDamage
if self.parent is None:
self.parent = self._get_default_parent()
assert self.parent is not None
self.thunk = thunk
if we_are_translated():
self.frame = self._bind()
else:
self.frame = self._greenlet_bind()
def _greenlet_bind(self):
weak = [self]
def _greenlet_execute(incoming_frame):
try:
chain2go2next = weak[0]._execute(incoming_frame)
except:
# no exception is supposed to get out of _execute()
# better report it directly into the main greenlet then,
# and hidden to prevent catching
main_greenlet.throw(AssertionError(
"unexpected exception out of Coroutine._execute()",
*sys.exc_info()))
assert 0
del weak[0]
greenlet.getcurrent().parent = chain2go2next.greenlet
return None # as the result of the FrameChain.switch()
chain = FrameChain(_greenlet_execute)
return chain
def _bind(self):
state = self.costate
incoming_frame = yield_current_frame_to_caller()
self = state.current
return self._execute(incoming_frame)
def _execute(self, incoming_frame):
state = self.costate
try:
try:
try:
exc = None
thunk = self.thunk
self.thunk = None
syncstate.switched(incoming_frame)
thunk.call()
resume_point("coroutine__bind", state)
except Exception, e:
exc = e
raise
finally:
# warning! we must reload the 'self' from the costate,
# because after a clone() the 'self' of both copies
# point to the original!
self = state.current
self.finish(exc)
except CoroutineExit:
# ignore a shutdown exception
pass
except Exception, e:
# redirect all unhandled exceptions to the parent
syncstate.push_exception(e)
while self.parent is not None and self.parent.frame is None:
# greenlet behavior is fine
self.parent = self.parent.parent
return state.update(self.parent)
def switch(self):
if self.frame is None:
# considered a programming error.
# greenlets and tasklets have different ideas about this.
raise CoroutineDamage
state = self.costate
incoming_frame = state.update(self).switch()
resume_point("coroutine_switch", state, returns=incoming_frame)
syncstate.switched(incoming_frame)
def kill(self):
if self.frame is None:
return
state = self.costate
syncstate.push_exception(CoroutineExit())
# careful here - if setting self.parent to state.current would
# create a loop, break it. The assumption is that 'self'
# will die, so that state.current's chain of parents can be
# modified to skip 'self' without too many people noticing.
p = state.current
if p is self or self.parent is None:
pass # killing the current of the main - don't change any parent
else:
while p.parent is not None:
if p.parent is self:
p.parent = self.parent
break
p = p.parent
self.parent = state.current
self.switch()
def _kill_finally(self):
try:
self._userdel()
except Exception:
pass # maybe print a warning?
self.kill()
def __del__(self):
# provide the necessary clean-up if this coro is left
# with a frame.
# note that AppCoroutine has to take care about this
# as well, including a check for user-supplied __del__.
# Additionally note that in the context of __del__, we are
# not in the position to issue a switch.
# we defer it completely.
if self.frame is not None and syncstate is not None:
syncstate.postpone_deletion(self)
def _userdel(self):
# override this for exposed coros
pass
def is_alive(self):
return self.frame is not None or self is self.costate.current
def is_zombie(self):
return self.frame is not None and syncstate.check_for_zombie(self)
def getcurrent():
costate = Coroutine._get_default_costate()
return costate.current
getcurrent = staticmethod(getcurrent)
def getmain():
costate = Coroutine._get_default_costate()
return costate.main
getmain = staticmethod(getmain)
def hello(self):
"Called when execution is transferred into this coroutine."
def goodbye(self):
"Called just after execution is transferred away from this coroutine."
def finish(self, exc=None):
"stephan forgot me"
# _________________________________________________
| Python |
"""
basic definitions for tasklet flags.
For simplicity and compatibility,
they are defined the same for coroutines,
even if they are not used.
taken from tasklet_structs.h
----------------------------
/***************************************************************************
Tasklet Flag Definition
-----------------------
blocked: The tasklet is either waiting in a channel for
writing (1) or reading (-1) or not blocked (0).
Maintained by the channel logic. Do not change.
atomic: If true, schedulers will never switch. Driven by
the code object or dynamically, see below.
ignore_nesting: Allows auto-scheduling, even if nesting_level
is not zero.
autoschedule: The tasklet likes to be auto-scheduled. User driven.
block_trap: Debugging aid. Whenever the tasklet would be
blocked by a channel, an exception is raised.
is_zombie: This tasklet is almost dead, its deallocation has
started. The tasklet *must* die at some time, or the
process can never end.
pending_irq: If set, an interrupt was issued during an atomic
operation, and should be handled when possible.
Policy for atomic/autoschedule and switching:
---------------------------------------------
A tasklet switch can always be done explicitly by calling schedule().
Atomic and schedule are concerned with automatic features.
atomic autoschedule
1 any Neither a scheduler nor a watchdog will
try to switch this tasklet.
0 0 The tasklet can be stopped on desire, or it
can be killed by an exception.
0 1 Like above, plus auto-scheduling is enabled.
Default settings:
-----------------
All flags are zero by default.
***************************************************************************/
typedef struct _tasklet_flags {
int blocked: 2;
unsigned int atomic: 1;
unsigned int ignore_nesting: 1;
unsigned int autoschedule: 1;
unsigned int block_trap: 1;
unsigned int is_zombie: 1;
unsigned int pending_irq: 1;
} PyTaskletFlagStruc;
"""
from pypy.rlib.rarithmetic import LONG_BIT, intmask
class BitSetDef(object):
__slots__ = "_names __dict__ _attrname".split()
def __init__(self, _attrname):
self._names = []
self._attrname = _attrname
def __setattr__(self, key, value):
if key not in self.__slots__:
assert key not in self.__dict__
self._names.append(key)
object.__setattr__(self, key, value)
def __iter__(self):
return self._enum_objects()
def _enum_objects(self):
for name in self._names:
yield name, getattr(self, name)
# negative values are user-writable
flags = BitSetDef("flags")
flags.blocked = 2, """writing (1) or reading (-1) or not blocked (0)"""
flags.atomic = -1, """If true, schedulers will never switch"""
flags.ignore_nesting = -1, """allow auto-scheduling in nested interpreters"""
flags.autoschedule = -1, """enable auto-scheduling"""
flags.block_trap = -1, """raise an exception instead of blocking"""
flags.is_zombie = 1, """__del__ is in progress"""
flags.pending_irq = 1, """an interrupt occured while being atomic"""
def make_get_bits(name, bits, shift):
""" return a bool for single bits, signed int otherwise """
signmask = 1 << (bits - 1 + shift)
lshift = bits + shift
rshift = bits
if bits == 1:
return "bool(%s & 0x%x)" % (name, signmask)
else:
return "intmask(%s << (LONG_BIT-%d)) >> (LONG_BIT-%d)" % (name, lshift, rshift)
def make_set_bits(name, bits, shift):
datamask = int('1' * bits, 2)
clearmask = datamask << shift
return "%s & ~0x%x | (value & 0x%x) << %d" % (name, clearmask, datamask, shift)
def gen_code():
from cStringIO import StringIO
f = StringIO()
print >> f, "class StacklessFlags(object):"
print >> f, " _mixin_ = True"
shift = 0
field = "self.%s" % flags._attrname
for name, (bits, doc) in flags:
write, bits = bits < 0, abs(bits)
print >> f
print >> f, ' def get_%s(self):' % name
print >> f, ' """%s"""' % doc
print >> f, ' return %s' % make_get_bits(field, bits, shift)
print >> f, ' def set_%s(self, value):' % name
print >> f, ' """%s"""' % doc
print >> f, ' %s = %s' % (field, make_set_bits(field, bits, shift))
print >> f, ' set_%s._public = %s' % (name, write)
shift += bits
return f.getvalue()
# BEGIN generated code
class StacklessFlags(object):
_mixin_ = True
def get_blocked(self):
"""writing (1) or reading (-1) or not blocked (0)"""
return intmask(self.flags << (LONG_BIT-2)) >> (LONG_BIT-2)
def set_blocked(self, value):
"""writing (1) or reading (-1) or not blocked (0)"""
self.flags = self.flags & ~0x3 | (value & 0x3) << 0
set_blocked._public = False
def get_atomic(self):
"""If true, schedulers will never switch"""
return bool(self.flags & 0x4)
def set_atomic(self, value):
"""If true, schedulers will never switch"""
self.flags = self.flags & ~0x4 | (value & 0x1) << 2
set_atomic._public = True
def get_ignore_nesting(self):
"""allow auto-scheduling in nested interpreters"""
return bool(self.flags & 0x8)
def set_ignore_nesting(self, value):
"""allow auto-scheduling in nested interpreters"""
self.flags = self.flags & ~0x8 | (value & 0x1) << 3
set_ignore_nesting._public = True
def get_autoschedule(self):
"""enable auto-scheduling"""
return bool(self.flags & 0x10)
def set_autoschedule(self, value):
"""enable auto-scheduling"""
self.flags = self.flags & ~0x10 | (value & 0x1) << 4
set_autoschedule._public = True
def get_block_trap(self):
"""raise an exception instead of blocking"""
return bool(self.flags & 0x20)
def set_block_trap(self, value):
"""raise an exception instead of blocking"""
self.flags = self.flags & ~0x20 | (value & 0x1) << 5
set_block_trap._public = True
def get_is_zombie(self):
"""__del__ is in progress"""
return bool(self.flags & 0x40)
def set_is_zombie(self, value):
"""__del__ is in progress"""
self.flags = self.flags & ~0x40 | (value & 0x1) << 6
set_is_zombie._public = False
def get_pending_irq(self):
"""an interrupt occured while being atomic"""
return bool(self.flags & 0x80)
def set_pending_irq(self, value):
"""an interrupt occured while being atomic"""
self.flags = self.flags & ~0x80 | (value & 0x1) << 7
set_pending_irq._public = False
# END generated code
if __name__ == '__main__':
# paste this into the file
print gen_code()
| Python |
from pypy.interpreter.argument import Arguments
from pypy.interpreter.typedef import GetSetProperty, TypeDef
from pypy.interpreter.gateway import interp2app, ObjSpace, W_Root
from pypy.interpreter.gateway import NoneNotWrapped
from pypy.interpreter.error import OperationError
from pypy.module._stackless.interp_coroutine import Coroutine, BaseCoState
from pypy.module._stackless.interp_coroutine import AbstractThunk, syncstate
from pypy.module._stackless.coroutine import makeStaticMethod
class GreenletThunk(AbstractThunk):
def __init__(self, greenlet):
self.greenlet = greenlet
def call(self):
greenlet = self.greenlet
greenlet.active = True
try:
space = greenlet.space
args_w = greenlet.costate.args_w
__args__ = Arguments(space, args_w)
try:
w_run = space.getattr(space.wrap(greenlet), space.wrap('run'))
greenlet.w_callable = None
w_result = space.call_args(w_run, __args__)
except OperationError, operror:
if not operror.match(space, greenlet.costate.w_GreenletExit):
raise
w_result = operror.w_value
finally:
greenlet.active = False
greenlet.costate.args_w = [w_result]
class AppGreenletCoState(BaseCoState):
def __init__(self, space):
BaseCoState.__init__(self)
self.args_w = None
self.space = space
self.w_GreenletExit = get(space, "GreenletExit")
self.w_GreenletError = get(space, "GreenletError")
def post_install(self):
self.current = self.main = AppGreenlet(self.space, is_main=True)
class AppGreenlet(Coroutine):
def __init__(self, space, w_callable=None, is_main=False):
Coroutine.__init__(self, self._get_state(space))
self.space = space
self.w_callable = w_callable
self.active = is_main
self.subctx = space.getexecutioncontext().Subcontext()
if is_main:
self.subctx.framestack = None # wack
else:
self.bind(GreenletThunk(self))
def descr_method__new__(space, w_subtype, __args__):
co = space.allocate_instance(AppGreenlet, w_subtype)
AppGreenlet.__init__(co, space)
return space.wrap(co)
def descr_method__init__(self, w_run=NoneNotWrapped,
w_parent=NoneNotWrapped):
if w_run is not None:
self.set_run(w_run)
if w_parent is not None:
self.set_parent(w_parent)
def _get_state(space):
return space.fromcache(AppGreenletCoState)
_get_state = staticmethod(_get_state)
def hello(self):
ec = self.space.getexecutioncontext()
self.subctx.enter(ec)
def goodbye(self):
ec = self.space.getexecutioncontext()
self.subctx.leave(ec)
def w_getcurrent(space):
return space.wrap(AppGreenlet._get_state(space).current)
w_getcurrent = staticmethod(w_getcurrent)
def w_switch(self, args_w):
# Find the switch target - it might be a parent greenlet
space = self.space
costate = self.costate
target = self
while target.isdead():
target = target.parent
assert isinstance(target, AppGreenlet)
# Switch to it
costate.args_w = args_w
if target is not costate.current:
target.switch()
else:
# case not handled in Coroutine.switch()
syncstate._do_things_to_do()
result_w = costate.args_w
costate.args_w = None
# costate.args_w can be set to None above for throw(), but then
# switch() should have raised. At this point cosstate.args_w != None.
assert result_w is not None
# Return the result of a switch, packaging it in a tuple if
# there is more than one value.
if len(result_w) == 1:
return result_w[0]
return space.newtuple(result_w)
def w_throw(self, w_type=None, w_value=None, w_traceback=None):
space = self.space
if space.is_w(w_type, space.w_None):
w_type = self.costate.w_GreenletExit
# Code copied from RAISE_VARARGS but slightly modified. Not too nice.
operror = OperationError(w_type, w_value)
operror.normalize_exception(space)
if not space.is_w(w_traceback, space.w_None):
from pypy.interpreter import pytraceback
tb = space.interpclass_w(w_traceback)
if tb is None or not space.is_true(space.isinstance(tb,
space.gettypeobject(pytraceback.PyTraceback.typedef))):
raise OperationError(space.w_TypeError,
space.wrap("throw: arg 3 must be a traceback or None"))
operror.application_traceback = tb
# Dead greenlet: turn GreenletExit into a regular return
if self.isdead() and operror.match(space, self.costate.w_GreenletExit):
args_w = [operror.w_value]
else:
syncstate.push_exception(operror)
args_w = None
return self.w_switch(args_w)
def _userdel(self):
self.space.userdel(self.space.wrap(self))
def isdead(self):
return self.thunk is None and not self.active
def w_get_is_dead(space, self):
return space.newbool(self.isdead())
def descr__nonzero__(self):
return self.space.newbool(self.active)
def w_get_run(space, self):
w_run = self.w_callable
if w_run is None:
raise OperationError(space.w_AttributeError, space.wrap("run"))
return w_run
def set_run(self, w_run):
space = self.space
if self.thunk is None:
raise OperationError(space.w_AttributeError,
space.wrap("run cannot be set "
"after the start of the greenlet"))
self.w_callable = w_run
def w_set_run(space, self, w_run):
self.set_run(w_run)
def w_del_run(space, self):
if self.w_callable is None:
raise OperationError(space.w_AttributeError, space.wrap("run"))
self.w_callable = None
def w_get_parent(space, self):
return space.wrap(self.parent)
def set_parent(self, w_parent):
space = self.space
newparent = space.interp_w(AppGreenlet, w_parent)
if newparent.costate is not self.costate:
raise OperationError(self.costate.w_GreenletError,
space.wrap("invalid foreign parent"))
curr = newparent
while curr:
if curr is self:
raise OperationError(space.w_ValueError,
space.wrap("cyclic parent chain"))
curr = curr.parent
self.parent = newparent
def w_set_parent(space, self, w_parent):
self.set_parent(w_parent)
def w_get_frame(space, self):
if not self.active or self.costate.current is self:
f = None
else:
try:
f = self.subctx.framestack.top(0)
except IndexError:
f = None
return space.wrap(f)
def get(space, name):
w_module = space.getbuiltinmodule('_stackless')
return space.getattr(w_module, space.wrap(name))
def post_install(module):
makeStaticMethod(module, 'greenlet', 'getcurrent')
space = module.space
state = AppGreenlet._get_state(space)
state.post_install()
w_greenlet = get(space, 'greenlet')
# HACK HACK HACK
# make the typeobject mutable for a while
from pypy.objspace.std.typeobject import _HEAPTYPE, W_TypeObject
assert isinstance(w_greenlet, W_TypeObject)
old_flags = w_greenlet.__flags__
w_greenlet.__flags__ |= _HEAPTYPE
space.appexec([w_greenlet,
state.w_GreenletExit,
state.w_GreenletError], """
(greenlet, exit, error):
greenlet.GreenletExit = exit
greenlet.error = error
""")
w_greenlet.__flags__ = old_flags
AppGreenlet.typedef = TypeDef("greenlet",
__new__ = interp2app(AppGreenlet.descr_method__new__.im_func,
unwrap_spec=[ObjSpace, W_Root, Arguments]),
__init__ = interp2app(AppGreenlet.descr_method__init__),
switch = interp2app(AppGreenlet.w_switch,
unwrap_spec=['self', 'args_w']),
dead = GetSetProperty(AppGreenlet.w_get_is_dead),
run = GetSetProperty(AppGreenlet.w_get_run,
AppGreenlet.w_set_run,
AppGreenlet.w_del_run),
parent = GetSetProperty(AppGreenlet.w_get_parent,
AppGreenlet.w_set_parent),
getcurrent = interp2app(AppGreenlet.w_getcurrent),
throw = interp2app(AppGreenlet.w_throw),
gr_frame = GetSetProperty(AppGreenlet.w_get_frame),
__nonzero__ = interp2app(AppGreenlet.descr__nonzero__),
__module__ = '_stackless',
)
| Python |
from pypy.module._stackless.interp_coroutine import AbstractThunk, Coroutine
from pypy.rlib.rgc import gc_swap_pool, gc_clone
from pypy.rlib.objectmodel import we_are_translated
from pypy.interpreter.error import OperationError
class InterpClonableMixin:
local_pool = None
_mixin_ = True
def hello_local_pool(self):
if we_are_translated():
self.saved_pool = gc_swap_pool(self.local_pool)
def goodbye_local_pool(self):
if we_are_translated():
self.local_pool = gc_swap_pool(self.saved_pool)
self.saved_pool = None
def clone_into(self, copy, extradata=None):
if not we_are_translated():
raise NotImplementedError
# cannot gc_clone() directly self, because it is not in its own
# local_pool. Moreover, it has a __del__, which cloning doesn't
# support properly at the moment.
copy.parent = self.parent
# the hello/goodbye pair has two purposes: it forces
# self.local_pool to be computed even if it was None up to now,
# and it puts the 'data' tuple in the correct pool to be cloned.
self.hello_local_pool()
data = (self.frame, extradata)
self.goodbye_local_pool()
# clone!
data, copy.local_pool = gc_clone(data, self.local_pool)
copy.frame, extradata = data
copy.thunk = self.thunk # in case we haven't switched to self yet
return extradata
class InterpClonableCoroutine(Coroutine, InterpClonableMixin):
def hello(self):
self.hello_local_pool()
def goodbye(self):
self.goodbye_local_pool()
def clone(self):
# hack, this is overridden in AppClonableCoroutine
if self.getcurrent() is self:
raise RuntimeError("clone() cannot clone the current coroutine; "
"use fork() instead")
copy = InterpClonableCoroutine(self.costate)
self.clone_into(copy)
return copy
class ForkThunk(AbstractThunk):
def __init__(self, coroutine):
self.coroutine = coroutine
self.newcoroutine = None
def call(self):
oldcoro = self.coroutine
self.coroutine = None
newcoro = oldcoro.clone()
newcoro.parent = oldcoro
self.newcoroutine = newcoro
def fork():
"""Fork, as in the Unix fork(): the call returns twice, and the return
value of the call is either the new 'child' coroutine object (if returning
into the parent), or None (if returning into the child). This returns
into the parent first, which can switch to the child later.
"""
current = InterpClonableCoroutine.getcurrent()
if not isinstance(current, InterpClonableCoroutine):
raise RuntimeError("fork() in a non-clonable coroutine")
thunk = ForkThunk(current)
coro_fork = InterpClonableCoroutine()
coro_fork.bind(thunk)
coro_fork.switch()
# we resume here twice. The following would need explanations about
# why it returns the correct thing in both the parent and the child...
return thunk.newcoroutine
## from pypy.rpython.lltypesystem import lltype, lloperation
## lloperation.llop.debug_view(lltype.Void, current, thunk,
## lloperation.llop.gc_x_size_header(lltype.Signed))
| Python |
# Package initialisation
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
"""
This module implements Stackless for applications.
"""
appleveldefs = {
'GreenletExit' : 'app_greenlet.GreenletExit',
'GreenletError' : 'app_greenlet.GreenletError',
}
interpleveldefs = {
'tasklet' : 'interp_stackless.tasklet',
'coroutine' : 'coroutine.AppCoroutine',
'greenlet' : 'interp_greenlet.AppGreenlet',
'usercostate': 'composable_coroutine.W_UserCoState',
'_return_main' : 'coroutine.return_main',
}
def setup_after_space_initialization(self):
# post-installing classmethods/staticmethods which
# are not yet directly supported
from pypy.module._stackless.coroutine import post_install as post_install_coro
post_install_coro(self)
from pypy.module._stackless.interp_greenlet import post_install as post_install_greenlet
post_install_greenlet(self)
if self.space.config.translation.gc in ('framework', 'stacklessgc'):
from pypy.module._stackless.clonable import post_install as post_install_clonable
self.extra_interpdef('clonable', 'clonable.AppClonableCoroutine')
self.extra_interpdef('fork', 'clonable.fork')
post_install_clonable(self)
| Python |
from pypy.interpreter.baseobjspace import Wrappable
from pypy.interpreter.typedef import TypeDef, interp2app
from pypy.module._stackless.coroutine import AppCoState, AppCoroutine
class W_UserCoState(Wrappable):
def __init__(self, space):
self.costate = AppCoState(space)
self.costate.post_install()
def descr_method__new__(space, w_subtype):
costate = space.allocate_instance(W_UserCoState, w_subtype)
W_UserCoState.__init__(costate, space)
return space.wrap(costate)
def w_getcurrent(self):
space = self.costate.space
return space.wrap(self.costate.current)
def w_spawn(self, w_subtype=None):
space = self.costate.space
if space.is_w(w_subtype, space.w_None):
w_subtype = space.gettypeobject(AppCoroutine.typedef)
co = space.allocate_instance(AppCoroutine, w_subtype)
AppCoroutine.__init__(co, space, state=self.costate)
return space.wrap(co)
W_UserCoState.typedef = TypeDef("usercostate",
__new__ = interp2app(W_UserCoState.descr_method__new__.im_func),
__module__ = '_stackless',
getcurrent = interp2app(W_UserCoState.w_getcurrent),
spawn = interp2app(W_UserCoState.w_spawn),
)
| Python |
from pypy.interpreter.error import OperationError
from pypy.interpreter.typedef import TypeDef
from pypy.interpreter.gateway import interp2app, ObjSpace, W_Root
from pypy.module._stackless.coroutine import AppCoroutine, AppCoState
from pypy.module._stackless.coroutine import makeStaticMethod
from pypy.module._stackless.interp_coroutine import AbstractThunk
from pypy.module._stackless.interp_clonable import InterpClonableMixin
class AppClonableCoroutine(AppCoroutine, InterpClonableMixin):
def newsubctx(self):
self.hello_local_pool()
AppCoroutine.newsubctx(self)
self.goodbye_local_pool()
def hello(self):
self.hello_local_pool()
AppCoroutine.hello(self)
def goodbye(self):
AppCoroutine.goodbye(self)
self.goodbye_local_pool()
def descr_method__new__(space, w_subtype):
co = space.allocate_instance(AppClonableCoroutine, w_subtype)
costate = AppClonableCoroutine._get_state(space)
AppClonableCoroutine.__init__(co, space, state=costate)
return space.wrap(co)
def _get_state(space):
return space.fromcache(AppClonableCoState)
_get_state = staticmethod(_get_state)
def w_getcurrent(space):
return space.wrap(AppClonableCoroutine._get_state(space).current)
w_getcurrent = staticmethod(w_getcurrent)
def w_clone(self):
space = self.space
costate = self.costate
if costate.current is self:
raise OperationError(space.w_RuntimeError,
space.wrap("clone() cannot clone the "
"current coroutine"
"; use fork() instead"))
copy = AppClonableCoroutine(space, state=costate)
copy.subctx = self.clone_into(copy, self.subctx)
return space.wrap(copy)
def descr__reduce__(self, space):
raise OperationError(space.w_TypeError,
space.wrap("_stackless.clonable instances are "
"not picklable"))
AppClonableCoroutine.typedef = TypeDef("clonable", AppCoroutine.typedef,
__new__ = interp2app(AppClonableCoroutine.descr_method__new__.im_func),
getcurrent = interp2app(AppClonableCoroutine.w_getcurrent),
clone = interp2app(AppClonableCoroutine.w_clone),
__reduce__ = interp2app(AppClonableCoroutine.descr__reduce__,
unwrap_spec=['self', ObjSpace]),
)
class AppClonableCoState(AppCoState):
def post_install(self):
self.current = self.main = AppClonableCoroutine(self.space, state=self)
self.main.subctx.framestack = None # wack
def post_install(module):
makeStaticMethod(module, 'clonable', 'getcurrent')
space = module.space
AppClonableCoroutine._get_state(space).post_install()
# ____________________________________________________________
class ForkThunk(AbstractThunk):
def __init__(self, coroutine):
self.coroutine = coroutine
self.newcoroutine = None
def call(self):
oldcoro = self.coroutine
self.coroutine = None
newcoro = AppClonableCoroutine(oldcoro.space, state=oldcoro.costate)
newcoro.subctx = oldcoro.clone_into(newcoro, oldcoro.subctx)
newcoro.parent = oldcoro
self.newcoroutine = newcoro
def fork(space):
"""Fork, as in the Unix fork(): the call returns twice, and the return
value of the call is either the new 'child' coroutine object (if returning
into the parent), or None (if returning into the child). This returns
into the parent first, which can switch to the child later.
"""
costate = AppClonableCoroutine._get_state(space)
current = costate.current
if current is costate.main:
raise OperationError(space.w_RuntimeError,
space.wrap("cannot fork() in the main "
"clonable coroutine"))
thunk = ForkThunk(current)
coro_fork = AppClonableCoroutine(space, state=costate)
coro_fork.bind(thunk)
coro_fork.switch()
# we resume here twice. The following would need explanations about
# why it returns the correct thing in both the parent and the child...
return space.wrap(thunk.newcoroutine)
fork.unwrap_spec = [ObjSpace]
| Python |
from pypy.conftest import gettestobjspace
# app-level testing of coroutine pickling
class AppTest_Pickle:
def setup_class(cls):
space = gettestobjspace(usemodules=('_stackless',))
cls.space = space
def test_simple_ish(self):
output = []
import _stackless
def f(coro, n, x):
if n == 0:
coro.switch()
return
f(coro, n-1, 2*x)
output.append(x)
def example():
main_coro = _stackless.coroutine.getcurrent()
sub_coro = _stackless.coroutine()
sub_coro.bind(f, main_coro, 5, 1)
sub_coro.switch()
import pickle
pckl = pickle.dumps(sub_coro)
new_coro = pickle.loads(pckl)
new_coro.switch()
example()
assert output == [16, 8, 4, 2, 1]
| Python |
# | Python |
""" The ffi for rpython, need to be imported for side effects
"""
import sys
from pypy.rpython.lltypesystem import rffi
from pypy.rpython.lltypesystem import lltype
from pypy.rpython.extfunc import register_external
from pypy.rpython.extregistry import ExtRegistryEntry
from pypy.module._curses import interp_curses
from pypy.rpython.lltypesystem import llmemory
# waaa...
includes = ['curses.h', 'term.h']
libs = ['curses']
INT = rffi.INT
INTP = lltype.Ptr(lltype.Array(INT, hints={'nolength':True}))
c_setupterm = rffi.llexternal('setupterm', [rffi.CCHARP, INT, INTP], INT,
includes=includes, libraries=libs)
c_tigetstr = rffi.llexternal('tigetstr', [rffi.CCHARP], rffi.CCHARP,
includes=includes, libraries=libs)
c_tparm = rffi.llexternal('tparm', [rffi.CCHARP, INT, INT, INT, INT, INT,
INT, INT, INT, INT, INT], rffi.CCHARP,
includes=includes, libraries=libs)
ERR = rffi.CConstant('ERR', INT)
OK = rffi.CConstant('OK', INT)
def curses_setupterm(term, fd):
intp = lltype.malloc(INTP.TO, 1, flavor='raw')
err = c_setupterm(term, fd, intp)
try:
if err == ERR:
if intp[0] == 0:
msg = "setupterm: could not find terminal"
elif intp[0] == -1:
msg = "setupterm: could not find terminfo database"
else:
msg = "setupterm: unknown error"
raise interp_curses.curses_error(msg)
interp_curses.module_info.setupterm_called = True
finally:
lltype.free(intp, flavor='raw')
def curses_setupterm_null_llimpl(fd):
curses_setupterm(lltype.nullptr(rffi.CCHARP.TO), fd)
def curses_setupterm_llimpl(term, fd):
ll_s = rffi.str2charp(term)
try:
curses_setupterm(ll_s, fd)
finally:
rffi.free_charp(ll_s)
register_external(interp_curses._curses_setupterm_null,
[int], llimpl=curses_setupterm_null_llimpl,
export_name='_curses.setupterm_null')
register_external(interp_curses._curses_setupterm,
[str, int], llimpl=curses_setupterm_llimpl,
export_name='_curses.setupterm')
def check_setup_invoked():
if not interp_curses.module_info.setupterm_called:
raise interp_curses.curses_error("must call (at least) setupterm() first")
def tigetstr_llimpl(cap):
check_setup_invoked()
ll_cap = rffi.str2charp(cap)
try:
ll_res = c_tigetstr(ll_cap)
num = lltype.cast_ptr_to_int(ll_res)
if num == 0 or num == -1:
raise interp_curses.TermError()
res = rffi.charp2str(ll_res)
return res
finally:
rffi.free_charp(ll_cap)
register_external(interp_curses._curses_tigetstr, [str], str,
export_name='_curses.tigetstr', llimpl=tigetstr_llimpl)
def tparm_llimpl(s, args):
check_setup_invoked()
l = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for i in range(min(len(args), 10)):
l[i] = args[i]
ll_s = rffi.str2charp(s)
# XXX nasty trick stolen from CPython
ll_res = c_tparm(ll_s, l[0], l[1], l[2], l[3], l[4], l[5], l[6],
l[7], l[8], l[9])
rffi.free_charp(ll_s)
res = rffi.charp2str(ll_res)
return res
register_external(interp_curses._curses_tparm, [str, [int]], str,
export_name='_curses.tparm', llimpl=tparm_llimpl)
| Python |
class error(Exception):
pass
| Python |
from pypy.interpreter.baseobjspace import ObjSpace, W_Root
from pypy.interpreter.error import OperationError
import _curses
class ModuleInfo:
def __init__(self):
self.setupterm_called = False
module_info = ModuleInfo()
class curses_error(Exception):
def __init__(self, msg):
self.msg = msg
from pypy.annotation.classdef import FORCE_ATTRIBUTES_INTO_CLASSES
from pypy.annotation.model import SomeString
# this is necessary due to annmixlevel
FORCE_ATTRIBUTES_INTO_CLASSES[curses_error] = {'msg': SomeString()}
def convert_error(space, error):
msg = error.msg
w_module = space.getbuiltinmodule('_curses')
w_exception_class = space.getattr(w_module, space.wrap('error'))
w_exception = space.call_function(w_exception_class, space.wrap(msg))
return OperationError(w_exception_class, w_exception)
def _curses_setupterm_null(fd):
# NOT_RPYTHON
try:
_curses.setupterm(None, fd)
except _curses.error, e:
raise curses_error(e.args[0])
def _curses_setupterm(termname, fd):
# NOT_RPYTHON
try:
_curses.setupterm(termname, fd)
except _curses.error, e:
raise curses_error(e.args[0])
def setupterm(space, w_termname=None, fd=-1):
if fd == -1:
w_stdout = space.getattr(space.getbuiltinmodule('sys'),
space.wrap('stdout'))
fd = space.int_w(space.call_function(space.getattr(w_stdout,
space.wrap('fileno'))))
try:
if space.is_w(w_termname, space.w_None) or w_termname is None:
_curses_setupterm_null(fd)
else:
_curses_setupterm(space.str_w(w_termname), fd)
except curses_error, e:
raise convert_error(space, e)
setupterm.unwrap_spec = [ObjSpace, W_Root, int]
class TermError(Exception):
pass
def _curses_tigetstr(capname):
# NOT_RPYTHON
try:
res = _curses.tigetstr(capname)
except _curses.error, e:
raise curses_error(e.args[0])
if res is None:
raise TermError
return res
def _curses_tparm(s, args):
# NOT_RPYTHON
try:
return _curses.tparm(s, *args)
except _curses.error, e:
raise curses_error(e.args[0])
def tigetstr(space, capname):
try:
result = _curses_tigetstr(capname)
except TermError:
return space.w_None
except curses_error, e:
raise convert_error(space, e)
return space.wrap(result)
tigetstr.unwrap_spec = [ObjSpace, str]
def tparm(space, s, args_w):
args = [space.int_w(a) for a in args_w]
try:
return space.wrap(_curses_tparm(s, args))
except curses_error, e:
raise convert_error(space, e)
tparm.unwrap_spec = [ObjSpace, str, 'args_w']
| Python |
from pypy.interpreter.mixedmodule import MixedModule
from pypy.module._curses import fficurses
from pypy.module._curses import interp_curses
from pypy.rlib.nonconst import NonConstant
import _curses
class Module(MixedModule):
""" Low-level interface for curses module,
not meant to be used directly
"""
applevel_name = "_curses"
appleveldefs = {
'error' : 'app_curses.error',
}
interpleveldefs = {
'setupterm' : 'interp_curses.setupterm',
'tigetstr' : 'interp_curses.tigetstr',
'tparm' : 'interp_curses.tparm',
}
import _curses
for i in dir(_curses):
val = getattr(_curses, i)
if i.isupper() and type(val) is int:
Module.interpleveldefs[i] = "space.wrap(%s)" % val
| Python |
import time
from pypy.interpreter.gateway import ObjSpace
def clock(space):
"""Return the CPU time or real time since the start of the process or
since the first call to clock(). This returns a floating point measured
in seconds with as much precision as the system records."""
return space.wrap(time.clock())
def time_(space):
"""Return the current time in seconds since the Epoch. Fractions of a
second may be present if the system clock provides them."""
return space.wrap(time.time())
def sleep(space, seconds):
"""Delay execution for a given number of seconds. The argument may
be a floating point number for subsecond precision."""
# XXX Temporary hack: we need to make sure the GIL is released while
# sleeping. XXX should be done differently !!!
GIL = space.threadlocals.getGIL()
if GIL is not None: GIL.release()
time.sleep(seconds)
if GIL is not None: GIL.acquire(True)
sleep.unwrap_spec = [ObjSpace, float]
| Python |
# Package initialisation
from pypy.interpreter.mixedmodule import MixedModule
import time
class Module(MixedModule):
"""time module"""
appleveldefs = {
}
interpleveldefs = {
'clock' : 'interp_time.clock',
'time' : 'interp_time.time_',
'sleep' : 'interp_time.sleep',
}
| Python |
from pypy.interpreter.error import OperationError
from pypy.interpreter.gateway import ObjSpace
from pypy.rlib.objectmodel import we_are_translated
def internal_repr(space, w_object):
return space.wrap('%r' % (w_object,))
def isfake(space, w_obj):
"""Return whether the argument is faked (stolen from CPython). This is
always False after translation."""
if we_are_translated():
return space.w_False
return space.wrap(bool(w_obj.typedef.fakedcpytype))
#return space.wrap(bool(getattr(w_obj.typedef, 'fakedcpytype', None)))
def interp_pdb(space):
"""Run an interp-level pdb.
This is not available in translated versions of PyPy."""
assert not we_are_translated()
import pdb
pdb.set_trace()
def method_cache_counter(space, name):
"""Return a tuple (method_cache_hits, method_cache_misses) for calls to
methods with the name."""
assert space.config.objspace.std.withmethodcachecounter
ec = space.getexecutioncontext()
return space.newtuple([space.newint(ec.method_cache_hits.get(name, 0)),
space.newint(ec.method_cache_misses.get(name, 0)),])
method_cache_counter.unwrap_spec = [ObjSpace, str]
def reset_method_cache_counter(space):
"""Reset the method cache counter to zero for all method names."""
assert space.config.objspace.std.withmethodcachecounter
ec = space.getexecutioncontext()
ec.method_cache_misses = {}
ec.method_cache_hits = {}
| Python |
# Package initialisation
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
appleveldefs = {
}
interpleveldefs = {
'internal_repr' : 'interp_magic.internal_repr',
}
def setup_after_space_initialization(self):
if not self.space.config.translating:
self.extra_interpdef('isfake', 'interp_magic.isfake')
self.extra_interpdef('interp_pdb', 'interp_magic.interp_pdb')
if self.space.config.objspace.std.withmethodcachecounter:
self.extra_interpdef('method_cache_counter',
'interp_magic.method_cache_counter')
self.extra_interpdef('reset_method_cache_counter',
'interp_magic.reset_method_cache_counter')
| Python |
#
| Python |
"""Compatibility layer for CPython's parser module"""
from pypy.interpreter.pyparser.tuplebuilder import TupleBuilder
from pythonparse import make_pyparser
from pythonutil import pypy_parse
import symbol # XXX use PYTHON_PARSER.symbols ?
from compiler import transformer, compile as pycompile
PYTHON_PARSER = make_pyparser()
def suite( source ):
strings = [line+'\n' for line in source.split('\n')]
builder = TupleBuilder(PYTHON_PARSER)
PYTHON_PARSER.parse_source(source, 'exec', builder)
nested_tuples = builder.stack[-1].as_tuple()
if builder.source_encoding is not None:
return (symbol.encoding_decl, nested_tuples, builder.source_encoding)
else:
return (None, nested_tuples, None)
return nested_tuples
def expr( source ):
strings = [line+'\n' for line in source.split('\n')]
builder = TupleBuilder(PYTHON_PARSER)
PYTHON_PARSER.parse_source(source, 'eval', builder)
nested_tuples = builder.stack[-1].as_tuple()
if builder.source_encoding is not None:
return (symbol.encoding_decl, nested_tuples, builder.source_encoding)
else:
return (None, nested_tuples, None)
return nested_tuples
def ast2tuple(node, line_info=False):
"""Quick dummy implementation of parser.ast2tuple(tree) function"""
tuples = node.totuple(line_info)
return tuples
| Python |
# ______________________________________________________________________
# ParserError exception
class ParserError (Exception):
"""Class ParserError
Exception class for parser errors (I assume).
"""
class ASTVisitor(object):
"""This is a visitor base class used to provide the visit
method in replacement of the former visitor.visit = walker.dispatch
It could also use to identify base type for visit arguments of AST nodes
"""
def default(self, node):
for child in node.getChildNodes():
child.accept(self)
return node
def visitExpression(self, node):
return self.default(node)
def visitEmptyNode(self, node):
return self.default(node)
def visitAbstractFunction(self, node):
return self.default( node )
def visitAbstractTest(self, node):
return self.default( node )
def visitAdd(self, node):
return self.default( node )
def visitAnd(self, node):
return self.default( node )
def visitAssAttr(self, node):
return self.default( node )
def visitAssList(self, node):
return self.default( node )
def visitAssName(self, node):
return self.default( node )
def visitAssSeq(self, node):
return self.default( node )
def visitAssTuple(self, node):
return self.default( node )
def visitAssert(self, node):
return self.default( node )
def visitAssign(self, node):
return self.default( node )
def visitAugAssign(self, node):
return self.default( node )
def visitBackquote(self, node):
return self.default( node )
def visitBinaryOp(self, node):
return self.default( node )
def visitBitOp(self, node):
return self.default( node )
def visitBitand(self, node):
return self.default( node )
def visitBitor(self, node):
return self.default( node )
def visitBitxor(self, node):
return self.default( node )
def visitBreak(self, node):
return self.default( node )
def visitCallFunc(self, node):
return self.default( node )
def visitClass(self, node):
return self.default( node )
def visitCompare(self, node):
return self.default( node )
def visitCondExpr(self, node):
return self.default( node )
def visitConst(self, node):
return self.default( node )
def visitContinue(self, node):
return self.default( node )
def visitDecorators(self, node):
return self.default( node )
def visitDict(self, node):
return self.default( node )
def visitDiscard(self, node):
return self.default( node )
def visitDiv(self, node):
return self.default( node )
def visitEllipsis(self, node):
return self.default( node )
def visitExec(self, node):
return self.default( node )
def visitFloorDiv(self, node):
return self.default( node )
def visitFor(self, node):
return self.default( node )
def visitFrom(self, node):
return self.default( node )
def visitFunction(self, node):
return self.default( node )
def visitGenExpr(self, node):
return self.default( node )
def visitGenExprFor(self, node):
return self.default( node )
def visitGenExprIf(self, node):
return self.default( node )
def visitGenExprInner(self, node):
return self.default( node )
def visitGetattr(self, node):
return self.default( node )
def visitGlobal(self, node):
return self.default( node )
def visitIf(self, node):
return self.default( node )
def visitImport(self, node):
return self.default( node )
def visitInvert(self, node):
return self.default( node )
def visitKeyword(self, node):
return self.default( node )
def visitLambda(self, node):
return self.default( node )
def visitLeftShift(self, node):
return self.default( node )
def visitList(self, node):
return self.default( node )
def visitListComp(self, node):
return self.default( node )
def visitListCompFor(self, node):
return self.default( node )
def visitListCompIf(self, node):
return self.default( node )
def visitMod(self, node):
return self.default( node )
def visitModule(self, node):
return self.default( node )
def visitMul(self, node):
return self.default( node )
def visitName(self, node):
return self.default( node )
def visitNoneConst(self, node):
return self.default( node )
def visitNot(self, node):
return self.default( node )
def visitOr(self, node):
return self.default( node )
def visitPass(self, node):
return self.default( node )
def visitPower(self, node):
return self.default( node )
def visitPrint(self, node):
return self.default( node )
def visitPrintnl(self, node):
return self.default( node )
def visitRaise(self, node):
return self.default( node )
def visitReturn(self, node):
return self.default( node )
def visitRightShift(self, node):
return self.default( node )
def visitSlice(self, node):
return self.default( node )
def visitSliceobj(self, node):
return self.default( node )
def visitStmt(self, node):
return self.default( node )
def visitSub(self, node):
return self.default( node )
def visitSubscript(self, node):
return self.default( node )
def visitTryExcept(self, node):
return self.default( node )
def visitTryFinally(self, node):
return self.default( node )
def visitTuple(self, node):
return self.default( node )
def visitUnaryAdd(self, node):
return self.default( node )
def visitUnaryOp(self, node):
return self.default( node )
def visitUnarySub(self, node):
return self.default( node )
def visitWhile(self, node):
return self.default( node )
def visitWith(self, node):
return self.default( node )
def visitYield(self, node):
return self.default( node )
class ASTMutator(ASTVisitor):
"""This class is similar to ASTVisitor, but will call
node.mutate(self) instead of node.accept(self). The visitXXX
methods of derived class should return the mutated node"""
def default(self, node):
return node
| Python |
"""this one logs simple assignments and somewhat clearly shows
that we need a nice API to define "joinpoints". Maybe a SAX-like
(i.e. event-based) API ?
XXX: crashes on everything else than simple assignment (AssAttr, etc.)
"""
from parser import ASTPrintnl, ASTConst, ASTName, ASTAssign, ASTMutator
from parser import install_compiler_hook, source2ast
BEFORE_LOG_SOURCE = """if '%s' in locals() or '%s' in globals():
print '(before) %s <--', locals().get('%s', globals().get('%s', '<XXX>'))
"""
AFTER_LOG_SOURCE = "print '(after) %s <--', %s"
def get_statements(source):
module = source2ast(source)
return module.node.nodes
class Tracer(ASTMutator):
def visitAssName(self, assname):
assign = assname
while not isinstance(assign, ASTAssign):
assign = assign.parent
stmt = assign.parent
varname = assname.name
before_stmts = get_statements(BEFORE_LOG_SOURCE % ((varname,) * 5))
after_stmts = get_statements(AFTER_LOG_SOURCE % (varname, varname))
stmt.insert_before(assign, before_stmts)
stmt.insert_after(assign, after_stmts)
return assname
def _trace(ast, enc, filename):
return ast.accept(Tracer())
install_compiler_hook(_trace)
code = """
a = 3
b = 2
a = 1
"""
exec code
| Python |
import parser
class ConstMutator(parser.ASTMutator):
def visitConst(self, node):
if node.value == 3:
node.value = 2
return node
def threebecomestwo(ast, enc, filename):
ast.mutate(ConstMutator())
return ast
# install the hook
parser.install_compiler_hook(threebecomestwo)
print eval('3*2')
| Python |
# Copyright (c) 2000-2003 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
"""
"""
__revision__ = "$Id: $"
import pythonutil
from compiler.visitor import ASTVisitor
from compiler.bytecode import *
def compile(source, filename, mode, flags=None, dont_inherit=None):
"""Replacement for builtin compile() function"""
# source to ast conversion
if mode == "single":
tree = pythonutil.ast_single_input( source )
elif mode == "eval":
tree = pythonutil.ast_eval_input( source )
elif mode == "exec":
tree = pythonutil.ast_srcfile_input( source, filename )
else:
raise RuntimeError("Not found")
return compile_ast( tree, filename, flags=None, dont_inherit=None)
def compile_ast( tree, filename, flags=None, dont_inherit=None ):
v = CompilerVisitor( filename, flags, dont_inherit )
tree.visit(v)
return tree, v
class PrintContext(object):
def __init__(self):
self.lineno = 0
def emit(self, insn):
print "% 5d %s" % (self.lineno, insn)
def emit_arg(self, insn, arg ):
print "% 5d %8s %s" % (self.lineno, insn, arg)
def set_lineno(self, lineno):
self.lineno = lineno
class Block(object):
def __init__(self):
self.insns = []
self.start_pos = 0
def append(self, insn ):
self.insns.append( insn )
def get_size(self):
s = 0
for i in self.insns:
s += i.size()
return s
def set_start_pos(self, pos ):
self.start_pos = pos
###########################
### XXX MUST FIX SET_LINENO
###########################
class CompilerVisitor(ASTVisitor):
"""Basic code generator for Python Bytecode"""
LOOP = 1
EXCEPT = 2
TRY_FINALLY = 3
END_FINALLY = 4
def __init__(self, filename, flags, dont_inherit ):
self.scopes = []
self.blocks = []
self.setups = [] # for loops and try/finally
self.code = None
self.current_block = None
self.ctx = PrintContext()
### Visitor functions
def visitModule( self, node ):
# setup doc
self.newBlock()
node.node.visit( self )
# build code object
for block in self.blocks:
pass
def visitExpression(self, node):
pass
def visitFunction(self, node):
pass
def visitIf(self, node):
end = Block()
for test, suite in node.tests:
if is_constant_false(test):
continue
test.visit(self) # emit test code in current block
nextTest = Block()
self.emit(CondJump('IF_FALSE', nextTest))
self.nextBlock()
self.emit(PopTop())
suite.visit(self)
self.emit(CondJump('FWD', end))
self.startBlock(nextTest)
self.emit(PopTop())
if node.else_:
node.else_.visit(self)
self.nextBlock(end)
def visitWhile(self, node):
# XXX emit LINENO here ?
loop = self.newBlock()
else_ = self.newBlock()
after = self.newBlock()
self.emit(SetupLoop(after))
self.nextBlock(loop)
self.setups.append((self.LOOP, loop))
node.test.visit(self)
self.emit(CondJump('IF_FALSE', else_or, after))
self.nextBlock()
self.emit(PopTop())
node.body.visit(self)
self.emit(CondJump('ABOSLUTE', loop))
self.startBlock(else_) # or just the POPs if not else clause
self.emit(PopTop())
self.emit(PopBlock())
self.setups.pop()
if node.else_:
node.else_.visit(self)
self.nextBlock(after)
def visitFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
after = self.newBlock()
self.setups.append((self.LOOP, start))
self.emit(SetupLoop(after))
node.list.visit(self)
self.emit(GetIter())
self.nextBlock(start)
self.set_lineno(node, force=1)
self.emit(ForIter(anchor))
node.assign.visit(self)
node.body.visit(self)
self.emit(CondJump('ABSOLUTE', start))
self.nextBlock(anchor)
self.emit(PopBlock())
self.setups.pop()
if node.else_:
node.else_.visist(self)
self.nextBlock(after)
def visitBreak(self, node):
if not self.setups:
raise SyntaxError("'break' outside loop (%s, %d)" % \
(node.filename, node.lineno))
# self.set_lineno(node)
self.emit(BreakLoop())
## Shortcut methods
def emit(self, bytecode):
bytecode.emit(self.ctx)
### Block handling functions
def newBlock(self):
"""Create a new block and make it current"""
b = Block()
self.blocks.append(b)
# self.current_block = b
return b
def nextBlock(self, block=None):
"""goto next block in the flow graph"""
if block is None:
block = self.newBlock()
self.blocks.append(block)
self.startBlock(block)
def startBlock(self, block):
self.current_block = block
if __name__ == "__main__":
testf = file("pycodegen2.py").read()
ast, v = compile(testf,"pycodegen2.py","exec")
print ast
print v
| Python |
# Emulation layer for the recparser module
# make it so that pyparser matches the 'parser' module interface
from pypy.interpreter.baseobjspace import ObjSpace, Wrappable, W_Root
from pypy.interpreter.gateway import interp2app, applevel
from pypy.interpreter.error import OperationError
from pypy.interpreter.typedef import TypeDef
from pypy.interpreter.typedef import interp_attrproperty, GetSetProperty
from pypy.interpreter.pycode import PyCode
from pypy.interpreter.pyparser.syntaxtree import TokenNode, SyntaxNode, AbstractSyntaxVisitor
from pypy.interpreter.pyparser.pythonparse import make_pyparser
from pypy.interpreter.pyparser.error import SyntaxError
from pypy.interpreter.pyparser import grammar, symbol, pytoken
from pypy.interpreter.argument import Arguments
# backward compat (temp)
PYTHON_PARSER = make_pyparser()
__all__ = [ "ASTType", "STType", "suite", "expr" ]
class SyntaxToTupleVisitor(AbstractSyntaxVisitor):
def __init__(self, space, line_info):
self.space = space
self.line_info = line_info
self.tuple_stack_w = []
def w_result( self ):
return self.tuple_stack_w[-1]
def visit_syntaxnode( self, node ):
space = self.space
# visiting in depth first order
for n in node.nodes:
n.visit(self)
n = len(node.nodes)
start = len(self.tuple_stack_w) - n
assert start >= 0 # annotator hint
l = [ space.wrap( node.name ) ] + self.tuple_stack_w[start:]
del self.tuple_stack_w[start:]
self.tuple_stack_w.append( space.newtuple( l ) )
def visit_tempsyntaxnode( self, node ):
assert False, "Should not come here"
def visit_tokennode( self, node ):
space = self.space
tokens = space.default_compiler.parser.tokens
num = node.name
lineno = node.lineno
if node.value is not None:
val = node.value
else:
if num != tokens['NEWLINE'] and \
num != tokens['INDENT'] and \
num != tokens['DEDENT'] and \
num != tokens['ENDMARKER']:
val = space.default_compiler.parser.tok_rvalues[num]
else:
val = node.value or ''
if self.line_info:
self.tuple_stack_w.append( space.newtuple( [space.wrap(num),
space.wrap(val),
space.wrap(lineno)]))
else:
self.tuple_stack_w.append( space.newtuple( [space.wrap(num),
space.wrap(val)]))
class STType (Wrappable):
"""Class STType
"""
def __init__ (self, space, syntaxnode ):
"""STType.__init__()
Wrapper for parse tree data returned by parse_python_source.
This encapsulate the syntaxnode at the head of the syntax tree
"""
self.space = space
self.node = syntaxnode
def descr_totuple(self, line_info = True):
"""STType.totuple()
Convert the ST object into a tuple representation.
"""
visitor = SyntaxToTupleVisitor(self.space, line_info )
self.node.visit( visitor )
return visitor.w_result()
descr_totuple.unwrap_spec=['self', int]
def tolist(self, line_info = True):
"""STType.tolist()
Convert the ST object into a list representation.
"""
return self.node.tolist( line_info )
def isexpr(self):
"""STType.isexpr()
Returns true if the root node in the syntax tree is an expr node,
false otherwise.
"""
return self.node.name == symbol.eval_input
def issuite(self):
"""STType.issuite()
Returns true if the root node in the syntax tree is a suite node,
false otherwise.
"""
return self.node.name == symbol.file_input
def descr_compile(self, w_filename = "<syntax_tree>"):
"""STType.compile()
"""
# We use the compiler module for that
space = self.space
w_tup = self.descr_totuple(line_info=True)
w_compileAST = mycompile(space, w_tup, w_filename)
if self.isexpr():
return exprcompile(space, w_compileAST)
else:
return modcompile(space, w_compileAST)
ASTType = STType
app = applevel("""
def mycompile(tup, filename):
import compiler
transformer = compiler.transformer.Transformer()
compileAST = transformer.compile_node(tup)
compiler.misc.set_filename(filename, compileAST)
return compileAST
def exprcompile(compileAST):
import compiler
gen = compiler.pycodegen.ExpressionCodeGenerator(compileAST)
return gen.getCode()
def modcompile(compileAST):
import compiler
gen = compiler.pycodegen.ModuleCodeGenerator(compileAST)
return gen.getCode()
""", filename=__file__)
mycompile = app.interphook("mycompile")
exprcompile = app.interphook("exprcompile")
modcompile = app.interphook("modcompile")
STType.typedef = TypeDef("parser.st",
compile = interp2app(STType.descr_compile),
totuple = interp2app(STType.descr_totuple),
)
def parse_python_source(space, source, mode):
builder = grammar.BaseGrammarBuilder(debug=False, parser=PYTHON_PARSER)
builder.space = space
try:
PYTHON_PARSER.parse_source(source, mode, builder )
return builder.stack[-1]
except SyntaxError, e:
raise OperationError(space.w_SyntaxError,
e.wrap_info(space, '<string>'))
def suite( space, source ):
# make the annotator life easier (don't use str.splitlines())
syntaxtree = parse_python_source( space, source, "exec" )
return space.wrap( STType(space, syntaxtree) )
suite.unwrap_spec = [ObjSpace, str]
def expr( space, source ):
# make the annotator life easier (don't use str.splitlines())
syntaxtree = parse_python_source( space, source, "eval" )
return space.wrap( STType(space, syntaxtree) )
expr.unwrap_spec = [ObjSpace, str]
def ast2tuple(space, node, line_info=0):
"""Quick dummy implementation of parser.ast2tuple(tree) function"""
return node.descr_totuple( line_info )
ast2tuple.unwrap_spec = [ObjSpace, STType, int]
def unwrap_syntax_tree( space, w_sequence ):
items = space.unpackiterable( w_sequence )
parser = space.default_compiler.parser
nodetype = space.int_w( items[0] )
if parser.is_base_token(nodetype):
nodes = []
for w_node in items[1:]:
node = unwrap_syntax_tree( space, w_node )
nodes.append( node )
return SyntaxNode( nodetype, nodes )
else:
value = space.str_w( items[1] )
lineno = -1
if len(items)>2:
lineno = space.int_w( items[2] )
return TokenNode( nodetype, value, lineno )
def sequence2st(space, w_sequence):
syntaxtree = unwrap_syntax_tree( space, w_sequence )
return space.wrap( STType(space, syntaxtree) )
def source2ast(space, source):
from pypy.interpreter.pyparser.pythonutil import source2ast
return space.wrap(source2ast(source, 'exec', space=space))
source2ast.unwrap_spec = [ObjSpace, str]
def decode_string_literal(space, s, w_encoding=None):
from pypy.interpreter.pyparser.parsestring import parsestr
if space.is_true(w_encoding):
encoding = space.str_w(w_encoding)
else:
encoding = None
return parsestr(space, encoding, s)
decode_string_literal.unwrap_spec = [ObjSpace, str, W_Root]
# append typedefs to the grammar objects
from pypy.interpreter.pyparser.grammar import GrammarElement, Alternative
from pypy.interpreter.pyparser.grammar import Sequence, KleeneStar, Token
def descr_grammarelement_repr( self, space ):
"""TODO: make __repr__ RPython"""
import symbol
return space.wrap( self.display(0, symbol.sym_name) )
def descr_grammarelement_get_children( self, space ):
return space.newlist( [ space.wrap(it) for it in self.args ] )
GrammarElement.descr_grammarelement_repr = descr_grammarelement_repr
GrammarElement.descr_grammarelement_get_children = descr_grammarelement_get_children
GrammarElement.typedef = TypeDef( "GrammarElement",
#__repr__ = interp2app(GrammarElement.descr_grammarelement_repr,
# unwrap_spec=['self', ObjSpace] ),
get_children = interp2app(GrammarElement.descr_grammarelement_get_children,
unwrap_spec=['self', ObjSpace] ),
)
def descr_alternative_append( self, space, w_rule ):
rule = space.interp_w(GrammarElement, w_rule)
self.args.append( rule )
def descr_alternative___getitem__(self, space, idx ):
return space.wrap(self.args[idx])
def descr_alternative___setitem__(self, space, idx, w_rule ):
rule = space.interp_w(GrammarElement, w_rule)
return space.wrap( self.args[idx] )
def descr_alternative___delitem__(self, space, idx ):
del self.args[idx]
def descr_alternative_insert(self, space, idx, w_rule ):
rule = space.interp_w(GrammarElement, w_rule)
if idx<0 or idx>len(self.args):
raise OperationError( space.w_IndexError, space.wrap("Invalid index") )
self.args.insert( idx, rule )
Alternative.descr_alternative_append = descr_alternative_append
Alternative.descr_alternative_insert = descr_alternative_insert
Alternative.descr_alternative___getitem__ = descr_alternative___getitem__
Alternative.descr_alternative___setitem__ = descr_alternative___setitem__
Alternative.descr_alternative___delitem__ = descr_alternative___delitem__
Alternative.typedef = TypeDef("Alternative", GrammarElement.typedef,
__getitem__ = interp2app( Alternative.descr_alternative___getitem__,
unwrap_spec=['self',ObjSpace,int]),
__setitem__ = interp2app( Alternative.descr_alternative___setitem__,
unwrap_spec=['self',ObjSpace,int,W_Root]),
__delitem__ = interp2app( Alternative.descr_alternative___delitem__,
unwrap_spec=['self',ObjSpace,int]),
insert = interp2app( Alternative.descr_alternative_insert,
unwrap_spec = ['self', ObjSpace, int, W_Root ] ),
append = interp2app( Alternative.descr_alternative_append,
unwrap_spec = ['self', ObjSpace, W_Root ] ),
)
Sequence.descr_alternative_append = descr_alternative_append
Sequence.descr_alternative_insert = descr_alternative_insert
Sequence.descr_alternative___getitem__ = descr_alternative___getitem__
Sequence.descr_alternative___setitem__ = descr_alternative___setitem__
Sequence.descr_alternative___delitem__ = descr_alternative___delitem__
Sequence.typedef = TypeDef("Sequence", GrammarElement.typedef,
__getitem__ = interp2app( Sequence.descr_alternative___getitem__,
unwrap_spec=['self',ObjSpace,int]),
__setitem__ = interp2app( Sequence.descr_alternative___setitem__,
unwrap_spec=['self',ObjSpace,int,W_Root]),
__delitem__ = interp2app( Sequence.descr_alternative___delitem__,
unwrap_spec=['self',ObjSpace,int]),
insert = interp2app( Sequence.descr_alternative_insert,
unwrap_spec = ['self', ObjSpace, int, W_Root ] ),
append = interp2app( Sequence.descr_alternative_append,
unwrap_spec = ['self', ObjSpace, W_Root ] ),
)
def descr_kleenestar___getitem__(self, space, idx ):
if idx!=0:
raise OperationError( space.w_ValueError, space.wrap("KleeneStar only support one child"))
return space.wrap(self.args[idx])
def descr_kleenestar___setitem__(self, space, idx, w_rule ):
if idx!=0:
raise OperationError( space.w_ValueError, space.wrap("KleeneStar only support one child"))
rule = space.interp_w(GrammarElement, w_rule)
self.args[idx] = rule
KleeneStar.descr_kleenestar___getitem__ = descr_kleenestar___getitem__
KleeneStar.descr_kleenestar___setitem__ = descr_kleenestar___setitem__
KleeneStar.typedef = TypeDef("KleeneStar", GrammarElement.typedef,
__getitem__ = interp2app(KleeneStar.descr_kleenestar___getitem__,
unwrap_spec=[ 'self', ObjSpace, int]),
__setitem__ = interp2app(KleeneStar.descr_kleenestar___setitem__,
unwrap_spec=[ 'self', ObjSpace, int, W_Root ]),
)
Token.typedef = TypeDef("Token", GrammarElement.typedef )
| Python |
from pypy.interpreter.error import OperationError, debug_print
import pypy.interpreter.pyparser.pythonparse
from pypy.interpreter.mixedmodule import MixedModule
# Forward imports so they run at startup time
import pyparser
import pypy.interpreter.pyparser.pythonlexer
import pypy.interpreter.pyparser.pythonparse
import pypy.interpreter.pycompiler
class Module(MixedModule):
"""The builtin parser module.
"""
applevel_name = 'parser'
appleveldefs = {
'ParserError' : 'app_class.ParserError',
'ASTVisitor': 'app_class.ASTVisitor',
'ASTMutator': 'app_class.ASTMutator',
}
interpleveldefs = {
'__name__' : '(space.wrap("parser"))',
'__doc__' : '(space.wrap("parser (recparser version) module"))',
'suite' : 'pyparser.suite',
'expr' : 'pyparser.expr',
'STType' : 'pyparser.STType',
'ast2tuple' : 'pyparser.ast2tuple',
## # 'ASTType' : 'pyparser.STType',
'sequence2st' : 'pyparser.sequence2st',
## #'eval_input' : 'pyparser.eval_input',
## #'file_input' : 'pyparser.file_input',
## #'compileast' : 'pyparser.compileast',
## #'st2tuple' : 'pyparser.st2tuple',
## #'st2list' : 'pyparser.st2list',
## #'issuite' : 'pyparser.issuite',
## #'ast2tuple' : 'pyparser.ast2tuple',
## #'tuple2st' : 'pyparser.tuple2st',
## #'isexpr' : 'pyparser.isexpr',
## #'ast2list' : 'pyparser.ast2list',
## #'sequence2ast' : 'pyparser.sequence2ast',
## #'tuple2ast' : 'pyparser.tuple2ast',
## #'_pickler' : 'pyparser._pickler',
## #'compilest' : 'pyparser.compilest',
# PyPy extension
'source2ast' : "pyparser.source2ast",
'decode_string_literal': 'pyparser.decode_string_literal',
'install_compiler_hook' : 'pypy.interpreter.pycompiler.install_compiler_hook',
}
# Automatically exports each AST class
from pypy.interpreter.astcompiler.ast import nodeclasses
for klass_name in nodeclasses:
Module.interpleveldefs['AST' + klass_name] = 'pypy.interpreter.astcompiler.ast.%s' % klass_name
from pypy.interpreter.astcompiler import consts
for name in dir(consts):
if name.startswith('__') or name in Module.interpleveldefs:
continue
Module.interpleveldefs[name] = ("space.wrap(%s)" %
(getattr(consts, name), ))
| Python |
# Package initialisation
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
"""
This module implements marshal at interpreter level.
"""
appleveldefs = {
}
interpleveldefs = {
'dump' : 'interp_marshal.dump',
'dumps' : 'interp_marshal.dumps',
'load' : 'interp_marshal.load',
'loads' : 'interp_marshal.loads',
'version' : 'space.wrap(interp_marshal.Py_MARSHAL_VERSION)',
}
| Python |
from pypy.interpreter.baseobjspace import ObjSpace
from pypy.interpreter.error import OperationError
from pypy.rlib.rarithmetic import intmask
import sys
# Py_MARSHAL_VERSION = 2
# this is from Python 2.5
# already implemented, but for compatibility,
# we default to version 1. Version 2 can be
# tested, anyway, by using the optional parameter.
# XXX auto-configure this by inspecting the
# Python version we emulate. How to do this?
Py_MARSHAL_VERSION = 1
def dump(space, w_data, w_f, w_version=Py_MARSHAL_VERSION):
"""Write the 'data' object into the open file 'f'."""
writer = FileWriter(space, w_f)
# note: bound methods are currently not supported,
# so we have to pass the instance in, instead.
##m = Marshaller(space, writer.write, space.int_w(w_version))
m = Marshaller(space, writer, space.int_w(w_version))
m.put_w_obj(w_data)
def dumps(space, w_data, w_version=Py_MARSHAL_VERSION):
"""Return the string that would have been written to a file
by dump(data, file)."""
m = StringMarshaller(space, space.int_w(w_version))
m.put_w_obj(w_data)
return space.wrap(m.get_value())
def load(space, w_f):
"""Read one value from the file 'f' and return it."""
reader = FileReader(space, w_f)
u = Unmarshaller(space, reader)
return u.get_w_obj(False)
def loads(space, w_str):
"""Convert a string back to a value. Extra characters in the string are
ignored."""
u = StringUnmarshaller(space, w_str)
return u.get_w_obj(False)
class FileWriter(object):
def __init__(self, space, w_f):
self.space = space
try:
self.func = space.getattr(w_f, space.wrap('write'))
# XXX how to check if it is callable?
except OperationError, e:
if not e.match(space, space.w_AttributeError):
raise
raise OperationError(space.w_TypeError, space.wrap(
'marshal.dump() 2nd arg must be file-like object'))
def raise_eof(self):
space = self.space
raise OperationError(space.w_EOFError, space.wrap(
'EOF read where object expected'))
def write(self, data):
space = self.space
space.call_function(self.func, space.wrap(data))
class FileReader(object):
def __init__(self, space, w_f):
self.space = space
try:
self.func = space.getattr(w_f, space.wrap('read'))
# XXX how to check if it is callable?
except OperationError, e:
if not e.match(space, space.w_AttributeError):
raise
raise OperationError(space.w_TypeError, space.wrap(
'marshal.load() arg must be file-like object'))
def read(self, n):
space = self.space
w_ret = space.call_function(self.func, space.wrap(n))
ret = space.str_w(w_ret)
if len(ret) != n:
self.raise_eof()
return ret
def raise_eof(self):
space = self.space
raise OperationError(space.w_EOFError, space.wrap(
'EOF read where object expected'))
MAX_MARSHAL_DEPTH = 5000
# the above is unfortunately necessary because CPython
# relies on it without run-time checking.
# PyPy is currently in much bigger trouble, because the
# multimethod dispatches cause deeper stack nesting.
class _Base(object):
def raise_exc(self, msg):
space = self.space
raise OperationError(space.w_ValueError, space.wrap(msg))
class Marshaller(_Base):
# _annspecialcase_ = "specialize:ctr_location" # polymorphic
# does not work with subclassing
def __init__(self, space, writer, version):
self.space = space
## self.put = putfunc
self.writer = writer
self.version = version
self.nesting = 0 # contribution to compatibility
self.stringtable = {}
## currently we cannot use a put that is a bound method
## from outside. Same holds for get.
def put(self, s):
self.writer.write(s)
def put1(self, c):
self.writer.write(c)
def atom(self, typecode):
#assert type(typecode) is str and len(typecode) == 1
# type(char) not supported
self.put1(typecode)
def atom_int(self, typecode, x):
a = chr(x & 0xff)
x >>= 8
b = chr(x & 0xff)
x >>= 8
c = chr(x & 0xff)
x >>= 8
d = chr(x & 0xff)
self.put(typecode + a + b + c + d)
def atom_int64(self, typecode, x):
self.atom_int(typecode, x)
self.put_int(x>>32)
def atom_str(self, typecode, x):
self.atom_int(typecode, len(x))
self.put(x)
def atom_strlist(self, typecode, tc2, x):
self.atom_int(typecode, len(x))
atom_str = self.atom_str
for item in x:
# type(str) seems to be forbidden
#if type(item) is not str:
# self.raise_exc('object with wrong type in strlist')
atom_str(tc2, item)
def start(self, typecode):
# type(char) not supported
self.put(typecode)
def put_short(self, x):
a = chr(x & 0xff)
x >>= 8
b = chr(x & 0xff)
self.put(a + b)
def put_int(self, x):
a = chr(x & 0xff)
x >>= 8
b = chr(x & 0xff)
x >>= 8
c = chr(x & 0xff)
x >>= 8
d = chr(x & 0xff)
self.put(a + b + c + d)
def put_pascal(self, x):
lng = len(x)
if lng > 255:
self.raise_exc('not a pascal string')
self.put(chr(lng))
self.put(x)
def put_w_obj(self, w_obj):
self.nesting += 1
if self.nesting < MAX_MARSHAL_DEPTH:
self.space.marshal_w(w_obj, self)
else:
self._overflow()
self.nesting -= 1
# this function is inlined below
def put_list_w(self, list_w, lng):
self.nesting += 1
self.put_int(lng)
idx = 0
while idx < lng:
self.put_w_obj(list_w[idx])
idx += 1
self.nesting -= 1
def put_list_w(self, list_w, lng):
self.nesting += 1
self.put_int(lng)
idx = 0
space = self.space
if self.nesting < MAX_MARSHAL_DEPTH:
while idx < lng:
w_obj = list_w[idx]
self.space.marshal_w(w_obj, self)
idx += 1
else:
self._overflow()
self.nesting -= 1
def _overflow(self):
self.raise_exc('object too deeply nested to marshal')
class StringMarshaller(Marshaller):
def __init__(self, space, version):
Marshaller.__init__(self, space, None, version)
self.buflis = [chr(0)] * 128
self.bufpos = 0
def put(self, s):
pos = self.bufpos
lng = len(s)
newpos = pos + lng
while len(self.buflis) < newpos:
self.buflis *= 2
idx = 0
while idx < lng:
self.buflis[pos + idx] = s[idx]
idx += 1
self.bufpos = newpos
def put1(self, c):
pos = self.bufpos
newpos = pos + 1
if len(self.buflis) < newpos:
self.buflis *= 2
self.buflis[pos] = c
self.bufpos = newpos
def atom_int(self, typecode, x):
a = chr(x & 0xff)
x >>= 8
b = chr(x & 0xff)
x >>= 8
c = chr(x & 0xff)
x >>= 8
d = chr(x & 0xff)
pos = self.bufpos
newpos = pos + 5
if len(self.buflis) < newpos:
self.buflis *= 2
self.buflis[pos] = typecode
self.buflis[pos+1] = a
self.buflis[pos+2] = b
self.buflis[pos+3] = c
self.buflis[pos+4] = d
self.bufpos = newpos
def put_short(self, x):
a = chr(x & 0xff)
x >>= 8
b = chr(x & 0xff)
pos = self.bufpos
newpos = pos + 2
if len(self.buflis) < newpos:
self.buflis *= 2
self.buflis[pos] = a
self.buflis[pos+1] = b
self.bufpos = newpos
def put_int(self, x):
a = chr(x & 0xff)
x >>= 8
b = chr(x & 0xff)
x >>= 8
c = chr(x & 0xff)
x >>= 8
d = chr(x & 0xff)
pos = self.bufpos
newpos = pos + 4
if len(self.buflis) < newpos:
self.buflis *= 2
self.buflis[pos] = a
self.buflis[pos+1] = b
self.buflis[pos+2] = c
self.buflis[pos+3] = d
self.bufpos = newpos
def get_value(self):
return ''.join(self.buflis[:self.bufpos])
def invalid_typecode(space, u, tc):
# %r not supported in rpython
#u.raise_exc('invalid typecode in unmarshal: %r' % tc)
c = ord(tc)
if c < 32 or c > 126:
s = '\\x' + hex(c)
elif tc == '\\':
s = r'\\'
else:
s = tc
q = "'"
if s[0] == "'":
q = '"'
u.raise_exc('invalid typecode in unmarshal: ' + q + s + q)
def register(codes, func):
"""NOT_RPYTHON"""
for code in codes:
Unmarshaller._dispatch[ord(code)] = func
class Unmarshaller(_Base):
_dispatch = [invalid_typecode] * 256
def __init__(self, space, reader):
self.space = space
self.reader = reader
self.nesting = 0
self.stringtable_w = []
def get(self, n):
assert n >= 0
return self.reader.read(n)
def get1(self):
# convince typer to use a char
return chr(ord(self.get(1)))
def atom_str(self, typecode):
self.start(typecode)
lng = self.get_lng()
return self.get(lng)
def atom_lng(self, typecode):
self.start(typecode)
return self.get_lng()
def atom_strlist(self, typecode, tc2):
self.start(typecode)
lng = self.get_lng()
res = [None] * lng
idx = 0
while idx < lng:
res[idx] = self.atom_str(tc2)
idx += 1
return res
def start(self, typecode):
tc = self.get1()
if tc != typecode:
self.raise_exc('invalid marshal data')
def get_short(self):
s = self.get(2)
a = ord(s[0])
b = ord(s[1])
x = a | (b << 8)
if x & 0x8000:
x = x - 0x10000
return x
def get_int(self):
s = self.get(4)
a = ord(s[0])
b = ord(s[1])
c = ord(s[2])
d = ord(s[3])
if d & 0x80:
d -= 0x100
x = a | (b<<8) | (c<<16) | (d<<24)
return intmask(x)
def get_lng(self):
s = self.get(4)
a = ord(s[0])
b = ord(s[1])
c = ord(s[2])
d = ord(s[3])
x = a | (b<<8) | (c<<16) | (d<<24)
if x >= 0:
return x
else:
self.raise_exc('bad marshal data')
def get_pascal(self):
lng = ord(self.get1())
return self.get(lng)
def get_str(self):
lng = self.get_lng()
return self.get(lng)
# this function is inlined below
def get_list_w(self):
self.nesting += 1
lng = self.get_lng()
res_w = [None] * lng
idx = 0
while idx < lng:
res_w[idx] = self.get_w_obj(False)
idx += 1
self.nesting -= 1
return res_w
def get_w_obj(self, allow_null):
self.nesting += 1
space = self.space
w_ret = space.w_None # something not None
if self.nesting < MAX_MARSHAL_DEPTH:
tc = self.get1()
w_ret = self._dispatch[ord(tc)](space, self, tc)
if w_ret is None and not allow_null:
raise OperationError(space.w_TypeError, space.wrap(
'NULL object in marshal data'))
else:
self._overflow()
self.nesting -= 1
return w_ret
# inlined version to save a nesting level
def get_list_w(self):
self.nesting += 1
lng = self.get_lng()
res_w = [None] * lng
idx = 0
space = self.space
w_ret = space.w_None # something not None
if self.nesting < MAX_MARSHAL_DEPTH:
while idx < lng:
tc = self.get1()
w_ret = self._dispatch[ord(tc)](space, self, tc)
if w_ret is None:
break
res_w[idx] = w_ret
idx += 1
else:
self._overflow()
if w_ret is None:
raise OperationError(space.w_TypeError, space.wrap(
'NULL object in marshal data'))
self.nesting -= 1
return res_w
def _overflow(self):
self.raise_exc('object too deeply nested to unmarshal')
class StringUnmarshaller(Unmarshaller):
# Unmarshaller with inlined buffer string
def __init__(self, space, w_str):
Unmarshaller.__init__(self, space, None)
try:
self.bufstr = space.str_w(w_str)
except OperationError, e:
if not e.match(space, space.w_TypeError):
raise
raise OperationError(space.w_TypeError, space.wrap(
'marshal.loads() arg must be string'))
self.bufpos = 0
self.limit = len(self.bufstr)
def raise_eof(self):
space = self.space
raise OperationError(space.w_EOFError, space.wrap(
'EOF read where object expected'))
def get(self, n):
pos = self.bufpos
newpos = pos + n
if newpos > self.limit:
self.raise_eof()
self.bufpos = newpos
return self.bufstr[pos : newpos]
def get1(self):
pos = self.bufpos
if pos >= self.limit:
self.raise_eof()
self.bufpos = pos + 1
return self.bufstr[pos]
def get_int(self):
pos = self.bufpos
newpos = pos + 4
if newpos > self.limit:
self.raise_eof()
self.bufpos = newpos
a = ord(self.bufstr[pos])
b = ord(self.bufstr[pos+1])
c = ord(self.bufstr[pos+2])
d = ord(self.bufstr[pos+3])
if d & 0x80:
d -= 0x100
x = a | (b<<8) | (c<<16) | (d<<24)
return intmask(x)
def get_lng(self):
pos = self.bufpos
newpos = pos + 4
if newpos > self.limit:
self.raise_eof()
self.bufpos = newpos
a = ord(self.bufstr[pos])
b = ord(self.bufstr[pos+1])
c = ord(self.bufstr[pos+2])
d = ord(self.bufstr[pos+3])
x = a | (b<<8) | (c<<16) | (d<<24)
if x >= 0:
return x
else:
self.raise_exc('bad marshal data')
| Python |
TESTCASES = """\
None
False
True
StopIteration
Ellipsis
42
-17
sys.maxint
-1.25
-1.25 #2
2+5j
2+5j #2
42L
-1234567890123456789012345678901234567890L
hello # not interned
"hello"
()
(1, 2)
[]
[3, 4]
{}
{5: 6, 7: 8}
func.func_code
scopefunc.func_code
u'hello'
buffer(hello)
buffer(u'unicode, too')
set()
set([1, 2])
frozenset()
frozenset([3, 4])
""".strip().split('\n')
def readable(s):
for c, repl in (
("'", '_quote_'), ('"', '_Quote_'), (':', '_colon_'), ('.', '_dot_'),
('[', '_list_'), (']', '_tsil_'), ('{', '_dict_'), ('}', '_tcid_'),
('-', '_minus_'), ('+', '_plus_'),
(',', '_comma_'), ('(', '_brace_'), (')', '_ecarb_') ):
s = s.replace(c, repl)
lis = list(s)
for i, c in enumerate(lis):
if c.isalnum() or c == '_':
continue
lis[i] = '_'
return ''.join(lis)
print """class AppTestMarshal:
"""
for line in TESTCASES:
line = line.strip()
name = readable(line)
version = ''
extra = ''
if line.endswith('#2'):
version = ', 2'
extra = '; assert len(s) in (9, 17)'
src = '''\
def test_%(name)s(self):
import sys
hello = "he"
hello += "llo"
def func(x):
return lambda y: x+y
scopefunc = func(42)
import marshal, StringIO
case = %(line)s
print "case: %%-30s func=%(name)s" %% (case, )
s = marshal.dumps(case%(version)s)%(extra)s
x = marshal.loads(s)
assert x == case
f = StringIO.StringIO()
marshal.dump(case, f)
f.seek(0)
x = marshal.load(f)
assert x == case
''' % {'name': name, 'line': line, 'version' : version, 'extra': extra}
print src
| Python |
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import W_Root, ObjSpace
from pypy.interpreter.miscutils import Action
import signal as cpy_signal
def setup():
for key, value in cpy_signal.__dict__.items():
if key.startswith('SIG') and isinstance(value, int):
globals()[key] = value
yield key
NSIG = cpy_signal.NSIG
SIG_DFL = cpy_signal.SIG_DFL
SIG_IGN = cpy_signal.SIG_IGN
signal_names = list(setup())
class CheckSignalAction(Action):
"""A repeatitive action at the space level, checking if the
signal_occurred flag is set and if so, scheduling ReportSignal actions.
"""
repeat = True
def __init__(self, space):
self.space = space
self.handlers_w = {}
def perform(self):
while True:
n = pypysig_poll()
if n < 0:
break
main_ec = self.space.threadlocals.getmainthreadvalue()
main_ec.add_pending_action(ReportSignal(self, n))
def get(space):
for action in space.pending_actions:
if isinstance(action, CheckSignalAction):
return action
raise OperationError(space.w_RuntimeError,
space.wrap("lost CheckSignalAction"))
get = staticmethod(get)
class ReportSignal(Action):
"""A one-shot action for the main thread's execution context."""
def __init__(self, action, signum):
self.action = action
self.signum = signum
def perform(self):
try:
w_handler = self.action.handlers_w[self.signum]
except KeyError:
return # no handler, ignore signal
# re-install signal handler, for OSes that clear it
pypysig_setflag(self.signum)
# invoke the app-level handler
space = self.action.space
ec = space.getexecutioncontext()
try:
w_frame = ec.framestack.top()
except IndexError:
w_frame = space.w_None
space.call_function(w_handler, space.wrap(self.signum), w_frame)
def getsignal(space, signum):
"""
getsignal(sig) -> action
Return the current action for the given signal. The return value can be:
SIG_IGN -- if the signal is being ignored
SIG_DFL -- if the default action for the signal is in effect
None -- if an unknown handler is in effect (XXX UNIMPLEMENTED)
anything else -- the callable Python object used as a handler
"""
action = CheckSignalAction.get(space)
if signum in action.handlers_w:
return action.handlers_w[signum]
return space.wrap(SIG_DFL)
getsignal.unwrap_spec = [ObjSpace, int]
def signal(space, signum, w_handler):
"""
signal(sig, action) -> action
Set the action for the given signal. The action can be SIG_DFL,
SIG_IGN, or a callable Python object. The previous action is
returned. See getsignal() for possible return values.
*** IMPORTANT NOTICE ***
A signal handler function is called with two arguments:
the first is the signal number, the second is the interrupted stack frame.
"""
ec = space.getexecutioncontext()
main_ec = space.threadlocals.getmainthreadvalue()
old_handler = getsignal(space, signum)
if ec is not main_ec:
raise OperationError(space.w_ValueError,
space.wrap("signal() must be called from the "
"main thread"))
action = CheckSignalAction.get(space)
if space.eq_w(w_handler, space.wrap(SIG_DFL)):
pypysig_default(signum)
action.handlers_w[signum] = w_handler
elif space.eq_w(w_handler, space.wrap(SIG_IGN)):
pypysig_ignore(signum)
action.handlers_w[signum] = w_handler
else:
if not space.is_true(space.callable(w_handler)):
raise OperationError(space.w_TypeError,
space.wrap("'handler' must be a callable "
"or SIG_DFL or SIG_IGN"))
pypysig_setflag(signum)
action.handlers_w[signum] = w_handler
return old_handler
signal.unwrap_spec = [ObjSpace, int, W_Root]
# ____________________________________________________________
# CPython and LLTypeSystem implementations
from pypy.rpython.extregistry import ExtRegistryEntry
signal_queue = [] # only for py.py, not for translated pypy-c's
def pypysig_poll():
"NOT_RPYTHON"
if signal_queue:
return signal_queue.pop(0)
else:
return -1
def pypysig_default(signum):
"NOT_RPYTHON"
cpy_signal.signal(signum, cpy_signal.SIG_DFL) # XXX error handling
def pypysig_ignore(signum):
"NOT_RPYTHON"
cpy_signal.signal(signum, cpy_signal.SIG_IGN) # XXX error handling
def _queue_handler(signum, frame):
if signum not in signal_queue:
signal_queue.append(signum)
def pypysig_setflag(signum):
"NOT_RPYTHON"
cpy_signal.signal(signum, _queue_handler)
# lltyping - direct mapping to the C functions defined in
# translator/c/src/signals.h
class Entry(ExtRegistryEntry):
_about_ = pypysig_poll
def compute_result_annotation(self):
from pypy.annotation import model as annmodel
return annmodel.SomeInteger()
def specialize_call(self, hop):
from pypy.rpython.lltypesystem import lltype
hop.exception_cannot_occur()
return hop.llops.gencapicall("pypysig_poll", [], lltype.Signed,
includes=('src/signals.h',))
for _fn in [pypysig_default, pypysig_ignore, pypysig_setflag]:
class Entry(ExtRegistryEntry):
_about_ = _fn
funcname = _fn.func_name
def compute_result_annotation(self, s_signum):
return None
def specialize_call(self, hop):
from pypy.rpython.lltypesystem import lltype
vlist = hop.inputargs(lltype.Signed)
hop.exception_cannot_occur()
hop.llops.gencapicall(self.funcname, vlist,
includes=('src/signals.h',))
del _fn
| Python |
def default_int_handler(signum, frame):
"""
default_int_handler(...)
The default handler for SIGINT installed by Python.
It raises KeyboardInterrupt.
"""
raise KeyboardInterrupt()
| Python |
from pypy.rpython.rctypes.tool import ctypes_platform
from pypy.rpython.rctypes.tool.libc import libc
from ctypes import *
assert 0, "not used so far =============================================="
signal_names = ['SIGINT', 'SIGTERM', 'SIGKILL',
# ...
]
sighandler_t = CFUNCTYPE(None, c_int)
signal = libc.signal
signal.restype = sighandler_t
signal.argtypes = [c_int, sighandler_t]
class CConfig:
_includes_ = ('signal.h',)
## struct_sigaction = ctypes_platform.Struct('struct sigaction',
## [('sa_handler', sighandler_t)])
for name in signal_names:
setattr(CConfig, name, ctypes_platform.DefinedConstantInteger(name))
globals().update(ctypes_platform.configure(CConfig))
##sigaction = libc.sigaction
##sigaction.restype = c_int
##sigaction.argtypes = [c_int, POINTER(struct_sigaction),
## POINTER(struct_sigaction)]
| Python |
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
interpleveldefs = {
'signal': 'interp_signal.signal',
'getsignal': 'interp_signal.getsignal',
'NSIG': 'space.wrap(interp_signal.NSIG)',
'SIG_DFL': 'space.wrap(interp_signal.SIG_DFL)',
'SIG_IGN': 'space.wrap(interp_signal.SIG_IGN)',
}
appleveldefs = {
'default_int_handler': 'app_signal.default_int_handler',
}
def buildloaders(cls):
from pypy.module.signal import interp_signal
for name in interp_signal.signal_names:
signum = getattr(interp_signal, name)
if signum is not None:
Module.interpleveldefs[name] = 'space.wrap(%d)' % (signum,)
super(Module, cls).buildloaders()
buildloaders = classmethod(buildloaders)
def __init__(self, space, *args):
"NOT_RPYTHON"
from pypy.module.signal.interp_signal import CheckSignalAction
MixedModule.__init__(self, space, *args)
# add the signal-checking callback as an action on the space
space.pending_actions.append(CheckSignalAction(space))
| Python |
# NOT_RPYTHON
from cclp import _make_expression
def make_expression(variables, formula):
func = 'lambda %s:%s' % (','.join([name_of(var)
for var in variables]),
formula)
return _make_expression(variables, formula, eval(func))
| Python |
from pypy.interpreter import baseobjspace, gateway, typedef
from pypy.interpreter.error import OperationError
from pypy.module._stackless.clonable import AppClonableCoroutine
from pypy.module.cclp.misc import w, AppCoroutine, get_current_cspace
from pypy.module.cclp.global_state import sched
from pypy.rlib.rgc import gc_swap_pool, gc_clone
from pypy.rlib.objectmodel import we_are_translated
W_Root = baseobjspace.W_Root
#-- Variables types ----------------------------------------
class W_Var(W_Root):
def __init__(w_self, space):
# ring of aliases or bound value
w_self.w_bound_to = w_self
w_self.entails = {}
# byneed flag
w_self.needed = False
def __repr__(w_self):
if isinstance(w_self.w_bound_to, W_Var):
return '<?@%x>' % id(w_self)
return '<%s@%x>' % (w_self.w_bound_to,
id(w_self))
def _same_as(w_self, w_var):
assert isinstance(w_var, W_Var)
return w_self is w_var
__str__ = __repr__
class W_Future(W_Var):
"a read-only-by-its-consummer variant of logic. var"
def __init__(w_self, space):
W_Var.__init__(w_self, space)
w_self._client = AppCoroutine.w_getcurrent(space)
w("FUT", str(w_self))
from pypy.module._cslib import fd
class W_CVar(W_Var):
def __init__(self, space, w_dom, w_name):
assert isinstance(w_dom, fd.W_FiniteDomain)
W_Var.__init__(self, space)
self.w_dom = w_dom
self.name = space.str_w(w_name)
self.w_nam = w_name
get_current_cspace(space).register_var(self)
def copy(self, space):
return W_CVar(space, self.w_dom.copy(), self.w_nam)
def name_w(self):
return self.name
def w_name(self):
return self.w_nam
def assign(self, w_var):
if not w_var.w_dom.contains(w_val):
raise ValueError, "assignment out of domain"
w_var.w_bound_to = w_val
def domain_of(space, w_v):
if not isinstance(w_v, W_CVar):
raise OperationError(space.w_TypeError,
space.wrap("domain_of takes a constraint variable"))
return w_v.w_dom
app_domain_of = gateway.interp2app(domain_of)
def name_of(space, w_v):
if not isinstance(w_v, W_CVar):
raise OperationError(space.w_TypeError,
space.wrap("name_of takes a constraint variable"))
return w_v.w_name()
app_name_of = gateway.interp2app(name_of)
#-- Exception types ----------------------------------------
class W_FailedValue(W_Root):
"""wraps an exception raised in some coro, to be re-raised in
some dependant coro sometime later
"""
def __init__(w_self, exc):
w_self.exc = exc
class ConsistencyError(Exception): pass
class Solution(Exception): pass
class FailedSpace(Exception): pass
#-- Ring (used by scheduling entities)
class RingMixin(object):
_mixin_ = True
"""
useless till we can give a type parameter
"""
def init_head(self, head):
self._head = head
head._next = head._prev = head
self._count = 1
def chain_insert(self, obj):
r = self._head
l = r._prev
l._next = obj
r._prev = obj
obj._prev = l
obj._next = r
def remove(self, obj):
l = obj._prev
r = obj._next
l._next = r
r._prev = l
if self._head == obj:
self._head = r
if r == obj:
# that means obj was the last one
# the group is about to die
self._head = None
obj._next = obj._prev = None
#-- Misc ---------------------------------------------------
def deref(space, w_var):
"gets the value/next alias of a variable"
assert isinstance(w_var, W_Var)
return w_var.w_bound_to
def aliases(space, w_var):
"""return the aliases of a var, including itself"""
assert isinstance(w_var, W_Var)
assert isinstance(w_var.w_bound_to, W_Var)
al = []
w_curr = w_var
while 1:
w_next = w_curr.w_bound_to
assert isinstance(w_next, W_Var)
al.append(w_curr)
if space.is_true(space.is_nb_(w_next, w_var)):
break
w_curr = w_next
return al
| Python |
from pypy.interpreter import gateway, baseobjspace
from pypy.objspace.std.listobject import W_ListObject
from pypy.objspace.std.stringobject import W_StringObject
from pypy.module.cclp.types import deref, W_Var, W_CVar
from pypy.module.cclp.variable import bind_mm, raise_unification_failure, _alias, \
_assign_aliases, bind__Var_Root, bind__Var_Var
from pypy.module.cclp.misc import w
from pypy.module._cslib import fd
W_Root = baseobjspace.W_Root
def domain(space, w_values, w_name):
assert isinstance(w_values, W_ListObject)
assert isinstance(w_name, W_StringObject)
w_dom = fd.W_FiniteDomain(w_values, None)
w_var = W_CVar(space, w_dom, w_name)
w("CVAR", str(w_var))
return w_var
app_domain = gateway.interp2app(domain)
def bind__CVar_Root(space, w_cvar, w_obj):
#XXX we should (want to) be able to test membership
# in a wrapped against wrappeds into a non-wrapped dict
if [True for elt in w_cvar.w_dom.domain.get_wvalues_in_rlist()
if space.is_true(space.eq(w_obj, elt))]:
return bind__Var_Root(space, w_cvar, w_obj)
raise_unification_failure(space, "value not in variable domain")
def bind__CVar_CVar(space, w_cvar1, w_cvar2):
d1 = w_cvar1.w_dom.domain
d2 = w_cvar2.w_dom.domain
dinter = d1.intersect(d2)
if dinter.size() > 0:
if dinter.size() == 1:
w_value = dinter.get_wvalues_in_rlist()[0]
_assign_aliases(space, w_cvar1, w_value)
_assign_aliases(space, w_cvar2, w_value)
else:
w_interdom = fd.W_FiniteDomain(space.newlist([]), None)
w_interdom.domain = dinter
w_cvar1.w_dom = w_cvar2.w_dom = w_interdom
_alias(space, w_cvar1, w_cvar2)
else:
raise_unification_failure(space, "incompatible domains")
def bind__CVar_Var(space, w_cvar, w_var):
if space.is_true(space.is_bound(w_var)):
return bind__CVar_Root(space, w_cvar, w_var)
return bind__Var_Var(space, w_cvar, w_var)
bind_mm.register(bind__CVar_CVar, W_CVar, W_CVar)
bind_mm.register(bind__CVar_Root, W_CVar, W_Root)
bind_mm.register(bind__CVar_Var, W_CVar, W_Var)
| Python |
from pypy.interpreter import baseobjspace
from pypy.interpreter.function import Function
from pypy.interpreter.error import OperationError
from pypy.objspace.std.listobject import W_ListObject
from pypy.objspace.std.stringobject import W_StringObject
from pypy.module.cclp.types import W_CVar
def check_variables(space, w_variables, min_nb):
if not isinstance(w_variables, W_ListObject):
raise OperationError(space.w_TypeError,
space.wrap("variables must be in a list or tuple."))
assert isinstance(w_variables, W_ListObject)
if len(w_variables.wrappeditems) < min_nb:
raise OperationError(space.w_RuntimeError,
space.wrap("there must be at least %s variables." % min_nb))
return w_variables
def cvars_to_names(cvars):
variables = []
for w_var in cvars:
assert isinstance(w_var, W_CVar)
variables.append(w_var.w_nam)
return variables
from pypy.module._cslib.constraint import interp_make_expression, \
make_alldistinct as mkalldiff
def _make_expression(space, w_variables, w_formula, w_filter_func):
"""create a new constraint of type Expression or BinaryExpression
The chosen class depends on the number of variables in the constraint"""
w_variables = check_variables(space, w_variables, 1)
assert isinstance(w_filter_func, Function)
if not isinstance(w_formula, W_StringObject):
raise OperationError(space.w_TypeError,
space.wrap("formula must be a string."))
variables = cvars_to_names(w_variables.wrappeditems)
return interp_make_expression(space, space.newlist(variables),
w_formula, w_filter_func)
_make_expression.unwrap_spec = [baseobjspace.ObjSpace,
baseobjspace.W_Root,
baseobjspace.W_Root,
baseobjspace.W_Root]
def make_alldistinct(space, w_variables):
w_variables = check_variables(space, w_variables, 2)
variables = cvars_to_names(w_variables.wrappeditems)
return mkalldiff(space, space.newlist(variables))
make_alldistinct.unwrap_spec = [baseobjspace.ObjSpace,
baseobjspace.W_Root]
| Python |
#
| Python |
from pypy.module.cclp.types import W_Var
from pypy.module.cclp.interp_var import interp_bind, interp_free
from pypy.module._cslib import fd
class _DorkFiniteDomain(fd._FiniteDomain):
"""
this variant accomodates synchronization needs
of the dorkspace
"""
def __init__(self, space, w_values, values):
fd._FiniteDomain.__init__(self, w_values, values)
self.space = space
self._changevar = W_Var(space)
def clear_change(self):
"create a fresh change synchonizer"
assert not interp_free(self._changevar)
self._changevar = W_Var(self.space)
def one_shot_watcher(self):
return self._changevar
def _value_removed(self):
fd._FiniteDomain._value_removed(self)
interp_bind(self._changevar, self.space.w_True)
self.clear_change()
| Python |
from pypy.interpreter import gateway, baseobjspace, argument
from pypy.rlib.objectmodel import we_are_translated
from pypy.module.cclp.types import W_Var, W_Future, W_FailedValue
from pypy.module.cclp.misc import w, v, AppCoroutine, get_current_cspace
from pypy.module.cclp.thunk import FutureThunk, ProcedureThunk
from pypy.module.cclp.global_state import sched
#-- Future --------------------------------------------------
def future(space, w_callable, __args__):
"""returns a future result"""
#XXX we could be much more lazy wrt coro creation
args = __args__.normalize()
coro = AppCoroutine(space)
coro._next = coro._prev = coro
w_Future = W_Future(space)
thunk = FutureThunk(space, w_callable, args, w_Future, coro)
coro.bind(thunk)
coro._cspace = get_current_cspace(space)
if not we_are_translated():
w("FUTURE", str(id(coro)), "for", str(w_callable.name))
sched.uler.add_new_thread(coro)
return w_Future
future.unwrap_spec= [baseobjspace.ObjSpace,
baseobjspace.W_Root,
argument.Arguments]
#-- plain Coroutine -----------------------------------------
def stacklet(space, w_callable, __args__):
"""returns a coroutine object"""
args = __args__.normalize()
coro = AppCoroutine(space)
coro._next = coro._prev = coro
thunk = ProcedureThunk(space, w_callable, args, coro)
coro.bind(thunk)
coro._cspace = get_current_cspace(space)
if not we_are_translated():
w("STACKLET", str(id(coro)), "for", str(w_callable.name))
sched.uler.add_new_thread(coro)
sched.uler.schedule()
return coro
stacklet.unwrap_spec=[baseobjspace.ObjSpace,
baseobjspace.W_Root,
argument.Arguments]
def this_thread(space):
return AppCoroutine.w_getcurrent(space)
this_thread.unwrap_spec = [baseobjspace.ObjSpace]
| Python |
from pypy.interpreter import gateway, baseobjspace
from pypy.interpreter.error import OperationError
from pypy.objspace.std.model import StdObjSpaceMultiMethod
from pypy.objspace.std.listobject import W_ListObject, W_TupleObject
from pypy.objspace.std.dictobject import W_DictObject
from pypy.objspace.std.stringobject import W_StringObject
from pypy.module.cclp.misc import w, v, AppCoroutine
from pypy.module.cclp.global_state import sched
from pypy.module.cclp.types import deref, W_Var, W_CVar, W_Future, W_FailedValue
from pypy.rlib.objectmodel import we_are_translated
W_Root = baseobjspace.W_Root
all_mms = {}
def newvar(space):
w_v = W_Var(space)
w("VAR", w_v.__repr__())
return w_v
app_newvar = gateway.interp2app(newvar)
#-- Wait -------------------------------------------------
def wait__Root(space, w_obj):
return w_obj
def wait__Var(space, w_var):
#w("###:wait", str(id(AppCoroutine.w_getcurrent(space))))
if space.is_true(space.is_free(w_var)):
sched.uler.unblock_byneed_on(w_var)
sched.uler.add_to_blocked_on(w_var)
sched.uler.schedule()
assert space.is_true(space.is_bound(w_var))
w_ret = w_var.w_bound_to
if isinstance(w_ret, W_FailedValue):
w(".. reraising Failed Value")
raise w_ret.exc
return w_ret
def wait(space, w_obj):
if not we_are_translated():
assert isinstance(w_obj, W_Root)
return space.wait(w_obj)
app_wait = gateway.interp2app(wait)
wait_mm = StdObjSpaceMultiMethod('wait', 1)
wait_mm.register(wait__Var, W_Var)
wait_mm.register(wait__Root, W_Root)
all_mms['wait'] = wait_mm
#-- Wait_needed --------------------------------------------
def wait_needed__Var(space, w_var):
#w(":wait_needed", str(id(AppCoroutine.w_getcurrent(space))))
if space.is_true(space.is_free(w_var)):
if w_var.needed:
return
sched.uler.add_to_blocked_byneed(w_var)
sched.uler.schedule()
else:
raise OperationError(space.w_TypeError,
space.wrap("wait_needed only supported on unbound variables"))
def wait_needed(space, w_var):
assert isinstance(w_var, W_Var)
return space.wait_needed(w_var)
app_wait_needed = gateway.interp2app(wait_needed)
wait_needed_mm = StdObjSpaceMultiMethod('wait_needed', 1)
wait_needed_mm.register(wait_needed__Var, W_Var)
all_mms['wait_needed'] = wait_needed_mm
#-- PREDICATES (is_bound, is_free, is_aliased, alias_of) ---------
def is_aliased(space, w_var): # XXX: this appear(ed) to block (long ago)
assert isinstance(w_var, W_Var)
if space.is_true(space.is_nb_(deref(space, w_var), w_var)):
return space.newbool(False)
return space.newbool(True)
app_is_aliased = gateway.interp2app(is_aliased)
def is_free(space, w_obj):
assert isinstance(w_obj, W_Root)
return space.is_free(w_obj)
app_is_free = gateway.interp2app(is_free)
def is_free__Root(space, w_obj):
return space.newbool(False)
def is_free__Var(space, w_var):
return space.newbool(isinstance(w_var.w_bound_to, W_Var))
is_free_mm = StdObjSpaceMultiMethod('is_free', 1)
is_free_mm.register(is_free__Root, W_Root)
is_free_mm.register(is_free__Var, W_Var)
all_mms['is_free'] = is_free_mm
def is_bound(space, w_obj):
assert isinstance(w_obj, W_Root)
return space.is_bound(w_obj)
app_is_bound = gateway.interp2app(is_bound)
def is_bound__Root(space, w_obj):
return space.newbool(True)
def is_bound__Var(space, w_var):
return space.newbool(not isinstance(w_var.w_bound_to, W_Var))
is_bound_mm = StdObjSpaceMultiMethod('is_bound', 1)
is_bound_mm.register(is_bound__Root, W_Root)
is_bound_mm.register(is_bound__Var, W_Var)
all_mms['is_bound'] = is_bound_mm
def alias_of(space, w_var1, w_var2):
assert isinstance(w_var1, W_Var)
assert isinstance(w_var2, W_Var)
if not (space.is_true(space.is_free(w_var1)) and \
space.is_true(space.is_free(w_var2))):
raise OperationError(space.w_LogicError,
space.wrap("don't call alias_of on bound variables"))
w_curr = w_var1
while 1:
w_next = w_curr.w_bound_to
assert isinstance(w_next, W_Var)
if w_next is w_var2:
return space.newbool(True)
if w_next is w_var1:
break
w_curr = w_next
return space.newbool(False)
app_alias_of = gateway.interp2app(alias_of)
#-- HELPERS ----------------------
def get_ring_tail(space, w_start):
"returns the last var of a ring of aliases"
assert isinstance(w_start, W_Var)
w_curr = w_start
while 1:
w_next = w_curr.w_bound_to
assert isinstance(w_next, W_Var)
if space.is_true(space.is_nb_(w_next, w_start)):
return w_curr
w_curr = w_next
def raise_unification_failure(space, comment="Unification failure"):
"""raises a specific exception for bind/unify
should fail the current comp. space at some point"""
raise OperationError(space.w_UnificationError,
space.wrap(comment))
# to signal a future binding exception
def raise_future_binding(space):
raise OperationError(space.w_FutureBindingError,
space.wrap("This future is read-only for you, pal"))
#-- BIND, ENTAIL----------------------------
def bind(space, w_var, w_obj):
"""1. aliasing of unbound variables
2. assign bound var to unbound var
3. assign value to unbound var
"""
v(" :bind")
assert isinstance(w_var, W_Var)
assert isinstance(w_obj, W_Root)
space.bind(w_var, w_obj)
app_bind = gateway.interp2app(bind)
def entail(space, w_v1, w_v2):
"X -> Y"
assert isinstance(w_v1, W_Var)
assert isinstance(w_v2, W_Var)
space.entail(w_v1, w_v2)
app_entail = gateway.interp2app(entail)
def bind__Var_Root(space, w_var, w_obj):
#w("var val", str(id(w_var)))
# 3. var and value
if space.is_true(space.is_free(w_var)):
return _assign_aliases(space, w_var, w_obj)
if space.is_true(space.eq(w_var.w_bound_to, w_obj)):
return
raise OperationError(space.w_RebindingError,
space.wrap("Cannot bind twice but two identical values"))
def bind__Future_Root(space, w_fut, w_obj):
#v("future val", str(id(w_fut)))
if w_fut._client == AppCoroutine.w_getcurrent(space):
raise_future_binding(space)
return bind__Var_Root(space, w_fut, w_obj) # call-next-method ?
def bind__Var_Var(space, w_v1, w_v2):
#w("var var")
if space.is_true(space.is_bound(w_v1)):
if space.is_true(space.is_bound(w_v2)):
# we allow re-binding to same value, see 3.
return unify(space,
deref(space, w_v1),
deref(space, w_v2))
# 2. a (obj unbound, var bound)
return _assign_aliases(space, w_v2, deref(space, w_v1))
elif space.is_true(space.is_bound(w_v2)):
# 2. b (var unbound, obj bound)
return _assign_aliases(space, w_v1, deref(space, w_v2))
else: # 1. both are unbound
return _alias(space, w_v1, w_v2)
def bind__Future_Var(space, w_fut, w_var):
#v("future var")
if w_fut._client == AppCoroutine.w_getcurrent(space):
raise_future_binding(space)
return bind__Var_Var(space, w_fut, w_var)
def bind__Var_Future(space, w_var, w_fut):
if space.is_true(space.is_bound(w_fut)): #XXX write a test for me !
return bind__Var_Root(space, w_var, deref(space, w_fut))
if w_fut._client == AppCoroutine.w_getcurrent(space):
raise_future_binding(space)
return bind__Var_Var(space, w_var, w_fut) #and for me ...
bind_mm = StdObjSpaceMultiMethod('bind', 2)
bind_mm.register(bind__Var_Root, W_Var, W_Root)
bind_mm.register(bind__Var_Var, W_Var, W_Var)
bind_mm.register(bind__Future_Root, W_Future, W_Root)
bind_mm.register(bind__Future_Var, W_Future, W_Var)
bind_mm.register(bind__Var_Future, W_Var, W_Future)
all_mms['bind'] = bind_mm
def entail__Var_Var(space, w_v1, w_v2):
#w(" :entail Var Var")
if space.is_true(space.is_bound(w_v1)):
if space.is_true(space.is_bound(w_v2)):
return unify(space,
deref(space, w_v1),
deref(space, w_v2))
return _assign_aliases(space, w_v2, deref(space, w_v1))
else:
return _entail(space, w_v1, w_v2)
entail_mm = StdObjSpaceMultiMethod('entail', 2)
entail_mm.register(entail__Var_Var, W_Var, W_Var)
all_mms['entail'] = entail_mm
def _entail(space, w_v1, w_v2):
assert isinstance(w_v1, W_Var)
assert isinstance(w_v2, W_Var)
w_v1.entails[w_v2] = True
return space.w_None
def _assign_aliases(space, w_var, w_val):
#w(" :assign")
assert isinstance(w_var, W_Var)
#assert isinstance(w_val, W_Root)
w_curr = w_var
while 1:
w_next = w_curr.w_bound_to
assert isinstance(w_next, W_Var)
_assign(space, w_curr, w_val)
# notify the blocked threads
sched.uler.unblock_on(w_curr)
if space.is_true(space.is_nb_(w_next, w_var)):
break
# switch to next
w_curr = w_next
_assign_entailed(space, w_var, w_val)
#w(" :assigned")
return space.w_None
def _assign_entailed(space, w_var, w_val):
#w(" :assign entailed")
for var in w_var.entails:
if space.is_true(space.is_free(var)):
_assign_aliases(space, var, w_val)
else:
unify(space, deref(space, var), w_val)
def _assign(space, w_var, w_val):
assert isinstance(w_var, W_Var)
if isinstance(w_var, W_CVar):
if not w_val in w_var.w_dom.domain.get_wvalues_in_rlist():
raise_unification_failure(space, "assignment out of domain")
w_var.w_bound_to = w_val
def _alias(space, w_v1, w_v2):
"""appends one var to the alias chain of another
user must ensure freeness of both vars"""
assert isinstance(w_v1, W_Var)
assert isinstance(w_v2, W_Var)
#w(" :alias", str(id(w_v1)), str(id(w_v2)))
if space.is_true(space.is_nb_(w_v1, w_v2)):
return space.w_None
if space.is_true(is_aliased(space, w_v1)):
if space.is_true(is_aliased(space, w_v2)):
return _merge_aliases(space, w_v1, w_v2)
return _add_to_aliases(space, w_v1, w_v2)
if space.is_true(is_aliased(space, w_v2)):
return _add_to_aliases(space, w_v2, w_v1)
# we have two unaliased vars
w_v1.w_bound_to = w_v2
w_v2.w_bound_to = w_v1
return space.w_None
def _add_to_aliases(space, w_v1, w_v2):
assert isinstance(w_v1, W_Var)
assert isinstance(w_v2, W_Var)
#w(" :add to aliases")
w_tail = w_v1.w_bound_to
w_v1.w_bound_to = w_v2
w_v2.w_bound_to = w_tail
return space.w_None
def _merge_aliases(space, w_v1, w_v2):
assert isinstance(w_v1, W_Var)
assert isinstance(w_v2, W_Var)
#w(" :merge aliases")
w_tail1 = get_ring_tail(space, w_v1)
w_tail2 = get_ring_tail(space, w_v2)
w_tail1.w_bound_to = w_v2
w_tail2.w_bound_to = w_v1
return space.w_None
#-- UNIFY -------------------------
def unify(space, w_x, w_y):
assert isinstance(w_x, W_Root)
assert isinstance(w_y, W_Root)
#w(":unify ", str(id(w_x)), str(id(w_y)))
return space.unify(w_x, w_y)
app_unify = gateway.interp2app(unify)
def unify__Root_Root(space, w_x, w_y):
if not space.eq_w(w_x, w_y):
w_d1 = w_x.getdict() #returns wrapped dict or unwrapped None ...
w_d2 = w_y.getdict()
if None in [w_d1, w_d2]:
raise_unification_failure(space, str(w_x) + " != " + str(w_y))
else:
return unify__Dict_Dict(space, w_d1, w_d2)
return space.w_None
def unify__Var_Var(space, w_x, w_y):
#w(":unify var var", str(id(w_x)), str(id(w_y)))
if space.is_true(space.is_bound(w_x)):
if space.is_true(space.is_bound(w_y)):
return space.unify(deref(space, w_x),
deref(space, w_y))
return space.bind(w_y, w_x)
# binding or aliasing x & y
else:
return space.bind(w_x, w_y)
def unify__Var_Root(space, w_x, w_y):
#w(" :unify var val", str(id(w_x)), str(w_y))
if space.is_true(space.is_bound(w_x)):
return space.unify(deref(space, w_x), w_y)
return space.bind(w_x, w_y)
def unify__Root_Var(space, w_x, w_y):
return space.unify(w_y, w_x)
def unify__Tuple_Tuple(space, w_i1, w_i2):
if len(w_i1.wrappeditems) != len(w_i2.wrappeditems):
raise_unification_failure(space, "tuples of different lengths.")
idx, top = (-1, space.int_w(space.len(w_i1))-1)
while idx < top:
idx += 1
w_xi = space.getitem(w_i1, space.newint(idx))
w_yi = space.getitem(w_i2, space.newint(idx))
if space.is_true(space.is_nb_(w_xi, w_yi)):
continue
unify(space, w_xi, w_yi)
return space.w_None
def unify__List_List(space, w_i1, w_i2):
if len(w_i1.wrappeditems) != len(w_i2.wrappeditems):
raise_unification_failure(space, "lists of different lengths.")
idx, top = (-1, space.int_w(space.len(w_i1))-1)
while idx < top:
idx += 1
w_xi = space.getitem(w_i1, space.newint(idx))
w_yi = space.getitem(w_i2, space.newint(idx))
if space.is_true(space.is_nb_(w_xi, w_yi)):
continue
unify(space, w_xi, w_yi)
return space.w_None
def unify__Dict_Dict(space, w_m1, w_m2):
assert isinstance(w_m1, W_DictObject)
assert isinstance(w_m2, W_DictObject)
#print " :unify mappings", w_m1, w_m2
for w_xk in space.unpackiterable(w_m1):
w_xi = space.getitem(w_m1, w_xk)
w_yi = space.getitem(w_m2, w_xk)
if space.is_true(space.is_nb_(w_xi, w_yi)):
continue
space.unify(w_xi, w_yi)
return space.w_None
unify_mm = StdObjSpaceMultiMethod('unify', 2)
unify_mm.register(unify__Root_Root, W_Root, W_Root)
unify_mm.register(unify__Var_Var, W_Var, W_Var)
unify_mm.register(unify__Var_Root, W_Var, W_Root)
unify_mm.register(unify__Root_Var, W_Root, W_Var)
unify_mm.register(unify__Tuple_Tuple, W_TupleObject, W_TupleObject)
unify_mm.register(unify__List_List, W_ListObject, W_ListObject)
unify_mm.register(unify__Dict_Dict, W_DictObject, W_DictObject)
all_mms['unify'] = unify_mm
| Python |
class State: pass
sched = State()
| Python |
from pypy.interpreter import gateway, baseobjspace
from pypy.rlib.objectmodel import we_are_translated
# commonly imported there, used from types, variable, thread
from pypy.module._stackless.coroutine import AppCoroutine
import os
class State: pass
NO_DEBUG_INFO = State()
NO_DEBUG_INFO.state = True
def w(*msgs):
"""writeln"""
if NO_DEBUG_INFO.state: return
v(*msgs)
os.write(1, ' \n')
def v(*msgs):
"""write"""
if NO_DEBUG_INFO.state: return
for msg in list(msgs):
os.write(1, msg)
os.write(1, ' ')
def get_current_cspace(space):
curr = AppCoroutine.w_getcurrent(space)
assert isinstance(curr, AppCoroutine)
if curr._cspace is None:
if not we_are_translated():
import pdb
pdb.set_trace()
return curr._cspace
def interp_id(space, w_obj):
"debugging purposes only"
assert isinstance(w_obj, baseobjspace.W_Root)
return space.newint(id(w_obj))
app_interp_id = gateway.interp2app(interp_id)
def switch_debug_info(space):
NO_DEBUG_INFO.state = not NO_DEBUG_INFO.state
switch_debug_info.unwrap_spec = [baseobjspace.ObjSpace]
| Python |
from pypy.module._stackless.coroutine import _AppThunk, AppCoroutine
from pypy.module._stackless.interp_coroutine import AbstractThunk
from pypy.module.cclp.misc import w, get_current_cspace
from pypy.module.cclp.global_state import sched
from pypy.module.cclp.types import W_Var, W_CVar, W_Future, W_FailedValue, \
ConsistencyError, FailedSpace, Solution
from pypy.module.cclp.interp_var import interp_bind, interp_free, interp_wait_or
from pypy.objspace.std.listobject import W_ListObject
from pypy.objspace.std.listobject import W_TupleObject
from pypy.rlib.objectmodel import we_are_translated
def logic_args(args):
"returns logic vars found in unpacked normalized args"
assert isinstance(args, tuple)
pos = args[0] or [] # pos is not an empty tuple but None
kwa = args[1]
pos_l = [arg for arg in pos
if isinstance(arg, W_Var)]
kwa_l = [arg for arg in kwa.keys()
if isinstance(arg, W_Var)]
return pos_l + kwa_l
#-- Thunk -----------------------------------------
class ProcedureThunk(_AppThunk):
"used by thread.stacklet"
def __init__(self, space, w_callable, args, coro):
_AppThunk.__init__(self, space, coro.costate, w_callable, args)
self._coro = coro
def call(self):
w(".! initial (returnless) thunk CALL in", str(id(self._coro)))
sched.uler.trace_vars(self._coro, logic_args(self.args.unpack()))
try:
try:
_AppThunk.call(self)
except Exception, exc:
w(".! exceptional EXIT of procedure", str(id(self._coro)), "with", str(exc))
if not we_are_translated():
import traceback
traceback.print_exc()
sched.uler.dirty_traced_vars(self._coro, W_FailedValue(exc))
else:
w(".! clean EXIT of procedure", str(id(self._coro)))
finally:
sched.uler.remove_thread(self._coro)
sched.uler.schedule()
class FutureThunk(_AppThunk):
def __init__(self, space, w_callable, args, w_Result, coro):
_AppThunk.__init__(self, space, coro.costate, w_callable, args)
self.w_Result = w_Result
self._coro = coro
def call(self):
w(".! initial thunk CALL in", str(id(self._coro)))
sched.uler.trace_vars(self._coro, logic_args(self.args.unpack()))
try:
try:
_AppThunk.call(self)
except Exception, exc:
w(".! exceptional EXIT of future", str(id(self._coro)), "with", str(exc))
failed_val = W_FailedValue(exc)
self.space.bind(self.w_Result, failed_val)
sched.uler.dirty_traced_vars(self._coro, failed_val)
else:
w(".! clean EXIT of future", str(id(self._coro)),
"-- setting future result", str(self.w_Result), "to",
str(self.costate.w_tempval))
self.space.unify(self.w_Result, self.costate.w_tempval)
finally:
sched.uler.remove_thread(self._coro)
sched.uler.schedule()
class CSpaceThunk(_AppThunk):
"for a constraint script/logic program"
def __init__(self, space, w_callable, args, coro):
_AppThunk.__init__(self, space, coro.costate, w_callable, args)
#self._coro = coro
def call(self):
space = self.space
coro = AppCoroutine.w_getcurrent(space)
assert isinstance(coro, AppCoroutine)
cspace = coro._cspace
w("-- initial DISTRIBUTOR thunk CALL in", str(id(coro)))
sched.uler.trace_vars(coro, logic_args(self.args.unpack()))
try:
try:
try:
_AppThunk.call(self)
finally:
coro = AppCoroutine.w_getcurrent(space)
assert isinstance(coro, AppCoroutine)
cspace = coro._cspace
except FailedSpace, exc:
w("-- EXIT of DISTRIBUTOR %s, space is FAILED with %s" % (id(coro),
str(exc)))
failed_value = W_FailedValue(exc)
interp_bind(cspace._solution, failed_value)
except Exception, exc:
# maybe app_level let something buble up ...
w("-- exceptional EXIT of DISTRIBUTOR %s with %s" % (id(coro),
str(exc)))
if not we_are_translated():
import traceback
traceback.print_exc()
failed_value = W_FailedValue(exc)
sched.uler.dirty_traced_vars(coro, failed_value)
interp_bind(cspace._solution, failed_value)
cspace.fail()
else:
w("-- clean EXIT of DISTRIBUTOR (success)", str(id(coro)))
sol = cspace._solution
assert isinstance(sol, W_Var)
if interp_free(sol): # returning from a constraint/logic script
interp_bind(sol, self.costate.w_tempval)
outcome = sol.w_bound_to
if not (isinstance(outcome, W_ListObject) or \
isinstance(outcome, W_TupleObject)):
w("WARNING: return value type of the script was not a list or tuple, we fail the space.")
cspace.fail()
return
assert interp_free(cspace._choices)
interp_bind(cspace._choices, space.wrap(1))
finally:
interp_bind(cspace._finished, self.space.w_True)
sched.uler.remove_thread(coro)
sched.uler.schedule()
class PropagatorThunk(AbstractThunk):
def __init__(self, space, w_constraint, coro):
self.space = space
self.coro = coro # XXX remove me
self.const = w_constraint
def call(self):
#coro = AppCoroutine.w_getcurrent(self.space)
try:
space = self.space
cspace = self.coro._cspace
const = self.const
try:
while 1:
if not interp_free(cspace._finished):
break
entailed = const.revise(cspace._domains)
if entailed:
break
# we will block on domains being pruned
wait_list = []
_doms = [cspace._domains[var]
for var in const._variables]
for dom in _doms:
#assert isinstance(dom, W_AbstractDomain)
wait_list.append(dom.one_shot_watcher())
#or the cspace being dead
wait_list.append(cspace._finished)
interp_wait_or(space, wait_list)
cspace = get_current_cspace(space) # might have been cloned
except ConsistencyError:
cspace.fail()
except Exception: # rpython doesn't like just except:\n ...
if not we_are_translated():
import traceback
traceback.print_exc()
finally:
sched.uler.remove_thread(self.coro)
sched.uler.schedule()
| Python |
from pypy.module.cclp.variable import wait__Var, _assign_aliases, _entail
from pypy.module.cclp.types import W_Root, W_Var, W_CVar
from pypy.module.cclp.global_state import sched
from pypy.module.cclp.misc import w
def interp_free(w_var):
assert isinstance(w_var, W_Var)
return isinstance(w_var.w_bound_to, W_Var)
def interp_wait(space, obj):
return wait__Var(space, obj)
class RebindingError(Exception): pass
def interp_bind(w_var, w_obj):
# w_obj is NOT a W_Var
if interp_free(w_var):
return interp_assign_aliases(w_var, w_obj)
if w_var.w_bound_to == w_obj:
return
raise RebindingError
class EntailmentError(Exception): pass
def interp_entail(w_v1, w_v2):
assert isinstance(w_v1, W_Var)
assert isinstance(w_v2, W_Var)
w_v1val = w_v1.w_bound_to
w_v2val = w_v2.w_bound_to
if not interp_free(w_v1):
if not interp_free(w_v2):
# let's be simpler than unify
if w_v1val != w_v2val:
raise EntailmentError
return interp_assign_aliases(w_v2, w_v1val)
else:
w_v1.entails[w_v2] = True
def interp_assign_aliases(w_var, w_val):
#w(" :assign")
assert isinstance(w_var, W_Var)
w_curr = w_var
while 1:
w_next = w_curr.w_bound_to
assert isinstance(w_next, W_Var)
_assign(w_curr, w_val)
# notify the blocked threads
sched.uler.unblock_on(w_curr)
if w_next is w_var:
break
# switch to next
w_curr = w_next
_assign_entailed(w_var, w_val)
#w(" :assigned")
def _assign_entailed(w_var, w_val):
#w(" :assign entailed")
for var in w_var.entails:
assert isinstance(var, W_Var)
if interp_free(var):
interp_assign_aliases(var, w_val)
else:
if w_var.w_bound_to != w_val:
raise EntailmentError
def _assign(w_var, w_val):
assert isinstance(w_var, W_Var)
if isinstance(w_var, W_CVar):
if not w_val in w_var.w_dom.domain.vlist:
raise ValueError, "assignment out of domain"
w_var.w_bound_to = w_val
def interp_wait_or(space, lvars):
assert isinstance(lvars, list)
O = W_Var(space)
for V in lvars:
interp_entail(V, O)
return interp_wait(space, O)
| Python |
# Package initialisation
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
"""
This module implements concurrent constraint logic programming for applications.
"""
appleveldefs = {
'make_expression':'app.make_expression'
}
interpleveldefs = {
'switch_debug_info':'misc.switch_debug_info',
'future':'thread.future',
'stacklet':'thread.stacklet',
'this_thread':'thread.this_thread',
'sched_info':'scheduler.sched_info',
'schedule':'scheduler.schedule',
'reset_scheduler':'scheduler.reset_scheduler',
'newspace':'cspace.newspace',
'dorkspace':'cspace.dorkspace',
'choose':'cspace.choose',
'tell':'cspace.tell',
'distribute':'cspace.distribute',
'_make_expression':'constraint.constraint._make_expression',
'all_diff': 'constraint.constraint.make_alldistinct'
}
| Python |
from pypy.rlib.objectmodel import we_are_translated
from pypy.interpreter import baseobjspace, gateway, argument, typedef
from pypy.interpreter.error import OperationError
from pypy.objspace.std.intobject import W_IntObject
from pypy.objspace.std.listobject import W_ListObject, W_TupleObject
from pypy.objspace.std.stringobject import W_StringObject
from pypy.module.cclp.misc import get_current_cspace, w, v
from pypy.module.cclp.thunk import CSpaceThunk, PropagatorThunk
from pypy.module.cclp.global_state import sched
from pypy.module.cclp.variable import newvar
from pypy.module.cclp.types import FailedSpace, ConsistencyError, W_Var, W_CVar
from pypy.module.cclp.interp_var import interp_bind, interp_free
from pypy.module.cclp.scheduler import W_ThreadGroupScheduler
from pypy.module._cslib import fd
from pypy.rlib.cslib import rdistributor as rd
from pypy.module._stackless.coroutine import AppCoroutine
import pypy.rlib.rgc as rgc
def gc_swap_pool(pool):
if we_are_translated():
return rgc.gc_swap_pool(pool)
def gc_clone(data, pool):
if we_are_translated():
return rgc.gc_clone(data, pool)
def newspace(space, w_callable, __args__):
"application level creation of a new computation space"
args = __args__.normalize()
# allocate in a new pool
saved_pool = gc_swap_pool(None)
dist_thread = AppCoroutine(space)
thunk = CSpaceThunk(space, w_callable, args, dist_thread)
dist_thread.bind(thunk)
w_space = W_CSpace(space, dist_thread, saved_pool)
w_space.goodbye_local_pool()
# /allocate
if not we_are_translated():
w("NEWSPACE, (distributor) thread %d for %s" %
( id(dist_thread), str(w_callable.name) ) )
return w_space
newspace.unwrap_spec=[baseobjspace.ObjSpace,
baseobjspace.W_Root,
argument.Arguments]
def dorkspace(space, w_callable, __args__):
"application level creation of a new complicated computation space"
args = __args__.normalize()
dist_thread = AppCoroutine(space)
thunk = CSpaceThunk(space, w_callable, args, dist_thread)
dist_thread.bind(thunk)
saved_pool = gc_swap_pool(None)
try:
w_space = W_ComplicatedSpace(space, dist_thread, saved_pool)
w_space.goodbye_local_pool()
except:
gc_swap_pool(saved_pool)
raise OperationError(space.w_RuntimeError,
space.wrap("Unknown error in newspace"))
if not we_are_translated():
w("NEWSPACE, (distributor) thread %d for %s" %
( id(dist_thread), str(w_callable.name) ) )
return w_space
dorkspace.unwrap_spec=[baseobjspace.ObjSpace,
baseobjspace.W_Root,
argument.Arguments]
def choose(space, w_n):
"non deterministic choice from within a c.space"
if not isinstance(w_n, W_IntObject):
raise OperationError(space.w_TypeError,
space.wrap('choose only accepts an integer.'))
n = space.int_w(w_n)
if n < 2:
raise OperationError(space.w_ValueError,
space.wrap("choose takes an int > 1"))
# XXX sanity check for 1 <= n <= last_choice
cspace = get_current_cspace(space)
if not isinstance(cspace, W_CSpace):
raise OperationError(space.w_TypeError,
space.wrap('choose does not work from within '
'the top-level computatoin space.'))
if not interp_free(cspace._finished):
raise OperationError(space.w_RuntimeError,
space.wrap("this space is finished"))
try:
return cspace.choose(n)
except Exception, e:
if not we_are_translated():
import traceback
traceback.print_exc()
w('whack whack whack')
raise OperationError(space.w_RuntimeError,
space.wrap("something wacky happened %s" % e))
choose.unwrap_spec = [baseobjspace.ObjSpace,
baseobjspace.W_Root]
from pypy.module._cslib.constraint import W_AbstractConstraint
def tell(space, w_constraint):
"adding a constraint to a c.space (from within)"
if not isinstance(w_constraint, W_AbstractConstraint):
raise OperationError(space.w_TypeError,
space.wrap('Tell only accepts object of '
'(sub-)types Constraint.'))
get_current_cspace(space).tell(w_constraint.constraint)
tell.unwrap_spec = [baseobjspace.ObjSpace,
baseobjspace.W_Root]
def distribute(space, w_strategy):
assert isinstance(w_strategy, W_StringObject)
strat = space.str_w(w_strategy)
cspace = get_current_cspace(space)
# constraint distributor thread main loop
cspace.distribute(strat)
distribute.unwrap_spec = [baseobjspace.ObjSpace,
baseobjspace.W_Root]
# base space
# non concurrent propagators
# hence much less weird synchronization stuff
# a specific pool object
# XXX maybe use a descr_method__new__ to create the pool before allocation
class W_CSpace(W_ThreadGroupScheduler):
local_pool = None
def dump(self):
w('-- DUMPing C.Space data --')
w(':local pool %s' % id(self.local_pool))
w(':saved pool %s' % id(self.saved_pool))
v(':threads :')
curr = stop = self._head
while 1:
v('%s ' % id(curr))
curr = curr._next
if curr == stop:
break
w('')
v(':blocked :')
for th in self._blocked.keys():
v('%s ' % id(th))
w('')
w(':blocked_on')
for var, thl in self._blocked_on.items():
v(' var %s : ' % id(var))
for th in thl:
v('%s ' % id(th))
w('')
w(':blocked_byneed')
for var, thl in self._blocked_byneed.items():
v(' var %s : ' % id(var))
for th in thl:
v('%s ' % id(th))
w('')
w(':traced vars')
for th, varl in self._traced.items():
v(' thread %s : ' % id(th))
for var in varl:
v('%s ' % id(var))
w('')
w('-- /DUMP --')
def __init__(self, space, dist_thread, saved_pool):
W_ThreadGroupScheduler.__init__(self, space)
# pool
self.local_pool = None
self.saved_pool = saved_pool
# /pool
# thread ring
dist_thread._cspace = self
self._init_head(dist_thread)
# /ring
sched.uler.add_new_group(self)
# choice mgmt
self._choices = newvar(space)
self._committed = newvar(space)
# status, merging
self._solution = newvar(space)
self._failed = False
self._merged = False
self._finished = newvar(space)
# constraint store ...
self._constraints = []
self._domains = {} # varname -> domain
self._variables = [] # varnames
self._varconst = {} # varname -> constraints
self._cqueue = [] # constraint queue to be processed
#-- POOL & cloning stuff
def hello_local_pool(self):
if we_are_translated():
self.saved_pool = gc_swap_pool(self.local_pool)
def goodbye_local_pool(self):
if we_are_translated():
self.local_pool = gc_swap_pool(self.saved_pool)
self.saved_pool = None
def w_clone(self):
# all this stuff is created in the local pool so that
# gc_clone can clone it. every object we want to clone
# must be reachable through objects allocated in this
# local pool via the data tuple.
self.report_bad_condition_to_applevel()
head = curr = self._head
new_shells = []
# within new POOL
self.hello_local_pool()
coroutines_to_clone = []
while 1:
coroutines_to_clone.append((curr, curr.frame, curr.subctx))
self.goodbye_local_pool()
# outside new POOL, build new fresh coro shells
new = AppCoroutine(self.space, state = curr.costate)
new.parent = curr.parent
new_shells.append(new)
self.hello_local_pool()
# /outside
curr = curr._next
if curr == head:
break
data = (self, coroutines_to_clone)
# /within new POOL
self.goodbye_local_pool()
(copy, copied_coros), copypool = gc_clone(data, self.local_pool)
copy.local_pool = copypool
copy.finalize_cloning(copied_coros, new_shells)
sched.uler.add_new_group(copy)
self.dump()
copy.dump()
return self.space.wrap(copy)
def finalize_cloning(self, copied_coros, new_shells):
# We need to walk all threads references from this cloned
# space and replace
# 1. our cloned thread gets a new thread ID
w('finalize cloning in c.space %s' % id(self))
self._head = None
for i in range(len(copied_coros)):
coro, cloned_frame, cloned_subctx = copied_coros[i]
# bolt cloned stuff on new coro shells
cloned_coro = new_shells[i]
cloned_coro.frame = cloned_frame
cloned_coro.subctx = cloned_subctx
cloned_coro._cspace = self
cloned_coro.thunk = coro.thunk
self.replace_thread(coro, cloned_coro)
def replace_thread(self, old, new):
# walk the list of _blocked threads:
if old in self._blocked.keys():
w('blocked : %s replaced %s' % (id(new), id(old)))
del self._blocked[old]
self._blocked[new] = True
# walk the mappings var->threads
for w_var in self._blocked_on:
threads = self._blocked_on[w_var]
for k in range(len(threads)):
if threads[k] is old:
w('blocked_on : %s replaced %s' % (id(new), id(old)))
threads[k] = new
for w_var in self._blocked_byneed:
threads = self._blocked_byneed[w_var]
for k in range(len(threads)):
if threads[k] is old:
w('blocked_byneed : %s replaced %s' % (id(new), id(old)))
threads[k] = new
# handled traced thread
for th in self._traced.keys():
if th is old:
lvars = self._traced[th]
del self._traced[th]
self._traced[new] = lvars
# insert the thread in the linked list
if self._head is None:
w('head was initialized with %s' % id(new))
self._head = new._next = new._prev = new
else:
w('%s was inserted in the runqueue' % id(new))
r = self._head
l = r._prev
l._next = new
r._prev = new
new._prev = l
new._next = r
assert new._next is not new
assert new._prev is not new
def _newvar(self):
"""
ensure a new space-local variable is allocated
in the right space/pool
"""
self.hello_local_pool()
var = newvar(self.space)
self.goodbye_local_pool()
return var
#-- / POOL
def register_var(self, cvar):
name = cvar.name
dom = cvar.w_dom.domain
self._domains[name] = dom
self._varconst[name] = []
def tell(self, rconst):
w('telling %s' % rconst)
self._constraints.append(rconst)
for var in rconst._variables:
self._varconst[var].append(rconst)
def untell(self, constraint):
"entailed constraint are allowed to go away"
self._constraints.remove(constraint)
for var in constraint._variables:
self._varconst[var].remove(constraint)
def distributable(self):
for dom in self._domains.values():
if dom.size() > 1:
return True
return False
def distribute(self, strat):
w('SP:start constraint propagation & distribution loop')
space = self.space
if strat == 'dichotomy':
dist = rd.DichotomyDistributor()
elif strat == 'allornothing':
dist = rd.AllOrNothingDistributor()
else:
raise OperationError(space.w_RuntimeError,
space.wrap("please pick a strategy in "
"(allornothing, dichotomy)."))
# initialize constraint queue
self._cqueue = [(constr.estimate_cost(self._domains), constr)
for constr in self._constraints]
self.wait_stable() # hmm
self.propagate()
while self.distributable():
w('SP:distribute loop')
w_choice = self.choose(2) # yes, two, always, all the time
choice = space.int_w(w_choice)
small_dom_var = dist.find_smallest_domain(self._domains)
dom = self._domains[small_dom_var]
dist._distribute_on_choice(dom, choice)
for constraint in self._varconst[small_dom_var]:
self._cqueue.append((0, constraint)) # *uck the cost
dom._changed = False
self.propagate()
def propagate(self): # XXX pasted from rlib.cslib.rpropagation, mixin me
"""Prunes the domains of the variables
This method calls constraint.narrow() and queues constraints
that are affected by recent changes in the domains.
Returns True if a solution was found"""
# XXX : _queue.sort()
w('SP:propagating')
_queue = self._cqueue
_affected_constraints = {}
while True:
if not _queue:
# refill the queue if some constraints have been affected
_queue = [(constr.estimate_cost(self._domains), constr)
for constr in _affected_constraints]
if not _queue:
break
# XXX _queue.sort()
_affected_constraints.clear()
cost, constraint = _queue.pop(0)
entailed = constraint.revise(self._domains)
for var in constraint._variables:
# affected constraints are listeners of
# affected variables of this constraint
dom = self._domains[var]
if not dom._changed: # XXX
continue
for constr in self._varconst[var]:
if constr is not constraint:
_affected_constraints[constr] = True
dom._changed = False
if entailed:
self.untell(constraint)
if constraint in _affected_constraints:
del _affected_constraints[constraint]
for domain in self._domains.values():
if domain.size() != 1:
return 0
return 1
#-- Public ops
def report_bad_condition_to_applevel(self):
"""
a couple of checks for methods on spaces
but forbidden within
"""
currspace = get_current_cspace(self.space)
if currspace is self:
raise OperationError(self.space.w_RuntimeError,
self.space.wrap("you can't do this operation"
"on the current computation space"))
if not interp_free(self._finished):
raise OperationError(self.space.w_RuntimeError,
self.space.wrap("space is finished"))
def w_ask(self):
try:
self.report_bad_condition_to_applevel()
except: # we're dead, let's avoid wait_stable ...
return self.space.wrap(self._last_choice)
self.wait_stable()
self.space.wait(self._choices)
choices = self._choices.w_bound_to
self._choices = self._newvar()
assert isinstance(choices, W_IntObject)
self._last_choice = self.space.int_w(choices)
return choices
def choose(self, n):
assert interp_free(self._choices)
assert interp_free(self._committed)
# XXX we wrap it a bit prematurely, so as to satisfy
# type requirements (logic variables only accept W_Roots)
interp_bind(self._choices, self.space.wrap(n)) # unblock the solver
# now we wait on a solver commit
self.space.wait(self._committed)
committed = self._committed.w_bound_to
self._committed = newvar(self.space)
return committed
def w_commit(self, w_n):
self.report_bad_condition_to_applevel()
if not isinstance(w_n, W_IntObject):
raise OperationError(self.space.w_TypeError,
self.space.wrap('commit accepts only ints.'))
n = self.space.int_w(w_n)
assert interp_free(self._committed)
if n < 1 or n > self._last_choice:
raise OperationError(self.space.w_ValueError,
self.space.wrap("need 0<commit<=%d" %
self._last_choice))
interp_bind(self._committed, w_n)
def fail(self):
self._failed = True
space = self.space
interp_bind(self._finished, space.w_True)
interp_bind(self._choices, space.wrap(0))
def w_fail(self):
self.fail()
def is_failed(self):
return self._failed
def w_merge(self):
if self._merged:
return self._solution
# let's bind the solution variables
self._merged = True
sol = self._solution.w_bound_to
if isinstance(sol, W_ListObject):
self._bind_solution_variables(sol.wrappeditems)
elif isinstance(sol, W_TupleObject):
self._bind_solution_variables(sol.wrappeditems)
return self._solution
#-- / Public ops
def __ne__(self, other):
if other is self:
return False
return True
def _bind_solution_variables(self, solution):
if contains_cvar(solution): # was a constraint script
for var in solution:
assert isinstance(var, W_CVar)
dom = self._domains[var.name]
assert isinstance(dom, fd._FiniteDomain)
assert dom.size() == 1
interp_bind(var, dom.get_wvalues_in_rlist()[0])
def contains_cvar(lst):
return isinstance(lst[0], W_CVar)
W_CSpace.typedef = typedef.TypeDef("W_CSpace",
ask = gateway.interp2app(W_CSpace.w_ask),
commit = gateway.interp2app(W_CSpace.w_commit),
clone = gateway.interp2app(W_CSpace.w_clone),
merge = gateway.interp2app(W_CSpace.w_merge),
fail = gateway.interp2app(W_CSpace.w_fail))
import pypy.rlib.cslib.rdistributor as rd
from pypy.module.cclp.constraint.domain import _DorkFiniteDomain
class W_ComplicatedSpace(W_CSpace):
"""
a space with concurrent propagators inside
it performs poorly, is needlessly complicated
the author should be shot
"""
def __init__(self, space, dist_thread, saved_pool):
W_ThreadGroupScheduler.__init__(self, space)
# thread ring
dist_thread._cspace = self
self._init_head(dist_thread)
# /ring
# pool
self.local_pool = None
self.saved_pool = saved_pool
# /pool
sched.uler.add_new_group(self)
self.dist = None # dist instance != thread
# choice mgmt
self._choices = newvar(space)
self._committed = newvar(space)
# status, merging
self._solution = newvar(space)
self._failed = False
self._merged = False
self._finished = newvar(space)
# constraint store ...
self._store = {} # name -> var
self._domains = {} # varname -> domain
def register_var(self, cvar):
name = cvar.name
self._store[name] = cvar
# let's switch to dork-style finite domains
basic_dom = cvar.w_dom.domain
dom = _DorkFiniteDomain(self.space,
basic_dom.vlist,
basic_dom._values)
cvar.w_dom.domain = dom
self._domains[name] = dom
def distribute(self, strat):
space = self.space
if strat == 'dichotomy':
dist = rd.DichotomyDistributor()
elif strat == 'allornothing':
dist = rd.AllOrNothingDistributor()
else:
raise OperationError(space.w_RuntimeError,
space.wrap("please pick a strategy in "
"(allornothing, dichotomy)."))
self.wait_stable()
while self.distributable():
w_choice = self.choose(2)
choice = space.int_w(w_choice)
small_dom_var = dist.find_smallest_domain(self._domains)
dom = self._domains[small_dom_var]
w('ABOUT TO DISTRIBUTE')
dist._distribute_on_choice(dom, choice)
self.wait_stable()
def tell(self, constraint):
space = self.space
w_coro = AppCoroutine(space)
w_coro._next = w_coro._prev = w_coro
w_coro._cspace = self
thunk = PropagatorThunk(space, constraint, w_coro)
w_coro.bind(thunk)
self.add_new_thread(w_coro)
def _bind_solution_variables(self, solution):
if contains_cvar(solution): # was a constraint script
for var in solution:
assert isinstance(var, W_CVar)
realvar = self._store[var.name]
dom = realvar.w_dom.domain
assert isinstance(dom, fd._FiniteDomain)
assert dom.size() == 1
interp_bind(var, dom.get_wvalues_in_rlist()[0])
| Python |
from pypy.rlib.objectmodel import we_are_translated
from pypy.interpreter.error import OperationError
from pypy.interpreter import gateway, baseobjspace
from pypy.objspace.std.listobject import W_ListObject
from pypy.module.cclp.types import W_Var, W_FailedValue, aliases
from pypy.module.cclp.misc import w, v, AppCoroutine, get_current_cspace
from pypy.module.cclp.global_state import sched
#-- Singleton scheduler ------------------------------------------------
AppCoroutine._cspace = None
class TopLevelScheduler(object):
# we are dealing with cspaces
def __init__(self, space, top_level_space):
w("NEW TOPLEVEL SCHEDULER", str(id(self)), "with", str(id(top_level_space)))
self.space = space
sched.main_thread._cspace = top_level_space
self._switch_count = 0
self._head = top_level_space
self._head._next = self._head._prev = self._head
# asking for stability
self._asking = {} # cspace -> thread set
self._asking[top_level_space] = {} # XXX
def _chain_insert(self, group):
assert isinstance(group, W_ThreadGroupScheduler), "type error"
assert isinstance(group._next, W_ThreadGroupScheduler), "type error"
assert isinstance(group._prev, W_ThreadGroupScheduler), "type error"
r = self._head
l = r._prev
l._next = group
r._prev = group
group._prev = l
group._next = r
def schedule(self):
running = self._head
to_be_run = self._select_next()
#w(".. SWITCHING (spaces)", str(id(get_current_cspace(self.space))), "=>", str(id(to_be_run)))
self._switch_count += 1
if to_be_run != running:
running.goodbye_local_pool()
to_be_run.hello_local_pool()
to_be_run.schedule()
def _select_next(self):
to_be_run = self._head
sentinel = to_be_run
while to_be_run.is_blocked():
# check stability + asking status, give a chance to run
if to_be_run.is_runnable():
break
to_be_run = to_be_run._next
if to_be_run == sentinel:
reset_scheduler(self.space)
w(".. SCHEDULER reinitialized")
raise OperationError(self.space.w_AllBlockedError,
self.space.wrap("can't schedule, probable deadlock in sight"))
self._head = to_be_run
return to_be_run
def add_new_group(self, group):
"insert 'group' at end of running queue"
assert isinstance(group, W_ThreadGroupScheduler), "type error"
w(".. ADDING group : %d" % id(group))
self._asking[group] = {}
self._chain_insert(group)
def remove_group(self, group):
assert isinstance(group, W_ThreadGroupScheduler), "type error"
w(".. REMOVING group %d" % id(group) )
l = group._prev
r = group._next
l._next = r
r._prev = l
self._head = r
if r == group:
# IS AN ERROR
if not we_are_translated():
import traceback
traceback.print_exc()
raise OperationError(self.space.w_RuntimeError,
self.space.wrap("BUG in remove_group"))
group._next = group._prev = None
# unblock all threads asking stability of this group
for th in self._asking[group]:
del th._cspace._blocked[th]
th._cspace.blocked_count -= 1
del self._asking[group]
def add_to_blocked_on(self, w_var):
assert isinstance(w_var, W_Var), "type error"
thread = AppCoroutine.w_getcurrent(self.space)
get_current_cspace(self.space).add_to_blocked_on(w_var, thread)
def unblock_on(self, w_var):
#XXX optimize me
curr = stop = self._head
while 1:
curr.unblock_on(w_var)
curr = curr._next
if curr == stop:
break
#XXX sync the un/block byneed stuff with above, later
def add_to_blocked_byneed(self, w_var):
assert isinstance(w_var, W_Var), "type error"
thread = AppCoroutine.w_getcurrent(self.space)
get_current_cspace(self.space).add_to_blocked_byneed(w_var, thread)
def unblock_byneed_on(self, w_var):
get_current_cspace(self.space).unblock_byneed(w_var)
# delegated to thread group
def add_new_thread(self, thread):
tg = get_current_cspace(self.space)
tg.add_new_thread(thread)
def remove_thread(self, thread):
tg = get_current_cspace(self.space)
tg.remove_thread(thread)
def trace_vars(self, thread, lvars):
tg = get_current_cspace(self.space)
tg.trace_vars(thread, lvars)
def dirty_traced_vars(self, thread, failed_value):
tg = get_current_cspace(self.space)
tg.dirty_traced_vars(thread, failed_value)
def wait_stable(self):
tg = get_current_cspace(self.space)
tg.wait_stable()
# statistics
def sched_info(self):
s = self.space
si = self.space.setitem
w_all = s.newdict()
si(w_all, s.newint(id(self._head)), self._head.group_info())
assert isinstance(self._head, W_ThreadGroupScheduler), "type error"
curr = self._head._next
while curr != self._head:
assert isinstance(curr, W_ThreadGroupScheduler), "type error"
si(w_all, s.newint(id(curr)), curr.group_info())
curr = curr._next
si(w_all, s.wrap('blocked'), self.w_blocked())
si(w_all, s.wrap('blocked_on'), self.w_blocked_on())
si(w_all, s.wrap('blocked_byneed'), self.w_blocked_byneed())
return w_all
def all_blocked(self):
curr = stop = self._head
blist = []
while 1:
blist.extend(curr._blocked.keys())
curr = curr._next
if curr == stop:
break
return blist
def w_blocked(self):
s = self.space
w_b = W_ListObject([s.newint(id(th))
for th in self.all_blocked()])
return w_b
def all_blocked_on(self):
curr = stop = self._head
blist = []
while 1:
blist.extend(curr._blocked_on.items())
curr = curr._next
if curr == stop:
break
return blist
def w_blocked_on(self):
s = self.space
si = s.setitem
w_bo = s.newdict()
for var, thl in self.all_blocked_on():
w_l = W_ListObject([s.newint(id(th))
for th in thl])
si(w_bo, s.wrap(var.__repr__()), w_l)
return w_bo
def all_blocked_byneed(self):
curr = stop = self._head
blist = []
while 1:
blist.extend(curr._blocked_byneed.items())
curr = curr._next
if curr == stop:
break
return blist
def w_blocked_byneed(self):
s = self.space
si = s.setitem
w_bb = s.newdict()
for var, thl in self.all_blocked_byneed():
w_l = W_ListObject([s.newint(id(th))
for th in thl])
si(w_bb, s.wrap(var.__repr__()), w_l)
return w_bb
#-- Thread Group scheduler --------------------------------------
class W_ThreadGroupScheduler(baseobjspace.Wrappable):
def __init__(self, space):
self.space = space
self._switch_count = 0
self._traced = {} # thread -> vars
self.thread_count = 1
self.blocked_count = 0
# head thread
self._head = None
# thread group ring
self._next = self
self._prev = self
# accounting for blocked stuff
self._blocked = {} # thread -> True
self._blocked_on = {}
self._blocked_byneed = {}
def _init_head(self, thread):
"sets the initial ring head"
self._head = thread
thread._next = thread._prev = thread
w("HEAD (main) THREAD = ", str(id(self._head)))
def _chain_insert(self, thread):
assert isinstance(thread, AppCoroutine), "type error"
assert isinstance(thread._next, AppCoroutine), "type error"
assert isinstance(thread._prev, AppCoroutine), "type error"
r = self._head
l = r._prev
l._next = thread
r._prev = thread
thread._prev = l
thread._next = r
def hello_local_pool(self):
pass
def goodbye_local_pool(self):
pass
def register_var(self, var):
space = self.space
raise OperationError(space.w_AssertionError,
space.wrap('You cannot create a constraint variable '
'in the top-level computation space.'))
def is_blocked(self):
return self.thread_count == self.blocked_count
def is_failed(self):
return False
def is_stable(self):
# second approx.
return self.is_blocked() or self.is_failed()
def is_runnable(self):
if not self.is_stable():
return True
asking_from_within = [th for th in sched.uler._asking[self]
if th._cspace == self]
return len(asking_from_within)
def wait_stable(self):
w("WAIT_STABLE on space %d from space %d" % (id(self),
id(get_current_cspace(self.space))))
if self.is_stable():
return
curr = AppCoroutine.w_getcurrent(self.space)
if not isinstance(curr, AppCoroutine):
w("current coro is not an AppCoroutine ???")
assert False, "type error"
asking = sched.uler._asking
if self in asking:
asking[self][curr] = True
else:
asking[self] = {curr:True}
curr._cspace._blocked[curr] = True #XXX semantics please ?
curr._cspace.blocked_count += 1
sched.uler.schedule()
def schedule(self):
to_be_run = self._select_next()
if to_be_run == AppCoroutine.w_getcurrent(self.space):
return
assert isinstance(to_be_run, AppCoroutine), "type error"
#w(".. SWITCHING (treads)", str(id(AppCoroutine.w_getcurrent(self.space))), "=>", str(id(to_be_run)))
self._switch_count += 1
to_be_run.w_switch()
def _select_next(self):
to_be_run = self._head._next
sentinel = to_be_run
while to_be_run in self._blocked:
if self.is_stable() and to_be_run in sched.uler._asking[self]:
for th in sched.uler._asking[self]:
del th._cspace._blocked[th]
th._cspace.blocked_count -= 1
sched.uler._asking[self] = {}
break
assert isinstance(to_be_run, AppCoroutine), "type error"
to_be_run = to_be_run._next
if to_be_run == sentinel:
if not we_are_translated():
import pdb
pdb.set_trace()
self._head = to_be_run
return to_be_run
def add_to_blocked_on(self, w_var, thread):
w(".. we BLOCK thread", str(id(thread)), "on var", w_var.__repr__())
assert isinstance(thread, AppCoroutine), "type error"
assert thread not in self._blocked
if w_var in self._blocked_on:
blocked = self._blocked_on[w_var]
else:
blocked = []
self._blocked_on[w_var] = blocked
blocked.append(thread)
self._blocked[thread] = True
# stability, accounting, etc
self._post_blocking(thread)
def unblock_on(self, w_var):
assert isinstance(w_var, W_Var), "type error"
blocked = []
if w_var in self._blocked_on:
blocked = self._blocked_on[w_var]
del self._blocked_on[w_var]
else:
return
w(".. we UNBLOCK threads dependants of var", w_var.__repr__(),
str([id(thr) for thr in blocked]))
for thr in blocked:
del self._blocked[thr]
thr._cspace.blocked_count -= 1
def add_to_blocked_byneed(self, w_var, thread):
assert isinstance(thread, AppCoroutine), "type error"
if w_var in self._blocked_byneed:
blocked = self._blocked_byneed[w_var]
else:
blocked = []
self._blocked_byneed[w_var] = blocked
blocked.append(thread)
self._blocked[thread] = True
self._post_blocking(thread)
def unblock_byneed(self, w_var):
assert isinstance(w_var, W_Var), "type error"
blocked = []
for w_alias in aliases(self.space, w_var):
if w_alias in self._blocked_byneed:
blocked += self._blocked_byneed[w_alias]
del self._blocked_byneed[w_alias]
w_alias.needed = True
if not blocked:
return
w(".. we UNBLOCK BYNEED dependants of var", w_var.__repr__(),
str([id(thr) for thr in blocked]))
for thr in blocked:
del self._blocked[thr]
thr._cspace.blocked_count -= 1
def _post_blocking(self, thread):
# check that those asking for stability in the home space
# of the thread can be unblocked
home = thread._cspace
home.blocked_count += 1
if home.is_stable():
v('(post-blocking) may UNBLOCK asker: ')
for th in sched.uler._asking[home].keys():
# these asking threads must be unblocked, in their
# respective home spaces
v(str(id(th)))
del th._cspace._blocked[th]
th._cspace.blocked_count -= 1
w('')
sched.uler._asking[home] = {}
def distribute(self, dist):
raise OperationError(self.space.w_RuntimeError,
self.space.wrap("You can't distribute a top-level space."))
def add_new_thread(self, thread):
"insert 'thread' at end of running queue"
w(".. ADDING thread %d to group %d" % ( id(thread), id(self)))
assert isinstance(thread, AppCoroutine), "type error"
self._chain_insert(thread)
self.thread_count += 1
def remove_thread(self, thread):
assert isinstance(thread, AppCoroutine)
w(".. REMOVING thread %d" % id(thread))
assert thread not in thread._cspace._blocked
try:
del self._traced[thread]
except KeyError:
w(".. removing non-traced thread")
l = thread._prev
r = thread._next
l._next = r
r._prev = l
self._head = r
if r == thread:
# that means thread was the last one
# the group is about to die
pass
thread._next = thread._prev = None
self.thread_count -= 1
if self.thread_count == 0:
sched.uler.remove_group(self)
# Logic Variables tracing, "accelerates" exception propagation
# amongst threads
def trace_vars(self, thread, lvars):
assert isinstance(thread, AppCoroutine), "type error"
assert isinstance(lvars, list), "type error"
#w(".. TRACING logic vars.", str(lvars), "for", str(id(thread)))
self._traced[thread] = lvars
def dirty_traced_vars(self, thread, failed_value):
assert isinstance(thread, AppCoroutine)
assert isinstance(failed_value, W_FailedValue)
#w(".. DIRTYING traced vars")
for w_var in self._traced[thread]:
if self.space.is_true(self.space.is_free(w_var)):
self.space.bind(w_var, failed_value)
def w_threads(self):
s = self.space
thl = [s.newint(id(self._head))]
assert isinstance(self._head, AppCoroutine)
curr = self._head._next
while curr != self._head:
assert isinstance(curr, AppCoroutine)
thl.append(s.newint(id(curr)))
curr = curr._next
w_t = W_ListObject(thl)
return w_t
def w_asking(self):
asking = sched.uler._asking.get(self, None)
if not asking:
return self.space.w_None
return W_ListObject([self.space.newint(id(th))
for th in asking.keys()])
def group_info(self):
s = self
si = self.space.setitem
sw = self.space.wrap
w_ret = self.space.newdict()
si(w_ret, sw('switches'), self.space.newint(s._switch_count))
si(w_ret, sw('threads'), s.w_threads())
si(w_ret, sw('asking'), s.w_asking())
return w_ret
#-- Misc --------------------------------------------------
def reset_scheduler(space):
tg = W_ThreadGroupScheduler(space)
sched.uler = TopLevelScheduler(space, tg)
tg._init_head(sched.main_thread)
reset_scheduler.unwrap_spec = [baseobjspace.ObjSpace]
def sched_info(space):
return sched.uler.sched_info()
sched_info.unwrap_spec = [baseobjspace.ObjSpace]
def schedule(space):
"useful til we get preemtive scheduling deep into the vm"
sched.uler.schedule()
schedule.unwrap_spec = [baseobjspace.ObjSpace]
| Python |
# NOT_RPYTHON
from _structseq import structseqtype, structseqfield
error = OSError
class stat_result:
__metaclass__ = structseqtype
st_mode = structseqfield(0, "protection bits")
st_ino = structseqfield(1, "inode")
st_dev = structseqfield(2, "device")
st_nlink = structseqfield(3, "number of hard links")
st_uid = structseqfield(4, "user ID of owner")
st_gid = structseqfield(5, "group ID of owner")
st_size = structseqfield(6, "total size, in bytes")
st_atime = structseqfield(7, "time of last access (XXX as an int)")
st_mtime = structseqfield(8, "time of last modification (XXX as an int)")
st_ctime = structseqfield(9, "time of last change (XXX as an int)")
# XXX no extra fields for now
def fdopen(fd, mode='r', buffering=-1):
"""fdopen(fd [, mode='r' [, buffering]]) -> file_object
Return an open file object connected to a file descriptor."""
return file.fdopen(fd, mode, buffering)
# __________ only if we have os.fork() __________
class popenfile(file):
_childpid = None
def close(self):
import os
file.close(self)
pid = self._childpid
if pid is not None:
self._childpid = None
os.waitpid(pid, 0)
__del__ = close # as in CPython, __del__ may call os.waitpid()
def popen(command, mode='r', bufsize=-1):
"""popen(command [, mode='r' [, bufsize]]) -> pipe
Open a pipe to/from a command returning a file object."""
from popen2 import MAXFD
import os
def try_close(fd):
try:
os.close(fd)
except OSError:
pass
if not mode.startswith('r') and not mode.startswith('w'):
raise ValueError("invalid mode %r" % (mode,))
read_end, write_end = os.pipe()
try:
childpid = os.fork()
if childpid == 0:
# in the child
try:
if mode.startswith('r'):
os.dup2(write_end, 1)
os.close(read_end)
else:
os.dup2(read_end, 0)
os.close(write_end)
for i in range(3, MAXFD):
try_close(i)
cmd = ['/bin/sh', '-c', command]
os.execvp(cmd[0], cmd)
finally:
os._exit(1)
if mode.startswith('r'):
os.close(write_end)
fd = read_end
else:
os.close(read_end)
fd = write_end
g = popenfile.fdopen(fd, mode, bufsize)
g._childpid = childpid
return g
except Exception, e:
try_close(write_end)
try_close(read_end)
raise Exception, e # bare 'raise' does not work here :-(
| Python |
from pypy.rpython.rctypes.tool import ctypes_platform
from ctypes import *
from pypy.rpython.rctypes.tool import util # ctypes.util from 0.9.9.6
from pypy.rpython.rctypes.aerrno import geterrno
includes = ['unistd.h', 'sys/types.h']
dllname = util.find_library('c')
assert dllname is not None
libc = cdll.LoadLibrary(dllname)
HAVE_UNAME = hasattr(libc, 'uname')
if HAVE_UNAME:
includes.append('sys/utsname.h')
class CConfig:
_header_ = ''.join(['#include <%s>\n' % filename for filename in includes])
uid_t = ctypes_platform.SimpleType('uid_t')
if HAVE_UNAME:
utsname_t = ctypes_platform.Struct('struct utsname',
[('sysname', c_char * 0),
('nodename', c_char * 0),
('release',c_char * 0),
('version',c_char * 0),
('machine', c_char *0),])
globals().update(ctypes_platform.configure(CConfig))
getuid = libc.getuid
getuid.argtype = []
getuid.restype = uid_t
geteuid = libc.geteuid
geteuid.argtype = []
geteuid.restype = uid_t
if HAVE_UNAME:
libc.uname.argtype = [POINTER(utsname_t)]
libc.uname.restype = c_int
def uname():
result = utsname_t()
retC = libc.uname(pointer(result))
if retC == -1:
raise OSError(geterrno())
return [result.sysname,
result.nodename,
result.release,
result.version,
result.machine,]
| Python |
# Package initialisation
from pypy.interpreter.mixedmodule import MixedModule
from pypy.rpython.module.ll_os import w_star
#Turned off for now. posix must support targets without ctypes.
#from pypy.module.posix import ctypes_posix
import os
exec 'import %s as posix' % os.name
class Module(MixedModule):
"""This module provides access to operating system functionality that is
standardized by the C Standard and the POSIX standard (a thinly
disguised Unix interface). Refer to the library manual and
corresponding Unix manual entries for more information on calls."""
applevel_name = os.name
appleveldefs = {
'error' : 'app_posix.error',
'stat_result': 'app_posix.stat_result',
'fdopen' : 'app_posix.fdopen',
}
interpleveldefs = {
'open' : 'interp_posix.open',
'lseek' : 'interp_posix.lseek',
'write' : 'interp_posix.write',
'isatty' : 'interp_posix.isatty',
'read' : 'interp_posix.read',
'close' : 'interp_posix.close',
'fstat' : 'interp_posix.fstat',
'stat' : 'interp_posix.stat',
'lstat' : 'interp_posix.lstat',
'dup' : 'interp_posix.dup',
'dup2' : 'interp_posix.dup2',
'access' : 'interp_posix.access',
'system' : 'interp_posix.system',
'unlink' : 'interp_posix.unlink',
'remove' : 'interp_posix.remove',
'getcwd' : 'interp_posix.getcwd',
'chdir' : 'interp_posix.chdir',
'mkdir' : 'interp_posix.mkdir',
'rmdir' : 'interp_posix.rmdir',
'environ' : 'interp_posix.get(space).w_environ',
'listdir' : 'interp_posix.listdir',
'strerror' : 'interp_posix.strerror',
'pipe' : 'interp_posix.pipe',
'chmod' : 'interp_posix.chmod',
'rename' : 'interp_posix.rename',
'umask' : 'interp_posix.umask',
'_exit' : 'interp_posix._exit',
#'getuid' : 'interp_posix.getuid',
#'geteuid' : 'interp_posix.geteuid',
'utime' : 'interp_posix.utime',
}
if hasattr(os, 'ftruncate'):
interpleveldefs['ftruncate'] = 'interp_posix.ftruncate'
if hasattr(os, 'putenv'):
interpleveldefs['putenv'] = 'interp_posix.putenv'
if hasattr(posix, 'unsetenv'): # note: emulated in os
interpleveldefs['unsetenv'] = 'interp_posix.unsetenv'
if hasattr(os, 'kill'):
interpleveldefs['kill'] = 'interp_posix.kill'
if hasattr(os, 'getpid'):
interpleveldefs['getpid'] = 'interp_posix.getpid'
if hasattr(os, 'link'):
interpleveldefs['link'] = 'interp_posix.link'
if hasattr(os, 'symlink'):
interpleveldefs['symlink'] = 'interp_posix.symlink'
if hasattr(os, 'readlink'):
interpleveldefs['readlink'] = 'interp_posix.readlink'
if hasattr(os, 'fork'):
interpleveldefs['fork'] = 'interp_posix.fork'
appleveldefs['popen'] = 'app_posix.popen'
if hasattr(os, 'waitpid'):
interpleveldefs['waitpid'] = 'interp_posix.waitpid'
if hasattr(os, 'execv'):
interpleveldefs['execv'] = 'interp_posix.execv'
if hasattr(os, 'execve') and 0: # XXX XXX in-progress
interpleveldefs['execve'] = 'interp_posix.execve'
#if hasattr(ctypes_posix, 'uname'):
# interpleveldefs['uname'] = 'interp_posix.uname'
if hasattr(os, 'ttyname'):
interpleveldefs['ttyname'] = 'interp_posix.ttyname'
for name in w_star:
if hasattr(os, name):
interpleveldefs[name] = 'interp_posix.' + name
def setup_after_space_initialization(self):
"""NOT_RPYTHON"""
space = self.space
config = space.config
# XXX execve does not work under ootypesystem yet :-(
# YYY nor does it anywhere else
#if config.translating and config.translation.type_system != "lltype":
# space.delattr(self, space.wrap("execve"))
if config.translating and config.translation.backend == "llvm":
space.delattr(self, space.wrap("execv"))
def startup(self, space):
from pypy.module.posix import interp_posix
interp_posix.get(space).startup(space)
for constant in dir(os):
value = getattr(os, constant)
if constant.isupper() and type(value) is int:
Module.interpleveldefs[constant] = "space.wrap(%s)" % value
| Python |
from pypy.interpreter.baseobjspace import ObjSpace, W_Root
from pypy.rlib.rarithmetic import intmask
from pypy.rlib import ros
from pypy.interpreter.error import OperationError, wrap_oserror
from pypy.rpython.module.ll_os import w_star, w_star_returning_int
import os
def open(space, fname, flag, mode=0777):
"""Open a file (for low level IO).
Return a file descriptor (a small integer)."""
try:
fd = os.open(fname, flag, mode)
except OSError, e:
raise wrap_oserror(space, e)
return space.wrap(fd)
open.unwrap_spec = [ObjSpace, str, int, int]
def lseek(space, fd, pos, how):
"""Set the current position of a file descriptor. Return the new position.
If how == 0, 'pos' is relative to the start of the file; if how == 1, to the
current position; if how == 2, to the end."""
try:
pos = os.lseek(fd, pos, how)
except OSError, e:
raise wrap_oserror(space, e)
else:
return space.wrap(pos)
lseek.unwrap_spec = [ObjSpace, int, int, int]
def isatty(space, fd):
"""Return True if 'fd' is an open file descriptor connected to the
slave end of a terminal."""
try:
res = os.isatty(fd)
except OSError, e:
raise wrap_oserror(space, e)
else:
return space.wrap(res)
isatty.unwrap_spec = [ObjSpace, int]
def read(space, fd, buffersize):
"""Read data from a file descriptor."""
try:
s = os.read(fd, buffersize)
except OSError, e:
raise wrap_oserror(space, e)
else:
return space.wrap(s)
read.unwrap_spec = [ObjSpace, int, int]
def write(space, fd, data):
"""Write a string to a file descriptor. Return the number of bytes
actually written, which may be smaller than len(data)."""
try:
res = os.write(fd, data)
except OSError, e:
raise wrap_oserror(space, e)
else:
return space.wrap(res)
write.unwrap_spec = [ObjSpace, int, str]
def close(space, fd):
"""Close a file descriptor (for low level IO)."""
try:
os.close(fd)
except OSError, e:
raise wrap_oserror(space, e)
close.unwrap_spec = [ObjSpace, int]
def ftruncate(space, fd, length):
"""Truncate a file to a specified length."""
try:
os.ftruncate(fd, length)
except OSError, e:
raise wrap_oserror(space, e)
ftruncate.unwrap_spec = [ObjSpace, int, int]
def build_stat_result(space, st):
# cannot index tuples with a variable...
lst = [st[0], st[1], st[2], st[3], st[4],
st[5], st[6], st[7], st[8], st[9]]
w_tuple = space.newtuple([space.wrap(intmask(x)) for x in lst])
w_stat_result = space.getattr(space.getbuiltinmodule(os.name),
space.wrap('stat_result'))
return space.call_function(w_stat_result, w_tuple)
def fstat(space, fd):
"""Perform a stat system call on the file referenced to by an open
file descriptor."""
try:
st = os.fstat(fd)
except OSError, e:
raise wrap_oserror(space, e)
else:
return build_stat_result(space, st)
fstat.unwrap_spec = [ObjSpace, int]
def stat(space, path):
"""Perform a stat system call on the given path. Return an object
with (at least) the following attributes:
st_mode
st_ino
st_dev
st_nlink
st_uid
st_gid
st_size
st_atime
st_mtime
st_ctime
"""
try:
st = os.stat(path)
except OSError, e:
raise wrap_oserror(space, e)
else:
return build_stat_result(space, st)
stat.unwrap_spec = [ObjSpace, str]
def lstat(space, path):
"Like stat(path), but do no follow symbolic links."
try:
st = os.lstat(path)
except OSError, e:
raise wrap_oserror(space, e)
else:
return build_stat_result(space, st)
lstat.unwrap_spec = [ObjSpace, str]
def dup(space, fd):
"""Create a copy of the file descriptor. Return the new file
descriptor."""
try:
newfd = os.dup(fd)
except OSError, e:
raise wrap_oserror(space, e)
else:
return space.wrap(newfd)
dup.unwrap_spec = [ObjSpace, int]
def dup2(space, old_fd, new_fd):
"""Duplicate a file descriptor."""
try:
os.dup2(old_fd, new_fd)
except OSError, e:
raise wrap_oserror(space, e)
dup2.unwrap_spec = [ObjSpace, int, int]
def access(space, path, mode):
"""
access(path, mode) -> 1 if granted, 0 otherwise
Use the real uid/gid to test for access to a path. Note that most
operations will use the effective uid/gid, therefore this routine can
be used in a suid/sgid environment to test if the invoking user has the
specified access to the path. The mode argument can be F_OK to test
existence, or the inclusive-OR of R_OK, W_OK, and X_OK.
"""
try:
ok = os.access(path, mode)
except OSError, e:
raise wrap_oserror(space, e)
else:
return space.wrap(ok)
access.unwrap_spec = [ObjSpace, str, int]
def system(space, cmd):
"""Execute the command (a string) in a subshell."""
try:
rc = os.system(cmd)
except OSError, e:
raise wrap_oserror(space, e)
else:
return space.wrap(rc)
system.unwrap_spec = [ObjSpace, str]
def unlink(space, path):
"""Remove a file (same as remove(path))."""
try:
os.unlink(path)
except OSError, e:
raise wrap_oserror(space, e)
unlink.unwrap_spec = [ObjSpace, str]
def remove(space, path):
"""Remove a file (same as unlink(path))."""
try:
os.unlink(path)
except OSError, e:
raise wrap_oserror(space, e)
remove.unwrap_spec = [ObjSpace, str]
def getcwd(space):
"""Return the current working directory."""
try:
cur = os.getcwd()
except OSError, e:
raise wrap_oserror(space, e)
else:
return space.wrap(cur)
getcwd.unwrap_spec = [ObjSpace]
def chdir(space, path):
"""Change the current working directory to the specified path."""
try:
os.chdir(path)
except OSError, e:
raise wrap_oserror(space, e)
chdir.unwrap_spec = [ObjSpace, str]
def mkdir(space, path, mode=0777):
"""Create a directory."""
try:
os.mkdir(path, mode)
except OSError, e:
raise wrap_oserror(space, e)
mkdir.unwrap_spec = [ObjSpace, str, int]
def rmdir(space, path):
"""Remove a directory."""
try:
os.rmdir(path)
except OSError, e:
raise wrap_oserror(space, e)
rmdir.unwrap_spec = [ObjSpace, str]
def strerror(space, errno):
"""Translate an error code to a message string."""
try:
text = os.strerror(errno)
except ValueError:
raise OperationError(space.w_ValueError,
space.wrap("strerror() argument out of range"))
return space.wrap(text)
strerror.unwrap_spec = [ObjSpace, int]
# this is a particular case, because we need to supply
# the storage for the environment variables, at least
# for some OSes.
class State:
def __init__(self, space):
self.posix_putenv_garbage = {}
self.w_environ = space.newdict()
def startup(self, space):
_convertenviron(space, self.w_environ)
def get(space):
return space.fromcache(State)
def _convertenviron(space, w_env):
idx = 0
while 1:
s = ros.environ(idx)
if s is None:
break
p = s.find('=')
if p >= 0:
key = s[:p]
value = s[p+1:]
space.setitem(w_env, space.wrap(key), space.wrap(value))
idx += 1
def putenv(space, name, value):
"""Change or add an environment variable."""
txt = '%s=%s' % (name, value)
ros.putenv(txt)
# Install the first arg and newstr in posix_putenv_garbage;
# this will cause previous value to be collected. This has to
# happen after the real putenv() call because the old value
# was still accessible until then.
get(space).posix_putenv_garbage[name] = txt
putenv.unwrap_spec = [ObjSpace, str, str]
def unsetenv(space, name):
"""Delete an environment variable."""
if name in get(space).posix_putenv_garbage:
os.unsetenv(name)
# Remove the key from posix_putenv_garbage;
# this will cause it to be collected. This has to
# happen after the real unsetenv() call because the
# old value was still accessible until then.
del get(space).posix_putenv_garbage[name]
unsetenv.unwrap_spec = [ObjSpace, str]
def enumeratedir(space, dir):
result = []
while True:
nextentry = dir.readdir()
if nextentry is None:
break
if nextentry not in ('.' , '..'):
result.append(space.wrap(nextentry))
return space.newlist(result)
def listdir(space, dirname):
"""Return a list containing the names of the entries in the directory.
\tpath: path of directory to list
The list is in arbitrary order. It does not include the special
entries '.' and '..' even if they are present in the directory."""
try:
dir = ros.opendir(dirname)
except OSError, e:
raise wrap_oserror(space, e)
try:
# sub-function call to make sure that 'try:finally:' will catch
# everything including MemoryErrors
return enumeratedir(space, dir)
finally:
dir.closedir()
listdir.unwrap_spec = [ObjSpace, str]
def pipe(space):
"Create a pipe. Returns (read_end, write_end)."
try:
fd1, fd2 = os.pipe()
except OSError, e:
raise wrap_oserror(space, e)
return space.newtuple([space.wrap(fd1), space.wrap(fd2)])
pipe.unwrap_spec = [ObjSpace]
def chmod(space, path, mode):
"Change the access permissions of a file."
try:
os.chmod(path, mode)
except OSError, e:
raise wrap_oserror(space, e)
chmod.unwrap_spec = [ObjSpace, str, int]
def rename(space, old, new):
"Rename a file or directory."
try:
os.rename(old, new)
except OSError, e:
raise wrap_oserror(space, e)
rename.unwrap_spec = [ObjSpace, str, str]
def umask(space, mask):
"Set the current numeric umask and return the previous umask."
prevmask = os.umask(mask)
return space.wrap(prevmask)
umask.unwrap_spec = [ObjSpace, int]
def getpid(space):
"Return the current process id."
try:
pid = os.getpid()
except OSError, e:
raise wrap_oserror(space, e)
return space.wrap(pid)
getpid.unwrap_spec = [ObjSpace]
def kill(space, pid, sig):
"Kill a process with a signal."
try:
os.kill(pid, sig)
except OSError, e:
raise wrap_oserror(space, e)
kill.unwrap_spec = [ObjSpace, int, int]
def link(space, src, dst):
"Create a hard link to a file."
try:
os.link(src, dst)
except OSError, e:
raise wrap_oserror(space, e)
link.unwrap_spec = [ObjSpace, str, str]
def symlink(space, src, dst):
"Create a symbolic link pointing to src named dst."
try:
os.symlink(src, dst)
except OSError, e:
raise wrap_oserror(space, e)
symlink.unwrap_spec = [ObjSpace, str, str]
def readlink(space, path):
"Return a string representing the path to which the symbolic link points."
try:
result = os.readlink(path)
except OSError, e:
raise wrap_oserror(space, e)
return space.wrap(result)
readlink.unwrap_spec = [ObjSpace, str]
def fork(space):
try:
pid = os.fork()
except OSError, e:
raise wrap_oserror(space, e)
return space.wrap(pid)
def waitpid(space, pid, options):
try:
pid, status = os.waitpid(pid, options)
except OSError, e:
raise wrap_oserror(space, e)
return space.newtuple([space.wrap(pid), space.wrap(status)])
waitpid.unwrap_spec = [ObjSpace, int, int]
def _exit(space, status):
os._exit(status)
_exit.unwrap_spec = [ObjSpace, int]
def getuid(space):
return space.wrap(intmask(_c.getuid()))
getuid.unwrap_spec = [ObjSpace]
def geteuid(space):
return space.wrap(intmask(_c.geteuid()))
geteuid.unwrap_spec = [ObjSpace]
def execv(space, command, w_args):
""" execv(path, args)
Execute an executable path with arguments, replacing current process.
path: path of executable file
args: iterable of strings
"""
try:
os.execv(command, [space.str_w(i) for i in space.unpackiterable(w_args)])
except OperationError, e:
if not e.match(space, space.w_TypeError):
raise
msg = "execv() arg 2 must be an iterable of strings"
raise OperationError(space.w_TypeError, space.wrap(str(msg)))
except OSError, e:
raise wrap_oserror(space, e)
execv.unwrap_spec = [ObjSpace, str, W_Root]
def execve(space, command, w_args, w_env):
""" execve(path, args, env)
Execute a path with arguments and environment, replacing current process.
path: path of executable file
args: iterable of arguments
env: dictionary of strings mapping to strings
"""
try:
args = [space.str_w(i) for i in space.unpackiterable(w_args)]
env = {}
keys = space.call_function(space.getattr(w_env, space.wrap('keys')))
for key in space.unpackiterable(keys):
value = space.getitem(w_env, key)
env[space.str_w(key)] = space.str_w(value)
os.execve(command, args, env)
except ValueError, e:
raise OperationError(space.w_ValueError, space.wrap(str(e)))
except OSError, e:
raise wrap_oserror(space, e)
execve.unwrap_spec = [ObjSpace, str, W_Root, W_Root]
def uname(space):
""" uname() -> (sysname, nodename, release, version, machine)
Return a tuple identifying the current operating system.
"""
try:
result = _c.uname()
except OSError, e:
raise wrap_oserror(space, e)
return space.newtuple([space.wrap(ob) for ob in result])
uname.unwrap_spec = [ObjSpace]
def utime(space, path, w_tuple):
""" utime(path, (atime, mtime))
utime(path, None)
Set the access and modified time of the file to the given values. If the
second form is used, set the access and modified times to the current time.
"""
if space.is_w(w_tuple, space.w_None):
try:
ros.utime_null(path)
return
except OSError, e:
raise wrap_oserror(space, e)
try:
msg = "utime() arg 2 must be a tuple (atime, mtime) or None"
args_w = space.unpackiterable(w_tuple)
if len(args_w) != 2:
raise OperationError(space.w_TypeError, space.wrap(msg))
actime = space.float_w(args_w[0])
modtime = space.float_w(args_w[1])
ros.utime_tuple(path, (actime, modtime))
except OSError, e:
raise wrap_oserror(space, e)
except OperationError, e:
if not e.match(space, space.w_TypeError):
raise
raise OperationError(space.w_TypeError, space.wrap(msg))
utime.unwrap_spec = [ObjSpace, str, W_Root]
def declare_new_w_star(name):
if name in w_star_returning_int:
def WSTAR(space, status):
return space.wrap(getattr(os, name)(status))
else:
def WSTAR(space, status):
return space.newbool(getattr(os, name)(status))
WSTAR.unwrap_spec = [ObjSpace, int]
WSTAR.func_name = name
return WSTAR
for name in w_star:
func = declare_new_w_star(name)
globals()[name] = func
def ttyname(space, fd):
try:
return space.wrap(os.ttyname(fd))
except OSError, e:
raise wrap_oserror(space, e)
ttyname.unwrap_spec = [ObjSpace, int]
| Python |
from pypy.rpython.rctypes.tool import ctypes_platform
from pypy.rpython.rctypes.tool.libc import libc
import pypy.rpython.rctypes.implementation # this defines rctypes magic
from pypy.rpython.rctypes.aerrno import geterrno
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import W_Root, ObjSpace, Wrappable
from pypy.interpreter.typedef import TypeDef
from pypy.interpreter.gateway import interp2app
from ctypes import *
import sys
import os
import platform
import stat
_POSIX = os.name == "posix"
_MS_WINDOWS = os.name == "nt"
_LINUX = "linux" in sys.platform
_64BIT = "64bit" in platform.architecture()[0]
class CConfig:
_includes_ = ("sys/types.h",)
size_t = ctypes_platform.SimpleType("size_t", c_long)
off_t = ctypes_platform.SimpleType("off_t", c_long)
constants = {}
if _POSIX:
CConfig._includes_ += ("sys/mman.h",)
# constants, look in sys/mman.h and platform docs for the meaning
# some constants are linux only so they will be correctly exposed outside
# depending on the OS
constant_names = ['MAP_SHARED', 'MAP_PRIVATE',
'PROT_READ', 'PROT_WRITE',
'MS_SYNC']
opt_constant_names = ['MAP_ANON', 'MAP_ANONYMOUS',
'PROT_EXEC',
'MAP_DENYWRITE', 'MAP_EXECUTABLE']
for name in constant_names:
setattr(CConfig, name, ctypes_platform.ConstantInteger(name))
for name in opt_constant_names:
setattr(CConfig, name, ctypes_platform.DefinedConstantInteger(name))
has_mremap = hasattr(libc, "mremap")
if has_mremap:
CConfig.MREMAP_MAYMOVE = (
ctypes_platform.DefinedConstantInteger("MREMAP_MAYMOVE"))
elif _MS_WINDOWS:
CConfig._includes_ += ("windows.h",)
constant_names = ['PAGE_READONLY', 'PAGE_READWRITE', 'PAGE_WRITECOPY',
'FILE_MAP_READ', 'FILE_MAP_WRITE', 'FILE_MAP_COPY',
'DUPLICATE_SAME_ACCESS']
for name in constant_names:
setattr(CConfig, name, ctypes_platform.ConstantInteger(name))
# export the constants inside and outside. see __init__.py
constants.update(ctypes_platform.configure(CConfig))
if _POSIX:
# MAP_ANONYMOUS is not always present but it's always available at CPython level
if constants["MAP_ANONYMOUS"] is None:
constants["MAP_ANONYMOUS"] = constants["MAP_ANON"]
assert constants["MAP_ANONYMOUS"] is not None
constants["MAP_ANON"] = constants["MAP_ANONYMOUS"]
locals().update(constants)
_ACCESS_DEFAULT, ACCESS_READ, ACCESS_WRITE, ACCESS_COPY = range(4)
PTR = POINTER(c_char) # cannot use c_void_p as return value of functions :-(
memmove_ = libc.memmove
memmove_.argtypes = [PTR, PTR, size_t]
if _POSIX:
libc.mmap.argtypes = [PTR, size_t, c_int, c_int, c_int, off_t]
libc.mmap.restype = PTR
libc.mmap.includes = ("sys/mman.h",)
libc.munmap.argtypes = [PTR, size_t]
libc.munmap.restype = c_int
libc.munmap.includes = ("sys/mman.h",)
libc.msync.argtypes = [PTR, size_t, c_int]
libc.msync.restype = c_int
libc.msync.includes = ("sys/mman.h",)
if has_mremap:
libc.mremap.argtypes = [PTR, size_t, size_t, c_ulong]
libc.mremap.restype = PTR
libc.mremap.includes = ("sys/mman.h",)
def _get_page_size():
return libc.getpagesize()
def _get_error_msg():
errno = geterrno()
return os.strerror(errno)
elif _MS_WINDOWS:
from ctypes import wintypes
WORD = wintypes.WORD
DWORD = wintypes.DWORD
BOOL = wintypes.BOOL
LONG = wintypes.LONG
LPVOID = PTR
LPCVOID = LPVOID
DWORD_PTR = DWORD
c_int = wintypes.c_int
INVALID_c_int_VALUE = c_int(-1).value
class SYSINFO_STRUCT(Structure):
_fields_ = [("wProcessorArchitecture", WORD),
("wReserved", WORD)]
class SYSINFO_UNION(Union):
_fields_ = [("dwOemId", DWORD),
("struct", SYSINFO_STRUCT)]
class SYSTEM_INFO(Structure):
_fields_ = [("union", SYSINFO_UNION),
("dwPageSize", DWORD),
("lpMinimumApplicationAddress", LPVOID),
("lpMaximumApplicationAddress", LPVOID),
("dwActiveProcessorMask", DWORD_PTR),
("dwNumberOfProcessors", DWORD),
("dwProcessorType", DWORD),
("dwAllocationGranularity", DWORD),
("wProcessorLevel", WORD),
("wProcessorRevision", WORD)]
windll.kernel32.GetSystemInfo.argtypes = [POINTER(SYSTEM_INFO)]
GetFileSize = windll.kernel32.GetFileSize
GetFileSize.argtypes = [c_int, POINTER(c_int)]
GetFileSize.restype = c_int
GetCurrentProcess = windll.kernel32.GetCurrentProcess
GetCurrentProcess.restype = c_int
DuplicateHandle = windll.kernel32.DuplicateHandle
DuplicateHandle.argtypes = [c_int, c_int, c_int, POINTER(c_int), DWORD,
BOOL, DWORD]
DuplicateHandle.restype = BOOL
CreateFileMapping = windll.kernel32.CreateFileMappingA
CreateFileMapping.argtypes = [c_int, PTR, c_int, c_int, c_int,
c_char_p]
CreateFileMapping.restype = c_int
MapViewOfFile = windll.kernel32.MapViewOfFile
MapViewOfFile.argtypes = [c_int, DWORD, DWORD, DWORD, DWORD]
MapViewOfFile.restype = PTR
CloseHandle = windll.kernel32.CloseHandle
CloseHandle.argtypes = [c_int]
CloseHandle.restype = BOOL
UnmapViewOfFile = windll.kernel32.UnmapViewOfFile
UnmapViewOfFile.argtypes = [LPCVOID]
UnmapViewOfFile.restype = BOOL
FlushViewOfFile = windll.kernel32.FlushViewOfFile
FlushViewOfFile.argtypes = [LPCVOID, c_int]
FlushViewOfFile.restype = BOOL
SetFilePointer = windll.kernel32.SetFilePointer
SetFilePointer.argtypes = [c_int, c_int, POINTER(c_int), c_int]
SetEndOfFile = windll.kernel32.SetEndOfFile
SetEndOfFile.argtypes = [c_int]
msvcr71 = cdll.LoadLibrary("msvcr71.dll")
msvcr71._get_osfhandle.argtypes = [c_int]
msvcr71._get_osfhandle.restype = c_int
# libc._lseek.argtypes = [c_int, c_int, c_int]
# libc._lseek.restype = c_int
def _get_page_size():
si = SYSTEM_INFO()
windll.kernel32.GetSystemInfo(byref(si))
return int(si.dwPageSize)
def _get_file_size(space, handle):
# XXX use native Windows types like WORD
high = c_int(0)
low = c_int(windll.kernel32.GetFileSize(c_int(handle.value), byref(high)))
# low might just happen to have the value INVALID_FILE_SIZE
# so we need to check the last error also
INVALID_FILE_SIZE = -1
NO_ERROR = 0
dwErr = GetLastError()
if low.value == INVALID_FILE_SIZE and dwErr != NO_ERROR:
raise OperationError(space.w_EnvironmentError,
space.wrap(os.strerror(dwErr)))
return low.value, high.value
def _get_error_msg():
errno = GetLastError()
return os.strerror(errno)
PAGESIZE = _get_page_size()
NULL = PTR()
EMPTY_DATA = (c_char * 0)()
NODATA = cast(pointer(EMPTY_DATA), PTR)
# ____________________________________________________________
# XXX the methods should take unsigned int arguments instead of int
class W_MMap(Wrappable):
def __init__(self, space, access):
self.space = space
self.size = 0
self.pos = 0
self.access = access
if _MS_WINDOWS:
self.map_handle = wintypes.c_int()
self.file_handle = wintypes.c_int()
self.tagname = ""
elif _POSIX:
self.fd = -1
self.closed = False
def check_valid(self):
if _MS_WINDOWS:
to_close = self.map_handle.value == INVALID_c_int_VALUE
elif _POSIX:
to_close = self.closed
if to_close:
raise OperationError(self.space.w_ValueError,
self.space.wrap("map closed or invalid"))
def check_writeable(self):
if not (self.access != ACCESS_READ):
raise OperationError(self.space.w_TypeError,
self.space.wrap("mmap can't modify a readonly memory map."))
def check_resizeable(self):
if not (self.access == ACCESS_WRITE or self.access == _ACCESS_DEFAULT):
raise OperationError(self.space.w_TypeError,
self.space.wrap(
"mmap can't resize a readonly or copy-on-write memory map."))
def setdata(self, data, size):
"""Set the internal data and map size from a PTR."""
assert size >= 0
arraytype = c_char * size
self.data = cast(data, POINTER(arraytype)).contents
self.size = size
def close(self):
if _MS_WINDOWS:
if self.size > 0:
self.unmapview()
self.setdata(NODATA, 0)
if self.map_handle.value != INVALID_c_int_VALUE:
CloseHandle(self.map_handle)
self.map_handle.value = INVALID_c_int_VALUE
if self.file_handle.value != INVALID_c_int_VALUE:
CloseHandle(self.file_handle)
self.file_handle.value = INVALID_c_int_VALUE
elif _POSIX:
self.closed = True
if self.fd != -1:
os.close(self.fd)
self.fd = -1
if self.size > 0:
libc.munmap(self.getptr(0), self.size)
self.setdata(NODATA, 0)
close.unwrap_spec = ['self']
def unmapview(self):
UnmapViewOfFile(self.getptr(0))
def read_byte(self):
self.check_valid()
if self.pos < self.size:
value = self.data[self.pos]
self.pos += 1
return self.space.wrap(value)
else:
raise OperationError(self.space.w_ValueError,
self.space.wrap("read byte out of range"))
read_byte.unwrap_spec = ['self']
def readline(self):
self.check_valid()
data = self.data
for pos in xrange(self.pos, self.size):
if data[pos] == '\n':
eol = pos + 1 # we're interested in the position after new line
break
else: # no '\n' found
eol = self.size
res = data[self.pos:eol]
self.pos += len(res)
return self.space.wrap(res)
readline.unwrap_spec = ['self']
def read(self, num=-1):
self.check_valid()
if num < 0:
# read all
eol = self.size
else:
eol = self.pos + num
# silently adjust out of range requests
if eol > self.size:
eol = self.size
res = self.data[self.pos:eol]
self.pos += len(res)
return self.space.wrap(res)
read.unwrap_spec = ['self', int]
def find(self, tofind, start=0):
self.check_valid()
# XXX naive! how can we reuse the rstr algorithm?
if start < 0:
start += self.size
if start < 0:
start = 0
data = self.data
for p in xrange(start, self.size - len(tofind) + 1):
for q in range(len(tofind)):
if data[p+q] != tofind[q]:
break # position 'p' is not a match
else:
# full match
return self.space.wrap(p)
# failure
return self.space.wrap(-1)
find.unwrap_spec = ['self', str, int]
def seek(self, pos, whence=0):
self.check_valid()
dist = pos
how = whence
if how == 0: # relative to start
where = dist
elif how == 1: # relative to current position
where = self.pos + dist
elif how == 2: # relative to the end
where = self.size + dist
else:
raise OperationError(self.space.w_ValueError,
self.space.wrap("unknown seek type"))
if not (0 <= where <= self.size):
raise OperationError(self.space.w_ValueError,
self.space.wrap("seek out of range"))
self.pos = where
seek.unwrap_spec = ['self', 'index', int]
def tell(self):
self.check_valid()
return self.space.wrap(self.pos)
tell.unwrap_spec = ['self']
def descr_size(self):
self.check_valid()
size = self.size
if _MS_WINDOWS:
if self.file_handle.value != INVALID_c_int_VALUE:
low, high = _get_file_size(self.space, self.file_handle)
if not high and low <= sys.maxint:
return self.space.wrap(low)
size = c_int((high << 32) + low).value
elif _POSIX:
st = os.fstat(self.fd)
size = st[stat.ST_SIZE]
if size > sys.maxint:
size = sys.maxint
else:
size = int(size)
return self.space.wrap(size)
descr_size.unwrap_spec = ['self']
def write(self, data):
self.check_valid()
self.check_writeable()
data_len = len(data)
if self.pos + data_len > self.size:
raise OperationError(self.space.w_ValueError,
self.space.wrap("data out of range"))
internaldata = self.data
start = self.pos
for i in range(data_len):
internaldata[start+i] = data[i]
self.pos = start + data_len
write.unwrap_spec = ['self', str]
def write_byte(self, byte):
self.check_valid()
if len(byte) != 1:
raise OperationError(self.space.w_TypeError,
self.space.wrap("write_byte() argument must be char"))
self.check_writeable()
self.data[self.pos] = byte[0]
self.pos += 1
write_byte.unwrap_spec = ['self', str]
def getptr(self, offset):
if offset > 0:
# XXX 64-bit support for pointer arithmetic!
dataptr = cast(pointer(self.data), c_void_p)
dataptr = c_void_p(dataptr.value + offset)
return cast(dataptr, PTR)
else:
return cast(pointer(self.data), PTR)
def flush(self, offset=0, size=0):
self.check_valid()
if size == 0:
size = self.size
if offset < 0 or size < 0 or offset + size > self.size:
raise OperationError(self.space.w_ValueError,
self.space.wrap("flush values out of range"))
else:
start = self.getptr(offset)
if _MS_WINDOWS:
res = FlushViewOfFile(start, size)
# XXX res == 0 means that an error occurred, but in CPython
# this is not checked
return self.space.wrap(res)
elif _POSIX:
## XXX why is this code here? There is no equivalent in CPython
## if _LINUX:
## # alignment of the address
## value = cast(self.data, c_void_p).value
## aligned_value = value & ~(PAGESIZE - 1)
## # the size should be increased too. otherwise the final
## # part is not "msynced"
## new_size = size + value & (PAGESIZE - 1)
res = libc.msync(start, size, MS_SYNC)
if res == -1:
raise OperationError(self.space.w_EnvironmentError,
self.space.wrap(_get_error_msg()))
return self.space.wrap(0)
flush.unwrap_spec = ['self', int, int]
def move(self, dest, src, count):
self.check_valid()
self.check_writeable()
# check boundings
if (src < 0 or dest < 0 or count < 0 or
src + count > self.size or dest + count > self.size):
raise OperationError(self.space.w_ValueError,
self.space.wrap("source or destination out of range"))
datasrc = self.getptr(src)
datadest = self.getptr(dest)
memmove_(datadest, datasrc, count)
move.unwrap_spec = ['self', int, int, int]
def resize(self, newsize):
self.check_valid()
self.check_resizeable()
if _POSIX:
if not has_mremap:
msg = "mmap: resizing not available -- no mremap()"
raise OperationError(self.space.w_EnvironmentError,
self.space.wrap(msg))
# resize the underlying file first
try:
os.ftruncate(self.fd, newsize)
except OSError, e:
raise OperationError(self.space.w_EnvironmentError,
self.space.wrap(os.strerror(e.errno)))
# now resize the mmap
newdata = libc.mremap(self.getptr(0), self.size, newsize,
MREMAP_MAYMOVE or 0)
self.setdata(newdata, newsize)
elif _MS_WINDOWS:
# disconnect the mapping
self.unmapview()
CloseHandle(self.map_handle)
# move to the desired EOF position
if _64BIT:
newsize_high = DWORD(newsize >> 32)
newsize_low = DWORD(newsize & 0xFFFFFFFF)
else:
newsize_high = c_int(0)
newsize_low = c_int(newsize)
FILE_BEGIN = c_int(0)
SetFilePointer(self.file_handle, newsize_low, byref(newsize_high),
FILE_BEGIN)
# resize the file
SetEndOfFile(self.file_handle)
# create another mapping object and remap the file view
res = CreateFileMapping(self.file_handle, NULL, PAGE_READWRITE,
newsize_high, newsize_low, self.tagname)
self.map_handle = c_int(res)
dwErrCode = DWORD(0)
if self.map_handle:
data = MapViewOfFile(self.map_handle, FILE_MAP_WRITE,
0, 0, 0)
if data:
self.setdata(data, newsize)
return
else:
dwErrCode = GetLastError()
else:
dwErrCode = GetLastError()
raise OperationError(self.space.w_EnvironmentError,
self.space.wrap(os.strerror(dwErrCode)))
resize.unwrap_spec = ['self', int]
def __len__(self):
self.check_valid()
return self.space.wrap(self.size)
__len__.unwrap_spec = ['self']
def descr_getitem(self, w_index):
self.check_valid()
space = self.space
start, stop, step = space.decode_index(w_index, self.size)
if step == 0: # index only
return space.wrap(self.data[start])
elif step == 1:
if 0 <= start <= stop:
res = self.data[start:stop]
else:
res = ''
return space.wrap(res)
else:
raise OperationError(space.w_ValueError,
space.wrap("mmap object does not support slicing with a step"))
descr_getitem.unwrap_spec = ['self', W_Root]
def descr_setitem(self, w_index, value):
self.check_valid()
self.check_writeable()
space = self.space
start, stop, step = space.decode_index(w_index, self.size)
if step == 0: # index only
if len(value) != 1:
raise OperationError(space.w_ValueError,
space.wrap("mmap assignment must be "
"single-character string"))
self.data[start] = value[0]
elif step == 1:
length = stop - start
if start < 0 or length < 0:
length = 0
if len(value) != length:
raise OperationError(space.w_ValueError,
space.wrap("mmap slice assignment is wrong size"))
for i in range(length):
self.data[start + i] = value[i]
else:
raise OperationError(space.w_ValueError,
space.wrap("mmap object does not support slicing with a step"))
descr_setitem.unwrap_spec = ['self', W_Root, str]
W_MMap.typedef = TypeDef("mmap",
close = interp2app(W_MMap.close),
read_byte = interp2app(W_MMap.read_byte),
readline = interp2app(W_MMap.readline),
read = interp2app(W_MMap.read),
find = interp2app(W_MMap.find),
seek = interp2app(W_MMap.seek),
tell = interp2app(W_MMap.tell),
size = interp2app(W_MMap.descr_size),
write = interp2app(W_MMap.write),
write_byte = interp2app(W_MMap.write_byte),
flush = interp2app(W_MMap.flush),
move = interp2app(W_MMap.move),
resize = interp2app(W_MMap.resize),
__module__ = "mmap",
__len__ = interp2app(W_MMap.__len__),
__getitem__ = interp2app(W_MMap.descr_getitem),
__setitem__ = interp2app(W_MMap.descr_setitem),
)
def _check_map_size(space, size):
if size < 0:
raise OperationError(space.w_TypeError,
space.wrap("memory mapped size must be positive"))
if size_t(size).value != size:
raise OperationError(space.w_OverflowError,
space.wrap("memory mapped size is too large (limited by C int)"))
if _POSIX:
def mmap(space, fileno, length, flags=MAP_SHARED,
prot=PROT_WRITE | PROT_READ, access=_ACCESS_DEFAULT):
fd = fileno
# check size boundaries
_check_map_size(space, length)
map_size = length
# check access is not there when flags and prot are there
if access != _ACCESS_DEFAULT and ((flags != MAP_SHARED) or\
(prot != (PROT_WRITE | PROT_READ))):
raise OperationError(space.w_ValueError,
space.wrap("mmap can't specify both access and flags, prot."))
if access == ACCESS_READ:
flags = MAP_SHARED
prot = PROT_READ
elif access == ACCESS_WRITE:
flags = MAP_SHARED
prot = PROT_READ | PROT_WRITE
elif access == ACCESS_COPY:
flags = MAP_PRIVATE
prot = PROT_READ | PROT_WRITE
elif access == _ACCESS_DEFAULT:
pass
else:
raise OperationError(space.w_ValueError,
space.wrap("mmap invalid access parameter."))
# check file size
try:
st = os.fstat(fd)
except OSError:
pass # ignore errors and trust map_size
else:
mode = st[stat.ST_MODE]
size = st[stat.ST_SIZE]
if size > sys.maxint:
size = sys.maxint
else:
size = int(size)
if stat.S_ISREG(mode):
if map_size == 0:
map_size = size
elif map_size > size:
raise OperationError(space.w_ValueError,
space.wrap("mmap length is greater than file size"))
m = W_MMap(space, access)
if fd == -1:
# Assume the caller wants to map anonymous memory.
# This is the same behaviour as Windows. mmap.mmap(-1, size)
# on both Windows and Unix map anonymous memory.
m.fd = -1
flags |= MAP_ANONYMOUS
else:
try:
m.fd = os.dup(fd)
except OSError, e:
raise OperationError(space.w_EnvironmentError,
space.wrap(os.strerror(e.errno)))
res = libc.mmap(NULL, map_size, prot, flags, fd, 0)
if cast(res, c_void_p).value == -1:
raise OperationError(space.w_EnvironmentError,
space.wrap(_get_error_msg()))
m.setdata(res, map_size)
return space.wrap(m)
mmap.unwrap_spec = [ObjSpace, int, 'index', int, int, int]
elif _MS_WINDOWS:
def mmap(space, fileno, length, tagname="", access=_ACCESS_DEFAULT):
# check size boundaries
_check_map_size(space, length)
map_size = length
flProtect = 0
dwDesiredAccess = 0
fh = 0
if access == ACCESS_READ:
flProtect = PAGE_READONLY
dwDesiredAccess = FILE_MAP_READ
elif access == _ACCESS_DEFAULT or access == ACCESS_WRITE:
flProtect = PAGE_READWRITE
dwDesiredAccess = FILE_MAP_WRITE
elif access == ACCESS_COPY:
flProtect = PAGE_WRITECOPY
dwDesiredAccess = FILE_MAP_COPY
else:
raise OperationError(space.w_ValueError,
space.wrap("mmap invalid access parameter."))
# assume -1 and 0 both mean invalid file descriptor
# to 'anonymously' map memory.
if fileno != -1 and fileno != 0:
fh = msvcr71._get_osfhandle(fileno)
if fh == -1:
raise OperationError(space.w_EnvironmentError,
space.wrap(_get_error_msg()))
# Win9x appears to need us seeked to zero
# SEEK_SET = 0
# libc._lseek(fileno, 0, SEEK_SET)
m = W_MMap(space, access)
# XXX the following two attributes should be plain RPython ints
m.file_handle = c_int(INVALID_c_int_VALUE)
m.map_handle = c_int(INVALID_c_int_VALUE)
if fh:
# it is necessary to duplicate the handle, so the
# Python code can close it on us
res = DuplicateHandle(GetCurrentProcess(), # source process handle
fh, # handle to be duplicated
GetCurrentProcess(), # target process handle
byref(m.file_handle), # result
0, # access - ignored due to options value
False, # inherited by child procs?
DUPLICATE_SAME_ACCESS) # options
if not res:
raise OperationError(space.w_EnvironmentError,
space.wrap(_get_error_msg()))
if not map_size:
low, high = _get_file_size(space, c_int(fh))
if _64BIT:
map_size = c_int((low << 32) + 1).value
else:
if high:
# file is too large to map completely
map_size = -1
else:
map_size = low
if tagname:
m.tagname = tagname
# DWORD is a 4-byte int. If int > 4-byte it must be divided
if _64BIT:
size_hi = DWORD(map_size >> 32)
size_lo = DWORD(map_size & 0xFFFFFFFF)
else:
size_hi = c_int(0)
size_lo = c_int(map_size)
m.map_handle = c_int(CreateFileMapping(m.file_handle, NULL, flProtect,
size_hi, size_lo, m.tagname))
if m.map_handle:
res = MapViewOfFile(m.map_handle, dwDesiredAccess,
0, 0, 0)
if res:
m.setdata(res, map_size)
return space.wrap(m)
else:
dwErr = GetLastError()
else:
dwErr = GetLastError()
raise OperationError(space.w_EnvironmentError,
space.wrap(os.strerror(dwErr)))
mmap.unwrap_spec = [ObjSpace, int, 'index', str, int]
| Python |
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
interpleveldefs = {
'PAGESIZE': 'space.wrap(interp_mmap.PAGESIZE)',
'mmap': 'interp_mmap.mmap'
}
appleveldefs = {
'ACCESS_READ': 'app_mmap.ACCESS_READ',
'ACCESS_WRITE': 'app_mmap.ACCESS_WRITE',
'ACCESS_COPY': 'app_mmap.ACCESS_COPY',
'error': 'app_mmap.error'
}
def buildloaders(cls):
from pypy.module.mmap import interp_mmap
for constant, value in interp_mmap.constants.iteritems():
if isinstance(value, int):
Module.interpleveldefs[constant] = "space.wrap(%r)" % value
super(Module, cls).buildloaders()
buildloaders = classmethod(buildloaders)
| Python |
ACCESS_READ = 1
ACCESS_WRITE = 2
ACCESS_COPY = 3
error = EnvironmentError
| Python |
from pypy.interpreter.nestedscope import Cell
from pypy.interpreter.pycode import PyCode
from pypy.interpreter.function import Function, Method
from pypy.interpreter.module import Module
from pypy.interpreter.pyframe import PyFrame
from pypy.interpreter.pytraceback import PyTraceback
from pypy.interpreter.generator import GeneratorIterator
from pypy.rlib.objectmodel import instantiate
from pypy.interpreter.argument import Arguments
from pypy.interpreter.baseobjspace import ObjSpace, W_Root
from pypy.objspace.std.dicttype import dictiter_typedef
from pypy.objspace.std.iterobject import W_SeqIterObject, W_ReverseSeqIterObject
#note: for now we don't use the actual value when creating the Cell.
# (i.e. we assume it will be handled by __setstate__)
# Stackless does use this so it might be needed here as well.
def cell_new(space):
return space.wrap(instantiate(Cell))
def code_new(space, __args__):
w_type = space.gettypeobject(PyCode.typedef)
return space.call_args(w_type, __args__)
code_new.unwrap_spec = [ObjSpace, Arguments]
def func_new(space):
fu = instantiate(Function)
fu.w_func_dict = space.newdict()
return space.wrap(fu)
func_new.unwrap_spec = [ObjSpace]
def module_new(space, w_name, w_dict):
new_mod = Module(space, w_name, w_dict)
return space.wrap(new_mod)
def method_new(space, __args__):
w_type = space.gettypeobject(Method.typedef)
return space.call_args(w_type, __args__)
method_new.unwrap_spec = [ObjSpace, Arguments]
def builtin_method_new(space, w_instance, w_name):
return space.getattr(w_instance, w_name)
def dictiter_surrogate_new(space, w_lis):
# we got a listobject.
# simply create an iterator and that's it.
return space.iter(w_lis)
dictiter_surrogate_new.unwrap_spec = [ObjSpace, W_Root]
def seqiter_new(space, w_seq, w_index):
return W_SeqIterObject(w_seq, space.int_w(w_index))
def reverseseqiter_new(space, w_seq, w_index):
w_len = space.len(w_seq)
index = space.int_w(w_index) - space.int_w(w_len)
return W_ReverseSeqIterObject(space, w_seq, index)
def frame_new(space, __args__):
args_w, kwds_w = __args__.unpack()
w_pycode, = args_w
pycode = space.interp_w(PyCode, w_pycode)
w = space.wrap
new_frame = instantiate(space.FrameClass) # XXX fish
return space.wrap(new_frame)
frame_new.unwrap_spec = [ObjSpace, Arguments]
def traceback_new(space):
tb = instantiate(PyTraceback)
return space.wrap(tb)
traceback_new.unwrap_spec = [ObjSpace]
def generator_new(space, __args__):
args_w, kwds_w = __args__.unpack() #stolen from std/fake.py
w_frame, w_running = args_w
frame = space.interp_w(PyFrame, w_frame)
running = space.int_w(w_running)
new_generator = GeneratorIterator(frame)
new_generator.running = running
return space.wrap(new_generator)
generator_new.unwrap_spec = [ObjSpace, Arguments]
def xrangeiter_new(space, current, remaining, step):
from pypy.module.__builtin__.functional import W_XRangeIterator
new_iter = W_XRangeIterator(space, current, remaining, step)
return space.wrap(new_iter)
xrangeiter_new.unwrap_spec = [ObjSpace, int, int, int]
# ___________________________________________________________________
# Helper functions for internal use
# adopted from prickelpit.c (but almost completely different)
def slp_into_tuple_with_nulls(space, seq_w):
"""
create a tuple with the object and store
a tuple with the positions of NULLs as first element.
"""
nulls = []
tup = [space.w_None]
w = space.wrap
for w_obj in seq_w:
if w_obj is None:
nulls.append(w(len(tup)-1))
w_obj = space.w_None
tup.append(w_obj)
tup[0] = space.newtuple(nulls)
return space.newtuple(tup)
def slp_from_tuple_with_nulls(space, w_tup):
tup_w = space.unpackiterable(w_tup)
nulls = space.unpackiterable(tup_w.pop(0))
for w_p in nulls:
p = space.int_w(w_p)
tup_w[p] = None
return tup_w
| Python |
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
"""Built-in functions, exceptions, and other objects."""
appleveldefs = {
}
interpleveldefs = {
'cell_new' : 'maker.cell_new',
'code_new' : 'maker.code_new',
'func_new' : 'maker.func_new',
'module_new' : 'maker.module_new',
'method_new' : 'maker.method_new',
'builtin_method_new' : 'maker.builtin_method_new',
'dictiter_surrogate_new' : 'maker.dictiter_surrogate_new',
'seqiter_new' : 'maker.seqiter_new',
'reverseseqiter_new' : 'maker.reverseseqiter_new',
'frame_new' : 'maker.frame_new',
'traceback_new' : 'maker.traceback_new',
'generator_new' : 'maker.generator_new',
'xrangeiter_new': 'maker.xrangeiter_new',
}
| Python |
"""
Plain Python definition of the builtin functions oriented towards
functional programming.
"""
from __future__ import generators
def sum(sequence, total=0):
"""sum(sequence, start=0) -> value
Returns the sum of a sequence of numbers (NOT strings) plus the value
of parameter 'start'. When the sequence is empty, returns start."""
# must forbid "summing" strings, per specs of built-in 'sum'
if isinstance(total, str): raise TypeError
for item in sequence:
total = total + item
return total
# ____________________________________________________________
def apply(function, args=(), kwds={}):
"""call a function (or other callable object) and return its result"""
return function(*args, **kwds)
def map(function, *collections):
"""does 3 separate things, hence this enormous docstring.
1. if function is None, return a list of tuples, each with one
item from each collection. If the collections have different
lengths, shorter ones are padded with None.
2. if function is not None, and there is only one collection,
apply function to every item in the collection and return a
list of the results.
3. if function is not None, and there are several collections,
repeatedly call the function with one argument from each
collection. If the collections have different lengths,
shorter ones are padded with None
"""
if len(collections) == 0:
raise TypeError, "map() requires at least one sequence"
if len(collections) == 1:
#it's the most common case, so make it faster
if function is None:
return list(collections[0])
return [function(x) for x in collections[0]]
iterators = [ iter(collection) for collection in collections ]
res = []
while 1:
cont = False #is any collection not empty?
args = []
for iterator in iterators:
try:
elem = iterator.next()
cont = True
except StopIteration:
elem = None
args.append(elem)
if cont:
if function is None:
res.append(tuple(args))
else:
res.append(function(*args))
else:
return res
def filter(function, collection):
"""construct a list of those elements of collection for which function
is True. If function is None, then return the items in the sequence
which are True."""
str_type = None
if isinstance(collection, str):
str_type = str
elif isinstance(collection, unicode):
str_type = unicode
if str_type is not None:
if function is None and type(collection) is str_type:
return collection
res = []
for i in xrange(len(collection)):
c = collection[i]
if function is None or function(c):
if not isinstance(c, str_type):
raise TypeError("can't filter %s to %s: __getitem__ returned different type", str_type.__name__, str_type.__name__)
res.append(c)
return str_type('').join(res) #added '' to make the annotator happy
if function is None:
res = [item for item in collection if item]
else:
res = [item for item in collection if function(item)]
if isinstance(collection, tuple):
return tuple(res)
else:
return res
def zip(*collections):
"""return a list of tuples, where the nth tuple contains every
nth item of each collection. If the collections have different
lengths, zip returns a list as long as the shortest collection,
ignoring the trailing items in the other collections."""
if len(collections) == 0:
import sys
if sys.version_info < (2,4):
raise TypeError("zip() requires at least one sequence")
return []
res = []
iterators = [ iter(collection) for collection in collections ]
while 1:
try:
elems = []
for iterator in iterators:
elems.append(iterator.next())
res.append(tuple(elems))
except StopIteration:
return res
def reduce(function, seq, *initialt):
""" Apply function of two arguments cumulatively to the items of
sequence, from left to right, so as to reduce the sequence to a
single value. Optionally begin with an initial value."""
seqiter = iter(seq)
if initialt:
initial, = initialt
else:
try:
initial = seqiter.next()
except StopIteration:
raise TypeError, "reduce() of empty sequence with no initial value"
while 1:
try:
arg = seqiter.next()
except StopIteration:
break
initial = function(initial, arg)
return initial
# ____________________________________________________________
"""
The following is a nice example of collaboration between
interp-level and app-level.
range is primarily implemented in functional.py for the integer case.
On every error or different data types, it redirects to the applevel
implementation below. functional.py uses this source via the inspect
module and uses gateway.applevel. This is also an alternative to
writing longer functions in strings.
"""
def range(x, y=None, step=1):
""" returns a list of integers in arithmetic position from start (defaults
to zero) to stop - 1 by step (defaults to 1). Use a negative step to
get a list in decending order."""
if y is None:
start = 0
stop = x
else:
start = x
stop = y
if not isinstance(start, (int, long)):
raise TypeError('range() integer start argument expected, got %s' % type(start))
if not isinstance(stop, (int, long)):
raise TypeError('range() integer stop argument expected, got %s' % type(stop))
if not isinstance(step, (int, long)):
raise TypeError('range() integer step argument expected, got %s' % type(step))
if step == 0:
raise ValueError, 'range() arg 3 must not be zero'
elif step > 0:
if stop <= start: # no work for us
return []
howmany = (stop - start + step - 1)/step
else: # step must be < 0, or we would have raised ValueError
if stop >= start: # no work for us
return []
howmany = (start - stop - step - 1)/-step
arr = [None] * howmany # this is to avoid using append.
i = start
n = 0
while n < howmany:
arr[n] = i
i += step
n += 1
return arr
# ____________________________________________________________
def _identity(arg):
return arg
def min(*arr, **kwargs):
"""return the smallest number in a list,
or its smallest argument if more than one is given."""
from operator import gt
return min_max(gt, "min", *arr, **kwargs)
def min_max(comp, funcname, *arr, **kwargs):
key = kwargs.pop("key", _identity)
if len(kwargs):
raise TypeError, '%s() got an unexpected keyword argument' % funcname
if not arr:
raise TypeError, '%s() takes at least one argument' % funcname
if len(arr) == 1:
arr = arr[0]
iterator = iter(arr)
try:
min_max_val = iterator.next()
except StopIteration:
raise ValueError, '%s() arg is an empty sequence' % funcname
keyed_min_max_val = key(min_max_val)
for i in iterator:
keyed = key(i)
if comp(keyed_min_max_val, keyed):
min_max_val = i
keyed_min_max_val = keyed
return min_max_val
def max(*arr, **kwargs):
"""return the largest number in a list,
or its largest argument if more than one is given."""
from operator import lt
return min_max(lt, "max", *arr, **kwargs)
class enumerate(object):
"""enumerate(iterable) -> iterator for (index, value) of iterable.
Return an enumerate object. iterable must be an other object that supports
iteration. The enumerate object yields pairs containing a count (from
zero) and a value yielded by the iterable argument. enumerate is useful
for obtaining an indexed list: (0, seq[0]), (1, seq[1]), (2, seq[2]), ..."""
def __init__(self, collection):
self._iter = iter(collection)
self._index = 0
def next(self):
try:
next = self._iter.next
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s object has no next() method' %
(type(self._iter).__name__,))
result = self._index, next()
self._index += 1
return result
def __iter__(self):
return self
# ____________________________________________________________
def sorted(lst, cmp=None, key=None, reverse=None):
"sorted(iterable, cmp=None, key=None, reverse=False) --> new sorted list"
sorted_lst = list(lst)
sorted_lst.sort(cmp, key, reverse)
return sorted_lst
def reversed(sequence):
"reversed(sequence) -> reverse iterator over values of the sequence"
if hasattr(sequence, '__reversed__'):
return sequence.__reversed__()
if not hasattr(sequence, '__getitem__'):
raise TypeError("argument to reversed() must be a sequence")
return reversed_iterator(sequence)
class reversed_iterator(object):
def __init__(self, seq):
self.seq = seq
self.remaining = len(seq)
def __iter__(self):
return self
def next(self):
if self.remaining > len(self.seq):
self.remaining = 0
i = self.remaining
if i > 0:
i -= 1
item = self.seq[i]
self.remaining = i
return item
raise StopIteration
def __len__(self):
if self.remaining > len(self.seq):
self.remaining = 0
return self.remaining
def __reduce__(self):
tup = (self.seq, self.remaining)
return (make_reversed_iterator, tup)
def make_reversed_iterator(seq, remaining):
ri = reversed_iterator.__new__(reversed_iterator)
ri.seq = seq
#or "ri = reversed_iterator(seq)" but that executes len(seq)
ri.remaining = remaining
return ri
def _install_pickle_support_for_reversed_iterator():
import _pickle_support
make_reversed_iterator.__module__ = '_pickle_support'
_pickle_support.make_reversed_iterator = make_reversed_iterator
| Python |
"""
Implementation of the interpreter-level compile/eval builtins.
"""
from pypy.interpreter.pycode import PyCode
from pypy.interpreter.baseobjspace import W_Root, ObjSpace
from pypy.interpreter.error import OperationError
from pypy.interpreter.gateway import NoneNotWrapped
def compile(space, w_source, filename, mode, flags=0, dont_inherit=0):
"""Compile the source string (a Python module, statement or expression)
into a code object that can be executed by the exec statement or eval().
The filename will be used for run-time error messages.
The mode must be 'exec' to compile a module, 'single' to compile a
single (interactive) statement, or 'eval' to compile an expression.
The flags argument, if present, controls which future statements influence
the compilation of the code.
The dont_inherit argument, if non-zero, stops the compilation inheriting
the effects of any future statements in effect in the code calling
compile; if absent or zero these statements do influence the compilation,
in addition to any features explicitly specified.
"""
if space.is_true(space.isinstance(w_source, space.w_unicode)):
# hack: encode the unicode string as UTF-8 and attach
# a BOM at the start
w_source = space.call_method(w_source, 'encode', space.wrap('utf-8'))
str_ = space.str_w(w_source)
str_ = '\xEF\xBB\xBF' + str_
else:
str_ = space.str_w(w_source)
ec = space.getexecutioncontext()
if not dont_inherit:
try:
caller = ec.framestack.top()
except IndexError:
pass
else:
flags |= ec.compiler.getcodeflags(caller.getcode())
if mode not in ('exec', 'eval', 'single'):
raise OperationError(space.w_ValueError,
space.wrap("compile() arg 3 must be 'exec' "
"or 'eval' or 'single'"))
code = ec.compiler.compile(str_, filename, mode, flags)
return space.wrap(code)
#
compile.unwrap_spec = [ObjSpace,W_Root,str,str,int,int]
def eval(space, w_code, w_globals=NoneNotWrapped, w_locals=NoneNotWrapped):
"""Evaluate the source in the context of globals and locals.
The source may be a string representing a Python expression
or a code object as returned by compile(). The globals and locals
are dictionaries, defaulting to the current current globals and locals.
If only globals is given, locals defaults to it.
"""
w = space.wrap
if (space.is_true(space.isinstance(w_code, space.w_str)) or
space.is_true(space.isinstance(w_code, space.w_unicode))):
try:
w_code = compile(space,
space.call_method(w_code, 'lstrip',
space.wrap(' \t')),
"<string>", "eval")
except OperationError, e:
if e.match(space, space.w_SyntaxError):
e_value_w = space.unpacktuple(e.w_value)
if len(e_value_w) == 2:
e_loc_w = space.unpacktuple(e_value_w[1])
e.w_value = space.newtuple([e_value_w[0],
space.newtuple([space.w_None]+
e_loc_w[1:])])
raise e
else:
raise
codeobj = space.interpclass_w(w_code)
if not isinstance(codeobj, PyCode):
raise OperationError(space.w_TypeError,
w('eval() arg 1 must be a string or code object'))
try:
caller = space.getexecutioncontext().framestack.top()
except IndexError:
caller = None
if w_globals is None or space.is_w(w_globals, space.w_None):
if caller is None:
w_globals = w_locals = space.newdict()
else:
w_globals = caller.w_globals
w_locals = caller.getdictscope()
elif w_locals is None:
w_locals = w_globals
try:
space.getitem(w_globals, space.wrap('__builtins__'))
except OperationError, e:
if not e.match(space, space.w_KeyError):
raise
if caller is not None:
w_builtin = space.builtin.pick_builtin(caller.w_globals)
space.setitem(w_globals, space.wrap('__builtins__'), w_builtin)
return codeobj.exec_code(space, w_globals, w_locals)
| Python |
# NOT_RPYTHON (but maybe soon)
"""
Plain Python definition of the builtin I/O-related functions.
"""
import sys
def execfile(filename, glob=None, loc=None):
"""execfile(filename[, globals[, locals]])
Read and execute a Python script from a file.
The globals and locals are dictionaries, defaulting to the current
globals and locals. If only globals is given, locals defaults to it."""
if glob is None:
# Warning this is at hidden_applevel
glob = globals()
if loc is None:
loc = locals()
elif loc is None:
loc = glob
f = file(filename, 'rU')
try:
source = f.read()
finally:
f.close()
#Don't exec the source directly, as this loses the filename info
co = compile(source.rstrip()+"\n", filename, 'exec')
exec co in glob, loc
def raw_input(prompt=None):
"""raw_input([prompt]) -> string
Read a string from standard input. The trailing newline is stripped.
If the user hits EOF (Unix: Ctl-D, Windows: Ctl-Z+Return), raise EOFError.
On Unix, GNU readline is used if enabled. The prompt string, if given,
is printed without a trailing newline before reading."""
try:
stdin = sys.stdin
except AttributeError:
raise RuntimeError("[raw_]input: lost sys.stdin")
try:
stdout = sys.stdout
except AttributeError:
raise RuntimeError("[raw_]input: lost sys.stdout")
# hook for the readline module
if (hasattr(sys, '__raw_input__') and
isinstance(stdin, file) and stdin.fileno() == 0 and stdin.isatty() and
isinstance(stdout, file) and stdout.fileno() == 1):
if prompt is None:
prompt = ''
return sys.__raw_input__(prompt)
if prompt is not None:
stdout.write(prompt)
try:
flush = stdout.flush
except AttributeError:
pass
else:
flush()
line = stdin.readline()
if not line: # inputting an empty line gives line == '\n'
raise EOFError
if line[-1] == '\n':
return line[:-1]
return line
def input(prompt=None):
"""Equivalent to eval(raw_input(prompt))."""
return eval(raw_input(prompt))
| Python |
"""
Implementation of the interpreter-level default import logic.
"""
import sys, os, stat
from pypy.interpreter.module import Module
from pypy.interpreter import gateway
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import W_Root, ObjSpace
from pypy.interpreter.eval import Code
from pypy.rlib import streamio
from pypy.rlib.rarithmetic import intmask
NOFILE = 0
PYFILE = 1
PYCFILE = 2
def info_modtype(space, filepart):
"""
calculate whether the .py file exists, the .pyc file exists
and whether the .pyc file has the correct mtime entry.
The latter is only true if the .py file exists.
The .pyc file is only considered existing if it has a valid
magic number.
"""
pyfile = filepart + ".py"
pyfile_exist = False
if os.path.exists(pyfile):
pyfile_ts = os.stat(pyfile)[stat.ST_MTIME]
pyfile_exist = True
else:
pyfile_ts = 0
pyfile_exist = False
pycfile = filepart + ".pyc"
if space.config.objspace.usepycfiles and os.path.exists(pycfile):
pyc_state = check_compiled_module(space, pyfile, pyfile_ts, pycfile)
pycfile_exists = pyc_state >= 0
pycfile_ts_valid = pyc_state > 0 or (pyc_state == 0 and not pyfile_exist)
else:
pycfile_exists = False
pycfile_ts_valid = False
return pyfile_exist, pycfile_exists, pycfile_ts_valid
def find_modtype(space, filepart):
""" This is the way pypy does it. A pyc is only used if the py file exists AND
the pyc file contains the timestamp of the py. """
pyfile_exist, pycfile_exists, pycfile_ts_valid = info_modtype(space, filepart)
if pycfile_ts_valid:
return PYCFILE
elif pyfile_exist:
return PYFILE
else:
return NOFILE
def find_modtype_cpython(space, filepart):
""" This is the way cpython does it (where the py file doesnt exist but there
is a valid pyc file. """
pyfile_exist, pycfile_exists, pycfile_ts_valid = info_modtype(space, filepart)
if pycfile_ts_valid:
return PYCFILE
elif pyfile_exist:
return PYFILE
elif pycfile_exists:
return PYCFILE
else:
return NOFILE
def try_import_mod(space, w_modulename, filepart, w_parent, w_name, pkgdir=None):
# decide what type we want (pyc/py)
modtype = find_modtype(space, filepart)
if modtype == NOFILE:
return None
w = space.wrap
w_mod = w(Module(space, w_modulename))
e = None
if modtype == PYFILE:
filename = filepart + ".py"
stream = streamio.open_file_as_stream(filename, "r")
else:
assert modtype == PYCFILE
filename = filepart + ".pyc"
stream = streamio.open_file_as_stream(filename, "rb")
space.sys.setmodule(w_mod)
space.setattr(w_mod, w('__file__'), space.wrap(filename))
space.setattr(w_mod, w('__doc__'), space.w_None)
if pkgdir is not None:
space.setattr(w_mod, w('__path__'), space.newlist([w(pkgdir)]))
try:
try:
if modtype == PYFILE:
load_source_module(space, w_modulename, w_mod, filename, stream)
else:
load_compiled_module(space, w_modulename, w_mod, filename, stream)
finally:
stream.close()
except OperationError, e:
w_mods = space.sys.get('modules')
space.call_method(w_mods,'pop', w_modulename, space.w_None)
raise
w_mod = check_sys_modules(space, w_modulename)
if w_mod is not None and w_parent is not None:
space.setattr(w_parent, w_name, w_mod)
if e:
raise e
return w_mod
def try_getattr(space, w_obj, w_name):
try:
return space.getattr(w_obj, w_name)
except OperationError, e:
# ugh, but blame CPython :-/ this is supposed to emulate
# hasattr, which eats all exceptions.
return None
def try_getitem(space, w_obj, w_key):
try:
return space.getitem(w_obj, w_key)
except OperationError, e:
if not e.match(space, space.w_KeyError):
raise
return None
def check_sys_modules(space, w_modulename):
w_modules = space.sys.get('modules')
try:
w_mod = space.getitem(w_modules, w_modulename)
except OperationError, e:
pass
else:
return w_mod
if not e.match(space, space.w_KeyError):
raise
return None
def importhook(space, modulename, w_globals=None,
w_locals=None, w_fromlist=None):
if not isinstance(modulename, str):
try:
helper = ', not ' + modulename.__class__.__name__
except AttributeError:
helper = ''
raise OperationError(space.w_TypeError,
space.wrap("__import__() argument 1 must be string" + helper))
w = space.wrap
ctxt_name = None
if w_globals is not None and not space.is_w(w_globals, space.w_None):
ctxt_w_name = try_getitem(space, w_globals, w('__name__'))
ctxt_w_path = try_getitem(space, w_globals, w('__path__'))
if ctxt_w_name is not None:
try:
ctxt_name = space.str_w(ctxt_w_name)
except OperationError, e:
if not e.match(space, space.w_TypeError):
raise
else:
ctxt_w_path = None
rel_modulename = None
if ctxt_name is not None:
ctxt_name_prefix_parts = ctxt_name.split('.')
if ctxt_w_path is None: # context is a plain module
ctxt_name_prefix_parts = ctxt_name_prefix_parts[:-1]
if ctxt_name_prefix_parts:
rel_modulename = '.'.join(ctxt_name_prefix_parts+[modulename])
else: # context is a package module
rel_modulename = ctxt_name+'.'+modulename
if rel_modulename is not None:
w_mod = check_sys_modules(space, w(rel_modulename))
if (w_mod is None or
not space.is_w(w_mod, space.w_None)):
w_mod = absolute_import(space, rel_modulename,
len(ctxt_name_prefix_parts),
w_fromlist, tentative=1)
if w_mod is not None:
return w_mod
else:
rel_modulename = None
w_mod = absolute_import(space, modulename, 0, w_fromlist, tentative=0)
if rel_modulename is not None:
space.setitem(space.sys.get('modules'), w(rel_modulename),space.w_None)
return w_mod
#
importhook.unwrap_spec = [ObjSpace,str,W_Root,W_Root,W_Root]
def absolute_import(space, modulename, baselevel, w_fromlist, tentative):
w = space.wrap
w_mod = None
parts = modulename.split('.')
prefix = []
# it would be nice if we could do here: w_path = space.sys.w_path
# instead:
w_path = space.sys.get('path')
first = None
level = 0
for part in parts:
w_mod = load_part(space, w_path, prefix, part, w_mod,
tentative=tentative)
if w_mod is None:
return None
if baselevel == level:
first = w_mod
tentative = 0
prefix.append(part)
w_path = try_getattr(space, w_mod, w('__path__'))
level += 1
if w_fromlist is not None and space.is_true(w_fromlist):
if w_path is not None:
fromlist_w = space.unpackiterable(w_fromlist)
if len(fromlist_w) == 1 and space.eq_w(fromlist_w[0],w('*')):
w_all = try_getattr(space, w_mod, w('__all__'))
if w_all is not None:
fromlist_w = space.unpackiterable(w_all)
for w_name in fromlist_w:
if try_getattr(space, w_mod, w_name) is None:
load_part(space, w_path, prefix, space.str_w(w_name), w_mod,
tentative=1)
return w_mod
else:
return first
def load_part(space, w_path, prefix, partname, w_parent, tentative):
w = space.wrap
modulename = '.'.join(prefix + [partname])
w_modulename = w(modulename)
w_mod = check_sys_modules(space, w_modulename)
if w_mod is not None:
if not space.is_w(w_mod, space.w_None):
return w_mod
else:
w_mod = space.sys.getmodule(modulename)
if w_mod is not None:
return w_mod
# Examin importhooks (PEP302) before doing the import
if w_path is not None:
w_loader = find_module(space, w_modulename, w_path)
else:
w_loader = find_module(space, w_modulename, space.w_None)
if not space.is_w(w_loader, space.w_None):
w_mod = space.call_method(w_loader, "load_module", w_modulename)
#w_mod_ = check_sys_modules(space, w_modulename)
if w_mod is not None and w_parent is not None:
space.setattr(w_parent, w(partname), w_mod)
return w_mod
if w_path is not None:
for path in space.unpackiterable(w_path):
dir = os.path.join(space.str_w(path), partname)
if os.path.isdir(dir):
fn = os.path.join(dir, '__init__')
w_mod = try_import_mod(space, w_modulename, fn, w_parent,
w(partname), pkgdir=dir)
if w_mod is not None:
return w_mod
fn = os.path.join(space.str_w(path), partname)
w_mod = try_import_mod(space, w_modulename, fn, w_parent,
w(partname))
if w_mod is not None:
return w_mod
if tentative:
return None
else:
# ImportError
msg = "No module named %s" % modulename
raise OperationError(space.w_ImportError, w(msg))
# __________________________________________________________________
#
# .pyc file support
"""
Magic word to reject .pyc files generated by other Python versions.
It should change for each incompatible change to the bytecode.
The value of CR and LF is incorporated so if you ever read or write
a .pyc file in text mode the magic number will be wrong; also, the
Apple MPW compiler swaps their values, botching string constants.
The magic numbers must be spaced apart atleast 2 values, as the
-U interpeter flag will cause MAGIC+1 being used. They have been
odd numbers for some time now.
There were a variety of old schemes for setting the magic number.
The current working scheme is to increment the previous value by
10.
Known values:
Python 1.5: 20121
Python 1.5.1: 20121
Python 1.5.2: 20121
Python 2.0: 50823
Python 2.0.1: 50823
Python 2.1: 60202
Python 2.1.1: 60202
Python 2.1.2: 60202
Python 2.2: 60717
Python 2.3a0: 62011
Python 2.3a0: 62021
Python 2.3a0: 62011 (!)
Python 2.4a0: 62041
Python 2.4a3: 62051
Python 2.4b1: 62061
Python 2.5a0: 62071
"""
# we decided to use the magic of 2.4.1
#
# In addition, for now, the presence of special bytecodes bumps the
# magic number:
#
# * CALL_LIKELY_BUILTIN +2
# * CALL_METHOD +4
#
# this is a bit of a hack waiting for a nicer general solution.
# Adding another bytecode is already a problem: if we bump the
# number by a total of +10 we collide with CPython's own magic
# number for 2.5a0.
#
MAGIC = 62061 | (ord('\r')<<16) | (ord('\n')<<24)
def get_pyc_magic(space):
result = MAGIC
if space.config.objspace.opcodes.CALL_LIKELY_BUILTIN:
result += 2
if space.config.objspace.opcodes.CALL_METHOD:
result += 4
return result
def parse_source_module(space, pathname, stream):
""" Parse a source file and return the corresponding code object """
w = space.wrap
source = stream.readall()
w_source = w(source)
w_mode = w("exec")
w_pathname = w(pathname)
w_code = space.builtin.call('compile', w_source, w_pathname, w_mode)
pycode = space.interp_w(Code, w_code)
return pycode
def load_source_module(space, w_modulename, w_mod, pathname, stream):
"""
Load a source module from a given file and return its module
object.
"""
w = space.wrap
pycode = parse_source_module(space, pathname, stream)
w_dict = space.getattr(w_mod, w('__dict__'))
space.call_method(w_dict, 'setdefault',
w('__builtins__'),
w(space.builtin))
pycode.exec_code(space, w_dict, w_dict)
if space.config.objspace.usepycfiles:
mtime = os.stat(pathname)[stat.ST_MTIME]
cpathname = pathname + 'c'
write_compiled_module(space, pycode, cpathname, mtime)
return w_mod
# helper, to avoid exposing internals of marshal and the
# difficulties of using it though applevel.
_r_correction = intmask(1L<<32) # == 0 on 32-bit machines
def _r_long(stream):
s = stream.read(4) # XXX XXX could return smaller string
if len(s) < 4:
return -1 # good enough for our purposes
a = ord(s[0])
b = ord(s[1])
c = ord(s[2])
d = ord(s[3])
x = a | (b<<8) | (c<<16) | (d<<24)
if _r_correction and d & 0x80 and x > 0:
x -= _r_correction
return int(x)
def _w_long(stream, x):
a = x & 0xff
x >>= 8
b = x & 0xff
x >>= 8
c = x & 0xff
x >>= 8
d = x & 0xff
stream.write(chr(a) + chr(b) + chr(c) + chr(d))
def check_compiled_module(space, pathname, mtime, cpathname):
"""
Given a pathname for a Python source file, its time of last
modification, and a pathname for a compiled file, check whether the
compiled file represents the same version of the source. If so,
return a FILE pointer for the compiled file, positioned just after
the header; if not, return NULL.
Doesn't set an exception.
"""
w_marshal = space.getbuiltinmodule('marshal')
stream = streamio.open_file_as_stream(cpathname, "rb")
magic = _r_long(stream)
try:
if magic != get_pyc_magic(space):
# XXX what to do about Py_VerboseFlag ?
# PySys_WriteStderr("# %s has bad magic\n", cpathname);
return -1
pyc_mtime = _r_long(stream)
if pyc_mtime != mtime:
# PySys_WriteStderr("# %s has bad mtime\n", cpathname);
return 0
# if (Py_VerboseFlag)
# PySys_WriteStderr("# %s matches %s\n", cpathname, pathname);
finally:
stream.close()
return 1
def read_compiled_module(space, cpathname, stream):
""" Read a code object from a file and check it for validity """
w_marshal = space.getbuiltinmodule('marshal')
strbuf = stream.readall()
w_code = space.call_method(w_marshal, 'loads', space.wrap(strbuf))
pycode = space.interpclass_w(w_code)
if pycode is None or not isinstance(pycode, Code):
raise OperationError(space.w_ImportError, space.wrap(
"Non-code object in %s" % cpathname))
return pycode
def load_compiled_module(space, w_modulename, w_mod, cpathname, stream):
"""
Load a module from a compiled file, execute it, and return its
module object.
"""
w = space.wrap
magic = _r_long(stream)
if magic != get_pyc_magic(space):
raise OperationError(space.w_ImportError, w(
"Bad magic number in %s" % cpathname))
_r_long(stream) # skip time stamp
#print "loading pyc file:", cpathname
code_w = read_compiled_module(space, cpathname, stream)
#if (Py_VerboseFlag)
# PySys_WriteStderr("import %s # precompiled from %s\n",
# name, cpathname);
w_dic = space.getattr(w_mod, w('__dict__'))
space.call_method(w_dic, 'setdefault',
w('__builtins__'),
w(space.builtin))
code_w.exec_code(space, w_dic, w_dic)
return w_mod
def write_compiled_module(space, co, cpathname, mtime):
"""
Write a compiled module to a file, placing the time of last
modification of its source into the header.
Errors are ignored, if a write error occurs an attempt is made to
remove the file.
"""
w_marshal = space.getbuiltinmodule('marshal')
try:
w_str = space.call_method(w_marshal, 'dumps', space.wrap(co))
strbuf = space.str_w(w_str)
except OperationError, e:
if e.async(space):
raise
#print "Problem while marshalling %s, skipping" % cpathname
return
#
# Careful here: we must not crash nor leave behind something that looks
# too much like a valid pyc file but really isn't one.
#
try:
stream = streamio.open_file_as_stream(cpathname, "wb")
except OSError:
return # cannot create file
try:
try:
# will patch the header later; write zeroes until we are sure that
# the rest of the file is valid
_w_long(stream, 0) # pyc_magic
_w_long(stream, 0) # mtime
stream.write(strbuf)
# should be ok (XXX or should call os.fsync() to be sure?)
stream.seek(0, 0)
_w_long(stream, get_pyc_magic(space))
_w_long(stream, mtime)
finally:
stream.close()
except OSError:
try:
os.unlink(cpathname)
except OSError:
pass
app = gateway.applevel(
r"""
# Implement pep302
IMP_HOOK = 9
def find_module(fullname, path):
import sys
meta_path = sys.meta_path
for hook in meta_path:
loader = hook.find_module(fullname, path)
if loader:
return loader
if path != None and type(path) == str:
pass
# XXX Check for frozen modules ?
if path == None:
# XXX Check frozen
path = sys.path
path_hooks = sys.path_hooks
importer_cache = sys.path_importer_cache
importer = None
for p in path:
if importer_cache.get(p,None):
importer = importer_cache.get(p)
else:
importer_cache[p] = None
for hook in path_hooks:
try:
importer = hook(p)
except ImportError:
pass
else:
break
if importer:
importer_cache[p] = importer
if importer:
loader = importer.find_module(fullname)
if loader:
return loader
#no hooks match - do normal import
""")
find_module = app.interphook('find_module')
| Python |
"""
Interp-level definition of frequently used functionals.
"""
from pypy.interpreter.error import OperationError
from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped, applevel
from pypy.interpreter.gateway import interp2app
from pypy.interpreter.typedef import TypeDef
from pypy.interpreter.baseobjspace import Wrappable
from pypy.rlib.rarithmetic import r_uint, intmask
from pypy.module.__builtin__.app_functional import range as app_range
from inspect import getsource, getfile
"""
Implementation of the common integer case of range. Instead of handling
all other cases here, too, we fall back to the applevel implementation
for non-integer arguments.
Ideally this implementation could be saved, if we were able to
specialize the geninterp generated code. But I guess having this
hand-optimized is a good idea.
Note the fun of using range inside range :-)
"""
def get_len_of_range(lo, hi, step):
"""
Return number of items in range/xrange (lo, hi, step).
Raise ValueError if step == 0 and OverflowError if the true value is too
large to fit in a signed long.
"""
# If lo >= hi, the range is empty.
# Else if n values are in the range, the last one is
# lo + (n-1)*step, which must be <= hi-1. Rearranging,
# n <= (hi - lo - 1)/step + 1, so taking the floor of the RHS gives
# the proper value. Since lo < hi in this case, hi-lo-1 >= 0, so
# the RHS is non-negative and so truncation is the same as the
# floor. Letting M be the largest positive long, the worst case
# for the RHS numerator is hi=M, lo=-M-1, and then
# hi-lo-1 = M-(-M-1)-1 = 2*M. Therefore unsigned long has enough
# precision to compute the RHS exactly.
if step == 0:
raise ValueError
elif step < 0:
lo, hi, step = hi, lo, -step
if lo < hi:
uhi = r_uint(hi)
ulo = r_uint(lo)
diff = uhi - ulo - 1
n = intmask(diff // r_uint(step) + 1)
if n < 0:
raise OverflowError
else:
n = 0
return n
def range(space, w_x, w_y=None, w_step=1):
"""Return a list of integers in arithmetic position from start (defaults
to zero) to stop - 1 by step (defaults to 1). Use a negative step to
get a list in decending order."""
try:
# save duplication by redirecting every error to applevel
x = space.int_w(w_x)
if space.is_w(w_y, space.w_None):
start, stop = 0, x
else:
start, stop = x, space.int_w(w_y)
step = space.int_w(w_step)
howmany = get_len_of_range(start, stop, step)
except OperationError, e:
if not e.match(space, space.w_TypeError):
pass
else:
raise
except (ValueError, OverflowError):
pass
else:
if (space.config.objspace.std.withmultilist or
space.config.objspace.std.withrangelist):
return range_withspecialized_implementation(space, start,
step, howmany)
res_w = [None] * howmany
v = start
for idx in range(howmany):
res_w[idx] = space.wrap(v)
v += step
return space.newlist(res_w)
return range_fallback(space, w_x, w_y, w_step)
range_int = range
range_int.unwrap_spec = [ObjSpace, W_Root, W_Root, W_Root]
del range # don't hide the builtin one
range_fallback = applevel(getsource(app_range), getfile(app_range)
).interphook('range')
def range_withspecialized_implementation(space, start, step, howmany):
if space.config.objspace.std.withrangelist:
from pypy.objspace.std.rangeobject import W_RangeListObject
return W_RangeListObject(start, step, howmany)
if space.config.objspace.std.withmultilist:
from pypy.objspace.std.listmultiobject import W_ListMultiObject
from pypy.objspace.std.listmultiobject import RangeImplementation
impl = RangeImplementation(space, start, step, howmany)
return W_ListMultiObject(space, impl)
def all(space, w_S):
w_iter = space.iter(w_S)
while True:
try:
w_next = space.next(w_iter)
except OperationError, e:
if not e.match(space, space.w_StopIteration):
raise # re-raise other app-level exceptions
break
if not space.is_true(w_next):
return space.w_False
return space.w_True
all.unwrap_spec = [ObjSpace, W_Root]
def any(space, w_S):
w_iter = space.iter(w_S)
while True:
try:
w_next = space.next(w_iter)
except OperationError, e:
if not e.match(space, space.w_StopIteration):
raise # re-raise other app-level exceptions
break
if space.is_true(w_next):
return space.w_True
return space.w_False
any.unwrap_spec = [ObjSpace, W_Root]
class W_XRange(Wrappable):
def __init__(self, space, start, len, step):
self.space = space
self.start = start
self.len = len
self.step = step
def descr_new(space, w_subtype, w_start, w_stop=None, w_step=1):
start = _toint(space, w_start)
step = _toint(space, w_step)
if space.is_w(w_stop, space.w_None): # only 1 argument provided
start, stop = 0, start
else:
stop = _toint(space, w_stop)
try:
howmany = get_len_of_range(start, stop, step)
except ValueError:
raise OperationError(space.w_ValueError,
space.wrap("xrange() arg 3 must not be zero"))
except OverflowError:
raise OperationError(space.w_OverflowError,
space.wrap("xrange() result has "
"too many items"))
obj = space.allocate_instance(W_XRange, w_subtype)
W_XRange.__init__(obj, space, start, howmany, step)
return space.wrap(obj)
def descr_repr(self):
stop = self.start + self.len * self.step
if self.start == 0 and self.step == 1:
s = "xrange(%d)" % (stop,)
elif self.step == 1:
s = "xrange(%d, %d)" % (self.start, stop)
else:
s = "xrange(%d, %d, %d)" %(self.start, stop, self.step)
return self.space.wrap(s)
def descr_len(self):
return self.space.wrap(self.len)
def descr_getitem(self, i):
# xrange does NOT support slicing
space = self.space
len = self.len
if i < 0:
i += len
if 0 <= i < len:
return space.wrap(self.start + i * self.step)
raise OperationError(space.w_IndexError,
space.wrap("xrange object index out of range"))
def descr_iter(self):
return self.space.wrap(W_XRangeIterator(self.space, self.start,
self.len, self.step))
def descr_reversed(self):
lastitem = self.start + (self.len-1) * self.step
return self.space.wrap(W_XRangeIterator(self.space, lastitem,
self.len, -self.step))
def _toint(space, w_obj):
# trying to support float arguments, just because CPython still does
try:
return space.int_w(w_obj)
except OperationError, e:
if space.is_true(space.isinstance(w_obj, space.w_float)):
return space.int_w(space.int(w_obj))
raise
W_XRange.typedef = TypeDef("xrange",
__new__ = interp2app(W_XRange.descr_new.im_func),
__repr__ = interp2app(W_XRange.descr_repr),
__getitem__ = interp2app(W_XRange.descr_getitem,
unwrap_spec=['self', 'index']),
__iter__ = interp2app(W_XRange.descr_iter),
__len__ = interp2app(W_XRange.descr_len),
__reversed__ = interp2app(W_XRange.descr_reversed),
)
class W_XRangeIterator(Wrappable):
def __init__(self, space, current, remaining, step):
self.space = space
self.current = current
self.remaining = remaining
self.step = step
def descr_iter(self):
return self.space.wrap(self)
def descr_next(self):
if self.remaining > 0:
item = self.current
self.current = item + self.step
self.remaining -= 1
return self.space.wrap(item)
raise OperationError(self.space.w_StopIteration, self.space.w_None)
def descr_len(self):
return self.space.wrap(self.remaining)
def descr_reduce(self):
from pypy.interpreter.mixedmodule import MixedModule
from pypy.module._pickle_support import maker # helper fns
space = self.space
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('xrangeiter_new')
w = space.wrap
nt = space.newtuple
tup = [w(self.current), w(self.remaining), w(self.step)]
return nt([new_inst, nt(tup)])
W_XRangeIterator.typedef = TypeDef("rangeiterator",
__iter__ = interp2app(W_XRangeIterator.descr_iter),
__len__ = interp2app(W_XRangeIterator.descr_len),
next = interp2app(W_XRangeIterator.descr_next),
__reduce__ = interp2app(W_XRangeIterator.descr_reduce),
)
| Python |
class State:
def __init__(self, space):
self.w_file = space.appexec([], """():
import _file;
return _file.file""")
def get(space):
return space.fromcache(State)
| Python |
"""
Plain Python definition of the builtin functions related to run-time
program introspection.
"""
import sys
def globals():
"Return the dictionary containing the current scope's global variables."
return sys._getframe(0).f_globals
def locals():
"""Return a dictionary containing the current scope's local variables.
Note that this may be the real dictionary of local variables, or a copy."""
return sys._getframe(0).f_locals
def _caller_locals():
return sys._getframe(0).f_locals
def vars(*obj):
"""Return a dictionary of all the attributes currently bound in obj. If
called with no argument, return the variables bound in local scope."""
if len(obj) == 0:
return _caller_locals()
elif len(obj) != 1:
raise TypeError, "vars() takes at most 1 argument."
else:
try:
return obj[0].__dict__
except AttributeError:
raise TypeError, "vars() argument must have __dict__ attribute"
# Replaced by the interp-level helper space.callable():
##def callable(ob):
## import __builtin__ # XXX this is insane but required for now for geninterp
## for c in type(ob).__mro__:
## if '__call__' in c.__dict__:
## if isinstance(ob, __builtin__._instance): # old style instance!
## return getattr(ob, '__call__', None) is not None
## return True
## else:
## return False
def dir(*args):
"""dir([object]) -> list of strings
Return an alphabetized list of names comprising (some of) the attributes
of the given object, and of attributes reachable from it:
No argument: the names in the current scope.
Module object: the module attributes.
Type or class object: its attributes, and recursively the attributes of
its bases.
Otherwise: its attributes, its class's attributes, and recursively the
attributes of its class's base classes.
"""
if len(args) > 1:
raise TypeError("dir expected at most 1 arguments, got %d"
% len(args))
if len(args) == 0:
local_names = _caller_locals().keys() # 2 stackframes away
if not isinstance(local_names, list):
raise TypeError("expected locals().keys() to be a list")
local_names.sort()
return local_names
import types
obj = args[0]
if isinstance(obj, types.ModuleType):
try:
result = obj.__dict__.keys()
if not isinstance(result, list):
raise TypeError("expected __dict__.keys() to be a list")
result.sort()
return result
except AttributeError:
return []
elif isinstance(obj, (types.TypeType, types.ClassType)):
#Don't look at __class__, as metaclass methods would be confusing.
result = _classdir(obj).keys()
result.sort()
return result
else: #(regular item)
Dict = {}
try:
Dict.update(obj.__dict__)
except AttributeError: pass
try:
Dict.update(_classdir(obj.__class__))
except AttributeError: pass
## Comment from object.c:
## /* Merge in __members__ and __methods__ (if any).
## XXX Would like this to go away someday; for now, it's
## XXX needed to get at im_self etc of method objects. */
for attr in ['__members__','__methods__']:
try:
for item in getattr(obj, attr):
if isinstance(item, types.StringTypes):
Dict[item] = None
except (AttributeError, TypeError): pass
result = Dict.keys()
result.sort()
return result
def _classdir(klass):
"""Return a dict of the accessible attributes of class/type klass.
This includes all attributes of klass and all of the
base classes recursively.
The values of this dict have no meaning - only the keys have
meaning.
"""
Dict = {}
try:
Dict.update(klass.__dict__)
except AttributeError: pass
try:
# XXX - Use of .__mro__ would be suggested, if the existance
# of that attribute could be guarranted.
bases = klass.__bases__
except AttributeError: pass
else:
try:
#Note that since we are only interested in the keys,
# the order we merge classes is unimportant
for base in bases:
Dict.update(_classdir(base))
except TypeError: pass
return Dict
| Python |
# NOT_RPYTHON
class file(object):
"""file(name[, mode[, buffering]]) -> file object
Open a file. The mode can be 'r', 'w' or 'a' for reading (default),
writing or appending. The file will be created if it doesn't exist
when opened for writing or appending; it will be truncated when
opened for writing. Add a 'b' to the mode for binary files.
Add a '+' to the mode to allow simultaneous reading and writing.
If the buffering argument is given, 0 means unbuffered, 1 means line
buffered, and larger numbers specify the buffer size.
Add a 'U' to mode to open the file for input with universal newline
support. Any line ending in the input file will be seen as a '\n'
in Python. Also, a file so opened gains the attribute 'newlines';
the value for this attribute is one of None (no newline read yet),
'\r', '\n', '\r\n' or a tuple containing all the newline types seen.
Note: open() is an alias for file().
"""
| Python |
"""
Plain Python definition of the builtin interactive help functions.
"""
import sys
if sys.platform == "win32":
exit = "Use Ctrl-Z plus Return to exit."
else:
exit = "Use Ctrl-D (i.e. EOF) to exit."
def copyright():
print 'Copyright 2003-2007 PyPy development team.\nAll rights reserved.\nFor further information see http://www.codespeak.net/pypy.\nSome materials may have a different copyright.\nIn these cases, this is explicitly noted in the source code file.'
def license():
print \
"""
Copyright (c) <2003-2007> <PyPy development team>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
# Define the built-in 'help'.
# This is a wrapper around pydoc.help (with a twist).
class _Helper:
def __repr__(self):
return "Type help() for interactive help, " \
"or help(object) for help about object."
def __call__(self, *args, **kwds):
import pydoc
return pydoc.help(*args, **kwds)
help = _Helper()
| Python |
"""
Interp-level implementation of the basic space operations.
"""
from pypy.interpreter import gateway
from pypy.interpreter.baseobjspace import ObjSpace
from pypy.interpreter.error import OperationError
import __builtin__
NoneNotWrapped = gateway.NoneNotWrapped
def abs(space, w_val):
"abs(number) -> number\n\nReturn the absolute value of the argument."
return space.abs(w_val)
def chr(space, w_ascii):
"Return a string of one character with the given ascii code."
w_character = space.newstring([w_ascii])
return w_character
def unichr(space, w_code):
"Return a Unicode string of one character with the given ordinal."
# XXX range checking!
return space.newunicode([__builtin__.unichr(space.int_w(w_code))])
def len(space, w_obj):
"len(object) -> integer\n\nReturn the number of items of a sequence or mapping."
return space.len(w_obj)
def checkattrname(space, w_name):
# This is a check to ensure that getattr/setattr/delattr only pass a
# string to the rest of the code. XXX not entirely sure if these three
# functions are the only way for non-string objects to reach
# space.{get,set,del}attr()...
# Note that if w_name is already a string (or a subclass of str),
# it must be returned unmodified (and not e.g. unwrapped-rewrapped).
if not space.is_true(space.isinstance(w_name, space.w_str)):
name = space.str_w(w_name) # typecheck
w_name = space.wrap(name) # rewrap as a real string
return w_name
def delattr(space, w_object, w_name):
"""Delete a named attribute on an object.
delattr(x, 'y') is equivalent to ``del x.y''."""
w_name = checkattrname(space, w_name)
space.delattr(w_object, w_name)
return space.w_None
def getattr(space, w_object, w_name, w_defvalue=NoneNotWrapped):
"""Get a named attribute from an object.
getattr(x, 'y') is equivalent to ``x.y''."""
w_name = checkattrname(space, w_name)
try:
return space.getattr(w_object, w_name)
except OperationError, e:
if w_defvalue is not None:
if e.match(space, space.w_AttributeError):
return w_defvalue
raise
def hasattr(space, w_object, w_name):
"""Return whether the object has an attribute with the given name.
(This is done by calling getattr(object, name) and catching exceptions.)"""
w_name = checkattrname(space, w_name)
if space.findattr(w_object, w_name) is not None:
return space.w_True
else:
return space.w_False
def hash(space, w_object):
"""Return a hash value for the object. Two objects which compare as
equal have the same hash value. It is possible, but unlikely, for
two un-equal objects to have the same hash value."""
return space.hash(w_object)
def oct(space, w_val):
"""Return the octal representation of an integer."""
# XXX does this need to be a space operation?
return space.oct(w_val)
def hex(space, w_val):
"""Return the hexadecimal representation of an integer."""
return space.hex(w_val)
def id(space, w_object):
"Return the identity of an object: id(x) == id(y) if and only if x is y."
return space.id(w_object)
def cmp(space, w_x, w_y):
"""return 0 when x == y, -1 when x < y and 1 when x > y """
return space.cmp(w_x, w_y)
def coerce(space, w_x, w_y):
"""coerce(x, y) -> (x1, y1)
Return a tuple consisting of the two numeric arguments converted to
a common type, using the same rules as used by arithmetic operations.
If coercion is not possible, raise TypeError."""
return space.coerce(w_x, w_y)
def divmod(space, w_x, w_y):
"""Return the tuple ((x-x%y)/y, x%y). Invariant: div*y + mod == x."""
return space.divmod(w_x, w_y)
# semi-private: works only for new-style classes.
def _issubtype(space, w_cls1, w_cls2):
return space.issubtype(w_cls1, w_cls2)
# ____________________________________________________________
from math import floor as _floor
from math import ceil as _ceil
def round(space, number, ndigits=0):
"""round(number[, ndigits]) -> floating point number
Round a number to a given precision in decimal digits (default 0 digits).
This always returns a floating point number. Precision may be negative."""
# Algortithm copied directly from CPython
f = 1.0
if ndigits < 0:
i = -ndigits
else:
i = ndigits
while i > 0:
f = f*10.0
i -= 1
if ndigits < 0:
number /= f
else:
number *= f
if number >= 0.0:
number = _floor(number + 0.5)
else:
number = _ceil(number - 0.5)
if ndigits < 0:
number *= f
else:
number /= f
return space.wrap(number)
#
round.unwrap_spec = [ObjSpace, float, int]
# ____________________________________________________________
iter_sentinel = gateway.applevel('''
# NOT_RPYTHON -- uses yield
# App-level implementation of the iter(callable,sentinel) operation.
def iter_generator(callable_, sentinel):
while 1:
result = callable_()
if result == sentinel:
return
yield result
def iter_sentinel(callable_, sentinel):
if not callable(callable_):
raise TypeError, 'iter(v, w): v must be callable'
return iter_generator(callable_, sentinel)
''', filename=__file__).interphook("iter_sentinel")
def iter(space, w_collection_or_callable, w_sentinel=NoneNotWrapped):
"""iter(collection) -> iterator over the elements of the collection.
iter(callable, sentinel) -> iterator calling callable() until it returns
the sentinal.
"""
if w_sentinel is None:
return space.iter(w_collection_or_callable)
# XXX it seems that CPython checks the following
# for newstyle but doesn't for oldstyle classes :-(
#w_res = space.iter(w_collection_or_callable)
#w_typeres = space.type(w_res)
#try:
# space.getattr(w_typeres, space.wrap('next'))
#except OperationError, e:
# if not e.match(space, space.w_AttributeError):
# raise
# raise OperationError(space.w_TypeError,
# space.wrap("iter() returned non-iterator of type '%s'" %
# w_typeres.name))
#else:
# return w_res
else:
return iter_sentinel(space, w_collection_or_callable, w_sentinel)
def _seqiter(space, w_obj):
return space.newseqiter(w_obj)
def ord(space, w_val):
"""Return the integer ordinal of a character."""
return space.ord(w_val)
def pow(space, w_base, w_exponent, w_modulus=None):
"""With two arguments, equivalent to ``base**exponent''.
With three arguments, equivalent to ``(base**exponent) % modulus'',
but much more efficient for large exponents."""
return space.pow(w_base, w_exponent, w_modulus)
def repr(space, w_object):
"""Return a canonical string representation of the object.
For simple object types, eval(repr(object)) == object."""
return space.repr(w_object)
def setattr(space, w_object, w_name, w_val):
"""Store a named attribute into an object.
setattr(x, 'y', z) is equivalent to ``x.y = z''."""
w_name = checkattrname(space, w_name)
space.setattr(w_object, w_name, w_val)
return space.w_None
def intern(space, w_str):
"""``Intern'' the given string. This enters the string in the (global)
table of interned strings whose purpose is to speed up dictionary lookups.
Return the string itself or the previously interned string object with the
same value."""
if space.is_w(space.type(w_str), space.w_str):
return space.new_interned_w_str(w_str)
raise OperationError(space.w_TypeError, space.wrap("intern() argument must be string."))
def callable(space, w_object):
"""Check whether the object appears to be callable (i.e., some kind of
function). Note that classes are callable."""
return space.callable(w_object)
def _recursive_issubclass(space, w_cls, w_klass_or_tuple): # returns interp-level bool
if space.is_w(w_cls, w_klass_or_tuple):
return True
try:
w_bases = space.getattr(w_cls, space.wrap("__bases__"))
except OperationError, e:
if e.match(space, space.w_AttributeError):
return False
else:
raise
w_iterator = space.iter(w_bases)
while True:
try:
w_base = space.next(w_iterator)
except OperationError, e:
if not e.match(space, space.w_StopIteration):
raise
break
if _recursive_issubclass(space, w_base, w_klass_or_tuple):
return True
return False
def _issubclass(space, w_cls, w_klass_or_tuple, check_cls, depth): # returns interp-level bool
if depth == 0:
# XXX overzealous test compliance hack
raise OperationError(space.w_RuntimeError, space.wrap("maximum recursion depth exceeded"))
if space.is_true(space.issubtype(space.type(w_klass_or_tuple), space.w_tuple)):
w_iter = space.iter(w_klass_or_tuple)
while True:
try:
w_klass = space.next(w_iter)
except OperationError, e:
if not e.match(space, space.w_StopIteration):
raise
break
if _issubclass(space, w_cls, w_klass, True, depth - 1):
return True
return False
try:
return space.is_true(space.issubtype(w_cls, w_klass_or_tuple))
except OperationError, e:
if e.match(space, space.w_TypeError):
w_bases = space.wrap('__bases__')
if check_cls:
try:
space.getattr(w_cls, w_bases)
except OperationError, e:
if not e.match(space, space.w_AttributeError):
raise
raise OperationError(space.w_TypeError, space.wrap('arg 1 must be a class or type'))
try:
space.getattr(w_klass_or_tuple, w_bases)
except OperationError, e:
if not e.match(space, space.w_AttributeError):
raise
raise OperationError(space.w_TypeError, space.wrap('arg 2 must be a class or type or a tuple thereof'))
return _recursive_issubclass(space, w_cls, w_klass_or_tuple)
else:
raise
def issubclass(space, w_cls, w_klass_or_tuple):
"""Check whether a class 'cls' is a subclass (i.e., a derived class) of
another class. When using a tuple as the second argument, check whether
'cls' is a subclass of any of the classes listed in the tuple."""
return space.wrap(issubclass_w(space, w_cls, w_klass_or_tuple))
def issubclass_w(space, w_cls, w_klass_or_tuple):
return _issubclass(space, w_cls, w_klass_or_tuple, True, space.sys.recursionlimit)
def isinstance(space, w_obj, w_klass_or_tuple):
"""Check whether an object is an instance of a class (or of a subclass
thereof). When using a tuple as the second argument, check whether 'obj'
is an instance of any of the classes listed in the tuple."""
w_objtype = space.type(w_obj)
if issubclass_w(space, w_objtype, w_klass_or_tuple):
return space.w_True
try:
w_objcls = space.getattr(w_obj, space.wrap("__class__"))
except OperationError, e:
if e.match(space, space.w_AttributeError):
return space.w_False
else:
raise
if space.is_w(w_objcls, w_objtype):
return space.w_False
else:
return space.wrap(_issubclass(space, w_objcls, w_klass_or_tuple, False, space.sys.recursionlimit))
| Python |
from pypy.interpreter.error import OperationError
from pypy.interpreter import module
from pypy.interpreter.mixedmodule import MixedModule
# put builtins here that should be optimized somehow
OPTIMIZED_BUILTINS = ["len", "range", "xrange", "min", "max", "enumerate",
"isinstance", "type", "zip", "file", "open", "abs", "chr", "unichr",
"ord", "pow", "repr", "hash", "oct", "hex", "round", "cmp", "getattr",
"setattr", "delattr", "callable", "int", "str", "float"]
assert len(OPTIMIZED_BUILTINS) <= 256
BUILTIN_TO_INDEX = {}
for i, name in enumerate(OPTIMIZED_BUILTINS):
BUILTIN_TO_INDEX[name] = i
assert len(OPTIMIZED_BUILTINS) == len(BUILTIN_TO_INDEX)
class Module(MixedModule):
"""Built-in functions, exceptions, and other objects."""
expose__file__attribute = False
appleveldefs = {
'quit' : 'app_help.exit',
'exit' : 'app_help.exit',
'copyright' : 'app_help.copyright',
'license' : 'app_help.license',
'help' : 'app_help.help',
'execfile' : 'app_io.execfile',
'raw_input' : 'app_io.raw_input',
'input' : 'app_io.input',
'sum' : 'app_functional.sum',
'apply' : 'app_functional.apply',
'map' : 'app_functional.map',
'filter' : 'app_functional.filter',
'zip' : 'app_functional.zip',
'reduce' : 'app_functional.reduce',
#'range' : 'app_functional.range',
# redirected to functional.py, applevel version
# is still needed and should stay where it is.
'min' : 'app_functional.min',
'max' : 'app_functional.max',
'enumerate' : 'app_functional.enumerate',
'sorted' : 'app_functional.sorted',
'reversed' : 'app_functional.reversed',
'_install_pickle_support_for_reversed_iterator':
'app_functional._install_pickle_support_for_reversed_iterator',
'globals' : 'app_inspect.globals',
'locals' : 'app_inspect.locals',
'vars' : 'app_inspect.vars',
'dir' : 'app_inspect.dir',
'property' : 'app_descriptor.property',
'staticmethod' : 'app_descriptor.staticmethod',
'classmethod' : 'app_descriptor.classmethod',
'super' : 'app_descriptor.super',
'complex' : 'app_complex.complex',
'buffer' : 'app_buffer.buffer',
'reload' : 'app_misc.reload',
'set' : 'app_sets.set',
'frozenset' : 'app_sets.frozenset',
'__filestub' : 'app_file_stub.file',
}
interpleveldefs = {
# constants
'None' : '(space.w_None)',
'False' : '(space.w_False)',
'True' : '(space.w_True)',
'__debug__' : '(space.w_True)', # XXX
'type' : '(space.w_type)',
'object' : '(space.w_object)',
'unicode' : '(space.w_unicode)',
'file' : 'state.get(space).w_file',
'open' : 'state.get(space).w_file',
# old-style classes dummy support
'_classobj' : 'space.w_classobj',
'_instance' : 'space.w_instance',
# default __metaclass__
'__metaclass__' : '(space.w_type)',
# interp-level function definitions
'abs' : 'operation.abs',
'chr' : 'operation.chr',
'unichr' : 'operation.unichr',
'len' : 'operation.len',
'ord' : 'operation.ord',
'pow' : 'operation.pow',
'repr' : 'operation.repr',
'hash' : 'operation.hash',
'oct' : 'operation.oct',
'hex' : 'operation.hex',
'round' : 'operation.round',
'cmp' : 'operation.cmp',
'coerce' : 'operation.coerce',
'divmod' : 'operation.divmod',
'_issubtype' : 'operation._issubtype',
'issubclass' : 'operation.issubclass',
'isinstance' : 'operation.isinstance',
'getattr' : 'operation.getattr',
'setattr' : 'operation.setattr',
'delattr' : 'operation.delattr',
'hasattr' : 'operation.hasattr',
'iter' : 'operation.iter',
'id' : 'operation.id',
'_seqiter' : 'operation._seqiter',
'intern' : 'operation.intern',
'callable' : 'operation.callable',
'compile' : 'compiling.compile',
'eval' : 'compiling.eval',
'__import__' : 'importing.importhook',
'range' : 'functional.range_int',
'xrange' : 'functional.W_XRange',
'all' : 'functional.all',
'any' : 'functional.any',
}
def pick_builtin(self, w_globals):
"Look up the builtin module to use from the __builtins__ global"
# pick the __builtins__ roughly in the same way CPython does it
# this is obscure and slow
space = self.space
try:
w_builtin = space.getitem(w_globals, space.wrap('__builtins__'))
except OperationError, e:
if not e.match(space, space.w_KeyError):
raise
else:
if w_builtin is space.builtin: # common case
return space.builtin
if space.is_true(space.isinstance(w_builtin, space.w_dict)):
return module.Module(space, None, w_builtin)
builtin = space.interpclass_w(w_builtin)
if isinstance(builtin, module.Module):
return builtin
# no builtin! make a default one. Given them None, at least.
builtin = module.Module(space, None)
space.setitem(builtin.w_dict, space.wrap('None'), space.w_None)
return builtin
def setup_after_space_initialization(self):
"""NOT_RPYTHON"""
space = self.space
self.builtins_by_index = [None] * len(OPTIMIZED_BUILTINS)
for i, name in enumerate(OPTIMIZED_BUILTINS):
self.builtins_by_index[i] = space.getattr(self, space.wrap(name))
# call installations for pickle support
for name in self.loaders.keys():
if name.startswith('_install_pickle_support_for_'):
w_install = self.get(name)
space.call_function(w_install)
# xxx hide the installer
space.delitem(self.w_dict, space.wrap(name))
del self.loaders[name]
| Python |
"""
Plain Python definition of the 'complex' type.
"""
#XXX Hack: This float is supposed to overflow to inf
#OVERFLOWED_FLOAT = float("1e10000000000000000000000000000000")
# but this would crash with marshal v.1.0
OVERFLOWED_FLOAT = 1e200
OVERFLOWED_FLOAT *= OVERFLOWED_FLOAT
class complex(object):
"""complex(real[, imag]) -> complex number
Create a complex number from a real part and an optional imaginary part.
This is equivalent to (real + imag*1j) where imag defaults to 0."""
PREC_REPR = 17
PREC_STR = 12
__slots__ = ['real', 'imag']
# XXX this class is not well tested
# provide __new__to prevent the default which has no parameters
def __new__(typ, real=0.0, imag=None):
if real.__class__ == complex and imag is None and typ is complex:
return real
ret = object.__new__(typ)
ret._init(real, imag)
return ret
def __getnewargs__(self):
return (complex(self.real, self.imag),)
def __reduce__(self):
return (self.__class__, (self.real, self.imag),
getattr(self, '__dict__', None))
def _init(self, real=0.0, imag=None):
if isinstance(real, (str, unicode)):
if imag is not None:
msg = "complex() can't take second arg if first is a string"
raise TypeError, msg
re, im = self._makeComplexFromString(real)
elif isinstance(real, complex):
re = real.real
im = real.imag
else:
if hasattr(real, "__complex__"):
co = real.__complex__()
if not isinstance(co, complex):
raise TypeError, "complex() argument must be a string or a number"
re = co.real
im = co.imag
else:
re = float(real)
im = 0.0
if isinstance(imag, (str, unicode)):
msg = "complex() second arg can't be a string"
raise TypeError, msg
elif isinstance(imag, complex):
re -= imag.imag
im += imag.real
elif imag is not None:
im += float(imag)
real_slot.__set__(self, re)
imag_slot.__set__(self, im)
def _makeComplexFromString(self, string_):
import re
string_ = string_.strip().lower()
pat = re.compile("([\+\-]?\d*\.?\d*)?([\+\-]?\d*\.?\d*j)?")
m = pat.match(string_)
x, y = m.groups()
if x is None:
x = ""
if y is None:
y = ""
if len(string_) - (len(x) + len(y)) != 0:
raise ValueError, "complex() arg is a malformed string"
if x == "":
if y == "":
raise ValueError, "complex() arg is an empty string"
if y[-1] != "j":
raise ValueError, "complex() arg is a malformed string"
assert y[-1] == "j"
y = y[:-1]
if len(y) <= 1:
y += "1"
f = float(y)
if abs(f) == OVERFLOWED_FLOAT:
raise ValueError, "float() out of range: %s" % y
return 0, f
if y == "":
f = float(x)
if abs(f) == OVERFLOWED_FLOAT:
raise ValueError, "float() out of range: %s" % x
return f, 0
if y[-1] != "j":
raise ValueError, "complex() arg is a malformed string"
assert y[-1] == "j"
y = y[:-1]
if y == "":
if x in "+-":
x += "1.0"
f = float(x)
if abs(f) == OVERFLOWED_FLOAT:
raise ValueError, "float() out of range: %s" % x
return 0, f
if y in "+-":
y += "1.0"
x = float(x)
y = float(y)
if abs(x) == OVERFLOWED_FLOAT:
raise ValueError, "float() out of range: %s" % x
if abs(y) == OVERFLOWED_FLOAT:
raise ValueError, "float() out of range: %s" % y
return x, y
def __description(self, precision):
if self.real != 0.:
return "(%.*g%+.*gj)"%(precision, self.real, precision, self.imag)
else:
return "%.*gj"%(precision, self.imag)
def __repr__(self):
return self.__description(self.PREC_REPR)
def __str__(self):
return self.__description(self.PREC_STR)
def __hash__(self):
hashreal = hash(self.real)
hashimag = hash(self.imag)
# Note: if the imaginary part is 0, hashimag is 0 now,
# so the following returns hashreal unchanged. This is
# important because numbers of different types that
# compare equal must have the same hash value, so that
# hash(x + 0*j) must equal hash(x).
return hashreal + 1000003 * hashimag
def __add__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
real = self.real + other.real
imag = self.imag + other.imag
return complex(real, imag)
__radd__ = __add__
def __sub__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
real = self.real - other.real
imag = self.imag - other.imag
return complex(real, imag)
def __rsub__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
return other.__sub__(self)
def __mul__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
real = self.real*other.real - self.imag*other.imag
imag = self.real*other.imag + self.imag*other.real
return complex(real, imag)
__rmul__ = __mul__
def __div__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
if abs(other.real) >= abs(other.imag):
# divide tops and bottom by other.real
try:
ratio = other.imag / other.real
except ZeroDivisionError:
raise ZeroDivisionError, "complex division"
denom = other.real + other.imag * ratio
real = (self.real + self.imag * ratio) / denom
imag = (self.imag - self.real * ratio) / denom
else:
# divide tops and bottom by other.imag
assert other.imag != 0.0
ratio = other.real / other.imag
denom = other.real * ratio + other.imag
real = (self.real * ratio + self.imag) / denom
imag = (self.imag * ratio - self.real) / denom
return complex(real, imag)
def __rdiv__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
return other.__div__(self)
def __floordiv__(self, other):
result = self.__divmod__(other)
if result is NotImplemented:
return result
div, mod = result
return div
def __rfloordiv__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
return other.__floordiv__(self)
__truediv__ = __div__
__rtruediv__ = __rdiv__
def __mod__(self, other):
result = self.__divmod__(other)
if result is NotImplemented:
return result
div, mod = result
return mod
def __rmod__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
return other.__mod__(self)
def __divmod__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
import warnings, math
warnings.warn("complex divmod(), // and % are deprecated", DeprecationWarning)
try:
div = self/other # The raw divisor value.
except ZeroDivisionError:
raise ZeroDivisionError, "complex remainder"
div = complex(math.floor(div.real), 0.0)
mod = self - div*other
return div, mod
def __rdivmod__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
return other.__divmod__(self)
def __pow__(self, other, mod=None):
if mod is not None:
raise ValueError("complex modulo")
result = self.__coerce__(other)
if result is NotImplemented:
return result
a, b = result
import math
if b.real == 0. and b.imag == 0.:
real = 1.
imag = 0.
elif a.real == 0. and a.imag == 0.:
if b.imag != 0. or b.real < 0.:
raise ZeroDivisionError, "0.0 to a negative or complex power"
real = 0.
imag = 0.
else:
vabs = math.hypot(a.real,a.imag)
len = math.pow(vabs,b.real)
at = math.atan2(a.imag, a.real)
phase = at*b.real
if b.imag != 0.0:
len /= math.exp(at*b.imag)
phase += b.imag*math.log(vabs)
real = len*math.cos(phase)
imag = len*math.sin(phase)
result = complex(real, imag)
return result
def __rpow__(self, other, mod=None):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
return other.__pow__(self, mod)
def __neg__(self):
return complex(-self.real, -self.imag)
def __pos__(self):
return complex(self.real, self.imag)
def __abs__(self):
import math
result = math.hypot(self.real, self.imag)
return float(result)
def __nonzero__(self):
return self.real != 0.0 or self.imag != 0.0
def __coerce__(self, other):
if isinstance(other, complex):
return self, other
if isinstance(other, (int, long, float)):
return self, complex(other)
return NotImplemented
def conjugate(self):
return complex(self.real, -self.imag)
def __eq__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
return self.real == other.real and self.imag == other.imag
def __ne__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
self, other = result
return self.real != other.real or self.imag != other.imag
# unsupported operations
def __lt__(self, other):
result = self.__coerce__(other)
if result is NotImplemented:
return result
raise TypeError, "cannot compare complex numbers using <, <=, >, >="
__le__ = __gt__ = __ge__ = __lt__
def __int__(self):
raise TypeError, "can't convert complex to int; use e.g. int(abs(z))"
def __long__(self):
raise TypeError, "can't convert complex to long; use e.g. long(abs(z))"
def __float__(self):
raise TypeError, "can't convert complex to float; use e.g. float(abs(z))"
real_slot = complex.real
imag_slot = complex.imag
# make the slots read-only
# XXX added doc string as helper for geninterplevel (any other idea?)
complex.real = property(real_slot.__get__, None, None, 'complex.real.__get__')
complex.imag = property(imag_slot.__get__, None, None, 'complex.imag.__get__')
| Python |
# Might probably be deprecated in Python at some point.
import sys
class buffer(object):
"""buffer(object [, offset[, size]])
Create a new buffer object which references the given object.
The buffer will reference a slice of the target object from the
start of the object (or at the specified offset). The slice will
extend to the end of the target object (or with the specified size).
"""
def __init__(self, object, offset=0, size=None):
import struct, array
if isinstance(object, str):
pass
elif isinstance(object, unicode):
str_object = ""
if sys.maxunicode == 65535:
pack_code = "H"
else:
pack_code = "I"
for char in object:
str_object += struct.pack(pack_code, ord(char))
object = str_object
elif isinstance(object, buffer):
object = object.buf
elif isinstance(object, array.array):
object = object.tostring()
else:
raise TypeError, "buffer object expected"
if offset < 0:
raise ValueError, "offset must be zero or positive"
# XXX according to CPython 2.4.1. Broken?
if size is not None and size < -1:
raise ValueError, "size must be zero or positive"
if size is None or size == -1:
self.buf = object[offset:]
else:
self.buf = object[offset:offset+size]
def __str__(self):
return self.buf
def __add__(self, other):
return self.buf + buffer(other).buf
def __mul__(self, count):
return self.buf * count
__rmul__ = __mul__
def __cmp__(self, other):
return cmp(self.buf, buffer(other).buf)
def __getitem__(self, index_or_slice):
return self.buf[index_or_slice]
def __hash__(self):
return hash(self.buf)
def __len__(self):
return len(self.buf)
def __repr__(self):
# We support only read-only buffers anyway
return "<read-only buffer for 0x000000>"
| Python |
"""
Plain Python definition of some miscellaneous builtin functions.
"""
def find_module(fullname, path):
import sys
meta_path = sys.meta_path
for hook in meta_path:
loader = hook.find_module(fullname, path)
if loader:
return loader
if path != None and type(path) == str:
pass
# XXX Check for frozen modules ?
if path == None:
# XXX Check frozen
path = sys.path
path_hooks = sys.path_hooks
importer_cache = sys.path_importer_cache
importer = None
for p in path:
if importer_cache.get(p,None):
importer = importer_cache.get(p)
else:
importer_cache[p] = None
for hook in path_hooks:
try:
importer = hook(p)
except ImportError:
pass
else:
break
if importer:
importer_cache[p] = importer
if importer:
loader = importer.find_module(fullname)
if loader:
return loader
#no hooks match - do normal import
def reload(module):
"""Reload the module.
The module must have been successfully imported before."""
import imp, sys, errno
if type(module) not in (type(imp), type(errno)):
raise TypeError("reload() argument must be module")
name = module.__name__
if module is not sys.modules[name]:
raise ImportError("reload(): module %.200s not in sys.modules" % name)
namepath = name.split('.')
subname = namepath[-1]
parent_name = '.'.join(namepath[:-1])
parent = None
path = None
if parent_name:
try:
parent = sys.modules[parent_name]
except KeyError:
raise ImportError("reload(): parent %.200s not in sys.modules" %
parent_name)
path = parent.__path__
loader = find_module(name, path)
if loader:
mod = loader.load_module(name)
if mod:
return mod
f, filename, description = imp.find_module(subname, path)
try:
new_module = imp.load_module(name, f, filename, description)
finally:
sys.modules[name] = module
if f is not None:
f.close()
return new_module
| Python |
"""
Plain Python definition of the builtin descriptors.
"""
# Descriptor code, shamelessly stolen from Raymond Hettinger:
# http://users.rcn.com/python/download/Descriptor.htm
# XXX there is an interp-level pypy.interpreter.function.StaticMethod
# XXX because __new__ needs to be a StaticMethod early.
class staticmethod(object):
"""staticmethod(function) -> static method
Convert a function to be a static method.
A static method does not receive an implicit first argument.
To declare a static method, use this idiom:
class C:
def f(arg1, arg2, ...): ...
f = staticmethod(f)
It can be called either on the class (e.g. C.f()) or on an instance
(e.g. C().f()). The instance is ignored except for its class."""
__slots__ = ['_f']
def __init__(self, f):
self._f = f
def __get__(self, obj, objtype=None):
return self._f
class classmethod(object):
"""classmethod(function) -> class method
Convert a function to be a class method.
A class method receives the class as implicit first argument,
just like an instance method receives the instance.
To declare a class method, use this idiom:
class C:
def f(cls, arg1, arg2, ...): ...
f = classmethod(f)
It can be called either on the class (e.g. C.f()) or on an instance
(e.g. C().f()). The instance is ignored except for its class.
If a class method is called for a derived class, the derived class
object is passed as the implied first argument."""
__slots__ = ['_f']
def __init__(self, f):
if not callable(f):
raise TypeError, "'%s' object is not callable" % type(f).__name__
self._f = f
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return MethodType(self._f, klass)
def dummy(): pass
MethodType = type(dummy.__get__(42))
del dummy
# It's difficult to have a class that has both a docstring and a slot called
# '__doc__', but not impossible...
class docstring(object):
def __init__(self, classdocstring):
self.classdocstring = classdocstring
self.slot = None
def capture(cls, slotname):
self = cls.__dict__['__doc__']
slot = cls.__dict__[slotname]
if not isinstance(self, docstring):
raise TypeError, "the class __doc__ must be a docstring instance"
self.slot = slot
delattr(cls, slotname)
capture = staticmethod(capture)
def __get__(self, p, cls=None):
if p is None:
return self.classdocstring # getting __doc__ on the class
elif self.slot is None:
raise AttributeError, "'%s' instance has no __doc__" % (
p.__class__.__name__,)
else:
return self.slot.__get__(p) # getting __doc__ on an instance
def __set__(self, p, value):
if hasattr(self.slot, '__set__'):
return self.slot.__set__(p, value)
else:
raise AttributeError, "cannot write __doc__"
def __delete__(self, p):
if hasattr(self.slot, '__delete__'):
return self.slot.__delete__(p)
else:
raise AttributeError, "cannot write __doc__"
class property(object):
__doc__ = docstring(
'''property(fget=None, fset=None, fdel=None, doc=None) -> property attribute
fget is a function to be used for getting an attribute value, and likewise
fset is a function for setting, and fdel a function for deleting, an
attribute. Typical use is to define a managed attribute x:
class C(object):
def getx(self): return self.__x
def setx(self, value): self.__x = value
def delx(self): del self.__x
x = property(getx, setx, delx, "I am the 'x' property.")''')
__slots__ = ['fget', 'fset', 'fdel', 'slot__doc__']
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.fget = fget
self.fset = fset
self.fdel = fdel
self.__doc__ = doc
def __get__(self, obj, objtype=None):
if obj is None:
return self
if self.fget is None:
raise AttributeError, "unreadable attribute"
return self.fget(obj)
def __set__(self, obj, value):
if self.fset is None:
raise AttributeError, "can't set attribute"
self.fset(obj, value)
def __delete__(self, obj):
if self.fdel is None:
raise AttributeError, "can't delete attribute"
self.fdel(obj)
docstring.capture(property, 'slot__doc__')
# super is a modified version from Guido's tutorial
# http://www.python.org/2.2.3/descrintro.html
# it exposes the same special attributes as CPython's.
class super(object):
"""super(type) -> unbound super object
super(type, obj) -> bound super object; requires isinstance(obj, type)
super(type, type2) -> bound super object; requires issubclass(type2, type)
Typical use to call a cooperative superclass method:
class C(B):
def meth(self, arg):
super(C, self).meth(arg)"""
__slots__ = ['__thisclass__', '__self__', '__self_class__']
def __init__(self, typ, obj=None):
if obj is None:
objcls = None # unbound super object
elif _issubtype(type(obj), type) and _issubtype(obj, typ):
objcls = obj # special case for class methods
elif _issubtype(type(obj), typ):
objcls = type(obj) # normal case
else:
objcls = getattr(obj, '__class__', type(obj))
if not _issubtype(objcls, typ):
raise TypeError, ("super(type, obj): "
"obj must be an instance or subtype of type")
self.__thisclass__ = typ
self.__self__ = obj
self.__self_class__ = objcls
def __get__(self, obj, type=None):
if obj is None or super.__self__.__get__(self) is not None:
return self
else:
return self.__class__(super.__thisclass__.__get__(self), obj)
def __getattribute__(self, attr):
_self_class_ = super.__self_class__.__get__(self)
if (attr != '__class__' # we want super().__class__ to be the real class
and _self_class_ is not None): # no magic for unbound type objects
_thisclass_ = super.__thisclass__.__get__(self)
mro = iter(_self_class_.__mro__)
for cls in mro:
if cls is _thisclass_:
break
# Note: mro is an iterator, so the second loop
# picks up where the first one left off!
for cls in mro:
try:
x = cls.__dict__[attr]
except KeyError:
continue
if hasattr(x, '__get__'):
_self_ = super.__self__.__get__(self)
if _self_ is _self_class_:
_self_ = None # performs an unbound __get__
x = x.__get__(_self_, _self_class_)
return x
return object.__getattribute__(self, attr) # fall-back
| Python |
# NOT_RPYTHON
"""
This emulates CPython's set and frozenset types based on the current sets
module. Diff against the sets module to find specific changes. Here's some
pointers:
- __slots__ as well as __setstate__/__getstate__ were removed from the set
classes to support pickling in conjunction with __reduce__.
- non-operator methods like (update, issubset, issuperset - see the set type
documentation for a full list) now accept iterables in addition to just other
sets.
- copy semantics for frozensets according to the frozenset type from CPython
- the hash algorithm for CPython's frozenset was not implemented because it
relies on integer overflows.
This whole module should probably be converted to RPython at some point in the
future.
The following is the original docstring from sets.py:
Classes to represent arbitrary sets (including sets of sets).
This module implements sets using dictionaries whose values are
ignored. The usual operations (union, intersection, deletion, etc.)
are provided as both methods and operators.
Important: sets are not sequences! While they support 'x in s',
'len(s)', and 'for x in s', none of those operations are unique for
sequences; for example, mappings support all three as well. The
characteristic operation for sequences is subscripting with small
integers: s[i], for i in range(len(s)). Sets don't support
subscripting at all. Also, sequences allow multiple occurrences and
their elements have a definite order; sets on the other hand don't
record multiple occurrences and don't remember the order of element
insertion (which is why they don't support s[i]).
The following classes are provided:
BaseSet -- All the operations common to both mutable and immutable
sets. This is an abstract class, not meant to be directly
instantiated.
set -- Mutable sets, subclass of BaseSet; not hashable.
frozenset -- Immutable sets, subclass of BaseSet; hashable.
An iterable argument is mandatory to create an frozenset.
_TemporarilyImmutableSet -- A wrapper around a Set, hashable,
giving the same hash value as the immutable set equivalent
would have. Do not use this class directly.
Only hashable objects can be added to a set. In particular, you cannot
really add a set as an element to another set; if you try, what is
actually added is an frozenset built from it (it compares equal to
the one you tried adding).
When you ask if `x in y' where x is a Set and y is a Set or
frozenset, x is wrapped into a _TemporarilyImmutableSet z, and
what's tested is actually `z in y'.
"""
# Code history:
#
# - Greg V. Wilson wrote the first version, using a different approach
# to the mutable/immutable problem, and inheriting from dict.
#
# - Alex Martelli modified Greg's version to implement the current
# Set/frozenset approach, and make the data an attribute.
#
# - Guido van Rossum rewrote much of the code, made some API changes,
# and cleaned up the docstrings.
#
# - Raymond Hettinger added a number of speedups and other
# improvements.
from __future__ import generators
try:
from itertools import ifilter, ifilterfalse
except ImportError:
# Code to make the module run under Py2.2
def ifilter(predicate, iterable):
if predicate is None:
def predicate(x):
return x
for x in iterable:
if predicate(x):
yield x
def ifilterfalse(predicate, iterable):
if predicate is None:
def predicate(x):
return x
for x in iterable:
if not predicate(x):
yield x
try:
True, False
except NameError:
True, False = (0==0, 0!=0)
__all__ = ['set', 'frozenset']
class BaseSet(object):
"""Common base class for mutable and immutable sets."""
# Constructor
def __init__(self):
"""This is an abstract class."""
# Don't call this from a concrete subclass!
if self.__class__ is BaseSet:
raise TypeError, ("BaseSet is an abstract class. "
"Use set or frozenset.")
# Standard protocols: __len__, __repr__, __str__, __iter__, __reduce__
def __len__(self):
"""Return the number of elements of a set."""
return len(self._data)
def __repr__(self):
"""Return string representation of a set.
This looks like 'set([<list of elements>])'.
"""
return self._repr()
# __str__ is the same as __repr__
__str__ = __repr__
def _repr(self, sorted=False):
elements = self._data.keys()
if sorted:
elements.sort()
return '%s(%r)' % (self.__class__.__name__, elements)
def __iter__(self):
"""Return an iterator over the elements or a set.
This is the keys iterator for the underlying dict.
"""
return self._data.iterkeys()
def __reduce__(self):
return (self.__class__, (self._data.keys(),), self.__dict__)
# Three-way comparison is not supported. However, because __eq__ is
# tried before __cmp__, if set x == set y, x.__eq__(y) returns True and
# then cmp(x, y) returns 0 (Python doesn't actually call __cmp__ in this
# case).
def __cmp__(self, other):
raise TypeError, "can't compare sets using cmp()"
# Equality comparisons using the underlying dicts. Mixed-type comparisons
# are allowed here, where set == z for non-set z always returns False,
# and set != z always True. This allows expressions like "x in y" to
# give the expected result when y is a sequence of mixed types, not
# raising a pointless TypeError just because y contains a set, or x is
# a set and y contain's a non-set ("in" invokes only __eq__).
# Subtle: it would be nicer if __eq__ and __ne__ could return
# NotImplemented instead of True or False. Then the other comparand
# would get a chance to determine the result, and if the other comparand
# also returned NotImplemented then it would fall back to object address
# comparison (which would always return False for __eq__ and always
# True for __ne__). However, that doesn't work, because this type
# *also* implements __cmp__: if, e.g., __eq__ returns NotImplemented,
# Python tries __cmp__ next, and the __cmp__ here then raises TypeError.
def __eq__(self, other):
if isinstance(other, BaseSet):
return self._data == other._data
else:
return False
def __ne__(self, other):
if isinstance(other, BaseSet):
return self._data != other._data
else:
return True
# Copying operations
def copy(self):
"""Return a shallow copy of a set."""
result = self.__class__()
result._data.update(self._data)
return result
__copy__ = copy # For the copy module
def __deepcopy__(self, memo):
"""Return a deep copy of a set; used by copy module."""
# This pre-creates the result and inserts it in the memo
# early, in case the deep copy recurses into another reference
# to this same set. A set can't be an element of itself, but
# it can certainly contain an object that has a reference to
# itself.
from copy import deepcopy
result = self.__class__()
memo[id(self)] = result
data = result._data
value = True
for elt in self:
data[deepcopy(elt, memo)] = value
return result
# Standard set operations: union, intersection, both differences.
# Each has an operator version (e.g. __or__, invoked with |) and a
# method version (e.g. union).
# Subtle: Each pair requires distinct code so that the outcome is
# correct when the type of other isn't suitable. For example, if
# we did "union = __or__" instead, then set().union(3) would return
# NotImplemented instead of raising TypeError (albeit that *why* it
# raises TypeError as-is is also a bit subtle).
def __or__(self, other):
"""Return the union of two sets as a new set.
(I.e. all elements that are in either set.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.union(other)
def union(self, other):
"""Return the union of two sets as a new set.
(I.e. all elements that are in either set.)
"""
result = self.__class__(self)
result._update(other)
return result
def __and__(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.intersection(other)
def intersection(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
other = set(other)
if len(self) <= len(other):
little, big = self, other
else:
little, big = other, self
common = ifilter(big._data.has_key, little)
return self.__class__(common)
def __xor__(self, other):
"""Return the symmetric difference of two sets as a new set.
(I.e. all elements that are in exactly one of the sets.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.symmetric_difference(other)
def symmetric_difference(self, other):
"""Return the symmetric difference of two sets as a new set.
(I.e. all elements that are in exactly one of the sets.)
"""
result = self.__class__()
data = result._data
value = True
selfdata = self._data
try:
otherdata = other._data
except AttributeError:
otherdata = set(other)._data
for elt in ifilterfalse(otherdata.has_key, selfdata):
data[elt] = value
for elt in ifilterfalse(selfdata.has_key, otherdata):
data[elt] = value
return result
def __sub__(self, other):
"""Return the difference of two sets as a new set.
(I.e. all elements that are in this set and not in the other.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.difference(other)
def difference(self, other):
"""Return the difference of two sets as a new set.
(I.e. all elements that are in this set and not in the other.)
"""
result = self.__class__()
data = result._data
try:
otherdata = other._data
except AttributeError:
otherdata = set(other)._data
value = True
for elt in ifilterfalse(otherdata.has_key, self):
data[elt] = value
return result
# Membership test
def __contains__(self, element):
"""Report whether an element is a member of a set.
(Called in response to the expression `element in self'.)
"""
try:
return element in self._data
except TypeError:
transform = getattr(element, "__as_temporarily_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
return transform() in self._data
# Subset and superset test
def issubset(self, other):
"""Report whether another set contains this set."""
if not isinstance(other, BaseSet):
# other is an iterable
other = self.__class__(other)
if len(self) > len(other): # Fast check for obvious cases
return False
for elt in ifilterfalse(other._data.has_key, self):
return False
return True
def issuperset(self, other):
"""Report whether this set contains another set."""
if not isinstance(other, BaseSet):
# other is an iterable
other = self.__class__(other)
if len(self) < len(other): # Fast check for obvious cases
return False
for elt in ifilterfalse(self._data.has_key, other):
return False
return True
# Inequality comparisons using the is-subset relation.
def __le__(self, other):
self._binary_sanity_check(other)
return self.issubset(other)
def __ge__(self, other):
self._binary_sanity_check(other)
return self.issuperset(other)
def __lt__(self, other):
self._binary_sanity_check(other)
return len(self) < len(other) and self.issubset(other)
def __gt__(self, other):
self._binary_sanity_check(other)
return len(self) > len(other) and self.issuperset(other)
# Assorted helpers
def _binary_sanity_check(self, other):
# Check that the other argument to a binary operation is also
# a set, raising a TypeError otherwise.
if not isinstance(other, BaseSet):
raise TypeError, "Binary operation only permitted between sets"
def _compute_hash(self):
# Calculate hash code for a set by xor'ing the hash codes of
# the elements. This ensures that the hash code does not depend
# on the order in which elements are added to the set. This is
# not called __hash__ because a BaseSet should not be hashable;
# only an frozenset is hashable.
result = 0
for elt in self:
result ^= hash(elt)
return result
def _update(self, iterable):
# The main loop for update() and the subclass __init__() methods.
data = self._data
# Use the fast update() method when a dictionary is available.
if isinstance(iterable, BaseSet):
data.update(iterable._data)
return
value = True
if type(iterable) in (list, tuple, xrange):
# Optimized: we know that __iter__() and next() can't
# raise TypeError, so we can move 'try:' out of the loop.
it = iter(iterable)
while True:
try:
for element in it:
data[element] = value
return
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
data[transform()] = value
else:
# Safe: only catch TypeError where intended
for element in iterable:
try:
data[element] = value
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
data[transform()] = value
class frozenset(BaseSet):
"""Immutable set class."""
# BaseSet + hashing
def __new__(cls, iterable=None):
if type(iterable) is frozenset:
return iterable
instance = BaseSet.__new__(cls)
frozenset._init(instance, iterable)
return instance
def __init__(self, iterable=None):
pass
def _init(self, iterable=None):
"""Construct an immutable set from an optional iterable."""
self._hashcode = None
self._data = {}
if iterable is not None:
self._update(iterable)
def __hash__(self):
if self._hashcode is None:
self._hashcode = self._compute_hash()
return self._hashcode
def union(self, other):
return BaseSet.union(BaseSet.copy(self), other)
def copy(self):
if type(self) is frozenset:
return self
else:
return self.__class__(self)
__copy__ = copy
class set(BaseSet):
""" Mutable set class."""
# BaseSet + operations requiring mutability; no hashing
def __init__(self, iterable=None):
"""Construct a set from an optional iterable."""
self._data = {}
if iterable is not None:
self._update(iterable)
def __hash__(self):
"""A set cannot be hashed."""
# We inherit object.__hash__, so we must deny this explicitly
raise TypeError, "Can't hash a set, only an frozenset."
# In-place union, intersection, differences.
# Subtle: The xyz_update() functions deliberately return None,
# as do all mutating operations on built-in container types.
# The __xyz__ spellings have to return self, though.
def __ior__(self, other):
"""Update a set with the union of itself and another."""
self._binary_sanity_check(other)
self._data.update(other._data)
return self
def union_update(self, other):
"""Update a set with the union of itself and another."""
self._update(other)
def __iand__(self, other):
"""Update a set with the intersection of itself and another."""
self._binary_sanity_check(other)
self._data = (self & other)._data
return self
def intersection_update(self, other):
"""Update a set with the intersection of itself and another."""
if isinstance(other, BaseSet):
self &= other
else:
self._data = (self.intersection(other))._data
def __ixor__(self, other):
"""Update a set with the symmetric difference of itself and another."""
self._binary_sanity_check(other)
self.symmetric_difference_update(other)
return self
def symmetric_difference_update(self, other):
"""Update a set with the symmetric difference of itself and another."""
data = self._data
value = True
if not isinstance(other, BaseSet):
other = set(other)
for elt in other:
if elt in data:
del data[elt]
else:
data[elt] = value
def __isub__(self, other):
"""Remove all elements of another set from this set."""
self._binary_sanity_check(other)
self.difference_update(other)
return self
def difference_update(self, other):
"""Remove all elements of another set from this set."""
data = self._data
if not isinstance(other, BaseSet):
other = set(other)
for elt in ifilter(data.has_key, other):
del data[elt]
# Python dict-like mass mutations: update, clear
def update(self, iterable):
"""Add all values from an iterable (such as a list or file)."""
self._update(iterable)
def clear(self):
"""Remove all elements from this set."""
self._data.clear()
# Single-element mutations: add, remove, discard
def add(self, element):
"""Add an element to a set.
This has no effect if the element is already present.
"""
try:
self._data[element] = True
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
self._data[transform()] = True
def remove(self, element):
"""Remove an element from a set; it must be a member.
If the element is not a member, raise a KeyError.
"""
try:
del self._data[element]
except TypeError:
transform = getattr(element, "__as_temporarily_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
del self._data[transform()]
def discard(self, element):
"""Remove an element from a set if it is a member.
If the element is not a member, do nothing.
"""
try:
self.remove(element)
except KeyError:
pass
def pop(self):
"""Remove and return an arbitrary set element."""
return self._data.popitem()[0]
def __as_immutable__(self):
# Return a copy of self as an immutable set
return frozenset(self)
def __as_temporarily_immutable__(self):
# Return self wrapped in a temporarily immutable set
return _TemporarilyImmutableSet(self)
class _TemporarilyImmutableSet(BaseSet):
# Wrap a mutable set as if it was temporarily immutable.
# This only supplies hashing and equality comparisons.
def __init__(self, set):
self._set = set
self._data = set._data # Needed by frozenset.__eq__()
def __hash__(self):
return self._set._compute_hash()
| Python |
#!/usr/bin/env python
"""
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
Note that this test currently runs at cpython-level and not
at any application level ....
"""
#taken from CPython 2.3 (?)
"""
Test module for class complex in complexobject.py
As it seems there are some numerical differences in
the __div__ and __divmod__ methods which have to be
sorted out.
"""
import autopath
import math
import cmath
import sys
import types
from pypy.module.__builtin__.app_complex import complex as pycomplex
try:
unicode
have_unicode = 0 # pypy doesn't have unicode, we know it ...
except NameError:
have_unicode = 0
def equal(a, b):
"Compare two complex or normal numbers. 0 if different, 1 if roughly equal."
numTypes = [types.IntType, types.LongType, types.FloatType]
da, db = dir(a), dir(b)
if 'real' in da and 'real' in db and 'imag' in da and 'imag' in db:
if math.fabs(a.real-b.real) > 1e-10:
return 0
if math.fabs(a.imag-b.imag) > 1e-10:
return 0
else:
return 1
elif type(a) in numTypes and type(b) in numTypes:
if math.fabs(a-b) > 1e-10:
return 0
else:
return 1
def enumerate():
valueRange = [-3, -0.5, 0, 1]
res = []
for x0 in valueRange:
for y0 in valueRange:
for x1 in valueRange:
for y1 in valueRange:
z0c = complex(x0,y0)
z1c = complex(x1,y1)
z0p = pycomplex(x0,y0)
z1p = pycomplex(x1,y1)
res.append((z0c, z1c, z0p, z1p))
return res
class TestComplex:
def assertAEqual(self, a, b):
assert equal(a, b)
def test_wrongInit1(self):
"Compare wrong init. with CPython."
try:
complex("1", "1")
except TypeError:
pass
else:
self.fail('complex("1", "1")')
try:
pycomplex("1", "1")
except TypeError:
pass
else:
self.fail('complex("1", "1")')
def test_wrongInit2(self):
"Compare wrong init. with CPython."
try:
complex(1, "1")
except TypeError:
pass
else:
self.fail('complex(1, "1")')
try:
pycomplex(1, "1")
except TypeError:
pass
else:
self.fail('complex(1, "1")')
def test_wrongInitFromString(self):
"Compare string init. with CPython."
if complex(" 3.14+J ") != 3.14+1j:
self.fail('complex(" 3.14+J )"')
if not equal(pycomplex(" 3.14+J "), pycomplex(3.14,1)):
self.fail('complex(" 3.14+J )"')
def test_wrongInitFromUnicodeString(self):
"Compare unicode string init. with CPython."
if have_unicode:
if complex(unicode(" 3.14+J ")) != 3.14+1j:
self.fail('complex(u" 3.14+J )"')
if not equal(pycomplex(unicode(" 3.14+J ")), pycomplex(3.14, 1)):
self.fail('complex(u" 3.14+J )"')
def test_class(self):
"Compare class with CPython."
class Z:
def __complex__(self):
return 3.14j
z = Z()
if complex(z) != 3.14j:
self.fail('complex(classinstance)')
if not equal(complex(z), pycomplex(0, 3.14)):
self.fail('complex(classinstance)')
def test_add_sub_mul_div(self):
"Compare add/sub/mul/div with CPython."
for (z0c, z1c, z0p, z1p) in enumerate():
mc = z0c*z1c
mp = z0p*z1p
self.assertAEqual(mc, mp)
sc = z0c+z1c
sp = z0p+z1p
self.assertAEqual(sc, sp)
dc = z0c-z1c
dp = z0p-z1p
self.assertAEqual(dc, dp)
if not equal(z1c, complex(0,0)):
qc = z0c/z1c
qp = z0p/z1p
self.assertAEqual(qc, qp)
def test_special(self):
"Compare special methods with CPython."
for (x, y) in [(0,0), (0,1), (1,3.)]:
zc = complex(x, y)
zp = pycomplex(x, y)
self.assertAEqual(zc, zp)
self.assertAEqual(-zc, -zp)
self.assertAEqual(+zc, +zp)
self.assertAEqual(abs(zc), abs(zp))
self.assertAEqual(zc, zp)
#self.assertEqual(zc.conjugate(), zp.conjugate()) XXX
assert str(zc) == str(zp)
assert hash(zc) == hash(zp)
# this fails on python2.3 and is depreacted anyway
def _test_divmod(self):
"Compare divmod with CPython."
for (z0c, z1c, z0p, z1p) in enumerate():
mc = z0c*z1c
mp = z0p*z1p
self.assertAEqual(mc, mp)
if not equal(z1c, complex(0,0)):
ddc, mmc = divmod(z0c, z1c)
self.assertAEqual(ddc*z1c + mmc, z0c)
ddp, mmp = divmod(z0p, z1p)
self.assertAEqual(ddp*z1p + mmp, z0p)
self.assertAEqual(ddc, ddp)
self.assertAEqual(mmc, mmp)
# these fail on python2.3
def _test_mod(self):
"Compare mod with CPython."
for (z0c, z1c, z0p, z1p) in enumerate():
mc = z0c*z1c
mp = z0p*z1p
self.assertAEqual(mc, mp)
if not equal(z1c, complex(0,0)):
rc = z0c%z1c
rp = z0p%z1p
self.assertAEqual(rc, rp)
def test_div(self):
"Compare mod with CPython."
for (z0c, z1c, z0p, z1p) in enumerate():
mc = z0c*z1c
mp = z0p*z1p
self.assertAEqual(mc, mp)
if not equal(z1c, complex(0,0)):
rc = z0c/z1c
rp = z0p/z1p
self.assertAEqual(rc, rp)
def test_pow(self):
"Compare pow with CPython."
for (z0c, z1c, z0p, z1p) in enumerate():
if not equal(z0c, 0j) and (z1c.imag != 0.0):
pc = z0c**z1c
pp = z0p**z1p
self.assertAEqual(pc, pp)
pc = z0c**z0c.real
pp = z0p**z0p.real
self.assertAEqual(pc, pp)
def test_complex(self):
"Compare complex() with CPython (with complex arguments)"
ours = pycomplex(pycomplex(1,10), 100)
cpy = complex(complex(1,10), 100)
self.assertAEqual(ours, cpy)
ours = pycomplex(pycomplex(1,10), pycomplex(100,1000))
cpy = complex(complex(1,10), complex(100,1000))
self.assertAEqual(ours, cpy)
ours = pycomplex(10, pycomplex(100,1000))
cpy = complex(10, complex(100,1000))
self.assertAEqual(ours, cpy)
def test_subclassing(self):
class cx(pycomplex):
pass
_1_j = pycomplex(0,1)
assert pycomplex(_1_j) is _1_j
assert type(cx(_1_j)) is cx
assert cx(_1_j) == _1_j
| Python |
"""
self cloning, automatic path configuration
copy this into any subdirectory of pypy from which scripts need
to be run, typically all of the test subdirs.
The idea is that any such script simply issues
import autopath
and this will make sure that the parent directory containing "pypy"
is in sys.path.
If you modify the master "autopath.py" version (in pypy/tool/autopath.py)
you can directly run it which will copy itself on all autopath.py files
it finds under the pypy root directory.
This module always provides these attributes:
pypydir pypy root directory path
this_dir directory where this autopath.py resides
"""
def __dirinfo(part):
""" return (partdir, this_dir) and insert parent of partdir
into sys.path. If the parent directories don't have the part
an EnvironmentError is raised."""
import sys, os
try:
head = this_dir = os.path.realpath(os.path.dirname(__file__))
except NameError:
head = this_dir = os.path.realpath(os.path.dirname(sys.argv[0]))
while head:
partdir = head
head, tail = os.path.split(head)
if tail == part:
break
else:
raise EnvironmentError, "'%s' missing in '%r'" % (partdir, this_dir)
pypy_root = os.path.join(head, '')
try:
sys.path.remove(head)
except ValueError:
pass
sys.path.insert(0, head)
munged = {}
for name, mod in sys.modules.items():
if '.' in name:
continue
fn = getattr(mod, '__file__', None)
if not isinstance(fn, str):
continue
newname = os.path.splitext(os.path.basename(fn))[0]
if not newname.startswith(part + '.'):
continue
path = os.path.join(os.path.dirname(os.path.realpath(fn)), '')
if path.startswith(pypy_root) and newname != part:
modpaths = os.path.normpath(path[len(pypy_root):]).split(os.sep)
if newname != '__init__':
modpaths.append(newname)
modpath = '.'.join(modpaths)
if modpath not in sys.modules:
munged[modpath] = mod
for name, mod in munged.iteritems():
if name not in sys.modules:
sys.modules[name] = mod
if '.' in name:
prename = name[:name.rfind('.')]
postname = name[len(prename)+1:]
if prename not in sys.modules:
__import__(prename)
if not hasattr(sys.modules[prename], postname):
setattr(sys.modules[prename], postname, mod)
return partdir, this_dir
def __clone():
""" clone master version of autopath.py into all subdirs """
from os.path import join, walk
if not this_dir.endswith(join('pypy','tool')):
raise EnvironmentError("can only clone master version "
"'%s'" % join(pypydir, 'tool',_myname))
def sync_walker(arg, dirname, fnames):
if _myname in fnames:
fn = join(dirname, _myname)
f = open(fn, 'rwb+')
try:
if f.read() == arg:
print "checkok", fn
else:
print "syncing", fn
f = open(fn, 'w')
f.write(arg)
finally:
f.close()
s = open(join(pypydir, 'tool', _myname), 'rb').read()
walk(pypydir, sync_walker, s)
_myname = 'autopath.py'
# set guaranteed attributes
pypydir, this_dir = __dirinfo('pypy')
if __name__ == '__main__':
__clone()
| Python |
from pypy.interpreter.gateway import ObjSpace
from pypy.interpreter.error import OperationError
from pypy.rlib import rgc # Force registration of gc.collect
import gc
def collect(space):
"Run a full collection."
gc.collect()
collect.unwrap_spec = [ObjSpace]
import sys
platform = sys.platform
def estimate_heap_size(space):
# XXX should be done with the help of the GCs
if platform == "linux2":
import os
pid = os.getpid()
try:
fd = os.open("/proc/" + str(pid) + "/status", os.O_RDONLY, 0777)
except OSError:
pass
else:
try:
content = os.read(fd, 1000000)
finally:
os.close(fd)
lines = content.split("\n")
for line in lines:
if line.startswith("VmSize:"):
stop = len(line) - 3
assert stop > 0
result = int(line[len("VmSize:"):stop].strip(" ")) * 1024
return space.wrap(result)
raise OperationError(space.w_RuntimeError,
space.wrap("can't estimate the heap size"))
estimate_heap_size.unwrap_spec = [ObjSpace]
| Python |
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
appleveldefs = {
'enable': 'app_gc.enable',
'disable': 'app_gc.disable',
'isenabled': 'app_gc.isenabled',
}
interpleveldefs = {
'collect': 'interp_gc.collect',
'estimate_heap_size': 'interp_gc.estimate_heap_size',
}
| Python |
def isenabled():
"Not implemented."
def enable():
"Not implemented."
def disable():
"Not implemented."
| Python |
class sslerror(Exception):
pass
__doc__ = """Implementation module for SSL socket operations.
See the socket module for documentation."""
| Python |
from pypy.rpython.rctypes.tool import ctypes_platform
from pypy.rpython.rctypes.tool.libc import libc
import pypy.rpython.rctypes.implementation # this defines rctypes magic
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import W_Root, ObjSpace, Wrappable
from pypy.interpreter.typedef import TypeDef
from pypy.interpreter.gateway import interp2app
from ctypes import *
import ctypes.util
import sys
import socket
import select
from ssl import SSL_CTX, SSL, X509, SSL_METHOD, X509_NAME
from bio import BIO
c_void = None
libssl = cdll.LoadLibrary(ctypes.util.find_library("ssl"))
## user defined constants
X509_NAME_MAXLEN = 256
# these mirror ssl.h
PY_SSL_ERROR_NONE, PY_SSL_ERROR_SSL = 0, 1
PY_SSL_ERROR_WANT_READ, PY_SSL_ERROR_WANT_WRITE = 2, 3
PY_SSL_ERROR_WANT_X509_LOOKUP = 4
PY_SSL_ERROR_SYSCALL = 5 # look at error stack/return value/errno
PY_SSL_ERROR_ZERO_RETURN, PY_SSL_ERROR_WANT_CONNECT = 6, 7
# start of non ssl.h errorcodes
PY_SSL_ERROR_EOF = 8 # special case of SSL_ERROR_SYSCALL
PY_SSL_ERROR_INVALID_ERROR_CODE = 9
SOCKET_IS_NONBLOCKING, SOCKET_IS_BLOCKING = 0, 1
SOCKET_HAS_TIMED_OUT, SOCKET_HAS_BEEN_CLOSED = 2, 3
SOCKET_TOO_LARGE_FOR_SELECT, SOCKET_OPERATION_OK = 4, 5
class CConfig:
_header_ = """
#include <openssl/ssl.h>
#include <openssl/opensslv.h>
#include <openssl/bio.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/poll.h>
"""
OPENSSL_VERSION_NUMBER = ctypes_platform.ConstantInteger(
"OPENSSL_VERSION_NUMBER")
SSL_FILETYPE_PEM = ctypes_platform.ConstantInteger("SSL_FILETYPE_PEM")
SSL_OP_ALL = ctypes_platform.ConstantInteger("SSL_OP_ALL")
SSL_VERIFY_NONE = ctypes_platform.ConstantInteger("SSL_VERIFY_NONE")
SSL_ERROR_WANT_READ = ctypes_platform.ConstantInteger(
"SSL_ERROR_WANT_READ")
SSL_ERROR_WANT_WRITE = ctypes_platform.ConstantInteger(
"SSL_ERROR_WANT_WRITE")
SSL_ERROR_ZERO_RETURN = ctypes_platform.ConstantInteger(
"SSL_ERROR_ZERO_RETURN")
SSL_ERROR_WANT_X509_LOOKUP = ctypes_platform.ConstantInteger(
"SSL_ERROR_WANT_X509_LOOKUP")
SSL_ERROR_WANT_CONNECT = ctypes_platform.ConstantInteger(
"SSL_ERROR_WANT_CONNECT")
SSL_ERROR_SYSCALL = ctypes_platform.ConstantInteger("SSL_ERROR_SYSCALL")
SSL_ERROR_SSL = ctypes_platform.ConstantInteger("SSL_ERROR_SSL")
FD_SETSIZE = ctypes_platform.ConstantInteger("FD_SETSIZE")
SSL_CTRL_OPTIONS = ctypes_platform.ConstantInteger("SSL_CTRL_OPTIONS")
BIO_C_SET_NBIO = ctypes_platform.ConstantInteger("BIO_C_SET_NBIO")
pollfd = ctypes_platform.Struct("struct pollfd",
[("fd", c_int), ("events", c_short), ("revents", c_short)])
nfds_t = ctypes_platform.SimpleType("nfds_t", c_uint)
POLLOUT = ctypes_platform.ConstantInteger("POLLOUT")
POLLIN = ctypes_platform.ConstantInteger("POLLIN")
class cConfig:
pass
cConfig.__dict__.update(ctypes_platform.configure(CConfig))
OPENSSL_VERSION_NUMBER = cConfig.OPENSSL_VERSION_NUMBER
HAVE_OPENSSL_RAND = OPENSSL_VERSION_NUMBER >= 0x0090500fL
SSL_FILETYPE_PEM = cConfig.SSL_FILETYPE_PEM
SSL_OP_ALL = cConfig.SSL_OP_ALL
SSL_VERIFY_NONE = cConfig.SSL_VERIFY_NONE
SSL_ERROR_WANT_READ = cConfig.SSL_ERROR_WANT_READ
SSL_ERROR_WANT_WRITE = cConfig.SSL_ERROR_WANT_WRITE
SSL_ERROR_ZERO_RETURN = cConfig.SSL_ERROR_ZERO_RETURN
SSL_ERROR_WANT_X509_LOOKUP = cConfig.SSL_ERROR_WANT_X509_LOOKUP
SSL_ERROR_WANT_CONNECT = cConfig.SSL_ERROR_WANT_CONNECT
SSL_ERROR_SYSCALL = cConfig.SSL_ERROR_SYSCALL
SSL_ERROR_SSL = cConfig.SSL_ERROR_SSL
FD_SETSIZE = cConfig.FD_SETSIZE
SSL_CTRL_OPTIONS = cConfig.SSL_CTRL_OPTIONS
BIO_C_SET_NBIO = cConfig.BIO_C_SET_NBIO
POLLOUT = cConfig.POLLOUT
POLLIN = cConfig.POLLIN
pollfd = cConfig.pollfd
nfds_t = cConfig.nfds_t
arr_x509 = c_char * X509_NAME_MAXLEN
constants = {}
constants["SSL_ERROR_ZERO_RETURN"] = PY_SSL_ERROR_ZERO_RETURN
constants["SSL_ERROR_WANT_READ"] = PY_SSL_ERROR_WANT_READ
constants["SSL_ERROR_WANT_WRITE"] = PY_SSL_ERROR_WANT_WRITE
constants["SSL_ERROR_WANT_X509_LOOKUP"] = PY_SSL_ERROR_WANT_X509_LOOKUP
constants["SSL_ERROR_SYSCALL"] = PY_SSL_ERROR_SYSCALL
constants["SSL_ERROR_SSL"] = PY_SSL_ERROR_SSL
constants["SSL_ERROR_WANT_CONNECT"] = PY_SSL_ERROR_WANT_CONNECT
constants["SSL_ERROR_EOF"] = PY_SSL_ERROR_EOF
constants["SSL_ERROR_INVALID_ERROR_CODE"] = PY_SSL_ERROR_INVALID_ERROR_CODE
libssl.SSL_load_error_strings.restype = c_void
libssl.SSL_library_init.restype = c_int
if HAVE_OPENSSL_RAND:
libssl.RAND_add.argtypes = [c_char_p, c_int, c_double]
libssl.RAND_add.restype = c_void
libssl.RAND_status.restype = c_int
libssl.RAND_egd.argtypes = [c_char_p]
libssl.RAND_egd.restype = c_int
libssl.SSL_CTX_new.argtypes = [POINTER(SSL_METHOD)]
libssl.SSL_CTX_new.restype = POINTER(SSL_CTX)
libssl.SSLv23_method.restype = POINTER(SSL_METHOD)
libssl.SSL_CTX_use_PrivateKey_file.argtypes = [POINTER(SSL_CTX), c_char_p, c_int]
libssl.SSL_CTX_use_PrivateKey_file.restype = c_int
libssl.SSL_CTX_use_certificate_chain_file.argtypes = [POINTER(SSL_CTX), c_char_p]
libssl.SSL_CTX_use_certificate_chain_file.restype = c_int
libssl.SSL_CTX_ctrl.argtypes = [POINTER(SSL_CTX), c_int, c_int, c_void_p]
libssl.SSL_CTX_ctrl.restype = c_int
libssl.SSL_CTX_set_verify.argtypes = [POINTER(SSL_CTX), c_int, c_void_p]
libssl.SSL_CTX_set_verify.restype = c_void
libssl.SSL_new.argtypes = [POINTER(SSL_CTX)]
libssl.SSL_new.restype = POINTER(SSL)
libssl.SSL_set_fd.argtypes = [POINTER(SSL), c_int]
libssl.SSL_set_fd.restype = c_int
libssl.BIO_ctrl.argtypes = [POINTER(BIO), c_int, c_int, c_void_p]
libssl.BIO_ctrl.restype = c_int
libssl.SSL_get_rbio.argtypes = [POINTER(SSL)]
libssl.SSL_get_rbio.restype = POINTER(BIO)
libssl.SSL_get_wbio.argtypes = [POINTER(SSL)]
libssl.SSL_get_wbio.restype = POINTER(BIO)
libssl.SSL_set_connect_state.argtypes = [POINTER(SSL)]
libssl.SSL_set_connect_state.restype = c_void
libssl.SSL_connect.argtypes = [POINTER(SSL)]
libssl.SSL_connect.restype = c_int
libssl.SSL_get_error.argtypes = [POINTER(SSL), c_int]
libssl.SSL_get_error.restype = c_int
have_poll = False
if hasattr(libc, "poll"):
have_poll = True
libc.poll.argtypes = [POINTER(pollfd), nfds_t, c_int]
libc.poll.restype = c_int
libssl.ERR_get_error.restype = c_int
libssl.ERR_error_string.argtypes = [c_int, c_char_p]
libssl.ERR_error_string.restype = c_char_p
libssl.SSL_get_peer_certificate.argtypes = [POINTER(SSL)]
libssl.SSL_get_peer_certificate.restype = POINTER(X509)
libssl.X509_get_subject_name.argtypes = [POINTER(X509)]
libssl.X509_get_subject_name.restype = POINTER(X509_NAME)
libssl.X509_get_issuer_name.argtypes = [POINTER(X509)]
libssl.X509_get_issuer_name.restype = POINTER(X509_NAME)
libssl.X509_NAME_oneline.argtypes = [POINTER(X509_NAME), arr_x509, c_int]
libssl.X509_NAME_oneline.restype = c_char_p
libssl.X509_free.argtypes = [POINTER(X509)]
libssl.X509_free.restype = c_void
libssl.SSL_free.argtypes = [POINTER(SSL)]
libssl.SSL_free.restype = c_void
libssl.SSL_CTX_free.argtypes = [POINTER(SSL_CTX)]
libssl.SSL_CTX_free.restype = c_void
libssl.SSL_write.argtypes = [POINTER(SSL), c_char_p, c_int]
libssl.SSL_write.restype = c_int
libssl.SSL_pending.argtypes = [POINTER(SSL)]
libssl.SSL_pending.restype = c_int
libssl.SSL_read.argtypes = [POINTER(SSL), c_char_p, c_int]
libssl.SSL_read.restype = c_int
def _init_ssl():
libssl.SSL_load_error_strings()
libssl.SSL_library_init()
if HAVE_OPENSSL_RAND:
# helper routines for seeding the SSL PRNG
def RAND_add(space, string, entropy):
"""RAND_add(string, entropy)
Mix string into the OpenSSL PRNG state. entropy (a float) is a lower
bound on the entropy contained in string."""
buf = c_char_p(string)
libssl.RAND_add(buf, len(string), entropy)
RAND_add.unwrap_spec = [ObjSpace, str, float]
def RAND_status(space):
"""RAND_status() -> 0 or 1
Returns 1 if the OpenSSL PRNG has been seeded with enough data and 0 if not.
It is necessary to seed the PRNG with RAND_add() on some platforms before
using the ssl() function."""
res = libssl.RAND_status()
return space.wrap(res)
RAND_status.unwrap_spec = [ObjSpace]
def RAND_egd(space, path):
"""RAND_egd(path) -> bytes
Queries the entropy gather daemon (EGD) on socket path. Returns number
of bytes read. Raises socket.sslerror if connection to EGD fails or
if it does provide enough data to seed PRNG."""
socket_path = c_char_p(path)
bytes = libssl.RAND_egd(socket_path)
if bytes == -1:
msg = "EGD connection failed or EGD did not return"
msg += " enough data to seed the PRNG"
raise OperationError(space.w_Exception, space.wrap(msg))
return space.wrap(bytes)
RAND_egd.unwrap_spec = [ObjSpace, str]
class SSLObject(Wrappable):
def __init__(self, space):
self.space = space
self.w_socket = None
self.ctx = POINTER(SSL_CTX)()
self.ssl = POINTER(SSL)()
self.server_cert = POINTER(X509)()
self._server = arr_x509()
self._issuer = arr_x509()
def server(self):
return self.space.wrap(self._server.value)
server.unwrap_spec = ['self']
def issuer(self):
return self.space.wrap(self._issuer.value)
issuer.unwrap_spec = ['self']
def __del__(self):
if self.server_cert:
libssl.X509_free(self.server_cert)
if self.ssl:
libssl.SSL_free(self.ssl)
if self.ctx:
libssl.SSL_CTX_free(self.ctx)
def write(self, data):
"""write(s) -> len
Writes the string s into the SSL object. Returns the number
of bytes written."""
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, True)
if sockstate == SOCKET_HAS_TIMED_OUT:
raise OperationError(self.space.w_Exception,
self.space.wrap("The write operation timed out"))
elif sockstate == SOCKET_HAS_BEEN_CLOSED:
raise OperationError(self.space.w_Exception,
self.space.wrap("Underlying socket has been closed."))
elif sockstate == SOCKET_TOO_LARGE_FOR_SELECT:
raise OperationError(self.space.w_Exception,
self.space.wrap("Underlying socket too large for select()."))
num_bytes = 0
while True:
err = 0
num_bytes = libssl.SSL_write(self.ssl, data, len(data))
err = libssl.SSL_get_error(self.ssl, num_bytes)
if err == SSL_ERROR_WANT_READ:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, False)
elif err == SSL_ERROR_WANT_WRITE:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, True)
else:
sockstate = SOCKET_OPERATION_OK
if sockstate == SOCKET_HAS_TIMED_OUT:
raise OperationError(self.space.w_Exception,
self.space.wrap("The connect operation timed out"))
elif sockstate == SOCKET_HAS_BEEN_CLOSED:
raise OperationError(self.space.w_Exception,
self.space.wrap("Underlying socket has been closed."))
elif sockstate == SOCKET_IS_NONBLOCKING:
break
if err == SSL_ERROR_WANT_READ or err == SSL_ERROR_WANT_WRITE:
continue
else:
break
if num_bytes > 0:
return self.space.wrap(num_bytes)
else:
errstr, errval = _ssl_seterror(self.space, self, num_bytes)
raise OperationError(self.space.w_Exception,
self.space.wrap("%s: %d" % (errstr, errval)))
write.unwrap_spec = ['self', str]
def read(self, num_bytes=1024):
"""read([len]) -> string
Read up to len bytes from the SSL socket."""
count = libssl.SSL_pending(self.ssl)
if not count:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, False)
if sockstate == SOCKET_HAS_TIMED_OUT:
raise OperationError(self.space.w_Exception,
self.space.wrap("The read operation timed out"))
elif sockstate == SOCKET_TOO_LARGE_FOR_SELECT:
raise OperationError(self.space.w_Exception,
self.space.wrap("Underlying socket too large for select()."))
buf = create_string_buffer(num_bytes)
while True:
err = 0
count = libssl.SSL_read(self.ssl, buf, num_bytes)
err = libssl.SSL_get_error(self.ssl, count)
if err == SSL_ERROR_WANT_READ:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, False)
elif err == SSL_ERROR_WANT_WRITE:
sockstate = check_socket_and_wait_for_timeout(self.space,
self.w_socket, True)
else:
sockstate = SOCKET_OPERATION_OK
if sockstate == SOCKET_HAS_TIMED_OUT:
raise OperationError(self.space.w_Exception,
self.space.wrap("The read operation timed out"))
elif sockstate == SOCKET_IS_NONBLOCKING:
break
if err == SSL_ERROR_WANT_READ or err == SSL_ERROR_WANT_WRITE:
continue
else:
break
if count <= 0:
errstr, errval = _ssl_seterror(self.space, self, count)
raise OperationError(self.space.w_Exception,
self.space.wrap("%s: %d" % (errstr, errval)))
if count != num_bytes:
# resize
data = buf.raw
assert count >= 0
try:
new_data = data[0:count]
except:
raise OperationError(self.space.w_MemoryException,
self.space.wrap("error in resizing of the buffer."))
buf = create_string_buffer(count)
buf.raw = new_data
return self.space.wrap(buf.value)
read.unwrap_spec = ['self', int]
SSLObject.typedef = TypeDef("SSLObject",
server = interp2app(SSLObject.server,
unwrap_spec=SSLObject.server.unwrap_spec),
issuer = interp2app(SSLObject.issuer,
unwrap_spec=SSLObject.issuer.unwrap_spec),
write = interp2app(SSLObject.write,
unwrap_spec=SSLObject.write.unwrap_spec),
read = interp2app(SSLObject.read, unwrap_spec=SSLObject.read.unwrap_spec)
)
def new_sslobject(space, w_sock, w_key_file, w_cert_file):
ss = SSLObject(space)
sock_fd = space.int_w(space.call_method(w_sock, "fileno"))
w_timeout = space.call_method(w_sock, "gettimeout")
if space.is_w(w_timeout, space.w_None):
has_timeout = False
else:
has_timeout = True
if space.is_w(w_key_file, space.w_None):
key_file = None
else:
key_file = space.str_w(w_key_file)
if space.is_w(w_cert_file, space.w_None):
cert_file = None
else:
cert_file = space.str_w(w_cert_file)
if ((key_file and not cert_file) or (not key_file and cert_file)):
raise OperationError(space.w_Exception,
space.wrap("Both the key & certificate files must be specified"))
ss.ctx = libssl.SSL_CTX_new(libssl.SSLv23_method()) # set up context
if not ss.ctx:
raise OperationError(space.w_Exception, space.wrap("SSL_CTX_new error"))
if key_file:
ret = libssl.SSL_CTX_use_PrivateKey_file(ss.ctx, key_file,
SSL_FILETYPE_PEM)
if ret < 1:
raise OperationError(space.w_Exception,
space.wrap("SSL_CTX_use_PrivateKey_file error"))
ret = libssl.SSL_CTX_use_certificate_chain_file(ss.ctx, cert_file)
libssl.SSL_CTX_ctrl(ss.ctx, SSL_CTRL_OPTIONS, SSL_OP_ALL, c_void_p())
if ret < 1:
raise OperationError(space.w_Exception,
space.wrap("SSL_CTX_use_certificate_chain_file error"))
libssl.SSL_CTX_set_verify(ss.ctx, SSL_VERIFY_NONE, c_void_p()) # set verify level
ss.ssl = libssl.SSL_new(ss.ctx) # new ssl struct
libssl.SSL_set_fd(ss.ssl, sock_fd) # set the socket for SSL
# If the socket is in non-blocking mode or timeout mode, set the BIO
# to non-blocking mode (blocking is the default)
if has_timeout:
# Set both the read and write BIO's to non-blocking mode
libssl.BIO_ctrl(libssl.SSL_get_rbio(ss.ssl), BIO_C_SET_NBIO, 1, c_void_p())
libssl.BIO_ctrl(libssl.SSL_get_wbio(ss.ssl), BIO_C_SET_NBIO, 1, c_void_p())
libssl.SSL_set_connect_state(ss.ssl)
# Actually negotiate SSL connection
# XXX If SSL_connect() returns 0, it's also a failure.
sockstate = 0
while True:
ret = libssl.SSL_connect(ss.ssl)
err = libssl.SSL_get_error(ss.ssl, ret)
if err == SSL_ERROR_WANT_READ:
sockstate = check_socket_and_wait_for_timeout(space, w_sock, False)
elif err == SSL_ERROR_WANT_WRITE:
sockstate = check_socket_and_wait_for_timeout(space, w_sock, True)
else:
sockstate = SOCKET_OPERATION_OK
if sockstate == SOCKET_HAS_TIMED_OUT:
raise OperationError(space.w_Exception,
space.wrap("The connect operation timed out"))
elif sockstate == SOCKET_HAS_BEEN_CLOSED:
raise OperationError(space.w_Exception,
space.wrap("Underlying socket has been closed."))
elif sockstate == SOCKET_TOO_LARGE_FOR_SELECT:
raise OperationError(space.w_Exception,
space.wrap("Underlying socket too large for select()."))
elif sockstate == SOCKET_IS_NONBLOCKING:
break
if err == SSL_ERROR_WANT_READ or err == SSL_ERROR_WANT_WRITE:
continue
else:
break
if ret < 0:
errstr, errval = _ssl_seterror(space, ss, ret)
raise OperationError(space.w_Exception,
space.wrap("%s: %d" % (errstr, errval)))
ss.server_cert = libssl.SSL_get_peer_certificate(ss.ssl)
if ss.server_cert:
libssl.X509_NAME_oneline(libssl.X509_get_subject_name(ss.server_cert),
ss._server, X509_NAME_MAXLEN)
libssl.X509_NAME_oneline(libssl.X509_get_issuer_name(ss.server_cert),
ss._issuer, X509_NAME_MAXLEN)
ss.w_socket = w_sock
return ss
new_sslobject.unwrap_spec = [ObjSpace, W_Root, str, str]
def check_socket_and_wait_for_timeout(space, w_sock, writing):
"""If the socket has a timeout, do a select()/poll() on the socket.
The argument writing indicates the direction.
Returns one of the possibilities in the timeout_state enum (above)."""
w_timeout = space.call_method(w_sock, "gettimeout")
if space.is_w(w_timeout, space.w_None):
return SOCKET_IS_BLOCKING
elif space.int_w(w_timeout) == 0.0:
return SOCKET_IS_NONBLOCKING
sock_timeout = space.int_w(w_timeout)
# guard against closed socket
try:
space.call_method(w_sock, "fileno")
except:
return SOCKET_HAS_BEEN_CLOSED
sock_fd = space.int_w(space.call_method(w_sock, "fileno"))
# Prefer poll, if available, since you can poll() any fd
# which can't be done with select().
if have_poll:
_pollfd = pollfd()
_pollfd.fd = sock_fd
if writing:
_pollfd.events = POLLOUT
else:
_pollfd.events = POLLIN
# socket's timeout is in seconds, poll's timeout in ms
timeout = int(sock_timeout * 1000 + 0.5)
rc = libc.poll(byref(_pollfd), 1, timeout)
if rc == 0:
return SOCKET_HAS_TIMED_OUT
else:
return SOCKET_OPERATION_OK
if sock_fd >= FD_SETSIZE:
return SOCKET_TOO_LARGE_FOR_SELECT
# construct the arguments for select
sec = int(sock_timeout)
usec = int((sock_timeout - sec) * 1e6)
timeout = sec + usec * 0.000001
# see if the socket is ready
if writing:
ret = select.select([], [sock_fd], [], timeout)
r, w, e = ret
if not w:
return SOCKET_HAS_TIMED_OUT
else:
return SOCKET_OPERATION_OK
else:
ret = select.select([sock_fd], [], [], timeout)
r, w, e = ret
if not r:
return SOCKET_HAS_TIMED_OUT
else:
return SOCKET_OPERATION_OK
def _ssl_seterror(space, ss, ret):
assert ret <= 0
err = libssl.SSL_get_error(ss.ssl, ret)
errstr = ""
errval = 0
if err == SSL_ERROR_ZERO_RETURN:
errstr = "TLS/SSL connection has been closed"
errval = PY_SSL_ERROR_ZERO_RETURN
elif err == SSL_ERROR_WANT_READ:
errstr = "The operation did not complete (read)"
errval = PY_SSL_ERROR_WANT_READ
elif err == SSL_ERROR_WANT_WRITE:
errstr = "The operation did not complete (write)"
errval = PY_SSL_ERROR_WANT_WRITE
elif err == SSL_ERROR_WANT_X509_LOOKUP:
errstr = "The operation did not complete (X509 lookup)"
errval = PY_SSL_ERROR_WANT_X509_LOOKUP
elif err == SSL_ERROR_WANT_CONNECT:
errstr = "The operation did not complete (connect)"
errval = PY_SSL_ERROR_WANT_CONNECT
elif err == SSL_ERROR_SYSCALL:
e = libssl.ERR_get_error()
if e == 0:
if ret == 0 or space.is_w(ss.w_socket, space.w_None):
errstr = "EOF occurred in violation of protocol"
errval = PY_SSL_ERROR_EOF
elif ret == -1:
# the underlying BIO reported an I/0 error
return errstr, errval # sock.errorhandler()?
else:
errstr = "Some I/O error occurred"
errval = PY_SSL_ERROR_SYSCALL
else:
errstr = libssl.ERR_error_string(e, None)
errval = PY_SSL_ERROR_SYSCALL
elif err == SSL_ERROR_SSL:
e = libssl.ERR_get_error()
errval = PY_SSL_ERROR_SSL
if e != 0:
errstr = libssl.ERR_error_string(e, None)
else:
errstr = "A failure in the SSL library occurred"
else:
errstr = "Invalid error code"
errval = PY_SSL_ERROR_INVALID_ERROR_CODE
return errstr, errval
def ssl(space, w_socket, w_key_file=None, w_cert_file=None):
"""ssl(socket, [keyfile, certfile]) -> sslobject"""
return space.wrap(new_sslobject(space, w_socket, w_key_file, w_cert_file))
ssl.unwrap_spec = [ObjSpace, W_Root, W_Root, W_Root]
| Python |
from ctypes import *
STRING = c_char_p
OSLittleEndian = 1
OSUnknownByteOrder = 0
OSBigEndian = 2
P_ALL = 0
P_PID = 1
P_PGID = 2
__darwin_nl_item = c_int
__darwin_wctrans_t = c_int
__darwin_wctype_t = c_ulong
__int8_t = c_byte
__uint8_t = c_ubyte
__int16_t = c_short
__uint16_t = c_ushort
__int32_t = c_int
__uint32_t = c_uint
__int64_t = c_longlong
__uint64_t = c_ulonglong
__darwin_intptr_t = c_long
__darwin_natural_t = c_uint
__darwin_ct_rune_t = c_int
class __mbstate_t(Union):
pass
__mbstate_t._pack_ = 4
__mbstate_t._fields_ = [
('__mbstate8', c_char * 128),
('_mbstateL', c_longlong),
]
assert sizeof(__mbstate_t) == 128, sizeof(__mbstate_t)
assert alignment(__mbstate_t) == 4, alignment(__mbstate_t)
__darwin_mbstate_t = __mbstate_t
__darwin_ptrdiff_t = c_int
__darwin_size_t = c_ulong
__darwin_va_list = STRING
__darwin_wchar_t = c_int
__darwin_rune_t = __darwin_wchar_t
__darwin_wint_t = c_int
__darwin_clock_t = c_ulong
__darwin_socklen_t = __uint32_t
__darwin_ssize_t = c_long
__darwin_time_t = c_long
sig_atomic_t = c_int
class sigcontext(Structure):
pass
sigcontext._fields_ = [
('sc_onstack', c_int),
('sc_mask', c_int),
('sc_eax', c_uint),
('sc_ebx', c_uint),
('sc_ecx', c_uint),
('sc_edx', c_uint),
('sc_edi', c_uint),
('sc_esi', c_uint),
('sc_ebp', c_uint),
('sc_esp', c_uint),
('sc_ss', c_uint),
('sc_eflags', c_uint),
('sc_eip', c_uint),
('sc_cs', c_uint),
('sc_ds', c_uint),
('sc_es', c_uint),
('sc_fs', c_uint),
('sc_gs', c_uint),
]
assert sizeof(sigcontext) == 72, sizeof(sigcontext)
assert alignment(sigcontext) == 4, alignment(sigcontext)
u_int8_t = c_ubyte
u_int16_t = c_ushort
u_int32_t = c_uint
u_int64_t = c_ulonglong
int32_t = c_int
register_t = int32_t
user_addr_t = u_int64_t
user_size_t = u_int64_t
int64_t = c_longlong
user_ssize_t = int64_t
user_long_t = int64_t
user_ulong_t = u_int64_t
user_time_t = int64_t
syscall_arg_t = u_int64_t
# values for unnamed enumeration
class bio_st(Structure):
pass
BIO = bio_st
bio_info_cb = CFUNCTYPE(None, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)
class bio_method_st(Structure):
pass
bio_method_st._fields_ = [
('type', c_int),
('name', STRING),
('bwrite', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('bread', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('bputs', CFUNCTYPE(c_int, POINTER(BIO), STRING)),
('bgets', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, c_long, c_void_p)),
('create', CFUNCTYPE(c_int, POINTER(BIO))),
('destroy', CFUNCTYPE(c_int, POINTER(BIO))),
('callback_ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, POINTER(bio_info_cb))),
]
assert sizeof(bio_method_st) == 40, sizeof(bio_method_st)
assert alignment(bio_method_st) == 4, alignment(bio_method_st)
BIO_METHOD = bio_method_st
class crypto_ex_data_st(Structure):
pass
class stack_st(Structure):
pass
STACK = stack_st
crypto_ex_data_st._fields_ = [
('sk', POINTER(STACK)),
('dummy', c_int),
]
assert sizeof(crypto_ex_data_st) == 8, sizeof(crypto_ex_data_st)
assert alignment(crypto_ex_data_st) == 4, alignment(crypto_ex_data_st)
CRYPTO_EX_DATA = crypto_ex_data_st
bio_st._fields_ = [
('method', POINTER(BIO_METHOD)),
('callback', CFUNCTYPE(c_long, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)),
('cb_arg', STRING),
('init', c_int),
('shutdown', c_int),
('flags', c_int),
('retry_reason', c_int),
('num', c_int),
('ptr', c_void_p),
('next_bio', POINTER(bio_st)),
('prev_bio', POINTER(bio_st)),
('references', c_int),
('num_read', c_ulong),
('num_write', c_ulong),
('ex_data', CRYPTO_EX_DATA),
]
assert sizeof(bio_st) == 64, sizeof(bio_st)
assert alignment(bio_st) == 4, alignment(bio_st)
class bio_f_buffer_ctx_struct(Structure):
pass
bio_f_buffer_ctx_struct._fields_ = [
('ibuf_size', c_int),
('obuf_size', c_int),
('ibuf', STRING),
('ibuf_len', c_int),
('ibuf_off', c_int),
('obuf', STRING),
('obuf_len', c_int),
('obuf_off', c_int),
]
assert sizeof(bio_f_buffer_ctx_struct) == 32, sizeof(bio_f_buffer_ctx_struct)
assert alignment(bio_f_buffer_ctx_struct) == 4, alignment(bio_f_buffer_ctx_struct)
BIO_F_BUFFER_CTX = bio_f_buffer_ctx_struct
class hostent(Structure):
pass
class CRYPTO_dynlock_value(Structure):
pass
class CRYPTO_dynlock(Structure):
pass
CRYPTO_dynlock._fields_ = [
('references', c_int),
('data', POINTER(CRYPTO_dynlock_value)),
]
assert sizeof(CRYPTO_dynlock) == 8, sizeof(CRYPTO_dynlock)
assert alignment(CRYPTO_dynlock) == 4, alignment(CRYPTO_dynlock)
BIO_dummy = bio_st
CRYPTO_EX_new = CFUNCTYPE(c_int, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p)
CRYPTO_EX_free = CFUNCTYPE(None, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p)
CRYPTO_EX_dup = CFUNCTYPE(c_int, POINTER(CRYPTO_EX_DATA), POINTER(CRYPTO_EX_DATA), c_void_p, c_int, c_long, c_void_p)
class crypto_ex_data_func_st(Structure):
pass
crypto_ex_data_func_st._fields_ = [
('argl', c_long),
('argp', c_void_p),
('new_func', POINTER(CRYPTO_EX_new)),
('free_func', POINTER(CRYPTO_EX_free)),
('dup_func', POINTER(CRYPTO_EX_dup)),
]
assert sizeof(crypto_ex_data_func_st) == 20, sizeof(crypto_ex_data_func_st)
assert alignment(crypto_ex_data_func_st) == 4, alignment(crypto_ex_data_func_st)
CRYPTO_EX_DATA_FUNCS = crypto_ex_data_func_st
class st_CRYPTO_EX_DATA_IMPL(Structure):
pass
CRYPTO_EX_DATA_IMPL = st_CRYPTO_EX_DATA_IMPL
CRYPTO_MEM_LEAK_CB = CFUNCTYPE(c_void_p, c_ulong, STRING, c_int, c_int, c_void_p)
openssl_fptr = CFUNCTYPE(None)
stack_st._fields_ = [
('num', c_int),
('data', POINTER(STRING)),
('sorted', c_int),
('num_alloc', c_int),
('comp', CFUNCTYPE(c_int, POINTER(STRING), POINTER(STRING))),
]
assert sizeof(stack_st) == 20, sizeof(stack_st)
assert alignment(stack_st) == 4, alignment(stack_st)
va_list = __darwin_va_list
size_t = __darwin_size_t
__darwin_off_t = __int64_t
fpos_t = __darwin_off_t
class __sbuf(Structure):
pass
__sbuf._fields_ = [
('_base', POINTER(c_ubyte)),
('_size', c_int),
]
assert sizeof(__sbuf) == 8, sizeof(__sbuf)
assert alignment(__sbuf) == 4, alignment(__sbuf)
class __sFILEX(Structure):
pass
class __sFILE(Structure):
pass
__sFILE._pack_ = 4
__sFILE._fields_ = [
('_p', POINTER(c_ubyte)),
('_r', c_int),
('_w', c_int),
('_flags', c_short),
('_file', c_short),
('_bf', __sbuf),
('_lbfsize', c_int),
('_cookie', c_void_p),
('_close', CFUNCTYPE(c_int, c_void_p)),
('_read', CFUNCTYPE(c_int, c_void_p, STRING, c_int)),
('_seek', CFUNCTYPE(fpos_t, c_void_p, c_longlong, c_int)),
('_write', CFUNCTYPE(c_int, c_void_p, STRING, c_int)),
('_ub', __sbuf),
('_extra', POINTER(__sFILEX)),
('_ur', c_int),
('_ubuf', c_ubyte * 3),
('_nbuf', c_ubyte * 1),
('_lb', __sbuf),
('_blksize', c_int),
('_offset', fpos_t),
]
assert sizeof(__sFILE) == 88, sizeof(__sFILE)
assert alignment(__sFILE) == 4, alignment(__sFILE)
FILE = __sFILE
ct_rune_t = __darwin_ct_rune_t
rune_t = __darwin_rune_t
class div_t(Structure):
pass
div_t._fields_ = [
('quot', c_int),
('rem', c_int),
]
assert sizeof(div_t) == 8, sizeof(div_t)
assert alignment(div_t) == 4, alignment(div_t)
class ldiv_t(Structure):
pass
ldiv_t._fields_ = [
('quot', c_long),
('rem', c_long),
]
assert sizeof(ldiv_t) == 8, sizeof(ldiv_t)
assert alignment(ldiv_t) == 4, alignment(ldiv_t)
class lldiv_t(Structure):
pass
lldiv_t._pack_ = 4
lldiv_t._fields_ = [
('quot', c_longlong),
('rem', c_longlong),
]
assert sizeof(lldiv_t) == 16, sizeof(lldiv_t)
assert alignment(lldiv_t) == 4, alignment(lldiv_t)
__darwin_dev_t = __int32_t
dev_t = __darwin_dev_t
__darwin_mode_t = __uint16_t
mode_t = __darwin_mode_t
class mcontext(Structure):
pass
class mcontext64(Structure):
pass
class __darwin_pthread_handler_rec(Structure):
pass
__darwin_pthread_handler_rec._fields_ = [
('__routine', CFUNCTYPE(None, c_void_p)),
('__arg', c_void_p),
('__next', POINTER(__darwin_pthread_handler_rec)),
]
assert sizeof(__darwin_pthread_handler_rec) == 12, sizeof(__darwin_pthread_handler_rec)
assert alignment(__darwin_pthread_handler_rec) == 4, alignment(__darwin_pthread_handler_rec)
class _opaque_pthread_attr_t(Structure):
pass
_opaque_pthread_attr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 36),
]
assert sizeof(_opaque_pthread_attr_t) == 40, sizeof(_opaque_pthread_attr_t)
assert alignment(_opaque_pthread_attr_t) == 4, alignment(_opaque_pthread_attr_t)
class _opaque_pthread_cond_t(Structure):
pass
_opaque_pthread_cond_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 24),
]
assert sizeof(_opaque_pthread_cond_t) == 28, sizeof(_opaque_pthread_cond_t)
assert alignment(_opaque_pthread_cond_t) == 4, alignment(_opaque_pthread_cond_t)
class _opaque_pthread_condattr_t(Structure):
pass
_opaque_pthread_condattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 4),
]
assert sizeof(_opaque_pthread_condattr_t) == 8, sizeof(_opaque_pthread_condattr_t)
assert alignment(_opaque_pthread_condattr_t) == 4, alignment(_opaque_pthread_condattr_t)
class _opaque_pthread_mutex_t(Structure):
pass
_opaque_pthread_mutex_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 40),
]
assert sizeof(_opaque_pthread_mutex_t) == 44, sizeof(_opaque_pthread_mutex_t)
assert alignment(_opaque_pthread_mutex_t) == 4, alignment(_opaque_pthread_mutex_t)
class _opaque_pthread_mutexattr_t(Structure):
pass
_opaque_pthread_mutexattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 8),
]
assert sizeof(_opaque_pthread_mutexattr_t) == 12, sizeof(_opaque_pthread_mutexattr_t)
assert alignment(_opaque_pthread_mutexattr_t) == 4, alignment(_opaque_pthread_mutexattr_t)
class _opaque_pthread_once_t(Structure):
pass
_opaque_pthread_once_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 4),
]
assert sizeof(_opaque_pthread_once_t) == 8, sizeof(_opaque_pthread_once_t)
assert alignment(_opaque_pthread_once_t) == 4, alignment(_opaque_pthread_once_t)
class _opaque_pthread_rwlock_t(Structure):
pass
_opaque_pthread_rwlock_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 124),
]
assert sizeof(_opaque_pthread_rwlock_t) == 128, sizeof(_opaque_pthread_rwlock_t)
assert alignment(_opaque_pthread_rwlock_t) == 4, alignment(_opaque_pthread_rwlock_t)
class _opaque_pthread_rwlockattr_t(Structure):
pass
_opaque_pthread_rwlockattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 12),
]
assert sizeof(_opaque_pthread_rwlockattr_t) == 16, sizeof(_opaque_pthread_rwlockattr_t)
assert alignment(_opaque_pthread_rwlockattr_t) == 4, alignment(_opaque_pthread_rwlockattr_t)
class _opaque_pthread_t(Structure):
pass
_opaque_pthread_t._fields_ = [
('__sig', c_long),
('__cleanup_stack', POINTER(__darwin_pthread_handler_rec)),
('__opaque', c_char * 596),
]
assert sizeof(_opaque_pthread_t) == 604, sizeof(_opaque_pthread_t)
assert alignment(_opaque_pthread_t) == 4, alignment(_opaque_pthread_t)
__darwin_blkcnt_t = __int64_t
__darwin_blksize_t = __int32_t
__darwin_fsblkcnt_t = c_uint
__darwin_fsfilcnt_t = c_uint
__darwin_gid_t = __uint32_t
__darwin_id_t = __uint32_t
__darwin_ino_t = __uint32_t
__darwin_mach_port_name_t = __darwin_natural_t
__darwin_mach_port_t = __darwin_mach_port_name_t
__darwin_mcontext_t = POINTER(mcontext)
__darwin_mcontext64_t = POINTER(mcontext64)
__darwin_pid_t = __int32_t
__darwin_pthread_attr_t = _opaque_pthread_attr_t
__darwin_pthread_cond_t = _opaque_pthread_cond_t
__darwin_pthread_condattr_t = _opaque_pthread_condattr_t
__darwin_pthread_key_t = c_ulong
__darwin_pthread_mutex_t = _opaque_pthread_mutex_t
__darwin_pthread_mutexattr_t = _opaque_pthread_mutexattr_t
__darwin_pthread_once_t = _opaque_pthread_once_t
__darwin_pthread_rwlock_t = _opaque_pthread_rwlock_t
__darwin_pthread_rwlockattr_t = _opaque_pthread_rwlockattr_t
__darwin_pthread_t = POINTER(_opaque_pthread_t)
__darwin_sigset_t = __uint32_t
__darwin_suseconds_t = __int32_t
__darwin_uid_t = __uint32_t
__darwin_useconds_t = __uint32_t
__darwin_uuid_t = c_ubyte * 16
class sigaltstack(Structure):
pass
sigaltstack._fields_ = [
('ss_sp', c_void_p),
('ss_size', __darwin_size_t),
('ss_flags', c_int),
]
assert sizeof(sigaltstack) == 12, sizeof(sigaltstack)
assert alignment(sigaltstack) == 4, alignment(sigaltstack)
__darwin_stack_t = sigaltstack
class ucontext(Structure):
pass
ucontext._fields_ = [
('uc_onstack', c_int),
('uc_sigmask', __darwin_sigset_t),
('uc_stack', __darwin_stack_t),
('uc_link', POINTER(ucontext)),
('uc_mcsize', __darwin_size_t),
('uc_mcontext', __darwin_mcontext_t),
]
assert sizeof(ucontext) == 32, sizeof(ucontext)
assert alignment(ucontext) == 4, alignment(ucontext)
__darwin_ucontext_t = ucontext
class ucontext64(Structure):
pass
ucontext64._fields_ = [
('uc_onstack', c_int),
('uc_sigmask', __darwin_sigset_t),
('uc_stack', __darwin_stack_t),
('uc_link', POINTER(ucontext64)),
('uc_mcsize', __darwin_size_t),
('uc_mcontext64', __darwin_mcontext64_t),
]
assert sizeof(ucontext64) == 32, sizeof(ucontext64)
assert alignment(ucontext64) == 4, alignment(ucontext64)
__darwin_ucontext64_t = ucontext64
class timeval(Structure):
pass
timeval._fields_ = [
('tv_sec', __darwin_time_t),
('tv_usec', __darwin_suseconds_t),
]
assert sizeof(timeval) == 8, sizeof(timeval)
assert alignment(timeval) == 4, alignment(timeval)
rlim_t = __int64_t
class rusage(Structure):
pass
rusage._fields_ = [
('ru_utime', timeval),
('ru_stime', timeval),
('ru_maxrss', c_long),
('ru_ixrss', c_long),
('ru_idrss', c_long),
('ru_isrss', c_long),
('ru_minflt', c_long),
('ru_majflt', c_long),
('ru_nswap', c_long),
('ru_inblock', c_long),
('ru_oublock', c_long),
('ru_msgsnd', c_long),
('ru_msgrcv', c_long),
('ru_nsignals', c_long),
('ru_nvcsw', c_long),
('ru_nivcsw', c_long),
]
assert sizeof(rusage) == 72, sizeof(rusage)
assert alignment(rusage) == 4, alignment(rusage)
class rlimit(Structure):
pass
rlimit._pack_ = 4
rlimit._fields_ = [
('rlim_cur', rlim_t),
('rlim_max', rlim_t),
]
assert sizeof(rlimit) == 16, sizeof(rlimit)
assert alignment(rlimit) == 4, alignment(rlimit)
mcontext_t = __darwin_mcontext_t
mcontext64_t = __darwin_mcontext64_t
pthread_attr_t = __darwin_pthread_attr_t
sigset_t = __darwin_sigset_t
ucontext_t = __darwin_ucontext_t
ucontext64_t = __darwin_ucontext64_t
uid_t = __darwin_uid_t
class sigval(Union):
pass
sigval._fields_ = [
('sival_int', c_int),
('sival_ptr', c_void_p),
]
assert sizeof(sigval) == 4, sizeof(sigval)
assert alignment(sigval) == 4, alignment(sigval)
class sigevent(Structure):
pass
sigevent._fields_ = [
('sigev_notify', c_int),
('sigev_signo', c_int),
('sigev_value', sigval),
('sigev_notify_function', CFUNCTYPE(None, sigval)),
('sigev_notify_attributes', POINTER(pthread_attr_t)),
]
assert sizeof(sigevent) == 20, sizeof(sigevent)
assert alignment(sigevent) == 4, alignment(sigevent)
class __siginfo(Structure):
pass
pid_t = __darwin_pid_t
__siginfo._fields_ = [
('si_signo', c_int),
('si_errno', c_int),
('si_code', c_int),
('si_pid', pid_t),
('si_uid', uid_t),
('si_status', c_int),
('si_addr', c_void_p),
('si_value', sigval),
('si_band', c_long),
('pad', c_ulong * 7),
]
assert sizeof(__siginfo) == 64, sizeof(__siginfo)
assert alignment(__siginfo) == 4, alignment(__siginfo)
siginfo_t = __siginfo
class __sigaction_u(Union):
pass
__sigaction_u._fields_ = [
('__sa_handler', CFUNCTYPE(None, c_int)),
('__sa_sigaction', CFUNCTYPE(None, c_int, POINTER(__siginfo), c_void_p)),
]
assert sizeof(__sigaction_u) == 4, sizeof(__sigaction_u)
assert alignment(__sigaction_u) == 4, alignment(__sigaction_u)
class __sigaction(Structure):
pass
__sigaction._fields_ = [
('__sigaction_u', __sigaction_u),
('sa_tramp', CFUNCTYPE(None, c_void_p, c_int, c_int, POINTER(siginfo_t), c_void_p)),
('sa_mask', sigset_t),
('sa_flags', c_int),
]
assert sizeof(__sigaction) == 16, sizeof(__sigaction)
assert alignment(__sigaction) == 4, alignment(__sigaction)
class sigaction(Structure):
pass
sigaction._fields_ = [
('__sigaction_u', __sigaction_u),
('sa_mask', sigset_t),
('sa_flags', c_int),
]
assert sizeof(sigaction) == 12, sizeof(sigaction)
assert alignment(sigaction) == 4, alignment(sigaction)
sig_t = CFUNCTYPE(None, c_int)
stack_t = __darwin_stack_t
class sigvec(Structure):
pass
sigvec._fields_ = [
('sv_handler', CFUNCTYPE(None, c_int)),
('sv_mask', c_int),
('sv_flags', c_int),
]
assert sizeof(sigvec) == 12, sizeof(sigvec)
assert alignment(sigvec) == 4, alignment(sigvec)
class sigstack(Structure):
pass
sigstack._fields_ = [
('ss_sp', STRING),
('ss_onstack', c_int),
]
assert sizeof(sigstack) == 8, sizeof(sigstack)
assert alignment(sigstack) == 4, alignment(sigstack)
# values for enumeration 'idtype_t'
idtype_t = c_int # enum
id_t = __darwin_id_t
class wait(Union):
pass
class N4wait3DOLLAR_3E(Structure):
pass
N4wait3DOLLAR_3E._fields_ = [
('w_Termsig', c_uint, 7),
('w_Coredump', c_uint, 1),
('w_Retcode', c_uint, 8),
('w_Filler', c_uint, 16),
]
assert sizeof(N4wait3DOLLAR_3E) == 4, sizeof(N4wait3DOLLAR_3E)
assert alignment(N4wait3DOLLAR_3E) == 4, alignment(N4wait3DOLLAR_3E)
class N4wait3DOLLAR_4E(Structure):
pass
N4wait3DOLLAR_4E._fields_ = [
('w_Stopval', c_uint, 8),
('w_Stopsig', c_uint, 8),
('w_Filler', c_uint, 16),
]
assert sizeof(N4wait3DOLLAR_4E) == 4, sizeof(N4wait3DOLLAR_4E)
assert alignment(N4wait3DOLLAR_4E) == 4, alignment(N4wait3DOLLAR_4E)
wait._fields_ = [
('w_status', c_int),
('w_T', N4wait3DOLLAR_3E),
('w_S', N4wait3DOLLAR_4E),
]
assert sizeof(wait) == 4, sizeof(wait)
assert alignment(wait) == 4, alignment(wait)
__gnuc_va_list = STRING
int8_t = c_byte
int16_t = c_short
uint8_t = c_ubyte
uint16_t = c_ushort
uint32_t = c_uint
uint64_t = c_ulonglong
int_least8_t = int8_t
int_least16_t = int16_t
int_least32_t = int32_t
int_least64_t = int64_t
uint_least8_t = uint8_t
uint_least16_t = uint16_t
uint_least32_t = uint32_t
uint_least64_t = uint64_t
int_fast8_t = int8_t
int_fast16_t = int16_t
int_fast32_t = int32_t
int_fast64_t = int64_t
uint_fast8_t = uint8_t
uint_fast16_t = uint16_t
uint_fast32_t = uint32_t
uint_fast64_t = uint64_t
intptr_t = c_long
uintptr_t = c_ulong
intmax_t = c_longlong
uintmax_t = c_ulonglong
__all__ = ['__uint16_t', '__int16_t', '__darwin_pthread_condattr_t',
'CRYPTO_dynlock', '__darwin_id_t', 'CRYPTO_EX_DATA_IMPL',
'__darwin_time_t', 'ucontext64_t', '__darwin_nl_item',
'_opaque_pthread_condattr_t', 'FILE', 'size_t',
'__uint32_t', 'mcontext_t', 'uint8_t', 'fpos_t', 'P_PGID',
'__darwin_gid_t', 'uint_least16_t',
'__darwin_pthread_handler_rec', 'CRYPTO_EX_free',
'__darwin_pid_t', 'int_fast8_t', '__darwin_fsfilcnt_t',
'intptr_t', 'uint_least64_t', 'user_addr_t',
'int_least32_t', 'sigaltstack', '__darwin_pthread_t',
'BIO_METHOD', 'uid_t', 'u_int64_t', 'u_int16_t',
'register_t', '__darwin_ucontext_t',
'bio_f_buffer_ctx_struct', '__darwin_ssize_t',
'__darwin_mcontext_t', '__darwin_sigset_t', 'ct_rune_t',
'__darwin_ptrdiff_t', 'int_fast32_t', 'va_list',
'uint_fast16_t', 'sigset_t', '__int32_t', 'ucontext',
'uint_fast32_t', '__uint64_t', 'mode_t',
'__darwin_suseconds_t', '__sigaction', 'sigevent',
'user_ulong_t', 'user_ssize_t', 'syscall_arg_t', 'int16_t',
'__darwin_socklen_t', '__darwin_intptr_t', 'rune_t',
'__darwin_va_list', 'siginfo_t', 'ucontext_t', '__sbuf',
'int_least8_t', 'N4wait3DOLLAR_4E', 'div_t', 'id_t',
'__darwin_blksize_t', 'int_least64_t', 'ldiv_t',
'int_least16_t', '__darwin_wctrans_t', 'uint_least8_t',
'u_int32_t', '__darwin_wchar_t', 'sigval',
'__gnuc_va_list', 'P_PID', 'sigaction',
'__darwin_natural_t', 'sig_t', '__darwin_blkcnt_t',
'hostent', '_opaque_pthread_cond_t', '__darwin_size_t',
'__darwin_ct_rune_t', '__darwin_ino_t', 'pthread_attr_t',
'CRYPTO_MEM_LEAK_CB', '__darwin_useconds_t',
'__darwin_mcontext64_t', 'uint16_t', '__darwin_clock_t',
'uint_fast8_t', 'CRYPTO_dynlock_value',
'__darwin_pthread_key_t', '__darwin_dev_t', 'int32_t',
'__darwin_pthread_mutex_t', '__darwin_ucontext64_t',
'st_CRYPTO_EX_DATA_IMPL', 'rlim_t', '__darwin_fsblkcnt_t',
'__darwin_rune_t', 'BIO_F_BUFFER_CTX', 'openssl_fptr',
'_opaque_pthread_rwlockattr_t', 'sigvec',
'_opaque_pthread_mutexattr_t', '__darwin_pthread_rwlock_t',
'rlimit', '__darwin_pthread_mutexattr_t',
'__darwin_pthread_once_t', 'stack_t', '__darwin_mode_t',
'uint_least32_t', 'wait', 'OSBigEndian', '__mbstate_t',
'uintptr_t', '__darwin_mach_port_t', 'CRYPTO_EX_DATA',
'__darwin_uid_t', '__int8_t', '_opaque_pthread_mutex_t',
'int8_t', '__darwin_uuid_t', '_opaque_pthread_attr_t',
'uintmax_t', 'sigstack', 'stack_st', 'bio_info_cb',
'mcontext', 'crypto_ex_data_func_st', 'pid_t',
'N4wait3DOLLAR_3E', 'uint_fast64_t', 'intmax_t',
'sigcontext', '__siginfo', '__darwin_mbstate_t',
'uint64_t', 'u_int8_t', 'crypto_ex_data_st',
'__darwin_wctype_t', '_opaque_pthread_once_t',
'OSLittleEndian', 'int64_t', 'int_fast16_t', 'bio_st',
'__sFILE', 'ucontext64', 'sig_atomic_t', 'BIO',
'__uint8_t', 'CRYPTO_EX_dup', 'lldiv_t',
'__darwin_pthread_rwlockattr_t', 'OSUnknownByteOrder',
'bio_method_st', 'timeval', '__darwin_stack_t',
'BIO_dummy', 'int_fast64_t', 'STACK', '__sFILEX',
'CRYPTO_EX_new', '__darwin_pthread_attr_t',
'__darwin_mach_port_name_t', 'CRYPTO_EX_DATA_FUNCS',
'user_time_t', '__darwin_wint_t',
'__darwin_pthread_cond_t', 'user_size_t', 'rusage',
'idtype_t', 'mcontext64', '__sigaction_u',
'_opaque_pthread_rwlock_t', '__darwin_off_t',
'_opaque_pthread_t', 'P_ALL', '__int64_t', 'uint32_t',
'mcontext64_t', 'user_long_t', 'dev_t']
| Python |
#import py # FINISHME - more thinking needed
raise ImportError
#skip("The _ssl module is only usable when running on the exact "
# "same platform from which the ssl.py was computed.")
# This module is imported by socket.py. It should *not* be used
# directly.
from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
interpleveldefs = {
'ssl': 'interp_ssl.ssl',
}
appleveldefs = {
'__doc__': 'app_ssl.__doc__',
'sslerror': 'app_ssl.sslerror',
}
def buildloaders(cls):
# init the SSL module
from pypy.module._ssl.interp_ssl import _init_ssl, constants, HAVE_OPENSSL_RAND
_init_ssl()
for constant, value in constants.iteritems():
Module.interpleveldefs[constant] = "space.wrap(%r)" % value
if HAVE_OPENSSL_RAND:
Module.interpleveldefs['RAND_add'] = "interp_ssl.RAND_add"
Module.interpleveldefs['RAND_status'] = "interp_ssl.RAND_status"
Module.interpleveldefs['RAND_egd'] = "interp_ssl.RAND_egd"
super(Module, cls).buildloaders()
buildloaders = classmethod(buildloaders)
| Python |
from ctypes import *
STRING = c_char_p
OSUnknownByteOrder = 0
UIT_PROMPT = 1
P_PGID = 2
P_PID = 1
UIT_ERROR = 5
UIT_INFO = 4
UIT_NONE = 0
P_ALL = 0
UIT_VERIFY = 2
OSBigEndian = 2
UIT_BOOLEAN = 3
OSLittleEndian = 1
__darwin_nl_item = c_int
__darwin_wctrans_t = c_int
__darwin_wctype_t = c_ulong
__int8_t = c_byte
__uint8_t = c_ubyte
__int16_t = c_short
__uint16_t = c_ushort
__int32_t = c_int
__uint32_t = c_uint
__int64_t = c_longlong
__uint64_t = c_ulonglong
__darwin_intptr_t = c_long
__darwin_natural_t = c_uint
__darwin_ct_rune_t = c_int
class __mbstate_t(Union):
pass
__mbstate_t._pack_ = 4
__mbstate_t._fields_ = [
('__mbstate8', c_char * 128),
('_mbstateL', c_longlong),
]
assert sizeof(__mbstate_t) == 128, sizeof(__mbstate_t)
assert alignment(__mbstate_t) == 4, alignment(__mbstate_t)
__darwin_mbstate_t = __mbstate_t
__darwin_ptrdiff_t = c_int
__darwin_size_t = c_ulong
__darwin_va_list = STRING
__darwin_wchar_t = c_int
__darwin_rune_t = __darwin_wchar_t
__darwin_wint_t = c_int
__darwin_clock_t = c_ulong
__darwin_socklen_t = __uint32_t
__darwin_ssize_t = c_long
__darwin_time_t = c_long
sig_atomic_t = c_int
class sigcontext(Structure):
pass
sigcontext._fields_ = [
('sc_onstack', c_int),
('sc_mask', c_int),
('sc_eax', c_uint),
('sc_ebx', c_uint),
('sc_ecx', c_uint),
('sc_edx', c_uint),
('sc_edi', c_uint),
('sc_esi', c_uint),
('sc_ebp', c_uint),
('sc_esp', c_uint),
('sc_ss', c_uint),
('sc_eflags', c_uint),
('sc_eip', c_uint),
('sc_cs', c_uint),
('sc_ds', c_uint),
('sc_es', c_uint),
('sc_fs', c_uint),
('sc_gs', c_uint),
]
assert sizeof(sigcontext) == 72, sizeof(sigcontext)
assert alignment(sigcontext) == 4, alignment(sigcontext)
u_int8_t = c_ubyte
u_int16_t = c_ushort
u_int32_t = c_uint
u_int64_t = c_ulonglong
int32_t = c_int
register_t = int32_t
user_addr_t = u_int64_t
user_size_t = u_int64_t
int64_t = c_longlong
user_ssize_t = int64_t
user_long_t = int64_t
user_ulong_t = u_int64_t
user_time_t = int64_t
syscall_arg_t = u_int64_t
# values for unnamed enumeration
class aes_key_st(Structure):
pass
aes_key_st._fields_ = [
('rd_key', c_ulong * 60),
('rounds', c_int),
]
assert sizeof(aes_key_st) == 244, sizeof(aes_key_st)
assert alignment(aes_key_st) == 4, alignment(aes_key_st)
AES_KEY = aes_key_st
class asn1_ctx_st(Structure):
pass
asn1_ctx_st._fields_ = [
('p', POINTER(c_ubyte)),
('eos', c_int),
('error', c_int),
('inf', c_int),
('tag', c_int),
('xclass', c_int),
('slen', c_long),
('max', POINTER(c_ubyte)),
('q', POINTER(c_ubyte)),
('pp', POINTER(POINTER(c_ubyte))),
('line', c_int),
]
assert sizeof(asn1_ctx_st) == 44, sizeof(asn1_ctx_st)
assert alignment(asn1_ctx_st) == 4, alignment(asn1_ctx_st)
ASN1_CTX = asn1_ctx_st
class asn1_object_st(Structure):
pass
asn1_object_st._fields_ = [
('sn', STRING),
('ln', STRING),
('nid', c_int),
('length', c_int),
('data', POINTER(c_ubyte)),
('flags', c_int),
]
assert sizeof(asn1_object_st) == 24, sizeof(asn1_object_st)
assert alignment(asn1_object_st) == 4, alignment(asn1_object_st)
ASN1_OBJECT = asn1_object_st
class asn1_string_st(Structure):
pass
asn1_string_st._fields_ = [
('length', c_int),
('type', c_int),
('data', POINTER(c_ubyte)),
('flags', c_long),
]
assert sizeof(asn1_string_st) == 16, sizeof(asn1_string_st)
assert alignment(asn1_string_st) == 4, alignment(asn1_string_st)
ASN1_STRING = asn1_string_st
class ASN1_ENCODING_st(Structure):
pass
ASN1_ENCODING_st._fields_ = [
('enc', POINTER(c_ubyte)),
('len', c_long),
('modified', c_int),
]
assert sizeof(ASN1_ENCODING_st) == 12, sizeof(ASN1_ENCODING_st)
assert alignment(ASN1_ENCODING_st) == 4, alignment(ASN1_ENCODING_st)
ASN1_ENCODING = ASN1_ENCODING_st
class asn1_string_table_st(Structure):
pass
asn1_string_table_st._fields_ = [
('nid', c_int),
('minsize', c_long),
('maxsize', c_long),
('mask', c_ulong),
('flags', c_ulong),
]
assert sizeof(asn1_string_table_st) == 20, sizeof(asn1_string_table_st)
assert alignment(asn1_string_table_st) == 4, alignment(asn1_string_table_st)
ASN1_STRING_TABLE = asn1_string_table_st
class ASN1_TEMPLATE_st(Structure):
pass
ASN1_TEMPLATE_st._fields_ = [
]
ASN1_TEMPLATE = ASN1_TEMPLATE_st
class ASN1_ITEM_st(Structure):
pass
ASN1_ITEM = ASN1_ITEM_st
ASN1_ITEM_st._fields_ = [
]
class ASN1_TLC_st(Structure):
pass
ASN1_TLC = ASN1_TLC_st
ASN1_TLC_st._fields_ = [
]
class ASN1_VALUE_st(Structure):
pass
ASN1_VALUE_st._fields_ = [
]
ASN1_VALUE = ASN1_VALUE_st
ASN1_ITEM_EXP = ASN1_ITEM
class asn1_type_st(Structure):
pass
class N12asn1_type_st4DOLLAR_11E(Union):
pass
ASN1_BOOLEAN = c_int
ASN1_INTEGER = asn1_string_st
ASN1_ENUMERATED = asn1_string_st
ASN1_BIT_STRING = asn1_string_st
ASN1_OCTET_STRING = asn1_string_st
ASN1_PRINTABLESTRING = asn1_string_st
ASN1_T61STRING = asn1_string_st
ASN1_IA5STRING = asn1_string_st
ASN1_GENERALSTRING = asn1_string_st
ASN1_BMPSTRING = asn1_string_st
ASN1_UNIVERSALSTRING = asn1_string_st
ASN1_UTCTIME = asn1_string_st
ASN1_GENERALIZEDTIME = asn1_string_st
ASN1_VISIBLESTRING = asn1_string_st
ASN1_UTF8STRING = asn1_string_st
N12asn1_type_st4DOLLAR_11E._fields_ = [
('ptr', STRING),
('boolean', ASN1_BOOLEAN),
('asn1_string', POINTER(ASN1_STRING)),
('object', POINTER(ASN1_OBJECT)),
('integer', POINTER(ASN1_INTEGER)),
('enumerated', POINTER(ASN1_ENUMERATED)),
('bit_string', POINTER(ASN1_BIT_STRING)),
('octet_string', POINTER(ASN1_OCTET_STRING)),
('printablestring', POINTER(ASN1_PRINTABLESTRING)),
('t61string', POINTER(ASN1_T61STRING)),
('ia5string', POINTER(ASN1_IA5STRING)),
('generalstring', POINTER(ASN1_GENERALSTRING)),
('bmpstring', POINTER(ASN1_BMPSTRING)),
('universalstring', POINTER(ASN1_UNIVERSALSTRING)),
('utctime', POINTER(ASN1_UTCTIME)),
('generalizedtime', POINTER(ASN1_GENERALIZEDTIME)),
('visiblestring', POINTER(ASN1_VISIBLESTRING)),
('utf8string', POINTER(ASN1_UTF8STRING)),
('set', POINTER(ASN1_STRING)),
('sequence', POINTER(ASN1_STRING)),
]
assert sizeof(N12asn1_type_st4DOLLAR_11E) == 4, sizeof(N12asn1_type_st4DOLLAR_11E)
assert alignment(N12asn1_type_st4DOLLAR_11E) == 4, alignment(N12asn1_type_st4DOLLAR_11E)
asn1_type_st._fields_ = [
('type', c_int),
('value', N12asn1_type_st4DOLLAR_11E),
]
assert sizeof(asn1_type_st) == 8, sizeof(asn1_type_st)
assert alignment(asn1_type_st) == 4, alignment(asn1_type_st)
ASN1_TYPE = asn1_type_st
class asn1_method_st(Structure):
pass
asn1_method_st._fields_ = [
('i2d', CFUNCTYPE(c_int)),
('d2i', CFUNCTYPE(STRING)),
('create', CFUNCTYPE(STRING)),
('destroy', CFUNCTYPE(None)),
]
assert sizeof(asn1_method_st) == 16, sizeof(asn1_method_st)
assert alignment(asn1_method_st) == 4, alignment(asn1_method_st)
ASN1_METHOD = asn1_method_st
class asn1_header_st(Structure):
pass
asn1_header_st._fields_ = [
('header', POINTER(ASN1_OCTET_STRING)),
('data', STRING),
('meth', POINTER(ASN1_METHOD)),
]
assert sizeof(asn1_header_st) == 12, sizeof(asn1_header_st)
assert alignment(asn1_header_st) == 4, alignment(asn1_header_st)
ASN1_HEADER = asn1_header_st
class BIT_STRING_BITNAME_st(Structure):
pass
BIT_STRING_BITNAME_st._fields_ = [
('bitnum', c_int),
('lname', STRING),
('sname', STRING),
]
assert sizeof(BIT_STRING_BITNAME_st) == 12, sizeof(BIT_STRING_BITNAME_st)
assert alignment(BIT_STRING_BITNAME_st) == 4, alignment(BIT_STRING_BITNAME_st)
BIT_STRING_BITNAME = BIT_STRING_BITNAME_st
class bio_st(Structure):
pass
BIO = bio_st
bio_info_cb = CFUNCTYPE(None, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)
class bio_method_st(Structure):
pass
bio_method_st._fields_ = [
('type', c_int),
('name', STRING),
('bwrite', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('bread', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('bputs', CFUNCTYPE(c_int, POINTER(BIO), STRING)),
('bgets', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, c_long, c_void_p)),
('create', CFUNCTYPE(c_int, POINTER(BIO))),
('destroy', CFUNCTYPE(c_int, POINTER(BIO))),
('callback_ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, POINTER(bio_info_cb))),
]
assert sizeof(bio_method_st) == 40, sizeof(bio_method_st)
assert alignment(bio_method_st) == 4, alignment(bio_method_st)
BIO_METHOD = bio_method_st
class crypto_ex_data_st(Structure):
pass
class stack_st(Structure):
pass
STACK = stack_st
crypto_ex_data_st._fields_ = [
('sk', POINTER(STACK)),
('dummy', c_int),
]
assert sizeof(crypto_ex_data_st) == 8, sizeof(crypto_ex_data_st)
assert alignment(crypto_ex_data_st) == 4, alignment(crypto_ex_data_st)
CRYPTO_EX_DATA = crypto_ex_data_st
bio_st._fields_ = [
('method', POINTER(BIO_METHOD)),
('callback', CFUNCTYPE(c_long, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)),
('cb_arg', STRING),
('init', c_int),
('shutdown', c_int),
('flags', c_int),
('retry_reason', c_int),
('num', c_int),
('ptr', c_void_p),
('next_bio', POINTER(bio_st)),
('prev_bio', POINTER(bio_st)),
('references', c_int),
('num_read', c_ulong),
('num_write', c_ulong),
('ex_data', CRYPTO_EX_DATA),
]
assert sizeof(bio_st) == 64, sizeof(bio_st)
assert alignment(bio_st) == 4, alignment(bio_st)
class bio_f_buffer_ctx_struct(Structure):
pass
bio_f_buffer_ctx_struct._fields_ = [
('ibuf_size', c_int),
('obuf_size', c_int),
('ibuf', STRING),
('ibuf_len', c_int),
('ibuf_off', c_int),
('obuf', STRING),
('obuf_len', c_int),
('obuf_off', c_int),
]
assert sizeof(bio_f_buffer_ctx_struct) == 32, sizeof(bio_f_buffer_ctx_struct)
assert alignment(bio_f_buffer_ctx_struct) == 4, alignment(bio_f_buffer_ctx_struct)
BIO_F_BUFFER_CTX = bio_f_buffer_ctx_struct
class hostent(Structure):
pass
hostent._fields_ = [
]
class bf_key_st(Structure):
pass
bf_key_st._fields_ = [
('P', c_uint * 18),
('S', c_uint * 1024),
]
assert sizeof(bf_key_st) == 4168, sizeof(bf_key_st)
assert alignment(bf_key_st) == 4, alignment(bf_key_st)
BF_KEY = bf_key_st
class bignum_st(Structure):
pass
bignum_st._fields_ = [
('d', POINTER(c_ulong)),
('top', c_int),
('dmax', c_int),
('neg', c_int),
('flags', c_int),
]
assert sizeof(bignum_st) == 20, sizeof(bignum_st)
assert alignment(bignum_st) == 4, alignment(bignum_st)
BIGNUM = bignum_st
class bignum_ctx(Structure):
pass
bignum_ctx._fields_ = [
]
BN_CTX = bignum_ctx
class bn_blinding_st(Structure):
pass
bn_blinding_st._fields_ = [
('init', c_int),
('A', POINTER(BIGNUM)),
('Ai', POINTER(BIGNUM)),
('mod', POINTER(BIGNUM)),
('thread_id', c_ulong),
]
assert sizeof(bn_blinding_st) == 20, sizeof(bn_blinding_st)
assert alignment(bn_blinding_st) == 4, alignment(bn_blinding_st)
BN_BLINDING = bn_blinding_st
class bn_mont_ctx_st(Structure):
pass
bn_mont_ctx_st._fields_ = [
('ri', c_int),
('RR', BIGNUM),
('N', BIGNUM),
('Ni', BIGNUM),
('n0', c_ulong),
('flags', c_int),
]
assert sizeof(bn_mont_ctx_st) == 72, sizeof(bn_mont_ctx_st)
assert alignment(bn_mont_ctx_st) == 4, alignment(bn_mont_ctx_st)
BN_MONT_CTX = bn_mont_ctx_st
class bn_recp_ctx_st(Structure):
pass
bn_recp_ctx_st._fields_ = [
('N', BIGNUM),
('Nr', BIGNUM),
('num_bits', c_int),
('shift', c_int),
('flags', c_int),
]
assert sizeof(bn_recp_ctx_st) == 52, sizeof(bn_recp_ctx_st)
assert alignment(bn_recp_ctx_st) == 4, alignment(bn_recp_ctx_st)
BN_RECP_CTX = bn_recp_ctx_st
class buf_mem_st(Structure):
pass
buf_mem_st._fields_ = [
('length', c_int),
('data', STRING),
('max', c_int),
]
assert sizeof(buf_mem_st) == 12, sizeof(buf_mem_st)
assert alignment(buf_mem_st) == 4, alignment(buf_mem_st)
BUF_MEM = buf_mem_st
class cast_key_st(Structure):
pass
cast_key_st._fields_ = [
('data', c_ulong * 32),
('short_key', c_int),
]
assert sizeof(cast_key_st) == 132, sizeof(cast_key_st)
assert alignment(cast_key_st) == 4, alignment(cast_key_st)
CAST_KEY = cast_key_st
class comp_method_st(Structure):
pass
comp_method_st._fields_ = [
('type', c_int),
('name', STRING),
('init', CFUNCTYPE(c_int)),
('finish', CFUNCTYPE(None)),
('compress', CFUNCTYPE(c_int)),
('expand', CFUNCTYPE(c_int)),
('ctrl', CFUNCTYPE(c_long)),
('callback_ctrl', CFUNCTYPE(c_long)),
]
assert sizeof(comp_method_st) == 32, sizeof(comp_method_st)
assert alignment(comp_method_st) == 4, alignment(comp_method_st)
COMP_METHOD = comp_method_st
class comp_ctx_st(Structure):
pass
comp_ctx_st._fields_ = [
('meth', POINTER(COMP_METHOD)),
('compress_in', c_ulong),
('compress_out', c_ulong),
('expand_in', c_ulong),
('expand_out', c_ulong),
('ex_data', CRYPTO_EX_DATA),
]
assert sizeof(comp_ctx_st) == 28, sizeof(comp_ctx_st)
assert alignment(comp_ctx_st) == 4, alignment(comp_ctx_st)
COMP_CTX = comp_ctx_st
class CRYPTO_dynlock_value(Structure):
pass
CRYPTO_dynlock_value._fields_ = [
]
class CRYPTO_dynlock(Structure):
pass
CRYPTO_dynlock._fields_ = [
('references', c_int),
('data', POINTER(CRYPTO_dynlock_value)),
]
assert sizeof(CRYPTO_dynlock) == 8, sizeof(CRYPTO_dynlock)
assert alignment(CRYPTO_dynlock) == 4, alignment(CRYPTO_dynlock)
BIO_dummy = bio_st
CRYPTO_EX_new = CFUNCTYPE(c_int, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p)
CRYPTO_EX_free = CFUNCTYPE(None, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p)
CRYPTO_EX_dup = CFUNCTYPE(c_int, POINTER(CRYPTO_EX_DATA), POINTER(CRYPTO_EX_DATA), c_void_p, c_int, c_long, c_void_p)
class crypto_ex_data_func_st(Structure):
pass
crypto_ex_data_func_st._fields_ = [
('argl', c_long),
('argp', c_void_p),
('new_func', POINTER(CRYPTO_EX_new)),
('free_func', POINTER(CRYPTO_EX_free)),
('dup_func', POINTER(CRYPTO_EX_dup)),
]
assert sizeof(crypto_ex_data_func_st) == 20, sizeof(crypto_ex_data_func_st)
assert alignment(crypto_ex_data_func_st) == 4, alignment(crypto_ex_data_func_st)
CRYPTO_EX_DATA_FUNCS = crypto_ex_data_func_st
class st_CRYPTO_EX_DATA_IMPL(Structure):
pass
CRYPTO_EX_DATA_IMPL = st_CRYPTO_EX_DATA_IMPL
st_CRYPTO_EX_DATA_IMPL._fields_ = [
]
CRYPTO_MEM_LEAK_CB = CFUNCTYPE(c_void_p, c_ulong, STRING, c_int, c_int, c_void_p)
DES_cblock = c_ubyte * 8
const_DES_cblock = c_ubyte * 8
class DES_ks(Structure):
pass
class N6DES_ks3DOLLAR_9E(Union):
pass
N6DES_ks3DOLLAR_9E._fields_ = [
('cblock', DES_cblock),
('deslong', c_ulong * 2),
]
assert sizeof(N6DES_ks3DOLLAR_9E) == 8, sizeof(N6DES_ks3DOLLAR_9E)
assert alignment(N6DES_ks3DOLLAR_9E) == 4, alignment(N6DES_ks3DOLLAR_9E)
DES_ks._fields_ = [
('ks', N6DES_ks3DOLLAR_9E * 16),
]
assert sizeof(DES_ks) == 128, sizeof(DES_ks)
assert alignment(DES_ks) == 4, alignment(DES_ks)
DES_key_schedule = DES_ks
_ossl_old_des_cblock = c_ubyte * 8
class _ossl_old_des_ks_struct(Structure):
pass
class N23_ossl_old_des_ks_struct4DOLLAR_10E(Union):
pass
N23_ossl_old_des_ks_struct4DOLLAR_10E._fields_ = [
('_', _ossl_old_des_cblock),
('pad', c_ulong * 2),
]
assert sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 8, sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E)
assert alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 4, alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E)
_ossl_old_des_ks_struct._fields_ = [
('ks', N23_ossl_old_des_ks_struct4DOLLAR_10E),
]
assert sizeof(_ossl_old_des_ks_struct) == 8, sizeof(_ossl_old_des_ks_struct)
assert alignment(_ossl_old_des_ks_struct) == 4, alignment(_ossl_old_des_ks_struct)
_ossl_old_des_key_schedule = _ossl_old_des_ks_struct * 16
class dh_st(Structure):
pass
DH = dh_st
class dh_method(Structure):
pass
dh_method._fields_ = [
('name', STRING),
('generate_key', CFUNCTYPE(c_int, POINTER(DH))),
('compute_key', CFUNCTYPE(c_int, POINTER(c_ubyte), POINTER(BIGNUM), POINTER(DH))),
('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DH), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))),
('init', CFUNCTYPE(c_int, POINTER(DH))),
('finish', CFUNCTYPE(c_int, POINTER(DH))),
('flags', c_int),
('app_data', STRING),
]
assert sizeof(dh_method) == 32, sizeof(dh_method)
assert alignment(dh_method) == 4, alignment(dh_method)
DH_METHOD = dh_method
class engine_st(Structure):
pass
ENGINE = engine_st
dh_st._fields_ = [
('pad', c_int),
('version', c_int),
('p', POINTER(BIGNUM)),
('g', POINTER(BIGNUM)),
('length', c_long),
('pub_key', POINTER(BIGNUM)),
('priv_key', POINTER(BIGNUM)),
('flags', c_int),
('method_mont_p', STRING),
('q', POINTER(BIGNUM)),
('j', POINTER(BIGNUM)),
('seed', POINTER(c_ubyte)),
('seedlen', c_int),
('counter', POINTER(BIGNUM)),
('references', c_int),
('ex_data', CRYPTO_EX_DATA),
('meth', POINTER(DH_METHOD)),
('engine', POINTER(ENGINE)),
]
assert sizeof(dh_st) == 76, sizeof(dh_st)
assert alignment(dh_st) == 4, alignment(dh_st)
class dsa_st(Structure):
pass
DSA = dsa_st
class DSA_SIG_st(Structure):
pass
DSA_SIG_st._fields_ = [
('r', POINTER(BIGNUM)),
('s', POINTER(BIGNUM)),
]
assert sizeof(DSA_SIG_st) == 8, sizeof(DSA_SIG_st)
assert alignment(DSA_SIG_st) == 4, alignment(DSA_SIG_st)
DSA_SIG = DSA_SIG_st
class dsa_method(Structure):
pass
dsa_method._fields_ = [
('name', STRING),
('dsa_do_sign', CFUNCTYPE(POINTER(DSA_SIG), POINTER(c_ubyte), c_int, POINTER(DSA))),
('dsa_sign_setup', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BN_CTX), POINTER(POINTER(BIGNUM)), POINTER(POINTER(BIGNUM)))),
('dsa_do_verify', CFUNCTYPE(c_int, POINTER(c_ubyte), c_int, POINTER(DSA_SIG), POINTER(DSA))),
('dsa_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))),
('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))),
('init', CFUNCTYPE(c_int, POINTER(DSA))),
('finish', CFUNCTYPE(c_int, POINTER(DSA))),
('flags', c_int),
('app_data', STRING),
]
assert sizeof(dsa_method) == 40, sizeof(dsa_method)
assert alignment(dsa_method) == 4, alignment(dsa_method)
DSA_METHOD = dsa_method
dsa_st._fields_ = [
('pad', c_int),
('version', c_long),
('write_params', c_int),
('p', POINTER(BIGNUM)),
('q', POINTER(BIGNUM)),
('g', POINTER(BIGNUM)),
('pub_key', POINTER(BIGNUM)),
('priv_key', POINTER(BIGNUM)),
('kinv', POINTER(BIGNUM)),
('r', POINTER(BIGNUM)),
('flags', c_int),
('method_mont_p', STRING),
('references', c_int),
('ex_data', CRYPTO_EX_DATA),
('meth', POINTER(DSA_METHOD)),
('engine', POINTER(ENGINE)),
]
assert sizeof(dsa_st) == 68, sizeof(dsa_st)
assert alignment(dsa_st) == 4, alignment(dsa_st)
class evp_pkey_st(Structure):
pass
class N11evp_pkey_st4DOLLAR_12E(Union):
pass
class rsa_st(Structure):
pass
N11evp_pkey_st4DOLLAR_12E._fields_ = [
('ptr', STRING),
('rsa', POINTER(rsa_st)),
('dsa', POINTER(dsa_st)),
('dh', POINTER(dh_st)),
]
assert sizeof(N11evp_pkey_st4DOLLAR_12E) == 4, sizeof(N11evp_pkey_st4DOLLAR_12E)
assert alignment(N11evp_pkey_st4DOLLAR_12E) == 4, alignment(N11evp_pkey_st4DOLLAR_12E)
evp_pkey_st._fields_ = [
('type', c_int),
('save_type', c_int),
('references', c_int),
('pkey', N11evp_pkey_st4DOLLAR_12E),
('save_parameters', c_int),
('attributes', POINTER(STACK)),
]
assert sizeof(evp_pkey_st) == 24, sizeof(evp_pkey_st)
assert alignment(evp_pkey_st) == 4, alignment(evp_pkey_st)
class env_md_st(Structure):
pass
class env_md_ctx_st(Structure):
pass
EVP_MD_CTX = env_md_ctx_st
env_md_st._fields_ = [
('type', c_int),
('pkey_type', c_int),
('md_size', c_int),
('flags', c_ulong),
('init', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))),
('update', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), c_void_p, c_ulong)),
('final', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(c_ubyte))),
('copy', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(EVP_MD_CTX))),
('cleanup', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))),
('sign', CFUNCTYPE(c_int)),
('verify', CFUNCTYPE(c_int)),
('required_pkey_type', c_int * 5),
('block_size', c_int),
('ctx_size', c_int),
]
assert sizeof(env_md_st) == 72, sizeof(env_md_st)
assert alignment(env_md_st) == 4, alignment(env_md_st)
EVP_MD = env_md_st
env_md_ctx_st._fields_ = [
('digest', POINTER(EVP_MD)),
('engine', POINTER(ENGINE)),
('flags', c_ulong),
('md_data', c_void_p),
]
assert sizeof(env_md_ctx_st) == 16, sizeof(env_md_ctx_st)
assert alignment(env_md_ctx_st) == 4, alignment(env_md_ctx_st)
class evp_cipher_st(Structure):
pass
class evp_cipher_ctx_st(Structure):
pass
EVP_CIPHER_CTX = evp_cipher_ctx_st
evp_cipher_st._fields_ = [
('nid', c_int),
('block_size', c_int),
('key_len', c_int),
('iv_len', c_int),
('flags', c_ulong),
('init', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_int)),
('do_cipher', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_uint)),
('cleanup', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX))),
('ctx_size', c_int),
('set_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))),
('get_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))),
('ctrl', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), c_int, c_int, c_void_p)),
('app_data', c_void_p),
]
assert sizeof(evp_cipher_st) == 52, sizeof(evp_cipher_st)
assert alignment(evp_cipher_st) == 4, alignment(evp_cipher_st)
class evp_cipher_info_st(Structure):
pass
EVP_CIPHER = evp_cipher_st
evp_cipher_info_st._fields_ = [
('cipher', POINTER(EVP_CIPHER)),
('iv', c_ubyte * 16),
]
assert sizeof(evp_cipher_info_st) == 20, sizeof(evp_cipher_info_st)
assert alignment(evp_cipher_info_st) == 4, alignment(evp_cipher_info_st)
EVP_CIPHER_INFO = evp_cipher_info_st
evp_cipher_ctx_st._fields_ = [
('cipher', POINTER(EVP_CIPHER)),
('engine', POINTER(ENGINE)),
('encrypt', c_int),
('buf_len', c_int),
('oiv', c_ubyte * 16),
('iv', c_ubyte * 16),
('buf', c_ubyte * 32),
('num', c_int),
('app_data', c_void_p),
('key_len', c_int),
('flags', c_ulong),
('cipher_data', c_void_p),
('final_used', c_int),
('block_mask', c_int),
('final', c_ubyte * 32),
]
assert sizeof(evp_cipher_ctx_st) == 140, sizeof(evp_cipher_ctx_st)
assert alignment(evp_cipher_ctx_st) == 4, alignment(evp_cipher_ctx_st)
class evp_Encode_Ctx_st(Structure):
pass
evp_Encode_Ctx_st._fields_ = [
('num', c_int),
('length', c_int),
('enc_data', c_ubyte * 80),
('line_num', c_int),
('expect_nl', c_int),
]
assert sizeof(evp_Encode_Ctx_st) == 96, sizeof(evp_Encode_Ctx_st)
assert alignment(evp_Encode_Ctx_st) == 4, alignment(evp_Encode_Ctx_st)
EVP_ENCODE_CTX = evp_Encode_Ctx_st
EVP_PBE_KEYGEN = CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), STRING, c_int, POINTER(ASN1_TYPE), POINTER(EVP_CIPHER), POINTER(EVP_MD), c_int)
class lhash_node_st(Structure):
pass
lhash_node_st._fields_ = [
('data', c_void_p),
('next', POINTER(lhash_node_st)),
('hash', c_ulong),
]
assert sizeof(lhash_node_st) == 12, sizeof(lhash_node_st)
assert alignment(lhash_node_st) == 4, alignment(lhash_node_st)
LHASH_NODE = lhash_node_st
LHASH_COMP_FN_TYPE = CFUNCTYPE(c_int, c_void_p, c_void_p)
LHASH_HASH_FN_TYPE = CFUNCTYPE(c_ulong, c_void_p)
LHASH_DOALL_FN_TYPE = CFUNCTYPE(None, c_void_p)
LHASH_DOALL_ARG_FN_TYPE = CFUNCTYPE(None, c_void_p, c_void_p)
class lhash_st(Structure):
pass
lhash_st._fields_ = [
('b', POINTER(POINTER(LHASH_NODE))),
('comp', LHASH_COMP_FN_TYPE),
('hash', LHASH_HASH_FN_TYPE),
('num_nodes', c_uint),
('num_alloc_nodes', c_uint),
('p', c_uint),
('pmax', c_uint),
('up_load', c_ulong),
('down_load', c_ulong),
('num_items', c_ulong),
('num_expands', c_ulong),
('num_expand_reallocs', c_ulong),
('num_contracts', c_ulong),
('num_contract_reallocs', c_ulong),
('num_hash_calls', c_ulong),
('num_comp_calls', c_ulong),
('num_insert', c_ulong),
('num_replace', c_ulong),
('num_delete', c_ulong),
('num_no_delete', c_ulong),
('num_retrieve', c_ulong),
('num_retrieve_miss', c_ulong),
('num_hash_comps', c_ulong),
('error', c_int),
]
assert sizeof(lhash_st) == 96, sizeof(lhash_st)
assert alignment(lhash_st) == 4, alignment(lhash_st)
LHASH = lhash_st
class MD2state_st(Structure):
pass
MD2state_st._fields_ = [
('num', c_int),
('data', c_ubyte * 16),
('cksm', c_uint * 16),
('state', c_uint * 16),
]
assert sizeof(MD2state_st) == 148, sizeof(MD2state_st)
assert alignment(MD2state_st) == 4, alignment(MD2state_st)
MD2_CTX = MD2state_st
class MD4state_st(Structure):
pass
MD4state_st._fields_ = [
('A', c_uint),
('B', c_uint),
('C', c_uint),
('D', c_uint),
('Nl', c_uint),
('Nh', c_uint),
('data', c_uint * 16),
('num', c_int),
]
assert sizeof(MD4state_st) == 92, sizeof(MD4state_st)
assert alignment(MD4state_st) == 4, alignment(MD4state_st)
MD4_CTX = MD4state_st
class MD5state_st(Structure):
pass
MD5state_st._fields_ = [
('A', c_uint),
('B', c_uint),
('C', c_uint),
('D', c_uint),
('Nl', c_uint),
('Nh', c_uint),
('data', c_uint * 16),
('num', c_int),
]
assert sizeof(MD5state_st) == 92, sizeof(MD5state_st)
assert alignment(MD5state_st) == 4, alignment(MD5state_st)
MD5_CTX = MD5state_st
class mdc2_ctx_st(Structure):
pass
mdc2_ctx_st._fields_ = [
('num', c_int),
('data', c_ubyte * 8),
('h', DES_cblock),
('hh', DES_cblock),
('pad_type', c_int),
]
assert sizeof(mdc2_ctx_st) == 32, sizeof(mdc2_ctx_st)
assert alignment(mdc2_ctx_st) == 4, alignment(mdc2_ctx_st)
MDC2_CTX = mdc2_ctx_st
class obj_name_st(Structure):
pass
obj_name_st._fields_ = [
('type', c_int),
('alias', c_int),
('name', STRING),
('data', STRING),
]
assert sizeof(obj_name_st) == 16, sizeof(obj_name_st)
assert alignment(obj_name_st) == 4, alignment(obj_name_st)
OBJ_NAME = obj_name_st
ASN1_TIME = asn1_string_st
ASN1_NULL = c_int
EVP_PKEY = evp_pkey_st
class x509_st(Structure):
pass
X509 = x509_st
class X509_algor_st(Structure):
pass
X509_ALGOR = X509_algor_st
class X509_crl_st(Structure):
pass
X509_CRL = X509_crl_st
class X509_name_st(Structure):
pass
X509_NAME = X509_name_st
class x509_store_st(Structure):
pass
X509_STORE = x509_store_st
class x509_store_ctx_st(Structure):
pass
X509_STORE_CTX = x509_store_ctx_st
engine_st._fields_ = [
]
class PEM_Encode_Seal_st(Structure):
pass
PEM_Encode_Seal_st._fields_ = [
('encode', EVP_ENCODE_CTX),
('md', EVP_MD_CTX),
('cipher', EVP_CIPHER_CTX),
]
assert sizeof(PEM_Encode_Seal_st) == 252, sizeof(PEM_Encode_Seal_st)
assert alignment(PEM_Encode_Seal_st) == 4, alignment(PEM_Encode_Seal_st)
PEM_ENCODE_SEAL_CTX = PEM_Encode_Seal_st
class pem_recip_st(Structure):
pass
pem_recip_st._fields_ = [
('name', STRING),
('dn', POINTER(X509_NAME)),
('cipher', c_int),
('key_enc', c_int),
]
assert sizeof(pem_recip_st) == 16, sizeof(pem_recip_st)
assert alignment(pem_recip_st) == 4, alignment(pem_recip_st)
PEM_USER = pem_recip_st
class pem_ctx_st(Structure):
pass
class N10pem_ctx_st4DOLLAR_16E(Structure):
pass
N10pem_ctx_st4DOLLAR_16E._fields_ = [
('version', c_int),
('mode', c_int),
]
assert sizeof(N10pem_ctx_st4DOLLAR_16E) == 8, sizeof(N10pem_ctx_st4DOLLAR_16E)
assert alignment(N10pem_ctx_st4DOLLAR_16E) == 4, alignment(N10pem_ctx_st4DOLLAR_16E)
class N10pem_ctx_st4DOLLAR_17E(Structure):
pass
N10pem_ctx_st4DOLLAR_17E._fields_ = [
('cipher', c_int),
]
assert sizeof(N10pem_ctx_st4DOLLAR_17E) == 4, sizeof(N10pem_ctx_st4DOLLAR_17E)
assert alignment(N10pem_ctx_st4DOLLAR_17E) == 4, alignment(N10pem_ctx_st4DOLLAR_17E)
pem_ctx_st._fields_ = [
('type', c_int),
('proc_type', N10pem_ctx_st4DOLLAR_16E),
('domain', STRING),
('DEK_info', N10pem_ctx_st4DOLLAR_17E),
('originator', POINTER(PEM_USER)),
('num_recipient', c_int),
('recipient', POINTER(POINTER(PEM_USER))),
('x509_chain', POINTER(STACK)),
('md', POINTER(EVP_MD)),
('md_enc', c_int),
('md_len', c_int),
('md_data', STRING),
('dec', POINTER(EVP_CIPHER)),
('key_len', c_int),
('key', POINTER(c_ubyte)),
('data_enc', c_int),
('data_len', c_int),
('data', POINTER(c_ubyte)),
]
assert sizeof(pem_ctx_st) == 76, sizeof(pem_ctx_st)
assert alignment(pem_ctx_st) == 4, alignment(pem_ctx_st)
PEM_CTX = pem_ctx_st
pem_password_cb = CFUNCTYPE(c_int, STRING, c_int, c_int, c_void_p)
class pkcs7_issuer_and_serial_st(Structure):
pass
pkcs7_issuer_and_serial_st._fields_ = [
('issuer', POINTER(X509_NAME)),
('serial', POINTER(ASN1_INTEGER)),
]
assert sizeof(pkcs7_issuer_and_serial_st) == 8, sizeof(pkcs7_issuer_and_serial_st)
assert alignment(pkcs7_issuer_and_serial_st) == 4, alignment(pkcs7_issuer_and_serial_st)
PKCS7_ISSUER_AND_SERIAL = pkcs7_issuer_and_serial_st
class pkcs7_signer_info_st(Structure):
pass
pkcs7_signer_info_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)),
('digest_alg', POINTER(X509_ALGOR)),
('auth_attr', POINTER(STACK)),
('digest_enc_alg', POINTER(X509_ALGOR)),
('enc_digest', POINTER(ASN1_OCTET_STRING)),
('unauth_attr', POINTER(STACK)),
('pkey', POINTER(EVP_PKEY)),
]
assert sizeof(pkcs7_signer_info_st) == 32, sizeof(pkcs7_signer_info_st)
assert alignment(pkcs7_signer_info_st) == 4, alignment(pkcs7_signer_info_st)
PKCS7_SIGNER_INFO = pkcs7_signer_info_st
class pkcs7_recip_info_st(Structure):
pass
pkcs7_recip_info_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)),
('key_enc_algor', POINTER(X509_ALGOR)),
('enc_key', POINTER(ASN1_OCTET_STRING)),
('cert', POINTER(X509)),
]
assert sizeof(pkcs7_recip_info_st) == 20, sizeof(pkcs7_recip_info_st)
assert alignment(pkcs7_recip_info_st) == 4, alignment(pkcs7_recip_info_st)
PKCS7_RECIP_INFO = pkcs7_recip_info_st
class pkcs7_signed_st(Structure):
pass
class pkcs7_st(Structure):
pass
pkcs7_signed_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('md_algs', POINTER(STACK)),
('cert', POINTER(STACK)),
('crl', POINTER(STACK)),
('signer_info', POINTER(STACK)),
('contents', POINTER(pkcs7_st)),
]
assert sizeof(pkcs7_signed_st) == 24, sizeof(pkcs7_signed_st)
assert alignment(pkcs7_signed_st) == 4, alignment(pkcs7_signed_st)
PKCS7_SIGNED = pkcs7_signed_st
class pkcs7_enc_content_st(Structure):
pass
pkcs7_enc_content_st._fields_ = [
('content_type', POINTER(ASN1_OBJECT)),
('algorithm', POINTER(X509_ALGOR)),
('enc_data', POINTER(ASN1_OCTET_STRING)),
('cipher', POINTER(EVP_CIPHER)),
]
assert sizeof(pkcs7_enc_content_st) == 16, sizeof(pkcs7_enc_content_st)
assert alignment(pkcs7_enc_content_st) == 4, alignment(pkcs7_enc_content_st)
PKCS7_ENC_CONTENT = pkcs7_enc_content_st
class pkcs7_enveloped_st(Structure):
pass
pkcs7_enveloped_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('recipientinfo', POINTER(STACK)),
('enc_data', POINTER(PKCS7_ENC_CONTENT)),
]
assert sizeof(pkcs7_enveloped_st) == 12, sizeof(pkcs7_enveloped_st)
assert alignment(pkcs7_enveloped_st) == 4, alignment(pkcs7_enveloped_st)
PKCS7_ENVELOPE = pkcs7_enveloped_st
class pkcs7_signedandenveloped_st(Structure):
pass
pkcs7_signedandenveloped_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('md_algs', POINTER(STACK)),
('cert', POINTER(STACK)),
('crl', POINTER(STACK)),
('signer_info', POINTER(STACK)),
('enc_data', POINTER(PKCS7_ENC_CONTENT)),
('recipientinfo', POINTER(STACK)),
]
assert sizeof(pkcs7_signedandenveloped_st) == 28, sizeof(pkcs7_signedandenveloped_st)
assert alignment(pkcs7_signedandenveloped_st) == 4, alignment(pkcs7_signedandenveloped_st)
PKCS7_SIGN_ENVELOPE = pkcs7_signedandenveloped_st
class pkcs7_digest_st(Structure):
pass
pkcs7_digest_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('md', POINTER(X509_ALGOR)),
('contents', POINTER(pkcs7_st)),
('digest', POINTER(ASN1_OCTET_STRING)),
]
assert sizeof(pkcs7_digest_st) == 16, sizeof(pkcs7_digest_st)
assert alignment(pkcs7_digest_st) == 4, alignment(pkcs7_digest_st)
PKCS7_DIGEST = pkcs7_digest_st
class pkcs7_encrypted_st(Structure):
pass
pkcs7_encrypted_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('enc_data', POINTER(PKCS7_ENC_CONTENT)),
]
assert sizeof(pkcs7_encrypted_st) == 8, sizeof(pkcs7_encrypted_st)
assert alignment(pkcs7_encrypted_st) == 4, alignment(pkcs7_encrypted_st)
PKCS7_ENCRYPT = pkcs7_encrypted_st
class N8pkcs7_st4DOLLAR_15E(Union):
pass
N8pkcs7_st4DOLLAR_15E._fields_ = [
('ptr', STRING),
('data', POINTER(ASN1_OCTET_STRING)),
('sign', POINTER(PKCS7_SIGNED)),
('enveloped', POINTER(PKCS7_ENVELOPE)),
('signed_and_enveloped', POINTER(PKCS7_SIGN_ENVELOPE)),
('digest', POINTER(PKCS7_DIGEST)),
('encrypted', POINTER(PKCS7_ENCRYPT)),
('other', POINTER(ASN1_TYPE)),
]
assert sizeof(N8pkcs7_st4DOLLAR_15E) == 4, sizeof(N8pkcs7_st4DOLLAR_15E)
assert alignment(N8pkcs7_st4DOLLAR_15E) == 4, alignment(N8pkcs7_st4DOLLAR_15E)
pkcs7_st._fields_ = [
('asn1', POINTER(c_ubyte)),
('length', c_long),
('state', c_int),
('detached', c_int),
('type', POINTER(ASN1_OBJECT)),
('d', N8pkcs7_st4DOLLAR_15E),
]
assert sizeof(pkcs7_st) == 24, sizeof(pkcs7_st)
assert alignment(pkcs7_st) == 4, alignment(pkcs7_st)
PKCS7 = pkcs7_st
class rc2_key_st(Structure):
pass
rc2_key_st._fields_ = [
('data', c_uint * 64),
]
assert sizeof(rc2_key_st) == 256, sizeof(rc2_key_st)
assert alignment(rc2_key_st) == 4, alignment(rc2_key_st)
RC2_KEY = rc2_key_st
class rc4_key_st(Structure):
pass
rc4_key_st._fields_ = [
('x', c_ubyte),
('y', c_ubyte),
('data', c_ubyte * 256),
]
assert sizeof(rc4_key_st) == 258, sizeof(rc4_key_st)
assert alignment(rc4_key_st) == 1, alignment(rc4_key_st)
RC4_KEY = rc4_key_st
class rc5_key_st(Structure):
pass
rc5_key_st._fields_ = [
('rounds', c_int),
('data', c_ulong * 34),
]
assert sizeof(rc5_key_st) == 140, sizeof(rc5_key_st)
assert alignment(rc5_key_st) == 4, alignment(rc5_key_st)
RC5_32_KEY = rc5_key_st
class RIPEMD160state_st(Structure):
pass
RIPEMD160state_st._fields_ = [
('A', c_uint),
('B', c_uint),
('C', c_uint),
('D', c_uint),
('E', c_uint),
('Nl', c_uint),
('Nh', c_uint),
('data', c_uint * 16),
('num', c_int),
]
assert sizeof(RIPEMD160state_st) == 96, sizeof(RIPEMD160state_st)
assert alignment(RIPEMD160state_st) == 4, alignment(RIPEMD160state_st)
RIPEMD160_CTX = RIPEMD160state_st
RSA = rsa_st
class rsa_meth_st(Structure):
pass
rsa_meth_st._fields_ = [
('name', STRING),
('rsa_pub_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)),
('rsa_pub_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)),
('rsa_priv_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)),
('rsa_priv_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)),
('rsa_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(RSA))),
('bn_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))),
('init', CFUNCTYPE(c_int, POINTER(RSA))),
('finish', CFUNCTYPE(c_int, POINTER(RSA))),
('flags', c_int),
('app_data', STRING),
('rsa_sign', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), POINTER(c_uint), POINTER(RSA))),
('rsa_verify', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), c_uint, POINTER(RSA))),
]
assert sizeof(rsa_meth_st) == 52, sizeof(rsa_meth_st)
assert alignment(rsa_meth_st) == 4, alignment(rsa_meth_st)
RSA_METHOD = rsa_meth_st
rsa_st._fields_ = [
('pad', c_int),
('version', c_long),
('meth', POINTER(RSA_METHOD)),
('engine', POINTER(ENGINE)),
('n', POINTER(BIGNUM)),
('e', POINTER(BIGNUM)),
('d', POINTER(BIGNUM)),
('p', POINTER(BIGNUM)),
('q', POINTER(BIGNUM)),
('dmp1', POINTER(BIGNUM)),
('dmq1', POINTER(BIGNUM)),
('iqmp', POINTER(BIGNUM)),
('ex_data', CRYPTO_EX_DATA),
('references', c_int),
('flags', c_int),
('_method_mod_n', POINTER(BN_MONT_CTX)),
('_method_mod_p', POINTER(BN_MONT_CTX)),
('_method_mod_q', POINTER(BN_MONT_CTX)),
('bignum_data', STRING),
('blinding', POINTER(BN_BLINDING)),
]
assert sizeof(rsa_st) == 84, sizeof(rsa_st)
assert alignment(rsa_st) == 4, alignment(rsa_st)
openssl_fptr = CFUNCTYPE(None)
class SHAstate_st(Structure):
pass
SHAstate_st._fields_ = [
('h0', c_uint),
('h1', c_uint),
('h2', c_uint),
('h3', c_uint),
('h4', c_uint),
('Nl', c_uint),
('Nh', c_uint),
('data', c_uint * 16),
('num', c_int),
]
assert sizeof(SHAstate_st) == 96, sizeof(SHAstate_st)
assert alignment(SHAstate_st) == 4, alignment(SHAstate_st)
SHA_CTX = SHAstate_st
class ssl_st(Structure):
pass
ssl_crock_st = POINTER(ssl_st)
class ssl_cipher_st(Structure):
pass
ssl_cipher_st._fields_ = [
('valid', c_int),
('name', STRING),
('id', c_ulong),
('algorithms', c_ulong),
('algo_strength', c_ulong),
('algorithm2', c_ulong),
('strength_bits', c_int),
('alg_bits', c_int),
('mask', c_ulong),
('mask_strength', c_ulong),
]
assert sizeof(ssl_cipher_st) == 40, sizeof(ssl_cipher_st)
assert alignment(ssl_cipher_st) == 4, alignment(ssl_cipher_st)
SSL_CIPHER = ssl_cipher_st
SSL = ssl_st
class ssl_ctx_st(Structure):
pass
SSL_CTX = ssl_ctx_st
class ssl_method_st(Structure):
pass
class ssl3_enc_method(Structure):
pass
ssl_method_st._fields_ = [
('version', c_int),
('ssl_new', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_clear', CFUNCTYPE(None, POINTER(SSL))),
('ssl_free', CFUNCTYPE(None, POINTER(SSL))),
('ssl_accept', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_connect', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_read', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)),
('ssl_peek', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)),
('ssl_write', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)),
('ssl_shutdown', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_renegotiate', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_renegotiate_check', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, c_long, c_void_p)),
('ssl_ctx_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, c_long, c_void_p)),
('get_cipher_by_char', CFUNCTYPE(POINTER(SSL_CIPHER), POINTER(c_ubyte))),
('put_cipher_by_char', CFUNCTYPE(c_int, POINTER(SSL_CIPHER), POINTER(c_ubyte))),
('ssl_pending', CFUNCTYPE(c_int, POINTER(SSL))),
('num_ciphers', CFUNCTYPE(c_int)),
('get_cipher', CFUNCTYPE(POINTER(SSL_CIPHER), c_uint)),
('get_ssl_method', CFUNCTYPE(POINTER(ssl_method_st), c_int)),
('get_timeout', CFUNCTYPE(c_long)),
('ssl3_enc', POINTER(ssl3_enc_method)),
('ssl_version', CFUNCTYPE(c_int)),
('ssl_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, CFUNCTYPE(None))),
('ssl_ctx_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, CFUNCTYPE(None))),
]
assert sizeof(ssl_method_st) == 100, sizeof(ssl_method_st)
assert alignment(ssl_method_st) == 4, alignment(ssl_method_st)
ssl3_enc_method._fields_ = [
]
SSL_METHOD = ssl_method_st
class ssl_session_st(Structure):
pass
class sess_cert_st(Structure):
pass
ssl_session_st._fields_ = [
('ssl_version', c_int),
('key_arg_length', c_uint),
('key_arg', c_ubyte * 8),
('master_key_length', c_int),
('master_key', c_ubyte * 48),
('session_id_length', c_uint),
('session_id', c_ubyte * 32),
('sid_ctx_length', c_uint),
('sid_ctx', c_ubyte * 32),
('not_resumable', c_int),
('sess_cert', POINTER(sess_cert_st)),
('peer', POINTER(X509)),
('verify_result', c_long),
('references', c_int),
('timeout', c_long),
('time', c_long),
('compress_meth', c_int),
('cipher', POINTER(SSL_CIPHER)),
('cipher_id', c_ulong),
('ciphers', POINTER(STACK)),
('ex_data', CRYPTO_EX_DATA),
('prev', POINTER(ssl_session_st)),
('next', POINTER(ssl_session_st)),
]
assert sizeof(ssl_session_st) == 200, sizeof(ssl_session_st)
assert alignment(ssl_session_st) == 4, alignment(ssl_session_st)
sess_cert_st._fields_ = [
]
SSL_SESSION = ssl_session_st
GEN_SESSION_CB = CFUNCTYPE(c_int, POINTER(SSL), POINTER(c_ubyte), POINTER(c_uint))
class ssl_comp_st(Structure):
pass
ssl_comp_st._fields_ = [
('id', c_int),
('name', STRING),
('method', POINTER(COMP_METHOD)),
]
assert sizeof(ssl_comp_st) == 12, sizeof(ssl_comp_st)
assert alignment(ssl_comp_st) == 4, alignment(ssl_comp_st)
SSL_COMP = ssl_comp_st
class N10ssl_ctx_st4DOLLAR_18E(Structure):
pass
N10ssl_ctx_st4DOLLAR_18E._fields_ = [
('sess_connect', c_int),
('sess_connect_renegotiate', c_int),
('sess_connect_good', c_int),
('sess_accept', c_int),
('sess_accept_renegotiate', c_int),
('sess_accept_good', c_int),
('sess_miss', c_int),
('sess_timeout', c_int),
('sess_cache_full', c_int),
('sess_hit', c_int),
('sess_cb_hit', c_int),
]
assert sizeof(N10ssl_ctx_st4DOLLAR_18E) == 44, sizeof(N10ssl_ctx_st4DOLLAR_18E)
assert alignment(N10ssl_ctx_st4DOLLAR_18E) == 4, alignment(N10ssl_ctx_st4DOLLAR_18E)
class cert_st(Structure):
pass
ssl_ctx_st._fields_ = [
('method', POINTER(SSL_METHOD)),
('cipher_list', POINTER(STACK)),
('cipher_list_by_id', POINTER(STACK)),
('cert_store', POINTER(x509_store_st)),
('sessions', POINTER(lhash_st)),
('session_cache_size', c_ulong),
('session_cache_head', POINTER(ssl_session_st)),
('session_cache_tail', POINTER(ssl_session_st)),
('session_cache_mode', c_int),
('session_timeout', c_long),
('new_session_cb', CFUNCTYPE(c_int, POINTER(ssl_st), POINTER(SSL_SESSION))),
('remove_session_cb', CFUNCTYPE(None, POINTER(ssl_ctx_st), POINTER(SSL_SESSION))),
('get_session_cb', CFUNCTYPE(POINTER(SSL_SESSION), POINTER(ssl_st), POINTER(c_ubyte), c_int, POINTER(c_int))),
('stats', N10ssl_ctx_st4DOLLAR_18E),
('references', c_int),
('app_verify_callback', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), c_void_p)),
('app_verify_arg', c_void_p),
('default_passwd_callback', POINTER(pem_password_cb)),
('default_passwd_callback_userdata', c_void_p),
('client_cert_cb', CFUNCTYPE(c_int, POINTER(SSL), POINTER(POINTER(X509)), POINTER(POINTER(EVP_PKEY)))),
('ex_data', CRYPTO_EX_DATA),
('rsa_md5', POINTER(EVP_MD)),
('md5', POINTER(EVP_MD)),
('sha1', POINTER(EVP_MD)),
('extra_certs', POINTER(STACK)),
('comp_methods', POINTER(STACK)),
('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)),
('client_CA', POINTER(STACK)),
('options', c_ulong),
('mode', c_ulong),
('max_cert_list', c_long),
('cert', POINTER(cert_st)),
('read_ahead', c_int),
('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)),
('msg_callback_arg', c_void_p),
('verify_mode', c_int),
('verify_depth', c_int),
('sid_ctx_length', c_uint),
('sid_ctx', c_ubyte * 32),
('default_verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))),
('generate_session_id', GEN_SESSION_CB),
('purpose', c_int),
('trust', c_int),
('quiet_shutdown', c_int),
]
assert sizeof(ssl_ctx_st) == 248, sizeof(ssl_ctx_st)
assert alignment(ssl_ctx_st) == 4, alignment(ssl_ctx_st)
cert_st._fields_ = [
]
class ssl2_state_st(Structure):
pass
class ssl3_state_st(Structure):
pass
ssl_st._fields_ = [
('version', c_int),
('type', c_int),
('method', POINTER(SSL_METHOD)),
('rbio', POINTER(BIO)),
('wbio', POINTER(BIO)),
('bbio', POINTER(BIO)),
('rwstate', c_int),
('in_handshake', c_int),
('handshake_func', CFUNCTYPE(c_int)),
('server', c_int),
('new_session', c_int),
('quiet_shutdown', c_int),
('shutdown', c_int),
('state', c_int),
('rstate', c_int),
('init_buf', POINTER(BUF_MEM)),
('init_msg', c_void_p),
('init_num', c_int),
('init_off', c_int),
('packet', POINTER(c_ubyte)),
('packet_length', c_uint),
('s2', POINTER(ssl2_state_st)),
('s3', POINTER(ssl3_state_st)),
('read_ahead', c_int),
('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)),
('msg_callback_arg', c_void_p),
('hit', c_int),
('purpose', c_int),
('trust', c_int),
('cipher_list', POINTER(STACK)),
('cipher_list_by_id', POINTER(STACK)),
('enc_read_ctx', POINTER(EVP_CIPHER_CTX)),
('read_hash', POINTER(EVP_MD)),
('expand', POINTER(COMP_CTX)),
('enc_write_ctx', POINTER(EVP_CIPHER_CTX)),
('write_hash', POINTER(EVP_MD)),
('compress', POINTER(COMP_CTX)),
('cert', POINTER(cert_st)),
('sid_ctx_length', c_uint),
('sid_ctx', c_ubyte * 32),
('session', POINTER(SSL_SESSION)),
('generate_session_id', GEN_SESSION_CB),
('verify_mode', c_int),
('verify_depth', c_int),
('verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))),
('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)),
('error', c_int),
('error_code', c_int),
('ctx', POINTER(SSL_CTX)),
('debug', c_int),
('verify_result', c_long),
('ex_data', CRYPTO_EX_DATA),
('client_CA', POINTER(STACK)),
('references', c_int),
('options', c_ulong),
('mode', c_ulong),
('max_cert_list', c_long),
('first_packet', c_int),
('client_version', c_int),
]
assert sizeof(ssl_st) == 268, sizeof(ssl_st)
assert alignment(ssl_st) == 4, alignment(ssl_st)
class N13ssl2_state_st4DOLLAR_19E(Structure):
pass
N13ssl2_state_st4DOLLAR_19E._fields_ = [
('conn_id_length', c_uint),
('cert_type', c_uint),
('cert_length', c_uint),
('csl', c_uint),
('clear', c_uint),
('enc', c_uint),
('ccl', c_ubyte * 32),
('cipher_spec_length', c_uint),
('session_id_length', c_uint),
('clen', c_uint),
('rlen', c_uint),
]
assert sizeof(N13ssl2_state_st4DOLLAR_19E) == 72, sizeof(N13ssl2_state_st4DOLLAR_19E)
assert alignment(N13ssl2_state_st4DOLLAR_19E) == 4, alignment(N13ssl2_state_st4DOLLAR_19E)
ssl2_state_st._fields_ = [
('three_byte_header', c_int),
('clear_text', c_int),
('escape', c_int),
('ssl2_rollback', c_int),
('wnum', c_uint),
('wpend_tot', c_int),
('wpend_buf', POINTER(c_ubyte)),
('wpend_off', c_int),
('wpend_len', c_int),
('wpend_ret', c_int),
('rbuf_left', c_int),
('rbuf_offs', c_int),
('rbuf', POINTER(c_ubyte)),
('wbuf', POINTER(c_ubyte)),
('write_ptr', POINTER(c_ubyte)),
('padding', c_uint),
('rlength', c_uint),
('ract_data_length', c_int),
('wlength', c_uint),
('wact_data_length', c_int),
('ract_data', POINTER(c_ubyte)),
('wact_data', POINTER(c_ubyte)),
('mac_data', POINTER(c_ubyte)),
('read_key', POINTER(c_ubyte)),
('write_key', POINTER(c_ubyte)),
('challenge_length', c_uint),
('challenge', c_ubyte * 32),
('conn_id_length', c_uint),
('conn_id', c_ubyte * 16),
('key_material_length', c_uint),
('key_material', c_ubyte * 48),
('read_sequence', c_ulong),
('write_sequence', c_ulong),
('tmp', N13ssl2_state_st4DOLLAR_19E),
]
assert sizeof(ssl2_state_st) == 288, sizeof(ssl2_state_st)
assert alignment(ssl2_state_st) == 4, alignment(ssl2_state_st)
SSL2_STATE = ssl2_state_st
class ssl3_record_st(Structure):
pass
ssl3_record_st._fields_ = [
('type', c_int),
('length', c_uint),
('off', c_uint),
('data', POINTER(c_ubyte)),
('input', POINTER(c_ubyte)),
('comp', POINTER(c_ubyte)),
]
assert sizeof(ssl3_record_st) == 24, sizeof(ssl3_record_st)
assert alignment(ssl3_record_st) == 4, alignment(ssl3_record_st)
SSL3_RECORD = ssl3_record_st
class ssl3_buffer_st(Structure):
pass
size_t = __darwin_size_t
ssl3_buffer_st._fields_ = [
('buf', POINTER(c_ubyte)),
('len', size_t),
('offset', c_int),
('left', c_int),
]
assert sizeof(ssl3_buffer_st) == 16, sizeof(ssl3_buffer_st)
assert alignment(ssl3_buffer_st) == 4, alignment(ssl3_buffer_st)
SSL3_BUFFER = ssl3_buffer_st
class N13ssl3_state_st4DOLLAR_20E(Structure):
pass
N13ssl3_state_st4DOLLAR_20E._fields_ = [
('cert_verify_md', c_ubyte * 72),
('finish_md', c_ubyte * 72),
('finish_md_len', c_int),
('peer_finish_md', c_ubyte * 72),
('peer_finish_md_len', c_int),
('message_size', c_ulong),
('message_type', c_int),
('new_cipher', POINTER(SSL_CIPHER)),
('dh', POINTER(DH)),
('next_state', c_int),
('reuse_message', c_int),
('cert_req', c_int),
('ctype_num', c_int),
('ctype', c_char * 7),
('ca_names', POINTER(STACK)),
('use_rsa_tmp', c_int),
('key_block_length', c_int),
('key_block', POINTER(c_ubyte)),
('new_sym_enc', POINTER(EVP_CIPHER)),
('new_hash', POINTER(EVP_MD)),
('new_compression', POINTER(SSL_COMP)),
('cert_request', c_int),
]
assert sizeof(N13ssl3_state_st4DOLLAR_20E) == 296, sizeof(N13ssl3_state_st4DOLLAR_20E)
assert alignment(N13ssl3_state_st4DOLLAR_20E) == 4, alignment(N13ssl3_state_st4DOLLAR_20E)
ssl3_state_st._fields_ = [
('flags', c_long),
('delay_buf_pop_ret', c_int),
('read_sequence', c_ubyte * 8),
('read_mac_secret', c_ubyte * 36),
('write_sequence', c_ubyte * 8),
('write_mac_secret', c_ubyte * 36),
('server_random', c_ubyte * 32),
('client_random', c_ubyte * 32),
('need_empty_fragments', c_int),
('empty_fragment_done', c_int),
('rbuf', SSL3_BUFFER),
('wbuf', SSL3_BUFFER),
('rrec', SSL3_RECORD),
('wrec', SSL3_RECORD),
('alert_fragment', c_ubyte * 2),
('alert_fragment_len', c_uint),
('handshake_fragment', c_ubyte * 4),
('handshake_fragment_len', c_uint),
('wnum', c_uint),
('wpend_tot', c_int),
('wpend_type', c_int),
('wpend_ret', c_int),
('wpend_buf', POINTER(c_ubyte)),
('finish_dgst1', EVP_MD_CTX),
('finish_dgst2', EVP_MD_CTX),
('change_cipher_spec', c_int),
('warn_alert', c_int),
('fatal_alert', c_int),
('alert_dispatch', c_int),
('send_alert', c_ubyte * 2),
('renegotiate', c_int),
('total_renegotiations', c_int),
('num_renegotiations', c_int),
('in_read_app_data', c_int),
('tmp', N13ssl3_state_st4DOLLAR_20E),
]
assert sizeof(ssl3_state_st) == 648, sizeof(ssl3_state_st)
assert alignment(ssl3_state_st) == 4, alignment(ssl3_state_st)
SSL3_STATE = ssl3_state_st
stack_st._fields_ = [
('num', c_int),
('data', POINTER(STRING)),
('sorted', c_int),
('num_alloc', c_int),
('comp', CFUNCTYPE(c_int, POINTER(STRING), POINTER(STRING))),
]
assert sizeof(stack_st) == 20, sizeof(stack_st)
assert alignment(stack_st) == 4, alignment(stack_st)
class ui_st(Structure):
pass
ui_st._fields_ = [
]
UI = ui_st
class ui_method_st(Structure):
pass
ui_method_st._fields_ = [
]
UI_METHOD = ui_method_st
class ui_string_st(Structure):
pass
ui_string_st._fields_ = [
]
UI_STRING = ui_string_st
# values for enumeration 'UI_string_types'
UI_string_types = c_int # enum
class X509_objects_st(Structure):
pass
X509_objects_st._fields_ = [
('nid', c_int),
('a2i', CFUNCTYPE(c_int)),
('i2a', CFUNCTYPE(c_int)),
]
assert sizeof(X509_objects_st) == 12, sizeof(X509_objects_st)
assert alignment(X509_objects_st) == 4, alignment(X509_objects_st)
X509_OBJECTS = X509_objects_st
X509_algor_st._fields_ = [
('algorithm', POINTER(ASN1_OBJECT)),
('parameter', POINTER(ASN1_TYPE)),
]
assert sizeof(X509_algor_st) == 8, sizeof(X509_algor_st)
assert alignment(X509_algor_st) == 4, alignment(X509_algor_st)
class X509_val_st(Structure):
pass
X509_val_st._fields_ = [
('notBefore', POINTER(ASN1_TIME)),
('notAfter', POINTER(ASN1_TIME)),
]
assert sizeof(X509_val_st) == 8, sizeof(X509_val_st)
assert alignment(X509_val_st) == 4, alignment(X509_val_st)
X509_VAL = X509_val_st
class X509_pubkey_st(Structure):
pass
X509_pubkey_st._fields_ = [
('algor', POINTER(X509_ALGOR)),
('public_key', POINTER(ASN1_BIT_STRING)),
('pkey', POINTER(EVP_PKEY)),
]
assert sizeof(X509_pubkey_st) == 12, sizeof(X509_pubkey_st)
assert alignment(X509_pubkey_st) == 4, alignment(X509_pubkey_st)
X509_PUBKEY = X509_pubkey_st
class X509_sig_st(Structure):
pass
X509_sig_st._fields_ = [
('algor', POINTER(X509_ALGOR)),
('digest', POINTER(ASN1_OCTET_STRING)),
]
assert sizeof(X509_sig_st) == 8, sizeof(X509_sig_st)
assert alignment(X509_sig_st) == 4, alignment(X509_sig_st)
X509_SIG = X509_sig_st
class X509_name_entry_st(Structure):
pass
X509_name_entry_st._fields_ = [
('object', POINTER(ASN1_OBJECT)),
('value', POINTER(ASN1_STRING)),
('set', c_int),
('size', c_int),
]
assert sizeof(X509_name_entry_st) == 16, sizeof(X509_name_entry_st)
assert alignment(X509_name_entry_st) == 4, alignment(X509_name_entry_st)
X509_NAME_ENTRY = X509_name_entry_st
X509_name_st._fields_ = [
('entries', POINTER(STACK)),
('modified', c_int),
('bytes', POINTER(BUF_MEM)),
('hash', c_ulong),
]
assert sizeof(X509_name_st) == 16, sizeof(X509_name_st)
assert alignment(X509_name_st) == 4, alignment(X509_name_st)
class X509_extension_st(Structure):
pass
X509_extension_st._fields_ = [
('object', POINTER(ASN1_OBJECT)),
('critical', ASN1_BOOLEAN),
('value', POINTER(ASN1_OCTET_STRING)),
]
assert sizeof(X509_extension_st) == 12, sizeof(X509_extension_st)
assert alignment(X509_extension_st) == 4, alignment(X509_extension_st)
X509_EXTENSION = X509_extension_st
class x509_attributes_st(Structure):
pass
class N18x509_attributes_st4DOLLAR_13E(Union):
pass
N18x509_attributes_st4DOLLAR_13E._fields_ = [
('ptr', STRING),
('set', POINTER(STACK)),
('single', POINTER(ASN1_TYPE)),
]
assert sizeof(N18x509_attributes_st4DOLLAR_13E) == 4, sizeof(N18x509_attributes_st4DOLLAR_13E)
assert alignment(N18x509_attributes_st4DOLLAR_13E) == 4, alignment(N18x509_attributes_st4DOLLAR_13E)
x509_attributes_st._fields_ = [
('object', POINTER(ASN1_OBJECT)),
('single', c_int),
('value', N18x509_attributes_st4DOLLAR_13E),
]
assert sizeof(x509_attributes_st) == 12, sizeof(x509_attributes_st)
assert alignment(x509_attributes_st) == 4, alignment(x509_attributes_st)
X509_ATTRIBUTE = x509_attributes_st
class X509_req_info_st(Structure):
pass
X509_req_info_st._fields_ = [
('enc', ASN1_ENCODING),
('version', POINTER(ASN1_INTEGER)),
('subject', POINTER(X509_NAME)),
('pubkey', POINTER(X509_PUBKEY)),
('attributes', POINTER(STACK)),
]
assert sizeof(X509_req_info_st) == 28, sizeof(X509_req_info_st)
assert alignment(X509_req_info_st) == 4, alignment(X509_req_info_st)
X509_REQ_INFO = X509_req_info_st
class X509_req_st(Structure):
pass
X509_req_st._fields_ = [
('req_info', POINTER(X509_REQ_INFO)),
('sig_alg', POINTER(X509_ALGOR)),
('signature', POINTER(ASN1_BIT_STRING)),
('references', c_int),
]
assert sizeof(X509_req_st) == 16, sizeof(X509_req_st)
assert alignment(X509_req_st) == 4, alignment(X509_req_st)
X509_REQ = X509_req_st
class x509_cinf_st(Structure):
pass
x509_cinf_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('serialNumber', POINTER(ASN1_INTEGER)),
('signature', POINTER(X509_ALGOR)),
('issuer', POINTER(X509_NAME)),
('validity', POINTER(X509_VAL)),
('subject', POINTER(X509_NAME)),
('key', POINTER(X509_PUBKEY)),
('issuerUID', POINTER(ASN1_BIT_STRING)),
('subjectUID', POINTER(ASN1_BIT_STRING)),
('extensions', POINTER(STACK)),
]
assert sizeof(x509_cinf_st) == 40, sizeof(x509_cinf_st)
assert alignment(x509_cinf_st) == 4, alignment(x509_cinf_st)
X509_CINF = x509_cinf_st
class x509_cert_aux_st(Structure):
pass
x509_cert_aux_st._fields_ = [
('trust', POINTER(STACK)),
('reject', POINTER(STACK)),
('alias', POINTER(ASN1_UTF8STRING)),
('keyid', POINTER(ASN1_OCTET_STRING)),
('other', POINTER(STACK)),
]
assert sizeof(x509_cert_aux_st) == 20, sizeof(x509_cert_aux_st)
assert alignment(x509_cert_aux_st) == 4, alignment(x509_cert_aux_st)
X509_CERT_AUX = x509_cert_aux_st
class AUTHORITY_KEYID_st(Structure):
pass
x509_st._fields_ = [
('cert_info', POINTER(X509_CINF)),
('sig_alg', POINTER(X509_ALGOR)),
('signature', POINTER(ASN1_BIT_STRING)),
('valid', c_int),
('references', c_int),
('name', STRING),
('ex_data', CRYPTO_EX_DATA),
('ex_pathlen', c_long),
('ex_flags', c_ulong),
('ex_kusage', c_ulong),
('ex_xkusage', c_ulong),
('ex_nscert', c_ulong),
('skid', POINTER(ASN1_OCTET_STRING)),
('akid', POINTER(AUTHORITY_KEYID_st)),
('sha1_hash', c_ubyte * 20),
('aux', POINTER(X509_CERT_AUX)),
]
assert sizeof(x509_st) == 84, sizeof(x509_st)
assert alignment(x509_st) == 4, alignment(x509_st)
AUTHORITY_KEYID_st._fields_ = [
]
class x509_trust_st(Structure):
pass
x509_trust_st._fields_ = [
('trust', c_int),
('flags', c_int),
('check_trust', CFUNCTYPE(c_int, POINTER(x509_trust_st), POINTER(X509), c_int)),
('name', STRING),
('arg1', c_int),
('arg2', c_void_p),
]
assert sizeof(x509_trust_st) == 24, sizeof(x509_trust_st)
assert alignment(x509_trust_st) == 4, alignment(x509_trust_st)
X509_TRUST = x509_trust_st
class X509_revoked_st(Structure):
pass
X509_revoked_st._fields_ = [
('serialNumber', POINTER(ASN1_INTEGER)),
('revocationDate', POINTER(ASN1_TIME)),
('extensions', POINTER(STACK)),
('sequence', c_int),
]
assert sizeof(X509_revoked_st) == 16, sizeof(X509_revoked_st)
assert alignment(X509_revoked_st) == 4, alignment(X509_revoked_st)
X509_REVOKED = X509_revoked_st
class X509_crl_info_st(Structure):
pass
X509_crl_info_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('sig_alg', POINTER(X509_ALGOR)),
('issuer', POINTER(X509_NAME)),
('lastUpdate', POINTER(ASN1_TIME)),
('nextUpdate', POINTER(ASN1_TIME)),
('revoked', POINTER(STACK)),
('extensions', POINTER(STACK)),
('enc', ASN1_ENCODING),
]
assert sizeof(X509_crl_info_st) == 40, sizeof(X509_crl_info_st)
assert alignment(X509_crl_info_st) == 4, alignment(X509_crl_info_st)
X509_CRL_INFO = X509_crl_info_st
X509_crl_st._fields_ = [
('crl', POINTER(X509_CRL_INFO)),
('sig_alg', POINTER(X509_ALGOR)),
('signature', POINTER(ASN1_BIT_STRING)),
('references', c_int),
]
assert sizeof(X509_crl_st) == 16, sizeof(X509_crl_st)
assert alignment(X509_crl_st) == 4, alignment(X509_crl_st)
class private_key_st(Structure):
pass
private_key_st._fields_ = [
('version', c_int),
('enc_algor', POINTER(X509_ALGOR)),
('enc_pkey', POINTER(ASN1_OCTET_STRING)),
('dec_pkey', POINTER(EVP_PKEY)),
('key_length', c_int),
('key_data', STRING),
('key_free', c_int),
('cipher', EVP_CIPHER_INFO),
('references', c_int),
]
assert sizeof(private_key_st) == 52, sizeof(private_key_st)
assert alignment(private_key_st) == 4, alignment(private_key_st)
X509_PKEY = private_key_st
class X509_info_st(Structure):
pass
X509_info_st._fields_ = [
('x509', POINTER(X509)),
('crl', POINTER(X509_CRL)),
('x_pkey', POINTER(X509_PKEY)),
('enc_cipher', EVP_CIPHER_INFO),
('enc_len', c_int),
('enc_data', STRING),
('references', c_int),
]
assert sizeof(X509_info_st) == 44, sizeof(X509_info_st)
assert alignment(X509_info_st) == 4, alignment(X509_info_st)
X509_INFO = X509_info_st
class Netscape_spkac_st(Structure):
pass
Netscape_spkac_st._fields_ = [
('pubkey', POINTER(X509_PUBKEY)),
('challenge', POINTER(ASN1_IA5STRING)),
]
assert sizeof(Netscape_spkac_st) == 8, sizeof(Netscape_spkac_st)
assert alignment(Netscape_spkac_st) == 4, alignment(Netscape_spkac_st)
NETSCAPE_SPKAC = Netscape_spkac_st
class Netscape_spki_st(Structure):
pass
Netscape_spki_st._fields_ = [
('spkac', POINTER(NETSCAPE_SPKAC)),
('sig_algor', POINTER(X509_ALGOR)),
('signature', POINTER(ASN1_BIT_STRING)),
]
assert sizeof(Netscape_spki_st) == 12, sizeof(Netscape_spki_st)
assert alignment(Netscape_spki_st) == 4, alignment(Netscape_spki_st)
NETSCAPE_SPKI = Netscape_spki_st
class Netscape_certificate_sequence(Structure):
pass
Netscape_certificate_sequence._fields_ = [
('type', POINTER(ASN1_OBJECT)),
('certs', POINTER(STACK)),
]
assert sizeof(Netscape_certificate_sequence) == 8, sizeof(Netscape_certificate_sequence)
assert alignment(Netscape_certificate_sequence) == 4, alignment(Netscape_certificate_sequence)
NETSCAPE_CERT_SEQUENCE = Netscape_certificate_sequence
class PBEPARAM_st(Structure):
pass
PBEPARAM_st._fields_ = [
('salt', POINTER(ASN1_OCTET_STRING)),
('iter', POINTER(ASN1_INTEGER)),
]
assert sizeof(PBEPARAM_st) == 8, sizeof(PBEPARAM_st)
assert alignment(PBEPARAM_st) == 4, alignment(PBEPARAM_st)
PBEPARAM = PBEPARAM_st
class PBE2PARAM_st(Structure):
pass
PBE2PARAM_st._fields_ = [
('keyfunc', POINTER(X509_ALGOR)),
('encryption', POINTER(X509_ALGOR)),
]
assert sizeof(PBE2PARAM_st) == 8, sizeof(PBE2PARAM_st)
assert alignment(PBE2PARAM_st) == 4, alignment(PBE2PARAM_st)
PBE2PARAM = PBE2PARAM_st
class PBKDF2PARAM_st(Structure):
pass
PBKDF2PARAM_st._fields_ = [
('salt', POINTER(ASN1_TYPE)),
('iter', POINTER(ASN1_INTEGER)),
('keylength', POINTER(ASN1_INTEGER)),
('prf', POINTER(X509_ALGOR)),
]
assert sizeof(PBKDF2PARAM_st) == 16, sizeof(PBKDF2PARAM_st)
assert alignment(PBKDF2PARAM_st) == 4, alignment(PBKDF2PARAM_st)
PBKDF2PARAM = PBKDF2PARAM_st
class pkcs8_priv_key_info_st(Structure):
pass
pkcs8_priv_key_info_st._fields_ = [
('broken', c_int),
('version', POINTER(ASN1_INTEGER)),
('pkeyalg', POINTER(X509_ALGOR)),
('pkey', POINTER(ASN1_TYPE)),
('attributes', POINTER(STACK)),
]
assert sizeof(pkcs8_priv_key_info_st) == 20, sizeof(pkcs8_priv_key_info_st)
assert alignment(pkcs8_priv_key_info_st) == 4, alignment(pkcs8_priv_key_info_st)
PKCS8_PRIV_KEY_INFO = pkcs8_priv_key_info_st
class x509_hash_dir_st(Structure):
pass
x509_hash_dir_st._fields_ = [
('num_dirs', c_int),
('dirs', POINTER(STRING)),
('dirs_type', POINTER(c_int)),
('num_dirs_alloced', c_int),
]
assert sizeof(x509_hash_dir_st) == 16, sizeof(x509_hash_dir_st)
assert alignment(x509_hash_dir_st) == 4, alignment(x509_hash_dir_st)
X509_HASH_DIR_CTX = x509_hash_dir_st
class x509_file_st(Structure):
pass
x509_file_st._fields_ = [
('num_paths', c_int),
('num_alloced', c_int),
('paths', POINTER(STRING)),
('path_type', POINTER(c_int)),
]
assert sizeof(x509_file_st) == 16, sizeof(x509_file_st)
assert alignment(x509_file_st) == 4, alignment(x509_file_st)
X509_CERT_FILE_CTX = x509_file_st
class x509_object_st(Structure):
pass
class N14x509_object_st4DOLLAR_14E(Union):
pass
N14x509_object_st4DOLLAR_14E._fields_ = [
('ptr', STRING),
('x509', POINTER(X509)),
('crl', POINTER(X509_CRL)),
('pkey', POINTER(EVP_PKEY)),
]
assert sizeof(N14x509_object_st4DOLLAR_14E) == 4, sizeof(N14x509_object_st4DOLLAR_14E)
assert alignment(N14x509_object_st4DOLLAR_14E) == 4, alignment(N14x509_object_st4DOLLAR_14E)
x509_object_st._fields_ = [
('type', c_int),
('data', N14x509_object_st4DOLLAR_14E),
]
assert sizeof(x509_object_st) == 8, sizeof(x509_object_st)
assert alignment(x509_object_st) == 4, alignment(x509_object_st)
X509_OBJECT = x509_object_st
class x509_lookup_st(Structure):
pass
X509_LOOKUP = x509_lookup_st
class x509_lookup_method_st(Structure):
pass
x509_lookup_method_st._fields_ = [
('name', STRING),
('new_item', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))),
('free', CFUNCTYPE(None, POINTER(X509_LOOKUP))),
('init', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))),
('shutdown', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))),
('ctrl', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_long, POINTER(STRING))),
('get_by_subject', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(X509_OBJECT))),
('get_by_issuer_serial', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(ASN1_INTEGER), POINTER(X509_OBJECT))),
('get_by_fingerprint', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(c_ubyte), c_int, POINTER(X509_OBJECT))),
('get_by_alias', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_int, POINTER(X509_OBJECT))),
]
assert sizeof(x509_lookup_method_st) == 40, sizeof(x509_lookup_method_st)
assert alignment(x509_lookup_method_st) == 4, alignment(x509_lookup_method_st)
X509_LOOKUP_METHOD = x509_lookup_method_st
x509_store_st._fields_ = [
('cache', c_int),
('objs', POINTER(STACK)),
('get_cert_methods', POINTER(STACK)),
('flags', c_ulong),
('purpose', c_int),
('trust', c_int),
('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))),
('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))),
('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))),
('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))),
('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))),
('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))),
('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('ex_data', CRYPTO_EX_DATA),
('references', c_int),
('depth', c_int),
]
assert sizeof(x509_store_st) == 76, sizeof(x509_store_st)
assert alignment(x509_store_st) == 4, alignment(x509_store_st)
x509_lookup_st._fields_ = [
('init', c_int),
('skip', c_int),
('method', POINTER(X509_LOOKUP_METHOD)),
('method_data', STRING),
('store_ctx', POINTER(X509_STORE)),
]
assert sizeof(x509_lookup_st) == 20, sizeof(x509_lookup_st)
assert alignment(x509_lookup_st) == 4, alignment(x509_lookup_st)
time_t = __darwin_time_t
x509_store_ctx_st._fields_ = [
('ctx', POINTER(X509_STORE)),
('current_method', c_int),
('cert', POINTER(X509)),
('untrusted', POINTER(STACK)),
('purpose', c_int),
('trust', c_int),
('check_time', time_t),
('flags', c_ulong),
('other_ctx', c_void_p),
('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))),
('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))),
('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))),
('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))),
('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))),
('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))),
('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('depth', c_int),
('valid', c_int),
('last_untrusted', c_int),
('chain', POINTER(STACK)),
('error_depth', c_int),
('error', c_int),
('current_cert', POINTER(X509)),
('current_issuer', POINTER(X509)),
('current_crl', POINTER(X509_CRL)),
('ex_data', CRYPTO_EX_DATA),
]
assert sizeof(x509_store_ctx_st) == 116, sizeof(x509_store_ctx_st)
assert alignment(x509_store_ctx_st) == 4, alignment(x509_store_ctx_st)
va_list = __darwin_va_list
__darwin_off_t = __int64_t
fpos_t = __darwin_off_t
class __sbuf(Structure):
pass
__sbuf._fields_ = [
('_base', POINTER(c_ubyte)),
('_size', c_int),
]
assert sizeof(__sbuf) == 8, sizeof(__sbuf)
assert alignment(__sbuf) == 4, alignment(__sbuf)
class __sFILEX(Structure):
pass
__sFILEX._fields_ = [
]
class __sFILE(Structure):
pass
__sFILE._pack_ = 4
__sFILE._fields_ = [
('_p', POINTER(c_ubyte)),
('_r', c_int),
('_w', c_int),
('_flags', c_short),
('_file', c_short),
('_bf', __sbuf),
('_lbfsize', c_int),
('_cookie', c_void_p),
('_close', CFUNCTYPE(c_int, c_void_p)),
('_read', CFUNCTYPE(c_int, c_void_p, STRING, c_int)),
('_seek', CFUNCTYPE(fpos_t, c_void_p, c_longlong, c_int)),
('_write', CFUNCTYPE(c_int, c_void_p, STRING, c_int)),
('_ub', __sbuf),
('_extra', POINTER(__sFILEX)),
('_ur', c_int),
('_ubuf', c_ubyte * 3),
('_nbuf', c_ubyte * 1),
('_lb', __sbuf),
('_blksize', c_int),
('_offset', fpos_t),
]
assert sizeof(__sFILE) == 88, sizeof(__sFILE)
assert alignment(__sFILE) == 4, alignment(__sFILE)
FILE = __sFILE
ct_rune_t = __darwin_ct_rune_t
rune_t = __darwin_rune_t
class div_t(Structure):
pass
div_t._fields_ = [
('quot', c_int),
('rem', c_int),
]
assert sizeof(div_t) == 8, sizeof(div_t)
assert alignment(div_t) == 4, alignment(div_t)
class ldiv_t(Structure):
pass
ldiv_t._fields_ = [
('quot', c_long),
('rem', c_long),
]
assert sizeof(ldiv_t) == 8, sizeof(ldiv_t)
assert alignment(ldiv_t) == 4, alignment(ldiv_t)
class lldiv_t(Structure):
pass
lldiv_t._pack_ = 4
lldiv_t._fields_ = [
('quot', c_longlong),
('rem', c_longlong),
]
assert sizeof(lldiv_t) == 16, sizeof(lldiv_t)
assert alignment(lldiv_t) == 4, alignment(lldiv_t)
__darwin_dev_t = __int32_t
dev_t = __darwin_dev_t
__darwin_mode_t = __uint16_t
mode_t = __darwin_mode_t
class mcontext(Structure):
pass
mcontext._fields_ = [
]
class mcontext64(Structure):
pass
mcontext64._fields_ = [
]
class __darwin_pthread_handler_rec(Structure):
pass
__darwin_pthread_handler_rec._fields_ = [
('__routine', CFUNCTYPE(None, c_void_p)),
('__arg', c_void_p),
('__next', POINTER(__darwin_pthread_handler_rec)),
]
assert sizeof(__darwin_pthread_handler_rec) == 12, sizeof(__darwin_pthread_handler_rec)
assert alignment(__darwin_pthread_handler_rec) == 4, alignment(__darwin_pthread_handler_rec)
class _opaque_pthread_attr_t(Structure):
pass
_opaque_pthread_attr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 36),
]
assert sizeof(_opaque_pthread_attr_t) == 40, sizeof(_opaque_pthread_attr_t)
assert alignment(_opaque_pthread_attr_t) == 4, alignment(_opaque_pthread_attr_t)
class _opaque_pthread_cond_t(Structure):
pass
_opaque_pthread_cond_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 24),
]
assert sizeof(_opaque_pthread_cond_t) == 28, sizeof(_opaque_pthread_cond_t)
assert alignment(_opaque_pthread_cond_t) == 4, alignment(_opaque_pthread_cond_t)
class _opaque_pthread_condattr_t(Structure):
pass
_opaque_pthread_condattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 4),
]
assert sizeof(_opaque_pthread_condattr_t) == 8, sizeof(_opaque_pthread_condattr_t)
assert alignment(_opaque_pthread_condattr_t) == 4, alignment(_opaque_pthread_condattr_t)
class _opaque_pthread_mutex_t(Structure):
pass
_opaque_pthread_mutex_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 40),
]
assert sizeof(_opaque_pthread_mutex_t) == 44, sizeof(_opaque_pthread_mutex_t)
assert alignment(_opaque_pthread_mutex_t) == 4, alignment(_opaque_pthread_mutex_t)
class _opaque_pthread_mutexattr_t(Structure):
pass
_opaque_pthread_mutexattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 8),
]
assert sizeof(_opaque_pthread_mutexattr_t) == 12, sizeof(_opaque_pthread_mutexattr_t)
assert alignment(_opaque_pthread_mutexattr_t) == 4, alignment(_opaque_pthread_mutexattr_t)
class _opaque_pthread_once_t(Structure):
pass
_opaque_pthread_once_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 4),
]
assert sizeof(_opaque_pthread_once_t) == 8, sizeof(_opaque_pthread_once_t)
assert alignment(_opaque_pthread_once_t) == 4, alignment(_opaque_pthread_once_t)
class _opaque_pthread_rwlock_t(Structure):
pass
_opaque_pthread_rwlock_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 124),
]
assert sizeof(_opaque_pthread_rwlock_t) == 128, sizeof(_opaque_pthread_rwlock_t)
assert alignment(_opaque_pthread_rwlock_t) == 4, alignment(_opaque_pthread_rwlock_t)
class _opaque_pthread_rwlockattr_t(Structure):
pass
_opaque_pthread_rwlockattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 12),
]
assert sizeof(_opaque_pthread_rwlockattr_t) == 16, sizeof(_opaque_pthread_rwlockattr_t)
assert alignment(_opaque_pthread_rwlockattr_t) == 4, alignment(_opaque_pthread_rwlockattr_t)
class _opaque_pthread_t(Structure):
pass
_opaque_pthread_t._fields_ = [
('__sig', c_long),
('__cleanup_stack', POINTER(__darwin_pthread_handler_rec)),
('__opaque', c_char * 596),
]
assert sizeof(_opaque_pthread_t) == 604, sizeof(_opaque_pthread_t)
assert alignment(_opaque_pthread_t) == 4, alignment(_opaque_pthread_t)
__darwin_blkcnt_t = __int64_t
__darwin_blksize_t = __int32_t
__darwin_fsblkcnt_t = c_uint
__darwin_fsfilcnt_t = c_uint
__darwin_gid_t = __uint32_t
__darwin_id_t = __uint32_t
__darwin_ino_t = __uint32_t
__darwin_mach_port_name_t = __darwin_natural_t
__darwin_mach_port_t = __darwin_mach_port_name_t
__darwin_mcontext_t = POINTER(mcontext)
__darwin_mcontext64_t = POINTER(mcontext64)
__darwin_pid_t = __int32_t
__darwin_pthread_attr_t = _opaque_pthread_attr_t
__darwin_pthread_cond_t = _opaque_pthread_cond_t
__darwin_pthread_condattr_t = _opaque_pthread_condattr_t
__darwin_pthread_key_t = c_ulong
__darwin_pthread_mutex_t = _opaque_pthread_mutex_t
__darwin_pthread_mutexattr_t = _opaque_pthread_mutexattr_t
__darwin_pthread_once_t = _opaque_pthread_once_t
__darwin_pthread_rwlock_t = _opaque_pthread_rwlock_t
__darwin_pthread_rwlockattr_t = _opaque_pthread_rwlockattr_t
__darwin_pthread_t = POINTER(_opaque_pthread_t)
__darwin_sigset_t = __uint32_t
__darwin_suseconds_t = __int32_t
__darwin_uid_t = __uint32_t
__darwin_useconds_t = __uint32_t
__darwin_uuid_t = c_ubyte * 16
class sigaltstack(Structure):
pass
sigaltstack._fields_ = [
('ss_sp', c_void_p),
('ss_size', __darwin_size_t),
('ss_flags', c_int),
]
assert sizeof(sigaltstack) == 12, sizeof(sigaltstack)
assert alignment(sigaltstack) == 4, alignment(sigaltstack)
__darwin_stack_t = sigaltstack
class ucontext(Structure):
pass
ucontext._fields_ = [
('uc_onstack', c_int),
('uc_sigmask', __darwin_sigset_t),
('uc_stack', __darwin_stack_t),
('uc_link', POINTER(ucontext)),
('uc_mcsize', __darwin_size_t),
('uc_mcontext', __darwin_mcontext_t),
]
assert sizeof(ucontext) == 32, sizeof(ucontext)
assert alignment(ucontext) == 4, alignment(ucontext)
__darwin_ucontext_t = ucontext
class ucontext64(Structure):
pass
ucontext64._fields_ = [
('uc_onstack', c_int),
('uc_sigmask', __darwin_sigset_t),
('uc_stack', __darwin_stack_t),
('uc_link', POINTER(ucontext64)),
('uc_mcsize', __darwin_size_t),
('uc_mcontext64', __darwin_mcontext64_t),
]
assert sizeof(ucontext64) == 32, sizeof(ucontext64)
assert alignment(ucontext64) == 4, alignment(ucontext64)
__darwin_ucontext64_t = ucontext64
class timeval(Structure):
pass
timeval._fields_ = [
('tv_sec', __darwin_time_t),
('tv_usec', __darwin_suseconds_t),
]
assert sizeof(timeval) == 8, sizeof(timeval)
assert alignment(timeval) == 4, alignment(timeval)
rlim_t = __int64_t
class rusage(Structure):
pass
rusage._fields_ = [
('ru_utime', timeval),
('ru_stime', timeval),
('ru_maxrss', c_long),
('ru_ixrss', c_long),
('ru_idrss', c_long),
('ru_isrss', c_long),
('ru_minflt', c_long),
('ru_majflt', c_long),
('ru_nswap', c_long),
('ru_inblock', c_long),
('ru_oublock', c_long),
('ru_msgsnd', c_long),
('ru_msgrcv', c_long),
('ru_nsignals', c_long),
('ru_nvcsw', c_long),
('ru_nivcsw', c_long),
]
assert sizeof(rusage) == 72, sizeof(rusage)
assert alignment(rusage) == 4, alignment(rusage)
class rlimit(Structure):
pass
rlimit._pack_ = 4
rlimit._fields_ = [
('rlim_cur', rlim_t),
('rlim_max', rlim_t),
]
assert sizeof(rlimit) == 16, sizeof(rlimit)
assert alignment(rlimit) == 4, alignment(rlimit)
mcontext_t = __darwin_mcontext_t
mcontext64_t = __darwin_mcontext64_t
pthread_attr_t = __darwin_pthread_attr_t
sigset_t = __darwin_sigset_t
ucontext_t = __darwin_ucontext_t
ucontext64_t = __darwin_ucontext64_t
uid_t = __darwin_uid_t
class sigval(Union):
pass
sigval._fields_ = [
('sival_int', c_int),
('sival_ptr', c_void_p),
]
assert sizeof(sigval) == 4, sizeof(sigval)
assert alignment(sigval) == 4, alignment(sigval)
class sigevent(Structure):
pass
sigevent._fields_ = [
('sigev_notify', c_int),
('sigev_signo', c_int),
('sigev_value', sigval),
('sigev_notify_function', CFUNCTYPE(None, sigval)),
('sigev_notify_attributes', POINTER(pthread_attr_t)),
]
assert sizeof(sigevent) == 20, sizeof(sigevent)
assert alignment(sigevent) == 4, alignment(sigevent)
class __siginfo(Structure):
pass
pid_t = __darwin_pid_t
__siginfo._fields_ = [
('si_signo', c_int),
('si_errno', c_int),
('si_code', c_int),
('si_pid', pid_t),
('si_uid', uid_t),
('si_status', c_int),
('si_addr', c_void_p),
('si_value', sigval),
('si_band', c_long),
('pad', c_ulong * 7),
]
assert sizeof(__siginfo) == 64, sizeof(__siginfo)
assert alignment(__siginfo) == 4, alignment(__siginfo)
siginfo_t = __siginfo
class __sigaction_u(Union):
pass
__sigaction_u._fields_ = [
('__sa_handler', CFUNCTYPE(None, c_int)),
('__sa_sigaction', CFUNCTYPE(None, c_int, POINTER(__siginfo), c_void_p)),
]
assert sizeof(__sigaction_u) == 4, sizeof(__sigaction_u)
assert alignment(__sigaction_u) == 4, alignment(__sigaction_u)
class __sigaction(Structure):
pass
__sigaction._fields_ = [
('__sigaction_u', __sigaction_u),
('sa_tramp', CFUNCTYPE(None, c_void_p, c_int, c_int, POINTER(siginfo_t), c_void_p)),
('sa_mask', sigset_t),
('sa_flags', c_int),
]
assert sizeof(__sigaction) == 16, sizeof(__sigaction)
assert alignment(__sigaction) == 4, alignment(__sigaction)
class sigaction(Structure):
pass
sigaction._fields_ = [
('__sigaction_u', __sigaction_u),
('sa_mask', sigset_t),
('sa_flags', c_int),
]
assert sizeof(sigaction) == 12, sizeof(sigaction)
assert alignment(sigaction) == 4, alignment(sigaction)
sig_t = CFUNCTYPE(None, c_int)
stack_t = __darwin_stack_t
class sigvec(Structure):
pass
sigvec._fields_ = [
('sv_handler', CFUNCTYPE(None, c_int)),
('sv_mask', c_int),
('sv_flags', c_int),
]
assert sizeof(sigvec) == 12, sizeof(sigvec)
assert alignment(sigvec) == 4, alignment(sigvec)
class sigstack(Structure):
pass
sigstack._fields_ = [
('ss_sp', STRING),
('ss_onstack', c_int),
]
assert sizeof(sigstack) == 8, sizeof(sigstack)
assert alignment(sigstack) == 4, alignment(sigstack)
u_char = c_ubyte
u_short = c_ushort
u_int = c_uint
u_long = c_ulong
ushort = c_ushort
uint = c_uint
u_quad_t = u_int64_t
quad_t = int64_t
qaddr_t = POINTER(quad_t)
caddr_t = STRING
daddr_t = int32_t
fixpt_t = u_int32_t
blkcnt_t = __darwin_blkcnt_t
blksize_t = __darwin_blksize_t
gid_t = __darwin_gid_t
in_addr_t = __uint32_t
in_port_t = __uint16_t
ino_t = __darwin_ino_t
key_t = __int32_t
nlink_t = __uint16_t
off_t = __darwin_off_t
segsz_t = int32_t
swblk_t = int32_t
clock_t = __darwin_clock_t
ssize_t = __darwin_ssize_t
useconds_t = __darwin_useconds_t
suseconds_t = __darwin_suseconds_t
fd_mask = __int32_t
class fd_set(Structure):
pass
fd_set._fields_ = [
('fds_bits', __int32_t * 32),
]
assert sizeof(fd_set) == 128, sizeof(fd_set)
assert alignment(fd_set) == 4, alignment(fd_set)
pthread_cond_t = __darwin_pthread_cond_t
pthread_condattr_t = __darwin_pthread_condattr_t
pthread_mutex_t = __darwin_pthread_mutex_t
pthread_mutexattr_t = __darwin_pthread_mutexattr_t
pthread_once_t = __darwin_pthread_once_t
pthread_rwlock_t = __darwin_pthread_rwlock_t
pthread_rwlockattr_t = __darwin_pthread_rwlockattr_t
pthread_t = __darwin_pthread_t
pthread_key_t = __darwin_pthread_key_t
fsblkcnt_t = __darwin_fsblkcnt_t
fsfilcnt_t = __darwin_fsfilcnt_t
# values for enumeration 'idtype_t'
idtype_t = c_int # enum
id_t = __darwin_id_t
class wait(Union):
pass
class N4wait3DOLLAR_3E(Structure):
pass
N4wait3DOLLAR_3E._fields_ = [
('w_Termsig', c_uint, 7),
('w_Coredump', c_uint, 1),
('w_Retcode', c_uint, 8),
('w_Filler', c_uint, 16),
]
assert sizeof(N4wait3DOLLAR_3E) == 4, sizeof(N4wait3DOLLAR_3E)
assert alignment(N4wait3DOLLAR_3E) == 4, alignment(N4wait3DOLLAR_3E)
class N4wait3DOLLAR_4E(Structure):
pass
N4wait3DOLLAR_4E._fields_ = [
('w_Stopval', c_uint, 8),
('w_Stopsig', c_uint, 8),
('w_Filler', c_uint, 16),
]
assert sizeof(N4wait3DOLLAR_4E) == 4, sizeof(N4wait3DOLLAR_4E)
assert alignment(N4wait3DOLLAR_4E) == 4, alignment(N4wait3DOLLAR_4E)
wait._fields_ = [
('w_status', c_int),
('w_T', N4wait3DOLLAR_3E),
('w_S', N4wait3DOLLAR_4E),
]
assert sizeof(wait) == 4, sizeof(wait)
assert alignment(wait) == 4, alignment(wait)
class timespec(Structure):
pass
timespec._fields_ = [
('tv_sec', time_t),
('tv_nsec', c_long),
]
assert sizeof(timespec) == 8, sizeof(timespec)
assert alignment(timespec) == 4, alignment(timespec)
class tm(Structure):
pass
tm._fields_ = [
('tm_sec', c_int),
('tm_min', c_int),
('tm_hour', c_int),
('tm_mday', c_int),
('tm_mon', c_int),
('tm_year', c_int),
('tm_wday', c_int),
('tm_yday', c_int),
('tm_isdst', c_int),
('tm_gmtoff', c_long),
('tm_zone', STRING),
]
assert sizeof(tm) == 44, sizeof(tm)
assert alignment(tm) == 4, alignment(tm)
__gnuc_va_list = STRING
ptrdiff_t = c_int
int8_t = c_byte
int16_t = c_short
uint8_t = c_ubyte
uint16_t = c_ushort
uint32_t = c_uint
uint64_t = c_ulonglong
int_least8_t = int8_t
int_least16_t = int16_t
int_least32_t = int32_t
int_least64_t = int64_t
uint_least8_t = uint8_t
uint_least16_t = uint16_t
uint_least32_t = uint32_t
uint_least64_t = uint64_t
int_fast8_t = int8_t
int_fast16_t = int16_t
int_fast32_t = int32_t
int_fast64_t = int64_t
uint_fast8_t = uint8_t
uint_fast16_t = uint16_t
uint_fast32_t = uint32_t
uint_fast64_t = uint64_t
intptr_t = c_long
uintptr_t = c_ulong
intmax_t = c_longlong
uintmax_t = c_ulonglong
__all__ = ['ENGINE', 'pkcs7_enc_content_st', '__int16_t',
'X509_REVOKED', 'SSL_CTX', 'UIT_BOOLEAN',
'__darwin_time_t', 'ucontext64_t', 'int_fast32_t',
'pem_ctx_st', 'uint8_t', 'fpos_t', 'X509', 'COMP_CTX',
'tm', 'N10pem_ctx_st4DOLLAR_17E', 'swblk_t',
'ASN1_TEMPLATE', '__darwin_pthread_t', 'fixpt_t',
'BIO_METHOD', 'ASN1_PRINTABLESTRING', 'EVP_ENCODE_CTX',
'dh_method', 'bio_f_buffer_ctx_struct', 'in_port_t',
'X509_SIG', '__darwin_ssize_t', '__darwin_sigset_t',
'wait', 'uint_fast16_t', 'N12asn1_type_st4DOLLAR_11E',
'uint_least8_t', 'pthread_rwlock_t', 'ASN1_IA5STRING',
'fsfilcnt_t', 'ucontext', '__uint64_t', 'timespec',
'x509_cinf_st', 'COMP_METHOD', 'MD5_CTX', 'buf_mem_st',
'ASN1_ENCODING_st', 'PBEPARAM', 'X509_NAME_ENTRY',
'__darwin_va_list', 'ucontext_t', 'lhash_st',
'N4wait3DOLLAR_4E', '__darwin_uuid_t',
'_ossl_old_des_ks_struct', 'id_t', 'ASN1_BIT_STRING',
'va_list', '__darwin_wchar_t', 'pthread_key_t',
'pkcs7_signer_info_st', 'ASN1_METHOD', 'DSA_SIG', 'DSA',
'UIT_NONE', 'pthread_t', '__darwin_useconds_t',
'uint_fast8_t', 'UI_STRING', 'DES_cblock',
'__darwin_mcontext64_t', 'rlim_t', 'PEM_Encode_Seal_st',
'SHAstate_st', 'u_quad_t', 'openssl_fptr',
'_opaque_pthread_rwlockattr_t',
'N18x509_attributes_st4DOLLAR_13E',
'__darwin_pthread_rwlock_t', 'daddr_t', 'ui_string_st',
'x509_file_st', 'X509_req_info_st', 'int_least64_t',
'evp_Encode_Ctx_st', 'X509_OBJECTS', 'CRYPTO_EX_DATA',
'__int8_t', 'AUTHORITY_KEYID_st', '_opaque_pthread_attr_t',
'sigstack', 'EVP_CIPHER_CTX', 'X509_extension_st', 'pid_t',
'RSA_METHOD', 'PEM_USER', 'pem_recip_st', 'env_md_ctx_st',
'rc5_key_st', 'ui_st', 'X509_PUBKEY', 'u_int8_t',
'ASN1_ITEM_st', 'pkcs7_recip_info_st', 'ssl2_state_st',
'off_t', 'N10ssl_ctx_st4DOLLAR_18E', 'crypto_ex_data_st',
'ui_method_st', '__darwin_pthread_rwlockattr_t',
'CRYPTO_EX_dup', '__darwin_ino_t', '__sFILE',
'OSUnknownByteOrder', 'BN_MONT_CTX', 'ASN1_NULL', 'time_t',
'CRYPTO_EX_new', 'asn1_type_st', 'CRYPTO_EX_DATA_FUNCS',
'user_time_t', 'BIGNUM', 'pthread_rwlockattr_t',
'ASN1_VALUE_st', 'DH_METHOD', '__darwin_off_t',
'_opaque_pthread_t', 'bn_blinding_st', 'RSA', 'ssize_t',
'mcontext64_t', 'user_long_t', 'fsblkcnt_t', 'cert_st',
'__darwin_pthread_condattr_t', 'X509_PKEY',
'__darwin_id_t', '__darwin_nl_item', 'SSL2_STATE', 'FILE',
'pthread_mutexattr_t', 'size_t',
'_ossl_old_des_key_schedule', 'pkcs7_issuer_and_serial_st',
'sigval', 'CRYPTO_MEM_LEAK_CB', 'X509_NAME', 'blkcnt_t',
'uint_least16_t', '__darwin_dev_t', 'evp_cipher_info_st',
'BN_BLINDING', 'ssl3_state_st', 'uint_least64_t',
'user_addr_t', 'DES_key_schedule', 'RIPEMD160_CTX',
'u_char', 'X509_algor_st', 'uid_t', 'sess_cert_st',
'u_int64_t', 'u_int16_t', 'sigset_t', '__darwin_ptrdiff_t',
'ASN1_CTX', 'STACK', '__int32_t', 'UI_METHOD',
'NETSCAPE_SPKI', 'UIT_PROMPT', 'st_CRYPTO_EX_DATA_IMPL',
'cast_key_st', 'X509_HASH_DIR_CTX', 'sigevent',
'user_ssize_t', 'clock_t', 'aes_key_st',
'__darwin_socklen_t', '__darwin_intptr_t', 'int_fast64_t',
'asn1_string_table_st', 'uint_fast32_t',
'ASN1_VISIBLESTRING', 'DSA_SIG_st', 'obj_name_st',
'X509_LOOKUP_METHOD', 'u_int32_t', 'EVP_CIPHER_INFO',
'__gnuc_va_list', 'AES_KEY', 'PKCS7_ISSUER_AND_SERIAL',
'BN_CTX', '__darwin_blkcnt_t', 'key_t', 'SHA_CTX',
'pkcs7_signed_st', 'SSL', 'N10pem_ctx_st4DOLLAR_16E',
'pthread_attr_t', 'EVP_MD', 'uint', 'ASN1_BOOLEAN',
'ino_t', '__darwin_clock_t', 'ASN1_OCTET_STRING',
'asn1_ctx_st', 'BIO_F_BUFFER_CTX', 'bn_mont_ctx_st',
'X509_REQ_INFO', 'PEM_CTX', 'sigvec',
'__darwin_pthread_mutexattr_t', 'x509_attributes_st',
'stack_t', '__darwin_mode_t', '__mbstate_t',
'asn1_object_st', 'ASN1_ENCODING', '__uint8_t',
'LHASH_NODE', 'PKCS7_SIGNER_INFO', 'asn1_method_st',
'stack_st', 'bio_info_cb', 'div_t', 'UIT_VERIFY',
'PBEPARAM_st', 'N4wait3DOLLAR_3E', 'quad_t', '__siginfo',
'__darwin_mbstate_t', 'rsa_st', 'ASN1_UNIVERSALSTRING',
'uint64_t', 'ssl_comp_st', 'X509_OBJECT', 'pthread_cond_t',
'DH', '__darwin_wctype_t', 'PKCS7_ENVELOPE', 'ASN1_TLC_st',
'sig_atomic_t', 'BIO', 'nlink_t', 'BUF_MEM', 'SSL3_RECORD',
'bio_method_st', 'timeval', 'UI_string_types', 'BIO_dummy',
'ssl_ctx_st', 'NETSCAPE_CERT_SEQUENCE',
'BIT_STRING_BITNAME_st', '__darwin_pthread_attr_t',
'int8_t', '__darwin_wint_t', 'OBJ_NAME',
'PKCS8_PRIV_KEY_INFO', 'PBE2PARAM_st',
'LHASH_DOALL_FN_TYPE', 'x509_st', 'X509_VAL', 'dev_t',
'ASN1_TEMPLATE_st', 'MD5state_st', '__uint16_t',
'LHASH_DOALL_ARG_FN_TYPE', 'mdc2_ctx_st', 'SSL3_STATE',
'ssl3_buffer_st', 'ASN1_ITEM_EXP',
'_opaque_pthread_condattr_t', 'mode_t', 'ASN1_VALUE',
'qaddr_t', '__darwin_gid_t', 'EVP_PKEY', 'CRYPTO_EX_free',
'_ossl_old_des_cblock', 'X509_INFO', 'asn1_string_st',
'intptr_t', 'UIT_INFO', 'int_fast8_t', 'sigaltstack',
'env_md_st', 'LHASH', '__darwin_ucontext_t',
'PKCS7_SIGN_ENVELOPE', '__darwin_mcontext_t', 'ct_rune_t',
'MD2_CTX', 'pthread_once_t', 'SSL3_BUFFER', 'fd_mask',
'ASN1_TYPE', 'PKCS7_SIGNED', 'ssl3_record_st', 'BF_KEY',
'MD4state_st', 'MD4_CTX', 'int16_t', 'SSL_CIPHER',
'rune_t', 'X509_TRUST', 'siginfo_t', 'X509_STORE',
'__sbuf', 'X509_STORE_CTX', '__darwin_blksize_t', 'ldiv_t',
'ASN1_TIME', 'SSL_METHOD', 'X509_LOOKUP',
'Netscape_spki_st', 'P_PID', 'sigaction', 'sig_t',
'hostent', 'x509_cert_aux_st', '_opaque_pthread_cond_t',
'segsz_t', 'ushort', '__darwin_ct_rune_t', 'fd_set',
'BN_RECP_CTX', 'x509_lookup_st', 'uint16_t', 'pkcs7_st',
'asn1_header_st', '__darwin_pthread_key_t',
'x509_trust_st', '__darwin_pthread_handler_rec', 'int32_t',
'X509_CRL_INFO', 'N11evp_pkey_st4DOLLAR_12E', 'MDC2_CTX',
'N23_ossl_old_des_ks_struct4DOLLAR_10E', 'ASN1_HEADER',
'X509_crl_info_st', 'LHASH_HASH_FN_TYPE',
'_opaque_pthread_mutexattr_t', 'ssl_st',
'N8pkcs7_st4DOLLAR_15E', 'evp_pkey_st',
'pkcs7_signedandenveloped_st', '__darwin_mach_port_t',
'EVP_PBE_KEYGEN', '_opaque_pthread_mutex_t',
'ASN1_UTCTIME', 'mcontext', 'crypto_ex_data_func_st',
'u_long', 'PBKDF2PARAM_st', 'rc4_key_st', 'DSA_METHOD',
'EVP_CIPHER', 'BIT_STRING_BITNAME', 'PKCS7_RECIP_INFO',
'ssl3_enc_method', 'X509_CERT_AUX', 'uintmax_t',
'int_fast16_t', 'RC5_32_KEY', 'ucontext64', 'ASN1_INTEGER',
'u_short', 'N14x509_object_st4DOLLAR_14E', 'mcontext64',
'X509_sig_st', 'ASN1_GENERALSTRING', 'PKCS7', '__sFILEX',
'X509_name_entry_st', 'ssl_session_st', 'caddr_t',
'bignum_st', 'X509_CINF', '__darwin_pthread_cond_t',
'ASN1_TLC', 'PKCS7_ENCRYPT', 'NETSCAPE_SPKAC',
'Netscape_spkac_st', 'idtype_t', 'UIT_ERROR',
'uint_fast64_t', 'in_addr_t', 'pthread_mutex_t',
'__int64_t', 'ASN1_BMPSTRING', 'uint32_t',
'PEM_ENCODE_SEAL_CTX', 'suseconds_t', 'ASN1_OBJECT',
'X509_val_st', 'private_key_st', 'CRYPTO_dynlock',
'X509_objects_st', 'CRYPTO_EX_DATA_IMPL',
'pthread_condattr_t', 'PKCS7_DIGEST', 'uint_least32_t',
'ASN1_STRING', '__uint32_t', 'P_PGID', 'rsa_meth_st',
'X509_crl_st', 'RC2_KEY', '__darwin_fsfilcnt_t',
'X509_revoked_st', 'PBE2PARAM', 'blksize_t',
'Netscape_certificate_sequence', 'ssl_cipher_st',
'bignum_ctx', 'register_t', 'ASN1_UTF8STRING',
'pkcs7_encrypted_st', 'RC4_KEY', '__darwin_ucontext64_t',
'N13ssl2_state_st4DOLLAR_19E', 'bn_recp_ctx_st',
'CAST_KEY', 'X509_ATTRIBUTE', '__darwin_suseconds_t',
'__sigaction', 'user_ulong_t', 'syscall_arg_t',
'evp_cipher_ctx_st', 'X509_ALGOR', 'mcontext_t',
'const_DES_cblock', '__darwin_fsblkcnt_t', 'dsa_st',
'int_least8_t', 'MD2state_st', 'X509_EXTENSION',
'GEN_SESSION_CB', 'int_least16_t', '__darwin_wctrans_t',
'PBKDF2PARAM', 'x509_lookup_method_st', 'pem_password_cb',
'X509_info_st', 'x509_store_st', '__darwin_natural_t',
'X509_pubkey_st', 'pkcs7_digest_st', '__darwin_size_t',
'ASN1_STRING_TABLE', 'OSLittleEndian', 'RIPEMD160state_st',
'pkcs7_enveloped_st', 'UI', 'ptrdiff_t', 'X509_REQ',
'CRYPTO_dynlock_value', 'X509_req_st', 'x509_store_ctx_st',
'N13ssl3_state_st4DOLLAR_20E', 'lhash_node_st',
'__darwin_pthread_mutex_t', 'LHASH_COMP_FN_TYPE',
'__darwin_rune_t', 'rlimit', '__darwin_pthread_once_t',
'OSBigEndian', 'uintptr_t', '__darwin_uid_t', 'u_int',
'ASN1_T61STRING', 'gid_t', 'ssl_method_st', 'ASN1_ITEM',
'ASN1_ENUMERATED', '_opaque_pthread_rwlock_t',
'pkcs8_priv_key_info_st', 'intmax_t', 'sigcontext',
'X509_CRL', 'rc2_key_st', 'engine_st', 'x509_object_st',
'_opaque_pthread_once_t', 'DES_ks', 'SSL_COMP',
'dsa_method', 'int64_t', 'bio_st', 'bf_key_st',
'ASN1_GENERALIZEDTIME', 'PKCS7_ENC_CONTENT',
'__darwin_pid_t', 'lldiv_t', 'comp_method_st',
'EVP_MD_CTX', 'evp_cipher_st', 'X509_name_st',
'x509_hash_dir_st', '__darwin_mach_port_name_t',
'useconds_t', 'user_size_t', 'SSL_SESSION', 'rusage',
'ssl_crock_st', 'int_least32_t', '__sigaction_u', 'dh_st',
'P_ALL', '__darwin_stack_t', 'N6DES_ks3DOLLAR_9E',
'comp_ctx_st', 'X509_CERT_FILE_CTX']
| Python |
"""
Thread support based on OS-level threads.
"""
import thread
from pypy.interpreter.error import OperationError
from pypy.interpreter.gateway import NoneNotWrapped
from pypy.interpreter.gateway import ObjSpace, W_Root, Arguments
# Force the declaration of thread.start_new_thread() & co. for RPython
import pypy.module.thread.rpython.exttable
THREAD_STARTUP_LOCK = thread.allocate_lock()
class Bootstrapper:
def bootstrap(self):
space = self.space
THREAD_STARTUP_LOCK.release()
space.threadlocals.enter_thread(space)
try:
self.run()
finally:
# release ownership of these objects before we release the GIL
self.args = None
self.w_callable = None
# at this point the thread should only have a reference to
# an empty 'self'. We hold the last reference to 'self'; indeed,
# the parent thread already forgot about it because the above
# enter_thread() must have blocked until long after the call to
# start_new_thread() below returned.
# (be careful of resetting *all* local variables to None here!)
# clean up space.threadlocals to remove the ExecutionContext
# entry corresponding to the current thread
space.threadlocals.leave_thread(space)
def run(self):
space = self.space
w_callable = self.w_callable
args = self.args
try:
space.call_args(w_callable, args)
except OperationError, e:
if not e.match(space, space.w_SystemExit):
ident = thread.get_ident()
where = 'thread %d started by ' % ident
e.write_unraisable(space, where, w_callable)
e.clear(space)
def start_new_thread(space, w_callable, w_args, w_kwargs=NoneNotWrapped):
"""Start a new thread and return its identifier. The thread will call the
function with positional arguments from the tuple args and keyword arguments
taken from the optional dictionary kwargs. The thread exits when the
function returns; the return value is ignored. The thread will also exit
when the function raises an unhandled exception; a stack trace will be
printed unless the exception is SystemExit."""
if not space.is_true(space.isinstance(w_args, space.w_tuple)):
raise OperationError(space.w_TypeError,
space.wrap("2nd arg must be a tuple"))
if w_kwargs is not None and not space.is_true(space.isinstance(w_kwargs, space.w_dict)):
raise OperationError(space.w_TypeError,
space.wrap("optional 3rd arg must be a dictionary"))
if not space.is_true(space.callable(w_callable)):
raise OperationError(space.w_TypeError,
space.wrap("first arg must be callable"))
args = Arguments.frompacked(space, w_args, w_kwargs)
boot = Bootstrapper()
boot.space = space
boot.w_callable = w_callable
boot.args = args
THREAD_STARTUP_LOCK.acquire(True)
ident = thread.start_new_thread(Bootstrapper.bootstrap, (boot,))
# wait until the thread has really started and acquired a reference to
# 'boot'.
THREAD_STARTUP_LOCK.acquire(True)
THREAD_STARTUP_LOCK.release()
return space.wrap(ident)
def get_ident(space):
"""Return a non-zero integer that uniquely identifies the current thread
amongst other threads that exist simultaneously.
This may be used to identify per-thread resources.
Even though on some platforms threads identities may appear to be
allocated consecutive numbers starting at 1, this behavior should not
be relied upon, and the number should be seen purely as a magic cookie.
A thread's identity may be reused for another thread after it exits."""
ident = thread.get_ident()
return space.wrap(ident)
| Python |
"""
Annotation support for interp-level lock objects.
"""
import thread
from pypy.rpython.extfunctable import declare, declaretype, standardexceptions
module = 'pypy.module.thread.rpython.ll_thread'
# ____________________________________________________________
# The external type thread.LockType
locktypeinfo = declaretype(thread.LockType,
"ThreadLock",
acquire = (bool, '%s/acquire_lock' % module),
release = (type(None), '%s/release_lock' % module),
# XXX special case for releasing and reaquiring the GIL
# withouth race condtions on exception handling
fused_release_acquire = (type(None), '%s/fused_release_acquire_lock' % module),
)
# ____________________________________________________________
# Built-in functions needed in the rtyper
def ann_startthr(s_bootstrap_function, s_argument_tuple):
from pypy.annotation import model as annmodel
from pypy.annotation.bookkeeper import getbookkeeper
bookkeeper = getbookkeeper()
assert (isinstance(s_argument_tuple, annmodel.SomeTuple) and
len(s_argument_tuple.items) == 1), (
"""thread.start_new_thread(f, arg) is only supported with a tuple of
length 1 for arg""")
s_arg, = s_argument_tuple.items
# XXX hack hack hack: emulate a call to s_bootstrap_function
s_result = bookkeeper.emulate_pbc_call(bookkeeper.position_key, s_bootstrap_function, [s_arg])
assert annmodel.s_None.contains(s_result), (
"""thread.start_new_thread(f, arg): f() should return None""")
return annmodel.SomeInteger()
declare(thread.start_new_thread, ann_startthr, '%s/start_new_thread' % module)
declare(thread.get_ident, int, '%s/get_ident' % module)
declare(thread.allocate_lock, thread.LockType,'%s/allocate_lock' % module)
# ____________________________________________________________
# thread.error can be raised by the above
# XXX a bit hackish
standardexceptions[thread.error] = True
| Python |
"""
Dummy low-level implementations for the external functions of the 'thread'
module.
"""
import thread
from pypy.rpython.lltypesystem.lltype import malloc
from pypy.rpython.module.support import init_opaque_object, from_opaque_object
from pypy.module.thread.rpython.exttable import locktypeinfo
LOCKCONTAINERTYPE = locktypeinfo.get_lltype()
def ll_thread_start_new_thread(funcptr, argtuple):
# wrapper around ll_thread_start, to extract the single argument
# from the argtuple.
argument = argtuple.item0 # expects a single argument
return ll_thread_start(funcptr, argument)
def ll_thread_start(funcptr, argument):
#return thread.start_new_thread(funcptr, (argument,))
# XXX we just return an integer here, because we cannot really call back
# XXX into thread.start_new_thread(). Indeed, 'funcptr' is not a normal
# XXX function object, but a low-level pointer to a _func. This also
# XXX confuses the annotator.
# note that running this won't really work, but anyway llinterpreter
# is probably quite confused if we start multiple threads
return 1234
ll_thread_start.suggested_primitive = True
def ll_thread_get_ident():
return thread.get_ident()
ll_thread_get_ident.suggested_primitive = True
def ll_newlock(opaqueptr):
init_opaque_object(opaqueptr, thread.allocate_lock())
ll_newlock.suggested_primitive = True
def ll_acquirelock(opaqueptr, waitflag):
lock = from_opaque_object(opaqueptr)
return lock.acquire(waitflag)
ll_acquirelock.suggested_primitive = True
def ll_releaselock(opaqueptr):
lock = from_opaque_object(opaqueptr)
lock.release()
ll_releaselock.suggested_primitive = True
def ll_fused_releaseacquirelock(opaqueptr):
lock = from_opaque_object(opaqueptr)
lock.release()
lock.acquire(True)
ll_fused_releaseacquirelock.suggested_primitive = True
def ll_thread_allocate_lock():
lockcontainer = malloc(LOCKCONTAINERTYPE)
ll_newlock(lockcontainer.obj)
return lockcontainer
def ll_thread_acquire_lock(lockcontainer, waitflag):
return ll_acquirelock(lockcontainer.obj, waitflag)
def ll_thread_release_lock(lockcontainer):
ll_releaselock(lockcontainer.obj)
def ll_thread_fused_release_acquire_lock(lockcontainer):
ll_fused_releaseacquirelock(lockcontainer.obj)
| Python |
#
| Python |
#
| Python |
import thread
# Force the declaration of thread.start_new_thread() & co. for RPython
import pypy.module.thread.rpython.exttable
class OSThreadLocals:
"""Thread-local storage for OS-level threads.
For memory management, this version depends on explicit notification when
a thread finishes. This works as long as the thread was started by
os_thread.bootstrap()."""
def __init__(self):
self._valuedict = {} # {thread_ident: ExecutionContext()}
self._mainthreadident = 0
def getvalue(self):
ident = thread.get_ident()
return self._valuedict.get(ident, None)
def setvalue(self, value):
ident = thread.get_ident()
if value is not None:
if len(self._valuedict) == 0:
self._mainthreadident = ident
self._valuedict[ident] = value
else:
try:
del self._valuedict[ident]
except KeyError:
pass
def getmainthreadvalue(self):
ident = self._mainthreadident
return self._valuedict.get(ident, None)
def enter_thread(self, space):
"Notification that the current thread is just starting."
ec = space.getexecutioncontext()
ec.thread_exit_funcs = []
def leave_thread(self, space):
"Notification that the current thread is about to stop."
try:
ec = space.getexecutioncontext()
while ec.thread_exit_funcs:
exit_func, w_obj = ec.thread_exit_funcs.pop()
exit_func(w_obj)
finally:
ident = thread.get_ident()
try:
del self._valuedict[ident]
except KeyError:
pass
def atthreadexit(self, space, exit_func, w_obj):
ec = space.getexecutioncontext()
ec.thread_exit_funcs.append((exit_func, w_obj))
def getGIL(self):
return None
| Python |
class error(Exception):
pass
def exit():
"""This is synonymous to ``raise SystemExit''. It will cause the current
thread to exit silently unless the exception is caught."""
raise SystemExit
| Python |
"""
Global Interpreter Lock.
"""
# This module adds a global lock to an object space.
# If multiple threads try to execute simultaneously in this space,
# all but one will be blocked. The other threads get a chance to run
# from time to time, using the executioncontext's XXX
import thread
from pypy.interpreter.miscutils import Action
from pypy.module.thread.threadlocals import OSThreadLocals
class GILThreadLocals(OSThreadLocals):
"""A version of OSThreadLocals that enforces a GIL."""
def __init__(self):
OSThreadLocals.__init__(self)
self.GIL = thread.allocate_lock()
def enter_thread(self, space):
"Notification that the current thread is just starting: grab the GIL."
self.GIL.acquire(True)
OSThreadLocals.enter_thread(self, space)
def leave_thread(self, space):
"Notification that the current thread is stopping: release the GIL."
OSThreadLocals.leave_thread(self, space)
self.GIL.release()
def yield_thread(self):
"""Notification that the current thread is between two bytecodes:
release the GIL for a little while."""
GIL = self.GIL
GIL.release()
# Other threads can run here
GIL.acquire(True)
yield_thread._annspecialcase_ = 'specialize:yield_thread'
def getGIL(self):
return self.GIL # XXX temporary hack!
class GILReleaseAction(Action):
"""An action called when the current thread is between two bytecodes
(so that it's a good time to yield some time to other threads).
"""
repeat = True
def __init__(self, threadlocals):
self.threadlocals = threadlocals
def perform(self):
self.threadlocals.yield_thread()
| Python |
"""
Python locks, based on true threading locks provided by the OS.
"""
import thread
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import Wrappable
from pypy.interpreter.gateway import ObjSpace, interp2app
from pypy.interpreter.typedef import TypeDef
# Force the declaration of the type 'thread.LockType' for RPython
import pypy.module.thread.rpython.exttable
##import sys
##def debug(msg, n):
## return
## tb = []
## try:
## for i in range(1, 8):
## tb.append(sys._getframe(i).f_code.co_name)
## except:
## pass
## tb = ' '.join(tb)
## msg = '| %6d | %d %s | %s\n' % (thread.get_ident(), n, msg, tb)
## sys.stderr.write(msg)
class Lock(Wrappable):
"A wrappable box around an interp-level lock object."
def __init__(self):
self.lock = thread.allocate_lock()
def descr_lock_acquire(self, space, waitflag=1):
"""Lock the lock. Without argument, this blocks if the lock is already
locked (even by the same thread), waiting for another thread to release
the lock, and return None once the lock is acquired.
With an argument, this will only block if the argument is true,
and the return value reflects whether the lock is acquired.
The blocking operation is not interruptible."""
# XXX Usage of threadlocals.GIL in this function is considered hackish.
# Ideally, all GIL knowledge should be in gil.py.
mylock = self.lock
GIL = space.threadlocals.GIL
GIL.release()
result = mylock.acquire(bool(waitflag))
GIL.acquire(True)
return space.newbool(result)
def descr_lock_release(self, space):
"""Release the lock, allowing another thread that is blocked waiting for
the lock to acquire the lock. The lock must be in the locked state,
but it needn't be locked by the same thread that unlocks it."""
try:
self.lock.release()
except thread.error:
w_module = space.getbuiltinmodule('thread')
w_error = space.getattr(w_module, space.wrap('error'))
raise OperationError(w_error, space.wrap("release unlocked lock"))
def descr_lock_locked(self, space):
"""Return whether the lock is in the locked state."""
if self.lock.acquire(False):
self.lock.release()
return space.w_False
else:
return space.w_True
descr_acquire = interp2app(Lock.descr_lock_acquire,
unwrap_spec=['self', ObjSpace, int])
descr_release = interp2app(Lock.descr_lock_release,
unwrap_spec=['self', ObjSpace])
descr_locked = interp2app(Lock.descr_lock_locked,
unwrap_spec=['self', ObjSpace])
Lock.typedef = TypeDef("thread.lock",
__doc__ = """\
A lock object is a synchronization primitive. To create a lock,
call the thread.allocate_lock() function. Methods are:
acquire() -- lock the lock, possibly blocking until it can be obtained
release() -- unlock of the lock
locked() -- test whether the lock is currently locked
A lock is not owned by the thread that locked it; another thread may
unlock it. A thread attempting to lock a lock that it has already locked
will block until another thread unlocks it. Deadlocks may ensue.""",
acquire = descr_acquire,
release = descr_release,
locked = descr_locked,
# Obsolete synonyms
acquire_lock = descr_acquire,
release_lock = descr_release,
locked_lock = descr_locked,
)
def allocate_lock(space):
"""Create a new lock object. (allocate() is an obsolete synonym.)
See LockType.__doc__ for information about locks."""
return space.wrap(Lock())
def getlocktype(space):
return space.gettypeobject(Lock.typedef)
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.