code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
"Constraint Solver in Python."
from propagation import Repository, Solver
#from distributors import DefaultDistributor
import fd
#import fi
__all__ = ['Repository', 'Solver', 'fd']
| Python |
"""Internal Python object serialization
This module contains functions that can read and write Python values in a binary format. The format is specific to Python, but independent of machine architecture issues (e.g., you can write a Python value to a file on a PC, transport the file to a Sun, and read it back there). Details of the format may change between Python versions.
"""
import types
from _codecs import utf_8_decode, utf_8_encode
try:
import new
except ImportError:
new = None
TYPE_NULL = '0'
TYPE_NONE = 'N'
TYPE_FALSE = 'F'
TYPE_TRUE = 'T'
TYPE_STOPITER = 'S'
TYPE_ELLIPSIS = '.'
TYPE_INT = 'i'
TYPE_INT64 = 'I'
TYPE_FLOAT = 'f'
TYPE_COMPLEX = 'x'
TYPE_LONG = 'l'
TYPE_STRING = 's'
TYPE_INTERNED = 't'
TYPE_STRINGREF= 'R'
TYPE_TUPLE = '('
TYPE_LIST = '['
TYPE_DICT = '{'
TYPE_CODE = 'c'
TYPE_UNICODE = 'u'
TYPE_UNKNOWN = '?'
TYPE_SET = '<'
TYPE_FROZENSET= '>'
class _Marshaller:
dispatch = {}
def __init__(self, writefunc):
self._write = writefunc
def dump(self, x):
try:
self.dispatch[type(x)](self, x)
except KeyError:
for tp in type(x).mro():
func = self.dispatch.get(tp)
if func:
break
else:
raise ValueError, "unmarshallable object"
func(self, x)
def w_long64(self, x):
self.w_long(x)
self.w_long(x>>32)
def w_long(self, x):
a = chr(x & 0xff)
x >>= 8
b = chr(x & 0xff)
x >>= 8
c = chr(x & 0xff)
x >>= 8
d = chr(x & 0xff)
self._write(a + b + c + d)
def w_short(self, x):
self._write(chr((x) & 0xff))
self._write(chr((x>> 8) & 0xff))
def dump_none(self, x):
self._write(TYPE_NONE)
dispatch[types.NoneType] = dump_none
def dump_bool(self, x):
if x:
self._write(TYPE_TRUE)
else:
self._write(TYPE_FALSE)
dispatch[bool] = dump_bool
def dump_stopiter(self, x):
if x is not StopIteration:
raise ValueError, "unmarshallable object"
self._write(TYPE_STOPITER)
dispatch[type(StopIteration)] = dump_stopiter
def dump_ellipsis(self, x):
self._write(TYPE_ELLIPSIS)
try:
dispatch[types.EllipsisType] = dump_ellipsis
except NameError:
pass
def dump_int(self, x):
y = x>>31
if y and y != -1:
self._write(TYPE_INT64)
self.w_long64(x)
else:
self._write(TYPE_INT)
self.w_long(x)
dispatch[types.IntType] = dump_int
def dump_long(self, x):
self._write(TYPE_LONG)
sign = 1
if x < 0:
sign = -1
x = -x
digits = []
while x:
digits.append(x & 0x7FFF)
x = x>>15
self.w_long(len(digits) * sign)
for d in digits:
self.w_short(d)
dispatch[types.LongType] = dump_long
def dump_float(self, x):
write = self._write
write(TYPE_FLOAT)
s = `x`
write(chr(len(s)))
write(s)
dispatch[types.FloatType] = dump_float
def dump_complex(self, x):
write = self._write
write(TYPE_COMPLEX)
s = `x.real`
write(chr(len(s)))
write(s)
s = `x.imag`
write(chr(len(s)))
write(s)
try:
dispatch[types.ComplexType] = dump_complex
except NameError:
pass
def dump_string(self, x):
# XXX we can't check for interned strings, yet,
# so we (for now) never create TYPE_INTERNED or TYPE_STRINGREF
self._write(TYPE_STRING)
self.w_long(len(x))
self._write(x)
dispatch[types.StringType] = dump_string
def dump_unicode(self, x):
self._write(TYPE_UNICODE)
#s = x.encode('utf8')
s, len_s = utf_8_encode(x)
self.w_long(len_s)
self._write(s)
dispatch[types.UnicodeType] = dump_unicode
def dump_tuple(self, x):
self._write(TYPE_TUPLE)
self.w_long(len(x))
for item in x:
self.dump(item)
dispatch[types.TupleType] = dump_tuple
def dump_list(self, x):
self._write(TYPE_LIST)
self.w_long(len(x))
for item in x:
self.dump(item)
dispatch[types.ListType] = dump_list
def dump_dict(self, x):
self._write(TYPE_DICT)
for key, value in x.items():
self.dump(key)
self.dump(value)
self._write(TYPE_NULL)
dispatch[types.DictionaryType] = dump_dict
def dump_code(self, x):
self._write(TYPE_CODE)
self.w_long(x.co_argcount)
self.w_long(x.co_nlocals)
self.w_long(x.co_stacksize)
self.w_long(x.co_flags)
self.dump(x.co_code)
self.dump(x.co_consts)
self.dump(x.co_names)
self.dump(x.co_varnames)
self.dump(x.co_freevars)
self.dump(x.co_cellvars)
self.dump(x.co_filename)
self.dump(x.co_name)
self.w_long(x.co_firstlineno)
self.dump(x.co_lnotab)
try:
dispatch[types.CodeType] = dump_code
except NameError:
pass
def dump_set(self, x):
self._write(TYPE_SET)
self.w_long(len(x))
for each in x:
self.dump(each)
try:
dispatch[set] = dump_set
except NameError:
pass
def dump_frozenset(self, x):
self._write(TYPE_FROZENSET)
self.w_long(len(x))
for each in x:
self.dump(each)
try:
dispatch[frozenset] = dump_frozenset
except NameError:
pass
class _NULL:
pass
class _StringBuffer:
def __init__(self, value):
self.bufstr = value
self.bufpos = 0
def read(self, n):
pos = self.bufpos
newpos = pos + n
ret = self.bufstr[pos : newpos]
self.bufpos = newpos
return ret
class _Unmarshaller:
dispatch = {}
def __init__(self, readfunc):
self._read = readfunc
self._stringtable = []
def load(self):
c = self._read(1)
if not c:
raise EOFError
try:
return self.dispatch[c](self)
except KeyError:
raise ValueError, "bad marshal code: %c (%d)" % (c, ord(c))
def r_short(self):
lo = ord(self._read(1))
hi = ord(self._read(1))
x = lo | (hi<<8)
if x & 0x8000:
x = x - 0x10000
return x
def r_long(self):
s = self._read(4)
a = ord(s[0])
b = ord(s[1])
c = ord(s[2])
d = ord(s[3])
x = a | (b<<8) | (c<<16) | (d<<24)
if d & 0x80 and x > 0:
x = -((1L<<32) - x)
return int(x)
else:
return x
def r_long64(self):
a = ord(self._read(1))
b = ord(self._read(1))
c = ord(self._read(1))
d = ord(self._read(1))
e = long(ord(self._read(1)))
f = long(ord(self._read(1)))
g = long(ord(self._read(1)))
h = long(ord(self._read(1)))
x = a | (b<<8) | (c<<16) | (d<<24)
x = x | (e<<32) | (f<<40) | (g<<48) | (h<<56)
if h & 0x80 and x > 0:
x = -((1L<<64) - x)
return x
def load_null(self):
return _NULL
dispatch[TYPE_NULL] = load_null
def load_none(self):
return None
dispatch[TYPE_NONE] = load_none
def load_true(self):
return True
dispatch[TYPE_TRUE] = load_true
def load_false(self):
return False
dispatch[TYPE_FALSE] = load_false
def load_stopiter(self):
return StopIteration
dispatch[TYPE_STOPITER] = load_stopiter
def load_ellipsis(self):
return Ellipsis
dispatch[TYPE_ELLIPSIS] = load_ellipsis
dispatch[TYPE_INT] = r_long
dispatch[TYPE_INT64] = r_long64
def load_long(self):
size = self.r_long()
sign = 1
if size < 0:
sign = -1
size = -size
x = 0L
for i in range(size):
d = self.r_short()
x = x | (d<<(i*15L))
return x * sign
dispatch[TYPE_LONG] = load_long
def load_float(self):
n = ord(self._read(1))
s = self._read(n)
return float(s)
dispatch[TYPE_FLOAT] = load_float
def load_complex(self):
n = ord(self._read(1))
s = self._read(n)
real = float(s)
n = ord(self._read(1))
s = self._read(n)
imag = float(s)
return complex(real, imag)
dispatch[TYPE_COMPLEX] = load_complex
def load_string(self):
n = self.r_long()
return self._read(n)
dispatch[TYPE_STRING] = load_string
def load_interned(self):
n = self.r_long()
ret = intern(self._read(n))
self._stringtable.append(ret)
return ret
dispatch[TYPE_INTERNED] = load_interned
def load_stringref(self):
n = self.r_long()
return self._stringtable[n]
dispatch[TYPE_STRINGREF] = load_stringref
def load_unicode(self):
n = self.r_long()
s = self._read(n)
#ret = s.decode('utf8')
ret, len_ret = utf_8_decode(s)
return ret
dispatch[TYPE_UNICODE] = load_unicode
def load_tuple(self):
return tuple(self.load_list())
dispatch[TYPE_TUPLE] = load_tuple
def load_list(self):
n = self.r_long()
list = [self.load() for i in range(n)]
return list
dispatch[TYPE_LIST] = load_list
def load_dict(self):
d = {}
while 1:
key = self.load()
if key is _NULL:
break
value = self.load()
d[key] = value
return d
dispatch[TYPE_DICT] = load_dict
def load_code(self):
argcount = self.r_long()
nlocals = self.r_long()
stacksize = self.r_long()
flags = self.r_long()
code = self.load()
consts = self.load()
names = self.load()
varnames = self.load()
freevars = self.load()
cellvars = self.load()
filename = self.load()
name = self.load()
firstlineno = self.r_long()
lnotab = self.load()
if not new:
raise RuntimeError, "can't unmarshal code objects; no 'new' module"
return new.code(argcount, nlocals, stacksize, flags, code, consts,
names, varnames, filename, name, firstlineno, lnotab,
freevars, cellvars)
dispatch[TYPE_CODE] = load_code
def load_set(self):
n = self.r_long()
args = [self.load() for i in range(n)]
return set(args)
dispatch[TYPE_SET] = load_set
def load_frozenset(self):
n = self.r_long()
args = [self.load() for i in range(n)]
return frozenset(args)
dispatch[TYPE_FROZENSET] = load_frozenset
# ________________________________________________________________
def _read(self, n):
pos = self.bufpos
newpos = pos + n
ret = self.bufstr[pos : newpos]
self.bufpos = newpos
return ret
def _read1(self):
ret = self.bufstr[self.bufpos]
self.bufpos += 1
return ret
def _r_short(self):
lo = ord(_read1(self))
hi = ord(_read1(self))
x = lo | (hi<<8)
if x & 0x8000:
x = x - 0x10000
return x
def _r_long(self):
# inlined this most common case
p = self.bufpos
s = self.bufstr
a = ord(s[p])
b = ord(s[p+1])
c = ord(s[p+2])
d = ord(s[p+3])
self.bufpos += 4
x = a | (b<<8) | (c<<16) | (d<<24)
if d & 0x80 and x > 0:
x = -((1L<<32) - x)
return int(x)
else:
return x
def _r_long64(self):
a = ord(_read1(self))
b = ord(_read1(self))
c = ord(_read1(self))
d = ord(_read1(self))
e = long(ord(_read1(self)))
f = long(ord(_read1(self)))
g = long(ord(_read1(self)))
h = long(ord(_read1(self)))
x = a | (b<<8) | (c<<16) | (d<<24)
x = x | (e<<32) | (f<<40) | (g<<48) | (h<<56)
if h & 0x80 and x > 0:
x = -((1L<<64) - x)
return x
_load_dispatch = {}
class _FastUnmarshaller:
dispatch = {}
def __init__(self, buffer):
self.bufstr = buffer
self.bufpos = 0
self._stringtable = []
def load(self):
# make flow space happy
c = '?'
try:
c = self.bufstr[self.bufpos]
self.bufpos += 1
return _load_dispatch[c](self)
except KeyError:
raise ValueError, "bad marshal code: %c (%d)" % (c, ord(c))
except IndexError:
raise EOFError
def load_null(self):
return _NULL
dispatch[TYPE_NULL] = load_null
def load_none(self):
return None
dispatch[TYPE_NONE] = load_none
def load_true(self):
return True
dispatch[TYPE_TRUE] = load_true
def load_false(self):
return False
dispatch[TYPE_FALSE] = load_false
def load_stopiter(self):
return StopIteration
dispatch[TYPE_STOPITER] = load_stopiter
def load_ellipsis(self):
return Ellipsis
dispatch[TYPE_ELLIPSIS] = load_ellipsis
def load_int(self):
return _r_long(self)
dispatch[TYPE_INT] = load_int
def load_int64(self):
return _r_long64(self)
dispatch[TYPE_INT64] = load_int64
def load_long(self):
size = _r_long(self)
sign = 1
if size < 0:
sign = -1
size = -size
x = 0L
for i in range(size):
d = _r_short(self)
x = x | (d<<(i*15L))
return x * sign
dispatch[TYPE_LONG] = load_long
def load_float(self):
n = ord(_read1(self))
s = _read(self, n)
return float(s)
dispatch[TYPE_FLOAT] = load_float
def load_complex(self):
n = ord(_read1(self))
s = _read(self, n)
real = float(s)
n = ord(_read1(self))
s = _read(self, n)
imag = float(s)
return complex(real, imag)
dispatch[TYPE_COMPLEX] = load_complex
def load_string(self):
n = _r_long(self)
return _read(self, n)
dispatch[TYPE_STRING] = load_string
def load_interned(self):
n = _r_long(self)
ret = intern(_read(self, n))
self._stringtable.append(ret)
return ret
dispatch[TYPE_INTERNED] = load_interned
def load_stringref(self):
n = _r_long(self)
return self._stringtable[n]
dispatch[TYPE_STRINGREF] = load_stringref
def load_unicode(self):
n = _r_long(self)
s = _read(self, n)
ret = s.decode('utf8')
return ret
dispatch[TYPE_UNICODE] = load_unicode
def load_tuple(self):
return tuple(self.load_list())
dispatch[TYPE_TUPLE] = load_tuple
def load_list(self):
n = _r_long(self)
list = []
for i in range(n):
list.append(self.load())
return list
dispatch[TYPE_LIST] = load_list
def load_dict(self):
d = {}
while 1:
key = self.load()
if key is _NULL:
break
value = self.load()
d[key] = value
return d
dispatch[TYPE_DICT] = load_dict
def load_code(self):
argcount = _r_long(self)
nlocals = _r_long(self)
stacksize = _r_long(self)
flags = _r_long(self)
code = self.load()
consts = self.load()
names = self.load()
varnames = self.load()
freevars = self.load()
cellvars = self.load()
filename = self.load()
name = self.load()
firstlineno = _r_long(self)
lnotab = self.load()
if not new:
raise RuntimeError, "can't unmarshal code objects; no 'new' module"
return new.code(argcount, nlocals, stacksize, flags, code, consts,
names, varnames, filename, name, firstlineno, lnotab,
freevars, cellvars)
dispatch[TYPE_CODE] = load_code
def load_set(self):
n = _r_long(self)
args = [self.load() for i in range(n)]
return set(args)
dispatch[TYPE_SET] = load_set
def load_frozenset(self):
n = _r_long(self)
args = [self.load() for i in range(n)]
return frozenset(args)
dispatch[TYPE_FROZENSET] = load_frozenset
_load_dispatch = _FastUnmarshaller.dispatch
# _________________________________________________________________
#
# compatibility
try:
set
except NameError:
def set(x):
raise ValueError("cannot unmarshal set objects on Python < 2.4")
frozenset = set
# _________________________________________________________________
#
# user interface
def dump(x, f):
m = _Marshaller(f.write)
m.dump(x)
def load(f):
um = _Unmarshaller(f.read)
return um.load()
def dumps(x):
buffer = []
m = _Marshaller(buffer.append)
m.dump(x)
return ''.join(buffer)
def loads(s):
um = _FastUnmarshaller(s)
return um.load()
| Python |
"""Python's standard exception class hierarchy.
Before Python 1.5, the standard exceptions were all simple string objects.
In Python 1.5, the standard exceptions were converted to classes organized
into a relatively flat hierarchy. String-based standard exceptions were
optional, or used as a fallback if some problem occurred while importing
the exception module. With Python 1.6, optional string-based standard
exceptions were removed (along with the -X command line flag).
The class exceptions were implemented in such a way as to be almost
completely backward compatible. Some tricky uses of IOError could
potentially have broken, but by Python 1.6, all of these should have
been fixed. As of Python 1.6, the class-based standard exceptions are
now implemented in C, and are guaranteed to exist in the Python
interpreter.
Here is a rundown of the class hierarchy. The classes found here are
inserted into both the exceptions module and the `built-in' module. It is
recommended that user defined class based exceptions be derived from the
`Exception' class, although this is currently not enforced.
Exception
|
+-- SystemExit
+-- StopIteration
+-- StandardError
| |
| +-- KeyboardInterrupt
| +-- ImportError
| +-- EnvironmentError
| | |
| | +-- IOError
| | +-- OSError
| | |
| | +-- WindowsError
| | +-- VMSError
| |
| +-- EOFError
| +-- RuntimeError
| | |
| | +-- NotImplementedError
| |
| +-- NameError
| | |
| | +-- UnboundLocalError
| |
| +-- AttributeError
| +-- SyntaxError
| | |
| | +-- IndentationError
| | |
| | +-- TabError
| |
| +-- TypeError
| +-- AssertionError
| +-- LookupError
| | |
| | +-- IndexError
| | +-- KeyError
| |
| +-- ArithmeticError
| | |
| | +-- OverflowError
| | +-- ZeroDivisionError
| | +-- FloatingPointError
| |
| +-- ValueError
| | |
| | +-- UnicodeError
| | |
| | +-- UnicodeEncodeError
| | +-- UnicodeDecodeError
| | +-- UnicodeTranslateError
| |
| +-- ReferenceError
| +-- SystemError
| +-- MemoryError
|
+---Warning
|
+-- UserWarning
+-- DeprecationWarning
+-- PendingDeprecationWarning
+-- SyntaxWarning
+-- OverflowWarning
+-- RuntimeWarning
+-- FutureWarning"""
class Exception:
"""Common base class for all exceptions."""
def __getitem__(self, idx):
return self.args[idx]
def __init__(self, *args):
self.args = args
def __str__(self):
args = self.args
argc = len(args)
if argc == 0:
return ''
elif argc == 1:
return str(args[0])
else:
return str(args)
class StandardError(Exception):
"""Base class for all standard Python exceptions."""
class ValueError(StandardError):
"""Inappropriate argument value (of correct type)."""
class ImportError(StandardError):
"""Import can't find module, or can't find name in module."""
class RuntimeError(StandardError):
"""Unspecified run-time error."""
class UnicodeError(ValueError):
"""Unicode related error."""
class UnicodeTranslateError(UnicodeError):
"""Unicode translation error."""
def __init__(self, *args):
argc = len(args)
self.args = args # modified: always assign args, no error check
if argc == 4:
if type(args[0]) == unicode:
self.object = args[0]
else:
raise TypeError('argument 0 must be unicode, not %s'%type(args[0]))
if type(args[1]) == int:
self.start = args[1]
else:
raise TypeError('argument 1 must be int, not %s'%type(args[1]))
if type(args[2]) == int:
self.end = args[2]
else:
raise TypeError('argument 2 must be int, not %s'%type(args[2]))
if type(args[3]) == str:
self.reason = args[3]
else:
raise TypeError('argument 3 must be str, not %s'%type(args[3]))
else:
raise TypeError('function takes exactly 4 arguments (%d given)'%argc)
def __str__(self):
if self.end == self.start + 1:
badchar = ord(self.object[self.start])
if badchar <= 0xff:
return "can't translate character u'\\x%02x' in position %d: %s" % (badchar, self.start, self.reason)
if badchar <= 0xffff:
return "can't translate character u'\\u%04x' in position %d: %s"%(badchar, self.start, self.reason)
return "can't translate character u'\\U%08x' in position %d: %s"%(badchar, self.start, self.reason)
return "can't translate characters in position %d-%d: %s" % (self.start, self.end - 1, self.reason)
class LookupError(StandardError):
"""Base class for lookup errors."""
class KeyError(LookupError):
"""Mapping key not found."""
def __str__(self):
args = self.args
argc = len(args)
if argc == 0:
return ''
elif argc == 1:
return repr(args[0])
else:
return str(args)
class StopIteration(Exception):
"""Signal the end from iterator.next()."""
class Warning(Exception):
"""Base class for warning categories."""
class PendingDeprecationWarning(Warning):
"""Base class for warnings about features which will be deprecated in the future."""
class EnvironmentError(StandardError):
"""Base class for I/O related errors."""
def __init__(self, *args):
argc = len(args)
self.args = args
self.errno = None
self.strerror = None
self.filename = None
if 2 <= argc <= 3:
self.errno = args[0]
self.strerror = args[1]
if argc == 3:
self.filename = args[2]
self.args = (args[0], args[1])
def __str__(self):
if self.filename is not None:
return "[Errno %s] %s: %s" % (self.errno,
self.strerror,
self.filename)
if self.errno and self.strerror:
return "[Errno %s] %s" % (self.errno, self.strerror)
return StandardError.__str__(self)
class OSError(EnvironmentError):
"""OS system call failed."""
class DeprecationWarning(Warning):
"""Base class for warnings about deprecated features."""
class ArithmeticError(StandardError):
"""Base class for arithmetic errors."""
class FloatingPointError(ArithmeticError):
"""Floating point operation failed."""
class ReferenceError(StandardError):
"""Weak ref proxy used after referent went away."""
class NameError(StandardError):
"""Name not found globally."""
class OverflowWarning(Warning):
"""Base class for warnings about numeric overflow. Won't exist in Python 2.5."""
class IOError(EnvironmentError):
"""I/O operation failed."""
class SyntaxError(StandardError):
"""Invalid syntax."""
filename = None
lineno = None
msg = ''
offset = None
print_file_and_line = None
text = None
def __init__(self, *args):
argc = len(args)
self.args = args
if argc >= 1:
self.msg = args[0]
if argc == 2:
if args[1][0] is None or type(args[1][0]) == str:
self.filename = args[1][0]
else:
raise TypeError('argument 1 must be str, not %s'%type(args[1][0]))
if args[1][1] is None or type(args[1][1]) == int:
self.lineno = args[1][1]
else:
raise TypeError('argument 2 must be str, not %s'%type(args[1][1]))
if args[1][2] is None or type(args[1][2]) == int:
self.offset = args[1][2]
else:
raise TypeError('argument 3 must be str, not %s'%type(args[1][2]))
if args[1][3] is None or type(args[1][3]) == str:
self.text = args[1][3]
else:
raise TypeError('argument 4 must be str, not %s'%type(args[1][3]))
def __str__(self):
if type(self.msg) is not str:
return self.msg
buffer = self.msg
have_filename = type(self.filename) is str
have_lineno = type(self.lineno) is int
if have_filename or have_lineno:
import os
fname = os.path.basename(self.filename or "???")
if have_filename and have_lineno:
buffer = "%s (%s, line %ld)" % (self.msg, fname, self.lineno)
elif have_filename:
buffer ="%s (%s)" % (self.msg, fname)
elif have_lineno:
buffer = "%s (line %ld)" % (self.msg, self.lineno)
return buffer
class FutureWarning(Warning):
"""Base class for warnings about constructs that will change semantically in the future."""
class SystemExit(Exception):
"""Request to exit from the interpreter."""
def __init__(self, *args):
argc = len(args)
if argc == 0:
self.code = None
self.args = args
if argc == 1:
self.code = args[0]
if argc >= 2:
if type(args) == tuple:
self.code = args
else:
raise TypeError('argument 0 must be tuple, not %s'%type(args))
class EOFError(StandardError):
"""Read beyond end of file."""
class IndentationError(SyntaxError):
"""Improper indentation."""
class TabError(IndentationError):
"""Improper mixture of spaces and tabs."""
class ZeroDivisionError(ArithmeticError):
"""Second argument to a division or modulo operation was zero."""
class SystemError(StandardError):
"""Internal error in the Python interpreter.
Please report this to the Python maintainer, along with the traceback,
the Python version, and the hardware/OS platform and version."""
class AssertionError(StandardError):
"""Assertion failed."""
class UnicodeDecodeError(UnicodeError):
"""Unicode decoding error."""
def __init__(self, *args):
argc = len(args)
self.args = args # modified: always assign args, no error check
if argc == 5:
if type(args[0]) == str:
self.encoding = args[0]
else:
raise TypeError('argument 0 must be str, not %s'%type(args[0]))
if type(args[1]) == str:
self.object = args[1]
else:
raise TypeError('argument 1 must be str, not %s'%type(args[1]))
if type(args[2]) == int:
self.start = args[2]
else:
raise TypeError('argument 2 must be int, not %s'%type(args[2]))
if type(args[3]) == int:
self.end = args[3]
else:
raise TypeError('argument 3 must be int, not %s'%type(args[3]))
if type(args[4]) == str:
self.reason = args[4]
else:
raise TypeError('argument 4 must be str, not %s'%type(args[4]))
else:
raise TypeError('function takes exactly 5 arguments (%d given)'%argc)
def __str__(self):
if self.end == self.start + 1:
return "%r codec can't decode byte 0x%02x in position %d: %s"%(self.encoding,
ord(self.object[self.start]), self.start, self.reason)
return "%r codec can't decode bytes in position %d-%d: %s" % (
self.encoding, self.start, self.end - 1, self.reason)
class TypeError(StandardError):
"""Inappropriate argument type."""
class IndexError(LookupError):
"""Sequence index out of range."""
class RuntimeWarning(Warning):
"""Base class for warnings about dubious runtime behavior."""
class KeyboardInterrupt(StandardError):
"""Program interrupted by user."""
class UserWarning(Warning):
"""Base class for warnings generated by user code."""
class SyntaxWarning(Warning):
"""Base class for warnings about dubious syntax."""
class MemoryError(StandardError):
"""Out of memory."""
class UnboundLocalError(NameError):
"""Local name referenced but not bound to a value."""
class NotImplementedError(RuntimeError):
"""Method or function hasn't been implemented yet."""
class AttributeError(StandardError):
"""Attribute not found."""
class OverflowError(ArithmeticError):
"""Result too large to be represented."""
class UnicodeEncodeError(UnicodeError):
"""Unicode encoding error."""
def __init__(self, *args):
argc = len(args)
self.args = args # modified: always assign args, no error check
if argc == 5:
if type(args[0]) == str:
self.encoding = args[0]
else:
raise TypeError('argument 0 must be str, not %s'%type(args[0]))
if type(args[1]) == unicode:
self.object = args[1]
else:
raise TypeError('argument 1 must be unicode, not %s'%type(args[1]))
if type(args[2]) == int:
self.start = args[2]
else:
raise TypeError('argument 2 must be int, not %s'%type(args[2]))
if type(args[3]) == int:
self.end = args[3]
else:
raise TypeError('argument 3 must be int, not %s'%type(args[3]))
if type(args[4]) == str:
self.reason = args[4]
else:
raise TypeError('argument 4 must be str, not %s'%type(args[4]))
else:
raise TypeError('function takes exactly 5 arguments (%d given)'%argc)
def __str__(self):
if self.end == self.start + 1:
badchar = ord(self.object[self.start])
if badchar <= 0xff:
return "%r codec can't encode character u'\\x%02x' in position %d: %s"%(self.encoding,
badchar, self.start, self.reason)
if badchar <= 0xffff:
return "%r codec can't encode character u'\\u%04x' in position %d: %s"%(self.encoding,
badchar, self.start, self.reason)
return "%r codec can't encode character u'\\U%08x' in position %d: %s"%(self.encoding,
badchar, self.start, self.reason)
return "%r codec can't encode characters in position %d-%d: %s" % (
self.encoding, self.start, self.end - 1, self.reason)
#-- Logic object space specific stuff
#XXX conditionalize me on '-o logic'
class LogicError(Exception): pass
class UnificationError(LogicError): pass
class RebindingError(UnificationError): pass
class FutureBindingError(LogicError): pass
class AllBlockedError(LogicError): pass
# constraints
class ConsistencyError(LogicError): pass
| Python |
import sys, operator
# producing nicer code objects by exec
from pypy.tool.sourcetools import NiceCompile
_compile = NiceCompile(globals())
def _coerce(left, right):
try:
return coerce(left, right)
except TypeError:
return None
obj_setattr = object.__setattr__
obj_getattribute = object.__getattribute__
HMASK = long(sys.maxint)
def uid(o):
v = id(o)
if v < 0:
v += HMASK
v += HMASK
v += 2
return v
# we use slots that we remove from type __dict__ for special attributes
#
# for classobj they are __bases__ and __name__ (classobj_bases_slot, classobj_name_slot)
# for instance it's __class__ (instance_class_slot)
# ____________________________________________________________
# classobj def
def type_err(arg, expected, v):
return TypeError("argument %s must be %s, not %s" % (arg, expected, type(v).__name__))
def set_name(cls, name):
if not isinstance(name, str):
raise TypeError, "__name__ must be a string object"
classobj_name_slot.__set__(cls, name)
def set_bases(cls, bases):
if not isinstance(bases, tuple):
raise TypeError, "__bases__ must be a tuple object"
for b in bases:
if not isinstance(b, classobj):
raise TypeError, "__bases__ items must be classes"
classobj_bases_slot.__set__(cls, bases)
def set_dict(cls, dic):
if not isinstance(dic, dict):
raise TypeError, "__dict__ must be a dictionary object"
obj_setattr(cls, '__dict__', dic)
def retrieve(obj, attr):
dic = obj_getattribute(obj, '__dict__')
try:
return dic[attr]
except KeyError:
raise AttributeError, attr
def lookup(cls, attr):
# returns (value, class it was found in)
try:
v = retrieve(cls, attr)
return v, cls
except AttributeError:
for b in classobj_bases_slot.__get__(cls):
v, found = lookup(b, attr)
if found:
return v, found
return None, None
def get_class_module(cls):
try:
mod = retrieve(cls, "__module__")
except AttributeError:
mod = None
if not isinstance(mod, str):
return "?"
return mod
def mro_lookup(v, name):
try:
mro = type(v).__mro__
except AttributeError:
return None
for x in mro:
if name in x.__dict__:
return x.__dict__[name]
return None
def _seqiter(obj):
func = obj.__getitem__
i = 0
while 1:
try:
yield func(i)
except IndexError:
return
i += 1
# let geninterplevel retrieve the PyPy builtin instead
_seqiter.geninterplevel_name = lambda gen: "space.builtin.get('_seqiter')"
OLD_STYLE_CLASSES_IMPL = object()
class classobj(object):
__slots__ = ('_name', '_bases', '__dict__')
def __new__(subtype, name, bases, dic):
if not isinstance(name, str):
raise type_err('name', 'string', name)
if bases is None:
bases = ()
if not isinstance(bases, tuple):
raise type_err('bases', 'tuple', bases)
if not isinstance(dic, dict):
raise type_err('dict', 'dict', dic)
try:
dic['__doc__']
except KeyError:
dic['__doc__'] = None
try:
dic['__module__']
except KeyError:
try:
i = 0
while 1:
g = sys._getframe(i).f_globals
if not g.get('OLD_STYLE_CLASSES_IMPL',None) is OLD_STYLE_CLASSES_IMPL:
break
i += 1
except ValueError:
pass
else:
modname = g.get('__name__', None)
if modname is not None:
dic['__module__'] = modname
for b in bases:
if not isinstance(b, classobj):
if callable(type(b)):
return type(b)(name, bases, dic)
raise TypeError,"base must be class"
new_class = object.__new__(classobj)
obj_setattr(new_class, '__dict__', dic)
classobj_name_slot.__set__(new_class, name)
classobj_bases_slot.__set__(new_class, bases)
return new_class
def __setattr__(self, attr, value):
if attr == '__name__':
set_name(self, value)
elif attr == '__bases__':
set_bases(self, value)
elif attr == '__dict__':
set_dict(self, value)
else:
obj_setattr(self, attr, value)
def __delattr__(self, attr):
if attr in ('__name__', '__bases__', '__dict__'):
classobj.__setattr__(self, attr, None)
else:
object.__delattr__(self, attr)
def __getattribute__(self, attr):
if attr == '__dict__':
return obj_getattribute(self, '__dict__')
if attr == '__name__':
return classobj_name_slot.__get__(self)
if attr == '__bases__':
return classobj_bases_slot.__get__(self)
v, found = lookup(self, attr)
if not found:
raise AttributeError, "class %s has no attribute %s" % (self.__name__, attr)
descr_get = mro_lookup(v, '__get__')
if descr_get is None:
return v
return descr_get(v, None, self)
def __repr__(self):
mod = get_class_module(self)
return "<class %s.%s at 0x%x>" % (mod, self.__name__, uid(self))
def __str__(self):
mod = get_class_module(self)
if mod == "?":
return self.__name__
else:
return "%s.%s" % (mod, self.__name__)
def __call__(self, *args, **kwds):
inst = object.__new__(instance)
dic = inst.__dict__
instance_class_slot.__set__(inst, self)
init = instance_getattr1(inst,'__init__', False)
if init:
ret = init(*args, **kwds)
if ret is not None:
raise TypeError("__init__() should return None")
elif args or kwds:
raise TypeError("this constructor takes no arguments")
return inst
# capture _name, _bases slots for usage and then hide them!
classobj_name_slot = classobj._name
classobj_bases_slot = classobj._bases
del classobj._name, classobj._bases
# ____________________________________________________________
# instance def
def instance_getattr1(inst, name, exc=True):
if name == "__dict__":
return obj_getattribute(inst, name)
elif name == "__class__":
return instance_class_slot.__get__(inst)
try:
return retrieve(inst, name)
except AttributeError:
cls = instance_class_slot.__get__(inst)
v, found = lookup(cls, name)
if not found:
if exc:
raise AttributeError, "%s instance has no attribute %s" % (cls.__name__, name)
else:
return None
descr_get = mro_lookup(v, '__get__')
if descr_get is None:
return v
return descr_get(v, inst, cls)
class instance(object):
__slots__ = ('_class', '__dict__')
def __getattribute__(self, name):
try:
return instance_getattr1(self, name)
except AttributeError:
getattr = instance_getattr1(self, '__getattr__', exc=False)
if getattr is not None:
return getattr(name)
raise
def __new__(typ, klass, dic=None):
# typ is not used at all
if not isinstance(klass,classobj):
raise TypeError("instance() first arg must be class")
if dic is None:
dic = {}
elif not isinstance(dic, dict):
raise TypeError("instance() second arg must be dictionary or None")
inst = object.__new__(instance)
instance_class_slot.__set__(inst, klass)
obj_setattr(inst, '__dict__', dic)
return inst
def __del__(self):
func = instance_getattr1(self, '__del__', exc=False)
if func is not None:
func()
def __setattr__(self, name, value):
if name == '__dict__':
if not isinstance(value, dict):
raise TypeError("__dict__ must be set to a dictionary")
obj_setattr(self, '__dict__', value)
elif name == '__class__':
if not isinstance(value, classobj):
raise TypeError("__class__ must be set to a class")
instance_class_slot.__set__(self, value)
else:
setattr = instance_getattr1(self, '__setattr__', exc=False)
if setattr is not None:
setattr(name, value)
else:
self.__dict__[name] = value
def __delattr__(self, name):
# abuse __setattr__ to get the complaints :-)
# this is as funny as in CPython
if name in ('__dict__', '__class__'):
instance.__setattr__(self, name, None)
else:
delattr = instance_getattr1(self, '__delattr__', exc=False)
if delattr is not None:
delattr(name)
else:
try:
del self.__dict__[name]
except KeyError, ex:
raise AttributeError("%s instance has no attribute '%s'" % (
self.__class__.__name__,name) )
def __repr__(self):
try:
func = instance_getattr1(self, '__repr__')
except AttributeError:
klass = self.__class__
mod = get_class_module(klass)
return "<%s.%s instance at 0x%x>" % (mod, klass.__name__, uid(self))
return func()
def __str__(self):
try:
func = instance_getattr1(self, '__str__')
except AttributeError:
return instance.__repr__(self)
return func()
def __hash__(self):
_eq = instance_getattr1(self, "__eq__", False)
_cmp = instance_getattr1(self, "__cmp__", False)
_hash = instance_getattr1(self, "__hash__", False)
if (_eq or _cmp) and not _hash:
raise TypeError("unhashable instance")
if _hash:
ret = _hash()
if not isinstance(ret, int):
raise TypeError("__hash__() should return an int")
return ret
else:
return object.__hash__(self)
def __len__(self):
ret = instance_getattr1(self,'__len__')()
if isinstance(ret, int):
if ret < 0:
raise ValueError("__len__() should return >= 0")
return ret
else:
raise TypeError("__len__() should return an int")
def __getitem__(self, key):
return instance_getattr1(self, '__getitem__')(key)
def __setitem__(self, key, value):
instance_getattr1(self, '__setitem__')(key, value)
def __delitem__(self, key):
instance_getattr1(self, '__delitem__')(key)
def __getslice__(self, i, j):
func = instance_getattr1(self, '__getslice__', False)
if func:
return func(i, j)
else:
return self[i:j:]
def __setslice__(self, i, j, sequence):
func = instance_getattr1(self, '__setslice__', False)
if func:
func(i, j, sequence)
else:
self[i:j:] = sequence
def __delslice__(self, i, j):
func = instance_getattr1(self, '__delslice__', False)
if func:
func(i, j)
else:
del self[i:j:]
def __contains__(self, obj):
func = instance_getattr1(self, '__contains__', False)
if func:
return bool(func(obj))
# now do it ourselves
for x in self:
if x == obj:
return True
return False
# unary operators
for op in "neg pos abs invert int long float oct hex".split():
exec _compile("""
def __%(op)s__(self):
return instance_getattr1(self, '__%(op)s__')()
""", {"op": op})
del op
def __index__(self):
func = instance_getattr1(self, '__index__', False)
if func:
return func()
else:
raise TypeError('object cannot be interpreted as an index')
# coerce
def __coerce__(self, other):
func = instance_getattr1(self, '__coerce__', False)
if func:
return func(other)
return NotImplemented
# binary operators
for op in "or and xor lshift rshift add sub mul div mod divmod floordiv truediv".split():
opref = op
if op in ['and', 'or']:
opref = op + '_'
if op not in ['divmod']:
opref = 'operator.' + opref
exec _compile("""
def __%(op)s__(self, other):
coerced = _coerce(self, other)
if coerced is None or coerced[0] is self:
func = instance_getattr1(self, '__%(op)s__', False)
if func:
return func(other)
return NotImplemented
else:
return %(opref)s(coerced[0], coerced[1])
def __r%(op)s__(self, other):
coerced = _coerce(self, other)
if coerced is None or coerced[0] is self:
func = instance_getattr1(self, '__r%(op)s__', False)
if func:
return func(other)
return NotImplemented
else:
return %(opref)s(coerced[1], coerced[0])
""", {"op": op, "opref": opref})
del op, opref
# inplace operators
for op in 'mod and pow truediv lshift xor rshift floordiv div sub mul add or'.split():
exec _compile("""
def __i%(op)s__(self, other):
func = instance_getattr1(self, '__i%(op)s__', False)
if func:
return func(other)
return NotImplemented
""", {"op": op})
del op
def __pow__(self, other, modulo=None):
if modulo is None:
coerced = _coerce(self, other)
if coerced is None or coerced[0] is self:
func = instance_getattr1(self, '__pow__', False)
if func:
return func(other)
return NotImplemented
else:
return operator.pow(coerced[0], coerced[1], None)
else:
# CPython also doesn't try coercion in this case
func = instance_getattr1(self, '__pow__', False)
if func:
return func(other, modulo)
return NotImplemented
def __rpow__(self, other, modulo=None):
if modulo is None:
coerced = _coerce(self, other)
if coerced is None or coerced[0] is self:
func = instance_getattr1(self, '__rpow__', False)
if func:
return func(other)
return NotImplemented
else:
return operator.pow(coerced[1], coerced[0], None)
else:
# CPython also doesn't try coercion in this case
func = instance_getattr1(self, '__rpow__', False)
if func:
return func(other, modulo)
return NotImplemented
def __nonzero__(self):
func = instance_getattr1(self, '__nonzero__', False)
if not func:
func = instance_getattr1(self, '__len__', False)
if not func: # default to true
return True
ret = func()
if isinstance(ret, int):
if ret < 0:
raise ValueError("__nonzero__() should return >= 0")
return ret > 0
else:
raise TypeError("__nonzero__() should return an int")
def __call__(self, *args, **kwds):
func = instance_getattr1(self, '__call__', False)
if not func:
raise AttributeError, "%s instance has no __call__ method" % (self.__class__.__name__)
return func(*args, **kwds)
# rich comparison operations
for op in 'eq ne gt lt ge le'.split():
exec _compile("""
def __%(op)s__(self, other):
try:
return instance_getattr1(self, '__%(op)s__')(other)
except AttributeError:
return NotImplemented
""", {"op": op})
del op
def __iter__(self):
func = instance_getattr1(self, '__iter__', False)
if func:
ret = func()
if not mro_lookup(ret, 'next'):
raise TypeError, ("__iter__ returned non-iterator of type %s"
% type(ret).__name__)
return ret
func = instance_getattr1(self, '__getitem__', False)
if not func:
raise TypeError, "iteration over non-sequence"
# moved sequiter away from here:
# flow space cannot handle nested functions.
return _seqiter(self)
def next(self):
func = instance_getattr1(self, 'next', False)
if not func:
raise TypeError, "instance has no next() method"
return func()
def __cmp__(self, other): # do all the work here like CPython
coerced = _coerce(self, other)
if coerced is None:
v = self
w = other
else:
v = coerced[0]
w = coerced[1]
if not isinstance(v, instance) and not isinstance(w, instance):
return cmp(v,w)
if isinstance(v, instance):
func = instance_getattr1(v, '__cmp__', False)
if func:
res = func(w)
if isinstance(res, int):
if res > 0:
return 1
if res < 0:
return -1
return 0
raise TypeError,"__cmp__ must return int"
if isinstance(w, instance):
func = instance_getattr1(w, '__cmp__', False)
if func:
res = func(v)
if isinstance(res, int):
if res > 0:
return 1
if res < 0:
return -1
return 0
raise TypeError,"__cmp__ must return int"
return NotImplemented
# capture _class slot for usage and then hide them!
instance_class_slot = instance._class
del instance._class
def purify(): # to use in geninterplevel case, because global side-effects are lost
del classobj._name
del classobj._bases
del classobj.__slots__
del instance._class
del instance.__slots__
del _compile, NiceCompile
| Python |
# NOT_RPYTHON
"""
A pure Python reimplementation of the _sre module from CPython 2.4
Copyright 2005 Nik Haldimann, licensed under the MIT license
This code is based on material licensed under CNRI's Python 1.6 license and
copyrighted by: Copyright (c) 1997-2001 by Secret Labs AB
"""
# Identifying as _sre from Python 2.3 or 2.4
import sys
if sys.version_info[:2] >= (2, 4):
MAGIC = 20031017
else:
MAGIC = 20030419
import array, operator
from sre_constants import ATCODES, OPCODES, CHCODES, MAXREPEAT
from sre_constants import SRE_INFO_PREFIX, SRE_INFO_LITERAL
from sre_constants import SRE_FLAG_UNICODE, SRE_FLAG_LOCALE
# In _sre.c this is bytesize of the code word type of the C implementation.
# There it's 2 for normal Python builds and more for wide unicode builds (large
# enough to hold a 32-bit UCS-4 encoded character). Since here in pure Python
# we only see re bytecodes as Python longs, we shouldn't have to care about the
# codesize. But sre_compile will compile some stuff differently depending on the
# codesize (e.g., charsets).
if sys.maxunicode == 65535:
CODESIZE = 2
else:
CODESIZE = 4
copyright = "_sre.py 2.4b Copyright 2005 by Nik Haldimann"
def getcodesize():
return CODESIZE
def compile(pattern, flags, code, groups=0, groupindex={}, indexgroup=[None]):
"""Compiles (or rather just converts) a pattern descriptor to a SRE_Pattern
object. Actual compilation to opcodes happens in sre_compile."""
return SRE_Pattern(pattern, flags, code, groups, groupindex, indexgroup)
def getlower(char_ord, flags):
if (char_ord < 128) or (flags & SRE_FLAG_UNICODE) \
or (flags & SRE_FLAG_LOCALE and char_ord < 256):
return ord(unichr(char_ord).lower())
else:
return char_ord
class SRE_Pattern(object):
def __init__(self, pattern, flags, code, groups=0, groupindex={}, indexgroup=[None]):
self.pattern = pattern
self.flags = flags
self.groups = groups
self.groupindex = groupindex # Maps group names to group indices
self._indexgroup = indexgroup # Maps indices to group names
self._code = code
def match(self, string, pos=0, endpos=sys.maxint):
"""If zero or more characters at the beginning of string match this
regular expression, return a corresponding MatchObject instance. Return
None if the string does not match the pattern."""
state = _State(string, pos, endpos, self.flags)
if state.match(self._code):
return SRE_Match(self, state)
else:
return None
def search(self, string, pos=0, endpos=sys.maxint):
"""Scan through string looking for a location where this regular
expression produces a match, and return a corresponding MatchObject
instance. Return None if no position in the string matches the
pattern."""
state = _State(string, pos, endpos, self.flags)
if state.search(self._code):
return SRE_Match(self, state)
else:
return None
def findall(self, string, pos=0, endpos=sys.maxint):
"""Return a list of all non-overlapping matches of pattern in string."""
matchlist = []
state = _State(string, pos, endpos, self.flags)
while state.start <= state.end:
state.reset()
state.string_position = state.start
if not state.search(self._code):
break
match = SRE_Match(self, state)
if self.groups == 0 or self.groups == 1:
item = match.group(self.groups)
else:
item = match.groups("")
matchlist.append(item)
if state.string_position == state.start:
state.start += 1
else:
state.start = state.string_position
return matchlist
def _subx(self, template, string, count=0, subn=False):
filter = template
if not callable(template) and "\\" in template:
# handle non-literal strings ; hand it over to the template compiler
import sre
filter = sre._subx(self, template)
state = _State(string, 0, sys.maxint, self.flags)
sublist = []
n = last_pos = 0
while not count or n < count:
state.reset()
state.string_position = state.start
if not state.search(self._code):
break
if last_pos < state.start:
sublist.append(string[last_pos:state.start])
if not (last_pos == state.start and
last_pos == state.string_position and n > 0):
# the above ignores empty matches on latest position
if callable(filter):
sublist.append(filter(SRE_Match(self, state)))
else:
sublist.append(filter)
last_pos = state.string_position
n += 1
if state.string_position == state.start:
state.start += 1
else:
state.start = state.string_position
if last_pos < state.end:
sublist.append(string[last_pos:state.end])
item = "".join(sublist)
if subn:
return item, n
else:
return item
def sub(self, repl, string, count=0):
"""Return the string obtained by replacing the leftmost non-overlapping
occurrences of pattern in string by the replacement repl."""
return self._subx(repl, string, count, False)
def subn(self, repl, string, count=0):
"""Return the tuple (new_string, number_of_subs_made) found by replacing
the leftmost non-overlapping occurrences of pattern with the replacement
repl."""
return self._subx(repl, string, count, True)
def split(self, string, maxsplit=0):
"""Split string by the occurrences of pattern."""
splitlist = []
state = _State(string, 0, sys.maxint, self.flags)
n = 0
last = state.start
while not maxsplit or n < maxsplit:
state.reset()
state.string_position = state.start
if not state.search(self._code):
break
if state.start == state.string_position: # zero-width match
if last == state.end: # or end of string
break
state.start += 1
continue
splitlist.append(string[last:state.start])
# add groups (if any)
if self.groups:
match = SRE_Match(self, state)
splitlist.extend(list(match.groups(None)))
n += 1
last = state.start = state.string_position
splitlist.append(string[last:state.end])
return splitlist
def finditer(self, string, pos=0, endpos=sys.maxint):
"""Return a list of all non-overlapping matches of pattern in string."""
scanner = self.scanner(string, pos, endpos)
return iter(scanner.search, None)
def scanner(self, string, start=0, end=sys.maxint):
return SRE_Scanner(self, string, start, end)
def __copy__(self):
raise TypeError, "cannot copy this pattern object"
def __deepcopy__(self):
raise TypeError, "cannot copy this pattern object"
class SRE_Scanner(object):
"""Undocumented scanner interface of sre."""
def __init__(self, pattern, string, start, end):
self.pattern = pattern
self._state = _State(string, start, end, self.pattern.flags)
def _match_search(self, matcher):
state = self._state
state.reset()
state.string_position = state.start
match = None
if matcher(self.pattern._code):
match = SRE_Match(self.pattern, state)
if match is None or state.string_position == state.start:
state.start += 1
else:
state.start = state.string_position
return match
def match(self):
return self._match_search(self._state.match)
def search(self):
return self._match_search(self._state.search)
class SRE_Match(object):
def __init__(self, pattern, state):
self.re = pattern
self.string = state.string
self.pos = state.pos
self.endpos = state.end
self.lastindex = state.lastindex
if self.lastindex < 0:
self.lastindex = None
self.regs = self._create_regs(state)
if pattern._indexgroup and 0 <= self.lastindex < len(pattern._indexgroup):
# The above upper-bound check should not be necessary, as the re
# compiler is supposed to always provide an _indexgroup list long
# enough. But the re.Scanner class seems to screw up something
# there, test_scanner in test_re won't work without upper-bound
# checking. XXX investigate this and report bug to CPython.
self.lastgroup = pattern._indexgroup[self.lastindex]
else:
self.lastgroup = None
def _create_regs(self, state):
"""Creates a tuple of index pairs representing matched groups."""
regs = [(state.start, state.string_position)]
for group in range(self.re.groups):
mark_index = 2 * group
if mark_index + 1 < len(state.marks) \
and state.marks[mark_index] is not None \
and state.marks[mark_index + 1] is not None:
regs.append((state.marks[mark_index], state.marks[mark_index + 1]))
else:
regs.append((-1, -1))
return tuple(regs)
def _get_index(self, group):
if isinstance(group, int):
if group >= 0 and group <= self.re.groups:
return group
else:
if self.re.groupindex.has_key(group):
return self.re.groupindex[group]
raise IndexError("no such group")
def _get_slice(self, group, default):
group_indices = self.regs[group]
if group_indices[0] >= 0:
return self.string[group_indices[0]:group_indices[1]]
else:
return default
def start(self, group=0):
"""Returns the indices of the start of the substring matched by group;
group defaults to zero (meaning the whole matched substring). Returns -1
if group exists but did not contribute to the match."""
return self.regs[self._get_index(group)][0]
def end(self, group=0):
"""Returns the indices of the end of the substring matched by group;
group defaults to zero (meaning the whole matched substring). Returns -1
if group exists but did not contribute to the match."""
return self.regs[self._get_index(group)][1]
def span(self, group=0):
"""Returns the 2-tuple (m.start(group), m.end(group))."""
return self.start(group), self.end(group)
def expand(self, template):
"""Return the string obtained by doing backslash substitution and
resolving group references on template."""
import sre
return sre._expand(self.re, self, template)
def groups(self, default=None):
"""Returns a tuple containing all the subgroups of the match. The
default argument is used for groups that did not participate in the
match (defaults to None)."""
groups = []
for indices in self.regs[1:]:
if indices[0] >= 0:
groups.append(self.string[indices[0]:indices[1]])
else:
groups.append(default)
return tuple(groups)
def groupdict(self, default=None):
"""Return a dictionary containing all the named subgroups of the match.
The default argument is used for groups that did not participate in the
match (defaults to None)."""
groupdict = {}
for key, value in self.re.groupindex.items():
groupdict[key] = self._get_slice(value, default)
return groupdict
def group(self, *args):
"""Returns one or more subgroups of the match. Each argument is either a
group index or a group name."""
if len(args) == 0:
args = (0,)
grouplist = []
for group in args:
grouplist.append(self._get_slice(self._get_index(group), None))
if len(grouplist) == 1:
return grouplist[0]
else:
return tuple(grouplist)
def __copy__():
raise TypeError, "cannot copy this pattern object"
def __deepcopy__():
raise TypeError, "cannot copy this pattern object"
class _State(object):
def __init__(self, string, start, end, flags):
self.string = string
if start < 0:
start = 0
if end > len(string):
end = len(string)
self.start = start
self.string_position = self.start
self.end = end
self.pos = start
self.flags = flags
self.reset()
def reset(self):
self.marks = []
self.lastindex = -1
self.marks_stack = []
self.context_stack = []
self.repeat = None
def match(self, pattern_codes):
# Optimization: Check string length. pattern_codes[3] contains the
# minimum length for a string to possibly match.
if pattern_codes[0] == OPCODES["info"] and pattern_codes[3]:
if self.end - self.string_position < pattern_codes[3]:
#_log("reject (got %d chars, need %d)"
# % (self.end - self.string_position, pattern_codes[3]))
return False
dispatcher = _OpcodeDispatcher()
self.context_stack.append(_MatchContext(self, pattern_codes))
has_matched = None
while len(self.context_stack) > 0:
context = self.context_stack[-1]
has_matched = dispatcher.match(context)
if has_matched is not None: # don't pop if context isn't done
self.context_stack.pop()
return has_matched
def search(self, pattern_codes):
flags = 0
if pattern_codes[0] == OPCODES["info"]:
# optimization info block
# <INFO> <1=skip> <2=flags> <3=min> <4=max> <5=prefix info>
if pattern_codes[2] & SRE_INFO_PREFIX and pattern_codes[5] > 1:
return self.fast_search(pattern_codes)
flags = pattern_codes[2]
pattern_codes = pattern_codes[pattern_codes[1] + 1:]
string_position = self.start
if pattern_codes[0] == OPCODES["literal"]:
# Special case: Pattern starts with a literal character. This is
# used for short prefixes
character = pattern_codes[1]
while True:
while string_position < self.end \
and ord(self.string[string_position]) != character:
string_position += 1
if string_position >= self.end:
return False
self.start = string_position
string_position += 1
self.string_position = string_position
if flags & SRE_INFO_LITERAL:
return True
if self.match(pattern_codes[2:]):
return True
return False
# General case
while string_position <= self.end:
self.reset()
self.start = self.string_position = string_position
if self.match(pattern_codes):
return True
string_position += 1
return False
def fast_search(self, pattern_codes):
"""Skips forward in a string as fast as possible using information from
an optimization info block."""
# pattern starts with a known prefix
# <5=length> <6=skip> <7=prefix data> <overlap data>
flags = pattern_codes[2]
prefix_len = pattern_codes[5]
prefix_skip = pattern_codes[6] # don't really know what this is good for
prefix = pattern_codes[7:7 + prefix_len]
overlap = pattern_codes[7 + prefix_len - 1:pattern_codes[1] + 1]
pattern_codes = pattern_codes[pattern_codes[1] + 1:]
i = 0
string_position = self.string_position
while string_position < self.end:
while True:
if ord(self.string[string_position]) != prefix[i]:
if i == 0:
break
else:
i = overlap[i]
else:
i += 1
if i == prefix_len:
# found a potential match
self.start = string_position + 1 - prefix_len
self.string_position = string_position + 1 \
- prefix_len + prefix_skip
if flags & SRE_INFO_LITERAL:
return True # matched all of pure literal pattern
if self.match(pattern_codes[2 * prefix_skip:]):
return True
i = overlap[i]
break
string_position += 1
return False
def set_mark(self, mark_nr, position):
if mark_nr & 1:
# This id marks the end of a group.
self.lastindex = mark_nr / 2 + 1
if mark_nr >= len(self.marks):
self.marks.extend([None] * (mark_nr - len(self.marks) + 1))
self.marks[mark_nr] = position
def get_marks(self, group_index):
marks_index = 2 * group_index
if len(self.marks) > marks_index + 1:
return self.marks[marks_index], self.marks[marks_index + 1]
else:
return None, None
def marks_push(self):
self.marks_stack.append((self.marks[:], self.lastindex))
def marks_pop(self):
self.marks, self.lastindex = self.marks_stack.pop()
def marks_pop_keep(self):
self.marks, self.lastindex = self.marks_stack[-1]
def marks_pop_discard(self):
self.marks_stack.pop()
def lower(self, char_ord):
return getlower(char_ord, self.flags)
class _MatchContext(object):
def __init__(self, state, pattern_codes):
self.state = state
self.pattern_codes = pattern_codes
self.string_position = state.string_position
self.code_position = 0
self.has_matched = None
def push_new_context(self, pattern_offset):
"""Creates a new child context of this context and pushes it on the
stack. pattern_offset is the offset off the current code position to
start interpreting from."""
child_context = _MatchContext(self.state,
self.pattern_codes[self.code_position + pattern_offset:])
self.state.context_stack.append(child_context)
return child_context
def peek_char(self, peek=0):
return self.state.string[self.string_position + peek]
def skip_char(self, skip_count):
self.string_position += skip_count
def remaining_chars(self):
return self.state.end - self.string_position
def peek_code(self, peek=0):
return self.pattern_codes[self.code_position + peek]
def skip_code(self, skip_count):
self.code_position += skip_count
def remaining_codes(self):
return len(self.pattern_codes) - self.code_position
def at_beginning(self):
return self.string_position == 0
def at_end(self):
return self.string_position == self.state.end
def at_linebreak(self):
return not self.at_end() and _is_linebreak(self.peek_char())
def at_boundary(self, word_checker):
if self.at_beginning() and self.at_end():
return False
that = not self.at_beginning() and word_checker(self.peek_char(-1))
this = not self.at_end() and word_checker(self.peek_char())
return this != that
class _RepeatContext(_MatchContext):
def __init__(self, context):
_MatchContext.__init__(self, context.state,
context.pattern_codes[context.code_position:])
self.count = -1
self.previous = context.state.repeat
self.last_position = None
class _Dispatcher(object):
DISPATCH_TABLE = None
def dispatch(self, code, context):
method = self.DISPATCH_TABLE.get(code, self.__class__.unknown)
return method(self, context)
def unknown(self, code, ctx):
raise NotImplementedError()
def build_dispatch_table(cls, code_dict, method_prefix):
if cls.DISPATCH_TABLE is not None:
return
table = {}
for key, value in code_dict.items():
if hasattr(cls, "%s%s" % (method_prefix, key)):
table[value] = getattr(cls, "%s%s" % (method_prefix, key))
cls.DISPATCH_TABLE = table
build_dispatch_table = classmethod(build_dispatch_table)
class _OpcodeDispatcher(_Dispatcher):
def __init__(self):
self.executing_contexts = {}
self.at_dispatcher = _AtcodeDispatcher()
self.ch_dispatcher = _ChcodeDispatcher()
self.set_dispatcher = _CharsetDispatcher()
def match(self, context):
"""Returns True if the current context matches, False if it doesn't and
None if matching is not finished, ie must be resumed after child
contexts have been matched."""
while context.remaining_codes() > 0 and context.has_matched is None:
opcode = context.peek_code()
if not self.dispatch(opcode, context):
return None
if context.has_matched is None:
context.has_matched = False
return context.has_matched
def dispatch(self, opcode, context):
"""Dispatches a context on a given opcode. Returns True if the context
is done matching, False if it must be resumed when next encountered."""
if self.executing_contexts.has_key(id(context)):
generator = self.executing_contexts[id(context)]
del self.executing_contexts[id(context)]
has_finished = generator.next()
else:
method = self.DISPATCH_TABLE.get(opcode, _OpcodeDispatcher.unknown)
has_finished = method(self, context)
if hasattr(has_finished, "next"): # avoid using the types module
generator = has_finished
has_finished = generator.next()
if not has_finished:
self.executing_contexts[id(context)] = generator
return has_finished
def op_success(self, ctx):
# end of pattern
#self._log(ctx, "SUCCESS")
ctx.state.string_position = ctx.string_position
ctx.has_matched = True
return True
def op_failure(self, ctx):
# immediate failure
#self._log(ctx, "FAILURE")
ctx.has_matched = False
return True
def general_op_literal(self, ctx, compare, decorate=lambda x: x):
if ctx.at_end() or not compare(decorate(ord(ctx.peek_char())),
decorate(ctx.peek_code(1))):
ctx.has_matched = False
ctx.skip_code(2)
ctx.skip_char(1)
def op_literal(self, ctx):
# match literal string
# <LITERAL> <code>
#self._log(ctx, "LITERAL", ctx.peek_code(1))
self.general_op_literal(ctx, operator.eq)
return True
def op_not_literal(self, ctx):
# match anything that is not the given literal character
# <NOT_LITERAL> <code>
#self._log(ctx, "NOT_LITERAL", ctx.peek_code(1))
self.general_op_literal(ctx, operator.ne)
return True
def op_literal_ignore(self, ctx):
# match literal regardless of case
# <LITERAL_IGNORE> <code>
#self._log(ctx, "LITERAL_IGNORE", ctx.peek_code(1))
self.general_op_literal(ctx, operator.eq, ctx.state.lower)
return True
def op_not_literal_ignore(self, ctx):
# match literal regardless of case
# <LITERAL_IGNORE> <code>
#self._log(ctx, "LITERAL_IGNORE", ctx.peek_code(1))
self.general_op_literal(ctx, operator.ne, ctx.state.lower)
return True
def op_at(self, ctx):
# match at given position
# <AT> <code>
#self._log(ctx, "AT", ctx.peek_code(1))
if not self.at_dispatcher.dispatch(ctx.peek_code(1), ctx):
ctx.has_matched = False
return True
ctx.skip_code(2)
return True
def op_category(self, ctx):
# match at given category
# <CATEGORY> <code>
#self._log(ctx, "CATEGORY", ctx.peek_code(1))
if ctx.at_end() or not self.ch_dispatcher.dispatch(ctx.peek_code(1), ctx):
ctx.has_matched = False
return True
ctx.skip_code(2)
ctx.skip_char(1)
return True
def op_any(self, ctx):
# match anything (except a newline)
# <ANY>
#self._log(ctx, "ANY")
if ctx.at_end() or ctx.at_linebreak():
ctx.has_matched = False
return True
ctx.skip_code(1)
ctx.skip_char(1)
return True
def op_any_all(self, ctx):
# match anything
# <ANY_ALL>
#self._log(ctx, "ANY_ALL")
if ctx.at_end():
ctx.has_matched = False
return True
ctx.skip_code(1)
ctx.skip_char(1)
return True
def general_op_in(self, ctx, decorate=lambda x: x):
#self._log(ctx, "OP_IN")
if ctx.at_end():
ctx.has_matched = False
return
skip = ctx.peek_code(1)
ctx.skip_code(2) # set op pointer to the set code
if not self.check_charset(ctx, decorate(ord(ctx.peek_char()))):
ctx.has_matched = False
return
ctx.skip_code(skip - 1)
ctx.skip_char(1)
def op_in(self, ctx):
# match set member (or non_member)
# <IN> <skip> <set>
#self._log(ctx, "OP_IN")
self.general_op_in(ctx)
return True
def op_in_ignore(self, ctx):
# match set member (or non_member), disregarding case of current char
# <IN_IGNORE> <skip> <set>
#self._log(ctx, "OP_IN_IGNORE")
self.general_op_in(ctx, ctx.state.lower)
return True
def op_jump(self, ctx):
# jump forward
# <JUMP> <offset>
#self._log(ctx, "JUMP", ctx.peek_code(1))
ctx.skip_code(ctx.peek_code(1) + 1)
return True
# skip info
# <INFO> <skip>
op_info = op_jump
def op_mark(self, ctx):
# set mark
# <MARK> <gid>
#self._log(ctx, "OP_MARK", ctx.peek_code(1))
ctx.state.set_mark(ctx.peek_code(1), ctx.string_position)
ctx.skip_code(2)
return True
def op_branch(self, ctx):
# alternation
# <BRANCH> <0=skip> code <JUMP> ... <NULL>
#self._log(ctx, "BRANCH")
ctx.state.marks_push()
ctx.skip_code(1)
current_branch_length = ctx.peek_code(0)
while current_branch_length:
# The following tries to shortcut branches starting with a
# (unmatched) literal. _sre.c also shortcuts charsets here.
if not (ctx.peek_code(1) == OPCODES["literal"] and \
(ctx.at_end() or ctx.peek_code(2) != ord(ctx.peek_char()))):
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(1)
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.state.marks_pop_keep()
ctx.skip_code(current_branch_length)
current_branch_length = ctx.peek_code(0)
ctx.state.marks_pop_discard()
ctx.has_matched = False
yield True
def op_repeat_one(self, ctx):
# match repeated sequence (maximizing).
# this operator only works if the repeated item is exactly one character
# wide, and we're not already collecting backtracking points.
# <REPEAT_ONE> <skip> <1=min> <2=max> item <SUCCESS> tail
mincount = ctx.peek_code(2)
maxcount = ctx.peek_code(3)
#self._log(ctx, "REPEAT_ONE", mincount, maxcount)
if ctx.remaining_chars() < mincount:
ctx.has_matched = False
yield True
ctx.state.string_position = ctx.string_position
count = self.count_repetitions(ctx, maxcount)
ctx.skip_char(count)
if count < mincount:
ctx.has_matched = False
yield True
if ctx.peek_code(ctx.peek_code(1) + 1) == OPCODES["success"]:
# tail is empty. we're finished
ctx.state.string_position = ctx.string_position
ctx.has_matched = True
yield True
ctx.state.marks_push()
if ctx.peek_code(ctx.peek_code(1) + 1) == OPCODES["literal"]:
# Special case: Tail starts with a literal. Skip positions where
# the rest of the pattern cannot possibly match.
char = ctx.peek_code(ctx.peek_code(1) + 2)
while True:
while count >= mincount and \
(ctx.at_end() or ord(ctx.peek_char()) != char):
ctx.skip_char(-1)
count -= 1
if count < mincount:
break
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(ctx.peek_code(1) + 1)
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.skip_char(-1)
count -= 1
ctx.state.marks_pop_keep()
else:
# General case: backtracking
while count >= mincount:
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(ctx.peek_code(1) + 1)
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.skip_char(-1)
count -= 1
ctx.state.marks_pop_keep()
ctx.state.marks_pop_discard()
ctx.has_matched = False
yield True
def op_min_repeat_one(self, ctx):
# match repeated sequence (minimizing)
# <MIN_REPEAT_ONE> <skip> <1=min> <2=max> item <SUCCESS> tail
mincount = ctx.peek_code(2)
maxcount = ctx.peek_code(3)
#self._log(ctx, "MIN_REPEAT_ONE", mincount, maxcount)
if ctx.remaining_chars() < mincount:
ctx.has_matched = False
yield True
ctx.state.string_position = ctx.string_position
if mincount == 0:
count = 0
else:
count = self.count_repetitions(ctx, mincount)
if count < mincount:
ctx.has_matched = False
yield True
ctx.skip_char(count)
if ctx.peek_code(ctx.peek_code(1) + 1) == OPCODES["success"]:
# tail is empty. we're finished
ctx.state.string_position = ctx.string_position
ctx.has_matched = True
yield True
ctx.state.marks_push()
while maxcount == MAXREPEAT or count <= maxcount:
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(ctx.peek_code(1) + 1)
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.state.string_position = ctx.string_position
if self.count_repetitions(ctx, 1) == 0:
break
ctx.skip_char(1)
count += 1
ctx.state.marks_pop_keep()
ctx.state.marks_pop_discard()
ctx.has_matched = False
yield True
def op_repeat(self, ctx):
# create repeat context. all the hard work is done by the UNTIL
# operator (MAX_UNTIL, MIN_UNTIL)
# <REPEAT> <skip> <1=min> <2=max> item <UNTIL> tail
#self._log(ctx, "REPEAT", ctx.peek_code(2), ctx.peek_code(3))
repeat = _RepeatContext(ctx)
ctx.state.repeat = repeat
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(ctx.peek_code(1) + 1)
yield False
ctx.state.repeat = repeat.previous
ctx.has_matched = child_context.has_matched
yield True
def op_max_until(self, ctx):
# maximizing repeat
# <REPEAT> <skip> <1=min> <2=max> item <MAX_UNTIL> tail
repeat = ctx.state.repeat
if repeat is None:
raise RuntimeError("Internal re error: MAX_UNTIL without REPEAT.")
mincount = repeat.peek_code(2)
maxcount = repeat.peek_code(3)
ctx.state.string_position = ctx.string_position
count = repeat.count + 1
#self._log(ctx, "MAX_UNTIL", count)
if count < mincount:
# not enough matches
repeat.count = count
child_context = repeat.push_new_context(4)
yield False
ctx.has_matched = child_context.has_matched
if not ctx.has_matched:
repeat.count = count - 1
ctx.state.string_position = ctx.string_position
yield True
if (count < maxcount or maxcount == MAXREPEAT) \
and ctx.state.string_position != repeat.last_position:
# we may have enough matches, if we can match another item, do so
repeat.count = count
ctx.state.marks_push()
save_last_position = repeat.last_position # zero-width match protection
repeat.last_position = ctx.state.string_position
child_context = repeat.push_new_context(4)
yield False
repeat.last_position = save_last_position
if child_context.has_matched:
ctx.state.marks_pop_discard()
ctx.has_matched = True
yield True
ctx.state.marks_pop()
repeat.count = count - 1
ctx.state.string_position = ctx.string_position
# cannot match more repeated items here. make sure the tail matches
ctx.state.repeat = repeat.previous
child_context = ctx.push_new_context(1)
yield False
ctx.has_matched = child_context.has_matched
if not ctx.has_matched:
ctx.state.repeat = repeat
ctx.state.string_position = ctx.string_position
yield True
def op_min_until(self, ctx):
# minimizing repeat
# <REPEAT> <skip> <1=min> <2=max> item <MIN_UNTIL> tail
repeat = ctx.state.repeat
if repeat is None:
raise RuntimeError("Internal re error: MIN_UNTIL without REPEAT.")
mincount = repeat.peek_code(2)
maxcount = repeat.peek_code(3)
ctx.state.string_position = ctx.string_position
count = repeat.count + 1
#self._log(ctx, "MIN_UNTIL", count)
if count < mincount:
# not enough matches
repeat.count = count
child_context = repeat.push_new_context(4)
yield False
ctx.has_matched = child_context.has_matched
if not ctx.has_matched:
repeat.count = count - 1
ctx.state.string_position = ctx.string_position
yield True
# see if the tail matches
ctx.state.marks_push()
ctx.state.repeat = repeat.previous
child_context = ctx.push_new_context(1)
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.state.repeat = repeat
ctx.state.string_position = ctx.string_position
ctx.state.marks_pop()
# match more until tail matches
if count >= maxcount and maxcount != MAXREPEAT:
ctx.has_matched = False
yield True
repeat.count = count
child_context = repeat.push_new_context(4)
yield False
ctx.has_matched = child_context.has_matched
if not ctx.has_matched:
repeat.count = count - 1
ctx.state.string_position = ctx.string_position
yield True
def general_op_groupref(self, ctx, decorate=lambda x: x):
group_start, group_end = ctx.state.get_marks(ctx.peek_code(1))
if group_start is None or group_end is None or group_end < group_start:
ctx.has_matched = False
return True
while group_start < group_end:
if ctx.at_end() or decorate(ord(ctx.peek_char())) \
!= decorate(ord(ctx.state.string[group_start])):
ctx.has_matched = False
return True
group_start += 1
ctx.skip_char(1)
ctx.skip_code(2)
return True
def op_groupref(self, ctx):
# match backreference
# <GROUPREF> <zero-based group index>
#self._log(ctx, "GROUPREF", ctx.peek_code(1))
return self.general_op_groupref(ctx)
def op_groupref_ignore(self, ctx):
# match backreference case-insensitive
# <GROUPREF_IGNORE> <zero-based group index>
#self._log(ctx, "GROUPREF_IGNORE", ctx.peek_code(1))
return self.general_op_groupref(ctx, ctx.state.lower)
def op_groupref_exists(self, ctx):
# <GROUPREF_EXISTS> <group> <skip> codeyes <JUMP> codeno ...
#self._log(ctx, "GROUPREF_EXISTS", ctx.peek_code(1))
group_start, group_end = ctx.state.get_marks(ctx.peek_code(1))
if group_start is None or group_end is None or group_end < group_start:
ctx.skip_code(ctx.peek_code(2) + 1)
else:
ctx.skip_code(3)
return True
def op_assert(self, ctx):
# assert subpattern
# <ASSERT> <skip> <back> <pattern>
#self._log(ctx, "ASSERT", ctx.peek_code(2))
ctx.state.string_position = ctx.string_position - ctx.peek_code(2)
if ctx.state.string_position < 0:
ctx.has_matched = False
yield True
child_context = ctx.push_new_context(3)
yield False
if child_context.has_matched:
ctx.skip_code(ctx.peek_code(1) + 1)
else:
ctx.has_matched = False
yield True
def op_assert_not(self, ctx):
# assert not subpattern
# <ASSERT_NOT> <skip> <back> <pattern>
#self._log(ctx, "ASSERT_NOT", ctx.peek_code(2))
ctx.state.string_position = ctx.string_position - ctx.peek_code(2)
if ctx.state.string_position >= 0:
child_context = ctx.push_new_context(3)
yield False
if child_context.has_matched:
ctx.has_matched = False
yield True
ctx.skip_code(ctx.peek_code(1) + 1)
yield True
def unknown(self, ctx):
#self._log(ctx, "UNKNOWN", ctx.peek_code())
raise RuntimeError("Internal re error. Unknown opcode: %s" % ctx.peek_code())
def check_charset(self, ctx, char):
"""Checks whether a character matches set of arbitrary length. Assumes
the code pointer is at the first member of the set."""
self.set_dispatcher.reset(char)
save_position = ctx.code_position
result = None
while result is None:
result = self.set_dispatcher.dispatch(ctx.peek_code(), ctx)
ctx.code_position = save_position
return result
def count_repetitions(self, ctx, maxcount):
"""Returns the number of repetitions of a single item, starting from the
current string position. The code pointer is expected to point to a
REPEAT_ONE operation (with the repeated 4 ahead)."""
count = 0
real_maxcount = ctx.state.end - ctx.string_position
if maxcount < real_maxcount and maxcount != MAXREPEAT:
real_maxcount = maxcount
# XXX could special case every single character pattern here, as in C.
# This is a general solution, a bit hackisch, but works and should be
# efficient.
code_position = ctx.code_position
string_position = ctx.string_position
ctx.skip_code(4)
reset_position = ctx.code_position
while count < real_maxcount:
# this works because the single character pattern is followed by
# a success opcode
ctx.code_position = reset_position
self.dispatch(ctx.peek_code(), ctx)
if ctx.has_matched is False: # could be None as well
break
count += 1
ctx.has_matched = None
ctx.code_position = code_position
ctx.string_position = string_position
return count
def _log(self, context, opname, *args):
arg_string = ("%s " * len(args)) % args
_log("|%s|%s|%s %s" % (context.pattern_codes,
context.string_position, opname, arg_string))
_OpcodeDispatcher.build_dispatch_table(OPCODES, "op_")
class _CharsetDispatcher(_Dispatcher):
def __init__(self):
self.ch_dispatcher = _ChcodeDispatcher()
def reset(self, char):
self.char = char
self.ok = True
def set_failure(self, ctx):
return not self.ok
def set_literal(self, ctx):
# <LITERAL> <code>
if ctx.peek_code(1) == self.char:
return self.ok
else:
ctx.skip_code(2)
def set_category(self, ctx):
# <CATEGORY> <code>
if self.ch_dispatcher.dispatch(ctx.peek_code(1), ctx):
return self.ok
else:
ctx.skip_code(2)
def set_charset(self, ctx):
# <CHARSET> <bitmap> (16 bits per code word)
char_code = self.char
ctx.skip_code(1) # point to beginning of bitmap
if CODESIZE == 2:
if char_code < 256 and ctx.peek_code(char_code >> 4) \
& (1 << (char_code & 15)):
return self.ok
ctx.skip_code(16) # skip bitmap
else:
if char_code < 256 and ctx.peek_code(char_code >> 5) \
& (1 << (char_code & 31)):
return self.ok
ctx.skip_code(8) # skip bitmap
def set_range(self, ctx):
# <RANGE> <lower> <upper>
if ctx.peek_code(1) <= self.char <= ctx.peek_code(2):
return self.ok
ctx.skip_code(3)
def set_negate(self, ctx):
self.ok = not self.ok
ctx.skip_code(1)
def set_bigcharset(self, ctx):
# <BIGCHARSET> <blockcount> <256 blockindices> <blocks>
char_code = self.char
count = ctx.peek_code(1)
ctx.skip_code(2)
if char_code < 65536:
block_index = char_code >> 8
# NB: there are CODESIZE block indices per bytecode
a = array.array("B")
a.fromstring(array.array(CODESIZE == 2 and "H" or "I",
[ctx.peek_code(block_index / CODESIZE)]).tostring())
block = a[block_index % CODESIZE]
ctx.skip_code(256 / CODESIZE) # skip block indices
block_value = ctx.peek_code(block * (32 / CODESIZE)
+ ((char_code & 255) >> (CODESIZE == 2 and 4 or 5)))
if block_value & (1 << (char_code & ((8 * CODESIZE) - 1))):
return self.ok
else:
ctx.skip_code(256 / CODESIZE) # skip block indices
ctx.skip_code(count * (32 / CODESIZE)) # skip blocks
def unknown(self, ctx):
return False
_CharsetDispatcher.build_dispatch_table(OPCODES, "set_")
class _AtcodeDispatcher(_Dispatcher):
def at_beginning(self, ctx):
return ctx.at_beginning()
at_beginning_string = at_beginning
def at_beginning_line(self, ctx):
return ctx.at_beginning() or _is_linebreak(ctx.peek_char(-1))
def at_end(self, ctx):
return (ctx.remaining_chars() == 1 and ctx.at_linebreak()) or ctx.at_end()
def at_end_line(self, ctx):
return ctx.at_linebreak() or ctx.at_end()
def at_end_string(self, ctx):
return ctx.at_end()
def at_boundary(self, ctx):
return ctx.at_boundary(_is_word)
def at_non_boundary(self, ctx):
return not ctx.at_boundary(_is_word)
def at_loc_boundary(self, ctx):
return ctx.at_boundary(_is_loc_word)
def at_loc_non_boundary(self, ctx):
return not ctx.at_boundary(_is_loc_word)
def at_uni_boundary(self, ctx):
return ctx.at_boundary(_is_uni_word)
def at_uni_non_boundary(self, ctx):
return not ctx.at_boundary(_is_uni_word)
def unknown(self, ctx):
return False
_AtcodeDispatcher.build_dispatch_table(ATCODES, "")
class _ChcodeDispatcher(_Dispatcher):
def category_digit(self, ctx):
return _is_digit(ctx.peek_char())
def category_not_digit(self, ctx):
return not _is_digit(ctx.peek_char())
def category_space(self, ctx):
return _is_space(ctx.peek_char())
def category_not_space(self, ctx):
return not _is_space(ctx.peek_char())
def category_word(self, ctx):
return _is_word(ctx.peek_char())
def category_not_word(self, ctx):
return not _is_word(ctx.peek_char())
def category_linebreak(self, ctx):
return _is_linebreak(ctx.peek_char())
def category_not_linebreak(self, ctx):
return not _is_linebreak(ctx.peek_char())
def category_loc_word(self, ctx):
return _is_loc_word(ctx.peek_char())
def category_loc_not_word(self, ctx):
return not _is_loc_word(ctx.peek_char())
def category_uni_digit(self, ctx):
return ctx.peek_char().isdigit()
def category_uni_not_digit(self, ctx):
return not ctx.peek_char().isdigit()
def category_uni_space(self, ctx):
return ctx.peek_char().isspace()
def category_uni_not_space(self, ctx):
return not ctx.peek_char().isspace()
def category_uni_word(self, ctx):
return _is_uni_word(ctx.peek_char())
def category_uni_not_word(self, ctx):
return not _is_uni_word(ctx.peek_char())
def category_uni_linebreak(self, ctx):
return ord(ctx.peek_char()) in _uni_linebreaks
def category_uni_not_linebreak(self, ctx):
return ord(ctx.peek_char()) not in _uni_linebreaks
def unknown(self, ctx):
return False
_ChcodeDispatcher.build_dispatch_table(CHCODES, "")
_ascii_char_info = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 6, 2,
2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25, 0, 0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 0, 0,
0, 0, 16, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 0, 0, 0, 0, 0 ]
def _is_digit(char):
code = ord(char)
return code < 128 and _ascii_char_info[code] & 1
def _is_space(char):
code = ord(char)
return code < 128 and _ascii_char_info[code] & 2
def _is_word(char):
# NB: non-ASCII chars aren't words according to _sre.c
code = ord(char)
return code < 128 and _ascii_char_info[code] & 16
def _is_loc_word(char):
return (not (ord(char) & ~255) and char.isalnum()) or char == '_'
def _is_uni_word(char):
return unichr(ord(char)).isalnum() or char == '_'
def _is_linebreak(char):
return char == "\n"
# Static list of all unicode codepoints reported by Py_UNICODE_ISLINEBREAK.
_uni_linebreaks = [10, 13, 28, 29, 30, 133, 8232, 8233]
def _log(message):
if 0:
print message
| Python |
# wrapper code generator for gecode library
GECODE_WRAP_HH = file("gecode_wrap_tmpl.hh").read()
GECODE_WRAP_CC = file("gecode_wrap_tmpl.cc").read()
VAR_CLASS_DEF = """
class Py%(var_type)s : public PyVar {
public:
/* constructor */
Py%(var_type)s( PySpace* space, %(var_factory_args)s );
virtual void update( PySpace* space, bool share, Py%(var_type)s& _var );
virtual %(value_type)s val() { return var.val(); }
%(var_type)s var;
};
"""
VAR_CLASS_BODY = """
Py%(var_type)s::Py%(var_type)s( PySpace* space, %(var_factory_args)s ):var(space, %(var_args)s )
{
}
void
Py%(var_type)s::update( PySpace* space, bool share, Py%(var_type)s& _var )
{
var.update( space, share, _var );
}
"""
VARACCESS = """
%(var_type)s* get%(var_type)s( int i ) { return &(dynamic_cast<Py%(var_type)s*>(&vars[i])->var); }
"""
VARTYPES = [ { 'var_type' : 'IntVar',
'value_type' : 'int',
'args' : [ ('int', 'min'), ('int', 'max') ],
'propagators' : [],
},
{ 'var_type' : 'BoolVar',
'value_type' : 'int',
'args' : [('int', 'min'), ('int', 'max') ],
'propagators' : [],
},
## { 'var_type' : 'SetVar',
## },
]
for vardef in VARTYPES:
vardef['var_factory_args'] = ", ".join( [ "%s _%s" % (typ,nam) for typ, nam in vardef['args'] ] )
vardef['var_args'] = ", ".join( [ "_%s" % nam for typ, nam in vardef['args'] ] )
vardef['var_storage'] = '_'+vardef['var_type'] + "_vect"
vardef['var_storage_temp'] = '_'+vardef['var_type'] + "_tmp_vect"
VAR_FACTORY_DEF = """
int %(var_type)s( %(var_factory_args)s );
int %(var_type)s_temp( %(var_factory_args)s );
"""
VAR_FACTORY_BODY = """
int PySpace::%(var_type)s( %(var_factory_args)s ) {
%(var_storage)s.push_back( %(var_type)s( %(var_args)s ) );
return %(var_storage)s.size();
}
int PySpace::%(var_type)s_temp( %(var_factory_args)s ) {
%(var_storage_temp)s.push_back( %(var_type)s( %(var_args)s ) );
return %(var_storage)s.size();
}
"""
VAR_ACCESSOR = """
void get%(var_type)sValues( int idx, int n, int* vars, %(var_type)s* values ) {
for(int i=0;i<n;++i) {
%(var_type)s* v = get%(var_type)s( vars[i] );
if (v) {
values[i] = v->val();
}
}
}
"""
PROPCOND = []
def create_var_subclasses( d ):
out_hh = []
out_cc = []
for vardef in VARTYPES:
out_hh.append( VAR_CLASS_DEF % vardef )
out_cc.append( VAR_CLASS_BODY % vardef )
d['var_subclasses_decl'] = "\n".join( out_hh )
d['var_subclasses_body'] = "\n".join( out_cc )
def create_var_factories( d ):
out_hh = []
out_cc = []
for vardef in VARTYPES:
out_hh.append( VAR_FACTORY_DEF % vardef )
out_cc.append( VAR_FACTORY_BODY % vardef )
d['var_factories_decl'] = "\n".join( out_hh )
d['var_factories_body'] = "\n".join( out_cc )
def create_var_propagators( d ):
out_hh = []
out_cc = []
d['var_propagators_decl'] = "\n".join( out_hh )
d['var_propagators_body'] = "\n".join( out_cc )
if __name__ == "__main__":
wrapper_hh = file("_gecode_wrap.hh", "w")
wrapper_cc = file("_gecode_wrap.cc", "w")
d = {}
create_var_subclasses( d )
create_var_factories( d )
create_var_propagators( d )
wrapper_hh.write( GECODE_WRAP_HH % d )
wrapper_cc.write( GECODE_WRAP_CC % d )
| Python |
from ctypes import *
gecode = cdll.LoadLibrary("./libgecode_wrap.so")
IRT_NQ = 1
ES_FAILED = -1 # < Execution has resulted in failure
ES_NOFIX = 0 # < Propagation has not computed fixpoint
ES_OK = 0 # < Execution is okay
ES_FIX = 1 # < Propagation has computed fixpoint
ES_SUBSUMED = 2 # < %Propagator is subsumed (entailed)
import sys
PROPCB = CFUNCTYPE(c_int, c_void_p)
def null_propagator( prop ):
x = gecode.int_view_assigned( prop, 0 )
y = gecode.int_view_assigned( prop, 1 )
print "Assigned", x, y
return ES_OK
nullpropcb = PROPCB(null_propagator)
N = int(sys.argv[1])
spc = gecode.new_space()
Narray = c_int*N
Pair = c_int*2
queens = [ gecode.new_int_var( spc, 0, 0, N-1 ) for i in range(N) ]
qvars = Narray(*queens)
gecode.space_alldiff( spc, N, qvars )
coefs = Pair( 1, -1 )
for i in range(N):
for j in range(i+1,N):
qpair = Pair( i, j )
gecode.space_linear( spc, 2, coefs, qpair, IRT_NQ, i-j )
gecode.space_linear( spc, 2, coefs, qpair, IRT_NQ, j-i )
myprop = gecode.new_propagator( spc, nullpropcb )
gecode.propagator_create_int_view( myprop, 0 )
gecode.propagator_create_int_view( myprop, N-1 )
gecode.space_branch( spc )
engine = gecode.new_dfs( spc, 5, 2 )
result = Narray( *([0]*N ) )
nsol = 0
while 1:
sol = gecode.search_next( engine )
if not sol:
break
if nsol%1 == 0:
print "Sol", nsol
gecode.space_values( sol, N, qvars, result )
for i in result:
print i,
print
gecode.space_release( sol )
nsol+=1
| Python |
#
| Python |
#
# StringIO-based cStringIO implementation.
#
from StringIO import *
from StringIO import __doc__
class StringIO(StringIO):
def reset(self):
"""
reset() -- Reset the file position to the beginning
"""
self.seek(0, 0)
| Python |
"""Functional tools for creating and using iterators.
Infinite iterators:
count([n]) --> n, n+1, n+2, ...
cycle(p) --> p0, p1, ... plast, p0, p1, ...
repeat(elem [,n]) --> elem, elem, elem, ... endlessly or up to n times
Iterators terminating on the shortest input sequence:
izip(p, q, ...) --> (p[0], q[0]), (p[1], q[1]), ...
ifilter(pred, seq) --> elements of seq where pred(elem) is True
ifilterfalse(pred, seq) --> elements of seq where pred(elem) is False
islice(seq, [start,] stop [, step]) --> elements from
seq[start:stop:step]
imap(fun, p, q, ...) --> fun(p0, q0), fun(p1, q1), ...
starmap(fun, seq) --> fun(*seq[0]), fun(*seq[1]), ...
tee(it, n=2) --> (it1, it2 , ... itn) splits one iterator into n
chain(p, q, ...) --> p0, p1, ... plast, q0, q1, ...
takewhile(pred, seq) --> seq[0], seq[1], until pred fails
dropwhile(pred, seq) --> seq[n], seq[n+1], starting when pred fails
groupby(iterable[, keyfunc]) --> sub-iterators grouped by value of keyfunc(v)
"""
__all__ = ['chain', 'count', 'cycle', 'dropwhile', 'groupby', 'ifilter',
'ifilterfalse', 'imap', 'islice', 'izip', 'repeat', 'starmap',
'takewhile', 'tee']
class chain:
"""Make an iterator that returns elements from the first iterable
until it is exhausted, then proceeds to the next iterable, until
all of the iterables are exhausted. Used for treating consecutive
sequences as a single sequence.
Equivalent to :
def chain(*iterables):
for it in iterables:
for element in it:
yield element
"""
def __init__(self, *iterables):
self._iterables_iter = iter(map(iter, iterables))
# little trick for the first chain.next() call
self._cur_iterable_iter = iter([])
def __iter__(self):
return self
def next(self):
try:
next_elt = self._cur_iterable_iter.next()
except StopIteration:
# The current list's iterator is exhausted, switch to next one
self._cur_iterable_iter = iter(self._iterables_iter.next())
try:
next_elt = self._cur_iterable_iter.next()
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(self._cur_iterable_iter))
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(self._cur_iterable_iter))
return next_elt
class count:
"""Make an iterator that returns consecutive integers starting
with n. If not specified n defaults to zero. Does not currently
support python long integers. Often used as an argument to imap()
to generate consecutive data points. Also, used with izip() to
add sequence numbers.
Equivalent to :
def count(n=0):
if not isinstance(n, int):
raise TypeError("%s is not a regular integer" % n)
while True:
yield n
n += 1
"""
def __init__(self, n=0):
if not isinstance(n, int):
raise TypeError('%s is not a regular integer' % n)
self.times = n-1
def __iter__(self):
return self
def next(self):
self.times += 1
return self.times
def __repr__(self):
return 'count(%d)' % (self.times + 1)
class cycle:
"""Make an iterator returning elements from the iterable and
saving a copy of each. When the iterable is exhausted, return
elements from the saved copy. Repeats indefinitely.
Equivalent to :
def cycle(iterable):
saved = []
for element in iterable:
yield element
saved.append(element)
while saved:
for element in saved:
yield element
"""
def __init__(self, iterable):
self._cur_iter = iter(iterable)
self._saved = []
self._must_save = True
def __iter__(self):
return self
def next(self):
# XXX Could probably be improved
try:
next_elt = self._cur_iter.next()
if self._must_save:
self._saved.append(next_elt)
except StopIteration:
self._cur_iter = iter(self._saved)
next_elt = self._cur_iter.next()
self._must_save = False
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(self._cur_iter))
return next_elt
class dropwhile:
"""Make an iterator that drops elements from the iterable as long
as the predicate is true; afterwards, returns every
element. Note, the iterator does not produce any output until the
predicate is true, so it may have a lengthy start-up time.
Equivalent to :
def dropwhile(predicate, iterable):
iterable = iter(iterable)
for x in iterable:
if not predicate(x):
yield x
break
for x in iterable:
yield x
"""
def __init__(self, predicate, iterable):
self._predicate = predicate
self._iter = iter(iterable)
self._dropped = False
def __iter__(self):
return self
def next(self):
try:
value = self._iter.next()
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(self._iter))
if self._dropped:
return value
while self._predicate(value):
value = self._iter.next()
self._dropped = True
return value
class groupby:
"""Make an iterator that returns consecutive keys and groups from the
iterable. The key is a function computing a key value for each
element. If not specified or is None, key defaults to an identity
function and returns the element unchanged. Generally, the
iterable needs to already be sorted on the same key function.
The returned group is itself an iterator that shares the
underlying iterable with groupby(). Because the source is shared,
when the groupby object is advanced, the previous group is no
longer visible. So, if that data is needed later, it should be
stored as a list:
groups = []
uniquekeys = []
for k, g in groupby(data, keyfunc):
groups.append(list(g)) # Store group iterator as a list
uniquekeys.append(k)
"""
def __init__(self, iterable, key=None):
if key is None:
key = lambda x: x
self.keyfunc = key
self.it = iter(iterable)
self.tgtkey = self.currkey = self.currvalue = xrange(0)
def __iter__(self):
return self
def next(self):
while self.currkey == self.tgtkey:
try:
self.currvalue = self.it.next() # Exit on StopIteration
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(self.it))
self.currkey = self.keyfunc(self.currvalue)
self.tgtkey = self.currkey
return (self.currkey, self._grouper(self.tgtkey))
def _grouper(self, tgtkey):
while self.currkey == tgtkey:
yield self.currvalue
self.currvalue = self.it.next() # Exit on StopIteration
self.currkey = self.keyfunc(self.currvalue)
class _ifilter_base:
"""base class for ifilter and ifilterflase"""
def __init__(self, predicate, iterable):
# Make sure iterable *IS* iterable
self._iter = iter(iterable)
if predicate is None:
self._predicate = bool
else:
self._predicate = predicate
def __iter__(self):
return self
class ifilter(_ifilter_base):
"""Make an iterator that filters elements from iterable returning
only those for which the predicate is True. If predicate is
None, return the items that are true.
Equivalent to :
def ifilter:
if predicate is None:
predicate = bool
for x in iterable:
if predicate(x):
yield x
"""
def next(self):
try:
next_elt = self._iter.next()
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(self._iter))
while True:
if self._predicate(next_elt):
return next_elt
next_elt = self._iter.next()
class ifilterfalse(_ifilter_base):
"""Make an iterator that filters elements from iterable returning
only those for which the predicate is False. If predicate is
None, return the items that are false.
Equivalent to :
def ifilterfalse(predicate, iterable):
if predicate is None:
predicate = bool
for x in iterable:
if not predicate(x):
yield x
"""
def next(self):
try:
next_elt = self._iter.next()
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(self._iter))
while True:
if not self._predicate(next_elt):
return next_elt
next_elt = self._iter.next()
class imap:
"""Make an iterator that computes the function using arguments
from each of the iterables. If function is set to None, then
imap() returns the arguments as a tuple. Like map() but stops
when the shortest iterable is exhausted instead of filling in
None for shorter iterables. The reason for the difference is that
infinite iterator arguments are typically an error for map()
(because the output is fully evaluated) but represent a common
and useful way of supplying arguments to imap().
Equivalent to :
def imap(function, *iterables):
iterables = map(iter, iterables)
while True:
args = [i.next() for i in iterables]
if function is None:
yield tuple(args)
else:
yield function(*args)
"""
def __init__(self, function, iterable, *other_iterables):
self._func = function
self._iters = map(iter, (iterable, ) + other_iterables)
def __iter__(self):
return self
def next(self):
try:
args = [it.next() for it in self._iters]
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(it))
if self._func is None:
return tuple(args)
else:
return self._func(*args)
class islice:
"""Make an iterator that returns selected elements from the
iterable. If start is non-zero, then elements from the iterable
are skipped until start is reached. Afterward, elements are
returned consecutively unless step is set higher than one which
results in items being skipped. If stop is None, then iteration
continues until the iterator is exhausted, if at all; otherwise,
it stops at the specified position. Unlike regular slicing,
islice() does not support negative values for start, stop, or
step. Can be used to extract related fields from data where the
internal structure has been flattened (for example, a multi-line
report may list a name field on every third line).
"""
def __init__(self, iterable, *args):
s = slice(*args)
self.start, self.stop, self.step = s.start or 0, s.stop, s.step
if not isinstance(self.start, (int, long)):
raise ValueError("Start argument must be an integer")
if self.stop is not None and not isinstance(self.stop, (int,long)):
raise ValueError("Stop argument must be an integer or None")
if self.step is None:
self.step = 1
if self.start<0 or (self.stop is not None and self.stop<0
) or self.step<=0:
raise ValueError, "indices for islice() must be positive"
self.it = iter(iterable)
self.donext = None
self.cnt = 0
def __iter__(self):
return self
def next(self):
if self.donext is None:
try:
self.donext = self.it.next
except AttributeError:
raise TypeError
while self.cnt < self.start:
self.donext()
self.cnt += 1
if self.stop is None or self.cnt < self.stop:
self.start += self.step
self.cnt += 1
return self.donext()
raise StopIteration
class izip:
"""Make an iterator that aggregates elements from each of the
iterables. Like zip() except that it returns an iterator instead
of a list. Used for lock-step iteration over several iterables at
a time.
Equivalent to :
def izip(*iterables):
iterables = map(iter, iterables)
while iterables:
result = [i.next() for i in iterables]
yield tuple(result)
"""
def __init__(self, *iterables):
self._iterators = map(iter, iterables)
self._result = [None] * len(self._iterators)
def __iter__(self):
return self
def next(self):
if not self._iterators:
raise StopIteration()
try:
return tuple([i.next() for i in self._iterators])
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % (i))
class repeat:
"""Make an iterator that returns object over and over again.
Runs indefinitely unless the times argument is specified. Used
as argument to imap() for invariant parameters to the called
function. Also used with izip() to create an invariant part of a
tuple record.
Equivalent to :
def repeat(object, times=None):
if times is None:
while True:
yield object
else:
for i in xrange(times):
yield object
"""
def __init__(self, obj, times=None):
self._obj = obj
if times is not None:
xrange(times) # Raise a TypeError
if times < 0:
times = 0
self._times = times
def __iter__(self):
return self
def next(self):
# next() *need* to decrement self._times when consumed
if self._times is not None:
if self._times <= 0:
raise StopIteration()
self._times -= 1
return self._obj
def __repr__(self):
if self._times is not None:
return 'repeat(%r, %r)' % (self._obj, self._times)
else:
return 'repeat(%r)' % (self._obj,)
def __len__(self):
if self._times == -1 or self._times is None:
raise TypeError("len() of uniszed object")
return self._times
class starmap:
"""Make an iterator that computes the function using arguments
tuples obtained from the iterable. Used instead of imap() when
argument parameters are already grouped in tuples from a single
iterable (the data has been ``pre-zipped''). The difference
between imap() and starmap() parallels the distinction between
function(a,b) and function(*c).
Equivalent to :
def starmap(function, iterable):
iterable = iter(iterable)
while True:
yield function(*iterable.next())
"""
def __init__(self, function, iterable):
self._func = function
self._iter = iter(iterable)
def __iter__(self):
return self
def next(self):
# CPython raises a TypeError when the iterator doesn't return a tuple
try:
t = self._iter.next()
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % self._iter)
if not isinstance(t, tuple):
raise TypeError("iterator must return a tuple")
return self._func(*t)
class takewhile:
"""Make an iterator that returns elements from the iterable as
long as the predicate is true.
Equivalent to :
def takewhile(predicate, iterable):
for x in iterable:
if predicate(x):
yield x
else:
break
"""
def __init__(self, predicate, iterable):
self._predicate = predicate
self._iter = iter(iterable)
def __iter__(self):
return self
def next(self):
try:
value = self._iter.next()
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % \
(self._iter))
if not self._predicate(value):
raise StopIteration()
return value
class TeeData(object):
"""Holds cached values for TeeObjects"""
def __init__(self, iterator):
self.data = []
self._iter = iterator
def __getitem__(self, i):
# iterates until 'i' if not done yet
while i>= len(self.data):
try:
self.data.append( self._iter.next() )
except AttributeError:
# CPython raises a TypeError when next() is not defined
raise TypeError('%s has no next() method' % self._iter)
return self.data[i]
class TeeObject(object):
"""Iterables / Iterators as returned by the tee() function"""
def __init__(self, iterable=None, tee_data=None):
if tee_data:
self.tee_data = tee_data
self.pos = 0
# <=> Copy constructor
elif isinstance(iterable, TeeObject):
self.tee_data = iterable.tee_data
self.pos = iterable.pos
else:
self.tee_data = TeeData(iter(iterable))
self.pos = 0
def next(self):
data = self.tee_data[self.pos]
self.pos += 1
return data
def __iter__(self):
return self
def tee(iterable, n=2):
"""Return n independent iterators from a single iterable.
Note : once tee() has made a split, the original iterable
should not be used anywhere else; otherwise, the iterable could get
advanced without the tee objects being informed.
Note : this member of the toolkit may require significant auxiliary
storage (depending on how much temporary data needs to be stored).
In general, if one iterator is going to use most or all of the
data before the other iterator, it is faster to use list() instead
of tee()
Equivalent to :
def tee(iterable, n=2):
def gen(next, data={}, cnt=[0]):
for i in count():
if i == cnt[0]:
item = data[i] = next()
cnt[0] += 1
else:
item = data.pop(i)
yield item
it = iter(iterable)
return tuple([gen(it.next) for i in range(n)])
"""
if isinstance(iterable, TeeObject):
# a,b = tee(range(10)) ; c,d = tee(a) ; self.assert_(a is c)
return tuple([iterable] +
[TeeObject(tee_data=iterable.tee_data) for i in xrange(n-1)])
tee_data = TeeData(iter(iterable))
return tuple([TeeObject(tee_data=tee_data) for i in xrange(n)])
| Python |
# This __init__.py shows up in PyPy's app-level standard library.
# Let's try to prevent that confusion...
if __name__ != 'pypy.lib':
raise ImportError, '__init__'
| Python |
"""Functions to convert between Python values and C structs.
Python strings are used to hold the data representing the C struct
and also as format strings to describe the layout of data in the C struct.
The optional first format char indicates byte order, size and alignment:
@: native order, size & alignment (default)
=: native order, std. size & alignment
<: little-endian, std. size & alignment
>: big-endian, std. size & alignment
!: same as >
The remaining chars indicate types of args and must match exactly;
these can be preceded by a decimal repeat count:
x: pad byte (no data);
c:char;
b:signed byte;
B:unsigned byte;
h:short;
H:unsigned short;
i:int;
I:unsigned int;
l:long;
L:unsigned long;
f:float;
d:double.
Special cases (preceding decimal count indicates length):
s:string (array of char); p: pascal string (with count byte).
Special case (only available in native format):
P:an integer type that is wide enough to hold a pointer.
Special case (not in native mode unless 'long long' in platform C):
q:long long;
Q:unsigned long long
Whitespace between formats is ignored.
The variable struct.error is an exception raised on errors."""
import math, sys
# TODO: XXX Find a way to get information on native sizes and alignments
class StructError(Exception):
pass
error = StructError
def unpack_int(data,index,size,le):
bytes = [ord(b) for b in data[index:index+size]]
if le == 'little':
bytes.reverse()
number = 0L
for b in bytes:
number = number << 8 | b
return int(number)
def unpack_signed_int(data,index,size,le):
number = unpack_int(data,index,size,le)
max = 2**(size*8)
if number > 2**(size*8 - 1) - 1:
number = int(-1*(max - number))
return number
def unpack_float(data,index,size,le):
bytes = [ord(b) for b in data[index:index+size]]
if len(bytes) != size:
raise StructError,"Not enough data to unpack"
if max(bytes) == 0:
return 0.0
if le == 'big':
bytes.reverse()
if size == 4:
bias = 127
exp = 8
prec = 23
else:
bias = 1023
exp = 11
prec = 52
mantissa = long(bytes[size-2] & (2**(15-exp)-1))
for b in bytes[size-3::-1]:
mantissa = mantissa << 8 | b
mantissa = 1 + (1.0*mantissa)/(2**(prec))
mantissa /= 2
e = (bytes[-1] & 0x7f) << (exp - 7)
e += (bytes[size-2] >> (15 - exp)) & (2**(exp - 7) -1)
e -= bias
e += 1
sign = bytes[-1] & 0x80
number = math.ldexp(mantissa,e)
if sign : number *= -1
return number
def unpack_char(data,index,size,le):
return data[index:index+size]
def pack_int(number,size,le):
x=number
res=[]
for i in range(size):
res.append(chr(x&0xff))
x >>= 8
if le == 'big':
res.reverse()
return ''.join(res)
def pack_signed_int(number,size,le):
if not isinstance(number, (int,long)):
raise StructError,"argument for i,I,l,L,q,Q,h,H must be integer"
if number > 2**(8*size-1)-1 or number < -1*2**(8*size-1):
raise OverflowError,"Number:%i too large to convert" % number
return pack_int(number,size,le)
def pack_unsigned_int(number,size,le):
if not isinstance(number, (int,long)):
raise StructError,"argument for i,I,l,L,q,Q,h,H must be integer"
if number < 0:
raise TypeError,"can't convert negative long to unsigned"
if number > 2**(8*size)-1:
raise OverflowError,"Number:%i too large to convert" % number
return pack_int(number,size,le)
def pack_char(char,size,le):
return str(char)
def sane_float(man,e):
# TODO: XXX Implement checks for floats
return True
def pack_float(number, size, le):
if number < 0:
sign = 1
number *= -1
elif number == 0.0:
return "\x00" * size
else:
sign = 0
if size == 4:
bias = 127
exp = 8
prec = 23
else:
bias = 1023
exp = 11
prec = 52
man, e = math.frexp(number)
if 0.5 <= man and man < 1.0:
man *= 2
e -= 1
if sane_float(man,e):
man -= 1
e += bias
mantissa = int(2**prec *(man) +0.5)
res=[]
if mantissa >> prec :
mantissa = 0
e += 1
for i in range(size-2):
res += [ mantissa & 0xff]
mantissa >>= 8
res += [ (mantissa & (2**(15-exp)-1)) | ((e & (2**(exp-7)-1))<<(15-exp))]
res += [sign << 7 | e >> (exp - 7)]
if le == 'big':
res.reverse()
return ''.join([chr(x) for x in res])
# TODO: What todo with insane floats/doubles. handle in sanefloat?
big_endian_format = {
'x':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'b':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'B':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'c':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_char, 'unpack' : unpack_char},
's':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'p':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'h':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'H':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'i':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'I':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'l':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'L':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'Q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'f':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float},
'd':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float},
}
default = big_endian_format
formatmode={ '<' : (default, 'little'),
'>' : (default, 'big'),
'!' : (default, 'big'),
'=' : (default, sys.byteorder),
'@' : (default, sys.byteorder)
}
def getmode(fmt):
try:
formatdef,endianness = formatmode[fmt[0]]
index = 1
except KeyError:
formatdef,endianness = formatmode['@']
index = 0
return formatdef,endianness,index
def getNum(fmt,i):
num=None
cur = fmt[i]
while ('0'<= cur ) and ( cur <= '9'):
if num == None:
num = int(cur)
else:
num = 10*num + int(cur)
i += 1
cur = fmt[i]
return num,i
def calcsize(fmt):
"""calcsize(fmt) -> int
Return size of C struct described by format string fmt.
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
num = 0
result = 0
while i<len(fmt):
num,i = getNum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError,"%s is not a valid format"%cur
if num != None :
result += num*format['size']
else:
result += format['size']
num = 0
i += 1
return result
def pack(fmt,*args):
"""pack(fmt, v1, v2, ...) -> string
Return string containing values v1, v2, ... packed according to fmt.
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
args = list(args)
n_args = len(args)
result = []
while i<len(fmt):
num,i = getNum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError,"%s is not a valid format"%cur
if num == None :
num_s = 0
num = 1
else:
num_s = num
if cur == 'x':
result += ['\0'*num]
elif cur == 's':
if isinstance(args[0], str):
padding = num - len(args[0])
result += [args[0][:num] + '\0'*padding]
args.pop(0)
else:
raise StructError,"arg for string format not a string"
elif cur == 'p':
if isinstance(args[0], str):
padding = num - len(args[0]) - 1
if padding > 0:
result += [chr(len(args[0])) + args[0][:num-1] + '\0'*padding]
else:
if num<255:
result += [chr(num-1) + args[0][:num-1]]
else:
result += [chr(255) + args[0][:num-1]]
args.pop(0)
else:
raise StructError,"arg for string format not a string"
else:
if len(args) == 0:
raise StructError,"insufficient arguments to pack"
for var in args[:num]:
result += [format['pack'](var,format['size'],endianness)]
args=args[num:]
num = None
i += 1
if len(args) != 0:
raise StructError,"too many arguments for pack format"
return ''.join(result)
def unpack(fmt,data):
"""unpack(fmt, string) -> (v1, v2, ...)
Unpack the string, containing packed C structure data, according
to fmt. Requires len(string)==calcsize(fmt).
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
j = 0
num = 0
result = []
length= calcsize(fmt)
if length != len (data):
raise StructError,"unpack str size does not match format"
while i<len(fmt):
num,i=getNum(fmt,i)
cur = fmt[i]
i += 1
try:
format = formatdef[cur]
except KeyError:
raise StructError,"%s is not a valid format"%cur
if not num :
num = 1
if cur == 'x':
j += num
elif cur == 's':
result.append(data[j:j+num])
j += num
elif cur == 'p':
n=ord(data[j])
if n >= num:
n = num-1
result.append(data[j+1:j+n+1])
j += num
else:
for n in range(num):
result += [format['unpack'](data,j,format['size'],endianness)]
j += format['size']
return tuple(result)
| Python |
"""Concrete date/time and related types -- prototype implemented in Python.
See http://www.zope.org/Members/fdrake/DateTimeWiki/FrontPage
See also http://dir.yahoo.com/Reference/calendars/
For a primer on DST, including many current DST rules, see
http://webexhibits.org/daylightsaving/
For more about DST than you ever wanted to know, see
ftp://elsie.nci.nih.gov/pub/
Sources for time zone and DST data: http://www.twinsun.com/tz/tz-link.htm
This was originally copied from the sandbox of the CPython CVS repository.
Thanks to Tim Peters for suggesting using it.
"""
import time as _time
import math as _math
MINYEAR = 1
MAXYEAR = 9999
# Utility functions, adapted from Python's Demo/classes/Dates.py, which
# also assumes the current Gregorian calendar indefinitely extended in
# both directions. Difference: Dates.py calls January 1 of year 0 day
# number 1. The code here calls January 1 of year 1 day number 1. This is
# to match the definition of the "proleptic Gregorian" calendar in Dershowitz
# and Reingold's "Calendrical Calculations", where it's the base calendar
# for all computations. See the book for algorithms for converting between
# proleptic Gregorian ordinals and many other calendar systems.
_DAYS_IN_MONTH = [None, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
_DAYS_BEFORE_MONTH = [None]
dbm = 0
for dim in _DAYS_IN_MONTH[1:]:
_DAYS_BEFORE_MONTH.append(dbm)
dbm += dim
del dbm, dim
def _is_leap(year):
"year -> 1 if leap year, else 0."
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def _days_in_year(year):
"year -> number of days in year (366 if a leap year, else 365)."
return 365 + _is_leap(year)
def _days_before_year(year):
"year -> number of days before January 1st of year."
y = year - 1
return y*365 + y//4 - y//100 + y//400
def _days_in_month(year, month):
"year, month -> number of days in that month in that year."
assert 1 <= month <= 12, month
if month == 2 and _is_leap(year):
return 29
return _DAYS_IN_MONTH[month]
def _days_before_month(year, month):
"year, month -> number of days in year preceeding first day of month."
if not 1 <= month <= 12:
raise ValueError('month must be in 1..12', month)
return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year))
def _ymd2ord(year, month, day):
"year, month, day -> ordinal, considering 01-Jan-0001 as day 1."
if not 1 <= month <= 12:
raise ValueError('month must be in 1..12', month)
dim = _days_in_month(year, month)
if not 1 <= day <= dim:
raise ValueError('day must be in 1..%d' % dim, day)
return (_days_before_year(year) +
_days_before_month(year, month) +
day)
_DI400Y = _days_before_year(401) # number of days in 400 years
_DI100Y = _days_before_year(101) # " " " " 100 "
_DI4Y = _days_before_year(5) # " " " " 4 "
# A 4-year cycle has an extra leap day over what we'd get from pasting
# together 4 single years.
assert _DI4Y == 4 * 365 + 1
# Similarly, a 400-year cycle has an extra leap day over what we'd get from
# pasting together 4 100-year cycles.
assert _DI400Y == 4 * _DI100Y + 1
# OTOH, a 100-year cycle has one fewer leap day than we'd get from
# pasting together 25 4-year cycles.
assert _DI100Y == 25 * _DI4Y - 1
def _ord2ymd(n):
"ordinal -> (year, month, day), considering 01-Jan-0001 as day 1."
# n is a 1-based index, starting at 1-Jan-1. The pattern of leap years
# repeats exactly every 400 years. The basic strategy is to find the
# closest 400-year boundary at or before n, then work with the offset
# from that boundary to n. Life is much clearer if we subtract 1 from
# n first -- then the values of n at 400-year boundaries are exactly
# those divisible by _DI400Y:
#
# D M Y n n-1
# -- --- ---- ---------- ----------------
# 31 Dec -400 -_DI400Y -_DI400Y -1
# 1 Jan -399 -_DI400Y +1 -_DI400Y 400-year boundary
# ...
# 30 Dec 000 -1 -2
# 31 Dec 000 0 -1
# 1 Jan 001 1 0 400-year boundary
# 2 Jan 001 2 1
# 3 Jan 001 3 2
# ...
# 31 Dec 400 _DI400Y _DI400Y -1
# 1 Jan 401 _DI400Y +1 _DI400Y 400-year boundary
n -= 1
n400, n = divmod(n, _DI400Y)
year = n400 * 400 + 1 # ..., -399, 1, 401, ...
# Now n is the (non-negative) offset, in days, from January 1 of year, to
# the desired date. Now compute how many 100-year cycles precede n.
# Note that it's possible for n100 to equal 4! In that case 4 full
# 100-year cycles precede the desired day, which implies the desired
# day is December 31 at the end of a 400-year cycle.
n100, n = divmod(n, _DI100Y)
# Now compute how many 4-year cycles precede it.
n4, n = divmod(n, _DI4Y)
# And now how many single years. Again n1 can be 4, and again meaning
# that the desired day is December 31 at the end of the 4-year cycle.
n1, n = divmod(n, 365)
year += n100 * 100 + n4 * 4 + n1
if n1 == 4 or n100 == 4:
assert n == 0
return year-1, 12, 31
# Now the year is correct, and n is the offset from January 1. We find
# the month via an estimate that's either exact or one too large.
leapyear = n1 == 3 and (n4 != 24 or n100 == 3)
assert leapyear == _is_leap(year)
month = (n + 50) >> 5
preceding = _DAYS_BEFORE_MONTH[month] + (month > 2 and leapyear)
if preceding > n: # estimate is too large
month -= 1
preceding -= _DAYS_IN_MONTH[month] + (month == 2 and leapyear)
n -= preceding
assert 0 <= n < _days_in_month(year, month)
# Now the year and month are correct, and n is the offset from the
# start of that month: we're done!
return year, month, n+1
# Month and day names. For localized versions, see the calendar module.
_MONTHNAMES = [None, "Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
_DAYNAMES = [None, "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
def _build_struct_time(y, m, d, hh, mm, ss, dstflag):
wday = (_ymd2ord(y, m, d) + 6) % 7
dnum = _days_before_month(y, m) + d
return _time.struct_time((y, m, d, hh, mm, ss, wday, dnum, dstflag))
def _format_time(hh, mm, ss, us):
# Skip trailing microseconds when us==0.
result = "%02d:%02d:%02d" % (hh, mm, ss)
if us:
result += ".%06d" % us
return result
# Correctly substitute for %z and %Z escapes in strftime formats.
def _wrap_strftime(object, format, timetuple):
year = timetuple[0]
if year < 1900:
raise ValueError("year=%d is before 1900; the datetime strftime() "
"methods require year >= 1900" % year)
# Don't call _utcoffset() or tzname() unless actually needed.
zreplace = None # the string to use for %z
Zreplace = None # the string to use for %Z
# Scan format for %z and %Z escapes, replacing as needed.
newformat = []
push = newformat.append
i, n = 0, len(format)
while i < n:
ch = format[i]
i += 1
if ch == '%':
if i < n:
ch = format[i]
i += 1
if ch == 'z':
if zreplace is None:
zreplace = ""
if hasattr(object, "_utcoffset"):
offset = object._utcoffset()
if offset is not None:
sign = '+'
if offset < 0:
offset = -offset
sign = '-'
h, m = divmod(offset, 60)
zreplace = '%c%02d%02d' % (sign, h, m)
assert '%' not in zreplace
newformat.append(zreplace)
elif ch == 'Z':
if Zreplace is None:
Zreplace = ""
if hasattr(object, "tzname"):
s = object.tzname()
if s is not None:
# strftime is going to have at this: escape %
Zreplace = s.replace('%', '%%')
newformat.append(Zreplace)
else:
push('%')
push(ch)
else:
push('%')
else:
push(ch)
newformat = "".join(newformat)
return _time.strftime(newformat, timetuple)
def _call_tzinfo_method(tzinfo, methname, tzinfoarg):
if tzinfo is None:
return None
return getattr(tzinfo, methname)(tzinfoarg)
# Just raise TypeError if the arg isn't None or a string.
def _check_tzname(name):
if name is not None and not isinstance(name, str):
raise TypeError("tzinfo.tzname() must return None or string, "
"not '%s'" % type(name))
# name is the offset-producing method, "utcoffset" or "dst".
# offset is what it returned.
# If offset isn't None or timedelta, raises TypeError.
# If offset is None, returns None.
# Else offset is checked for being in range, and a whole # of minutes.
# If it is, its integer value is returned. Else ValueError is raised.
def _check_utc_offset(name, offset):
assert name in ("utcoffset", "dst")
if offset is None:
return None
if not isinstance(offset, timedelta):
raise TypeError("tzinfo.%s() must return None "
"or timedelta, not '%s'" % (name, type(offset)))
days = offset.days
if days < -1 or days > 0:
offset = 1440 # trigger out-of-range
else:
seconds = days * 86400 + offset.seconds
minutes, seconds = divmod(seconds, 60)
if seconds or offset.microseconds:
raise ValueError("tzinfo.%s() must return a whole number "
"of minutes" % name)
offset = minutes
if -1440 < offset < 1440:
return offset
raise ValueError("%s()=%d, must be in -1439..1439" % (name, offset))
def _check_date_fields(year, month, day):
if not MINYEAR <= year <= MAXYEAR:
raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year)
if not 1 <= month <= 12:
raise ValueError('month must be in 1..12', month)
dim = _days_in_month(year, month)
if not 1 <= day <= dim:
raise ValueError('day must be in 1..%d' % dim, day)
def _check_time_fields(hour, minute, second, microsecond):
if not 0 <= hour <= 23:
raise ValueError('hour must be in 0..23', hour)
if not 0 <= minute <= 59:
raise ValueError('minute must be in 0..59', minute)
if not 0 <= second <= 59:
raise ValueError('second must be in 0..59', second)
if not 0 <= microsecond <= 999999:
raise ValueError('microsecond must be in 0..999999', microsecond)
def _check_tzinfo_arg(tz):
if tz is not None and not isinstance(tz, tzinfo):
raise TypeError("tzinfo argument must be None or of a tzinfo subclass")
# Notes on comparison: In general, datetime module comparison operators raise
# TypeError when they don't know how to do a comparison themself. If they
# returned NotImplemented instead, comparison could (silently) fall back to
# the default compare-objects-by-comparing-their-memory-addresses strategy,
# and that's not helpful. There are two exceptions:
#
# 1. For date and datetime, if the other object has a "timetuple" attr,
# NotImplemented is returned. This is a hook to allow other kinds of
# datetime-like objects a chance to intercept the comparison.
#
# 2. Else __eq__ and __ne__ return False and True, respectively. This is
# so opertaions like
#
# x == y
# x != y
# x in sequence
# x not in sequence
# dict[x] = y
#
# don't raise annoying TypeErrors just because a datetime object
# is part of a heterogeneous collection. If there's no known way to
# compare X to a datetime, saying they're not equal is reasonable.
def _cmperror(x, y):
raise TypeError("can't compare '%s' to '%s'" % (
type(x).__name__, type(y).__name__))
# This is a start at a struct tm workalike. Goals:
#
# + Works the same way across platforms.
# + Handles all the fields datetime needs handled, without 1970-2038 glitches.
#
# Note: I suspect it's best if this flavor of tm does *not* try to
# second-guess timezones or DST. Instead fold whatever adjustments you want
# into the minutes argument (and the constructor will normalize).
_ORD1970 = _ymd2ord(1970, 1, 1) # base ordinal for UNIX epoch
class tmxxx:
ordinal = None
def __init__(self, year, month, day, hour=0, minute=0, second=0,
microsecond=0):
# Normalize all the inputs, and store the normalized values.
if not 0 <= microsecond <= 999999:
carry, microsecond = divmod(microsecond, 1000000)
second += carry
if not 0 <= second <= 59:
carry, second = divmod(second, 60)
minute += carry
if not 0 <= minute <= 59:
carry, minute = divmod(minute, 60)
hour += carry
if not 0 <= hour <= 23:
carry, hour = divmod(hour, 24)
day += carry
# That was easy. Now it gets muddy: the proper range for day
# can't be determined without knowing the correct month and year,
# but if day is, e.g., plus or minus a million, the current month
# and year values make no sense (and may also be out of bounds
# themselves).
# Saying 12 months == 1 year should be non-controversial.
if not 1 <= month <= 12:
carry, month = divmod(month-1, 12)
year += carry
month += 1
assert 1 <= month <= 12
# Now only day can be out of bounds (year may also be out of bounds
# for a datetime object, but we don't care about that here).
# If day is out of bounds, what to do is arguable, but at least the
# method here is principled and explainable.
dim = _days_in_month(year, month)
if not 1 <= day <= dim:
# Move day-1 days from the first of the month. First try to
# get off cheap if we're only one day out of range (adjustments
# for timezone alone can't be worse than that).
if day == 0: # move back a day
month -= 1
if month > 0:
day = _days_in_month(year, month)
else:
year, month, day = year-1, 12, 31
elif day == dim + 1: # move forward a day
month += 1
day = 1
if month > 12:
month = 1
year += 1
else:
self.ordinal = _ymd2ord(year, month, 1) + (day - 1)
year, month, day = _ord2ymd(self.ordinal)
self.year, self.month, self.day = year, month, day
self.hour, self.minute, self.second = hour, minute, second
self.microsecond = microsecond
def toordinal(self):
"""Return proleptic Gregorian ordinal for the year, month and day.
January 1 of year 1 is day 1. Only the year, month and day values
contribute to the result.
"""
if self.ordinal is None:
self.ordinal = _ymd2ord(self.year, self.month, self.day)
return self.ordinal
def time(self):
"Return Unixish timestamp, as a float (assuming UTC)."
days = self.toordinal() - _ORD1970 # convert to UNIX epoch
seconds = ((days * 24. + self.hour)*60. + self.minute)*60.
return seconds + self.second + self.microsecond / 1e6
def ctime(self):
"Return ctime() style string."
weekday = self.toordinal() % 7 or 7
return "%s %s %2d %02d:%02d:%02d %04d" % (
_DAYNAMES[weekday],
_MONTHNAMES[self.month],
self.day,
self.hour, self.minute, self.second,
self.year)
class timedelta(object):
"""Represent the difference between two datetime objects.
Supported operators:
- add, subtract timedelta
- unary plus, minus, abs
- compare to timedelta
- multiply, divide by int/long
In addition, datetime supports subtraction of two datetime objects
returning a timedelta, and addition or subtraction of a datetime
and a timedelta giving a datetime.
Representation: (days, seconds, microseconds). Why? Because I
felt like it.
"""
def __new__(cls, days=0, seconds=0, microseconds=0,
# XXX The following should only be used as keyword args:
milliseconds=0, minutes=0, hours=0, weeks=0):
# Doing this efficiently and accurately in C is going to be difficult
# and error-prone, due to ubiquitous overflow possibilities, and that
# C double doesn't have enough bits of precision to represent
# microseconds over 10K years faithfully. The code here tries to make
# explicit where go-fast assumptions can be relied on, in order to
# guide the C implementation; it's way more convoluted than speed-
# ignoring auto-overflow-to-long idiomatic Python could be.
# XXX Check that all inputs are ints, longs or floats.
# Final values, all integer.
# s and us fit in 32-bit signed ints; d isn't bounded.
d = s = us = 0
# Normalize everything to days, seconds, microseconds.
days += weeks*7
seconds += minutes*60 + hours*3600
microseconds += milliseconds*1000
# Get rid of all fractions, and normalize s and us.
# Take a deep breath <wink>.
if isinstance(days, float):
dayfrac, days = _math.modf(days)
daysecondsfrac, daysecondswhole = _math.modf(dayfrac * (24.*3600.))
assert daysecondswhole == int(daysecondswhole) # can't overflow
s = int(daysecondswhole)
assert days == long(days)
d = long(days)
else:
daysecondsfrac = 0.0
d = days
assert isinstance(daysecondsfrac, float)
assert abs(daysecondsfrac) <= 1.0
assert isinstance(d, (int, long))
assert abs(s) <= 24 * 3600
# days isn't referenced again before redefinition
if isinstance(seconds, float):
secondsfrac, seconds = _math.modf(seconds)
assert seconds == long(seconds)
seconds = long(seconds)
secondsfrac += daysecondsfrac
assert abs(secondsfrac) <= 2.0
else:
secondsfrac = daysecondsfrac
# daysecondsfrac isn't referenced again
assert isinstance(secondsfrac, float)
assert abs(secondsfrac) <= 2.0
assert isinstance(seconds, (int, long))
days, seconds = divmod(seconds, 24*3600)
d += days
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 2 * 24 * 3600
# seconds isn't referenced again before redefinition
usdouble = secondsfrac * 1e6
assert abs(usdouble) < 2.1e6 # exact value not critical
# secondsfrac isn't referenced again
if isinstance(microseconds, float):
microseconds += usdouble
microseconds = round(microseconds)
seconds, microseconds = divmod(microseconds, 1e6)
assert microseconds == int(microseconds)
assert seconds == long(seconds)
days, seconds = divmod(seconds, 24.*3600.)
assert days == long(days)
assert seconds == int(seconds)
d += long(days)
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 3 * 24 * 3600
else:
seconds, microseconds = divmod(microseconds, 1000000)
days, seconds = divmod(seconds, 24*3600)
d += days
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 3 * 24 * 3600
microseconds = float(microseconds)
microseconds += usdouble
microseconds = round(microseconds)
assert abs(s) <= 3 * 24 * 3600
assert abs(microseconds) < 3.1e6
# Just a little bit of carrying possible for microseconds and seconds.
assert isinstance(microseconds, float)
assert int(microseconds) == microseconds
us = int(microseconds)
seconds, us = divmod(us, 1000000)
s += seconds # cant't overflow
assert isinstance(s, int)
days, s = divmod(s, 24*3600)
d += days
assert isinstance(d, (int, long))
assert isinstance(s, int) and 0 <= s < 24*3600
assert isinstance(us, int) and 0 <= us < 1000000
self = object.__new__(cls)
self.__days = d
self.__seconds = s
self.__microseconds = us
if abs(d) > 999999999:
raise OverflowError("timedelta # of days is too large: %d" % d)
return self
def __repr__(self):
if self.__microseconds:
return "%s(%d, %d, %d)" % ('datetime.' + self.__class__.__name__,
self.__days,
self.__seconds,
self.__microseconds)
if self.__seconds:
return "%s(%d, %d)" % ('datetime.' + self.__class__.__name__,
self.__days,
self.__seconds)
return "%s(%d)" % ('datetime.' + self.__class__.__name__, self.__days)
def __str__(self):
mm, ss = divmod(self.__seconds, 60)
hh, mm = divmod(mm, 60)
s = "%d:%02d:%02d" % (hh, mm, ss)
if self.__days:
def plural(n):
return n, abs(n) != 1 and "s" or ""
s = ("%d day%s, " % plural(self.__days)) + s
if self.__microseconds:
s = s + ".%06d" % self.__microseconds
return s
days = property(lambda self: self.__days, doc="days")
seconds = property(lambda self: self.__seconds, doc="seconds")
microseconds = property(lambda self: self.__microseconds,
doc="microseconds")
def __add__(self, other):
if isinstance(other, timedelta):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self.__days + other.__days,
self.__seconds + other.__seconds,
self.__microseconds + other.__microseconds)
return NotImplemented
__radd__ = __add__
def __sub__(self, other):
if isinstance(other, timedelta):
return self + -other
return NotImplemented
def __rsub__(self, other):
if isinstance(other, timedelta):
return -self + other
return NotImplemented
def __neg__(self):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(-self.__days,
-self.__seconds,
-self.__microseconds)
def __pos__(self):
return self
def __abs__(self):
if self.__days < 0:
return -self
else:
return self
def __mul__(self, other):
if isinstance(other, (int, long)):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self.__days * other,
self.__seconds * other,
self.__microseconds * other)
return NotImplemented
__rmul__ = __mul__
def __div__(self, other):
if isinstance(other, (int, long)):
usec = ((self.__days * (24*3600L) + self.__seconds) * 1000000 +
self.__microseconds)
return timedelta(0, 0, usec // other)
return NotImplemented
__floordiv__ = __div__
# Comparisons.
def __eq__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) == 0
else:
return False
def __ne__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) != 0
else:
return True
def __le__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) <= 0
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) < 0
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) >= 0
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, timedelta):
return self.__cmp(other) > 0
else:
_cmperror(self, other)
def __cmp(self, other):
assert isinstance(other, timedelta)
return cmp(self.__getstate(), other.__getstate())
def __hash__(self):
return hash(self.__getstate())
def __nonzero__(self):
return (self.__days != 0 or
self.__seconds != 0 or
self.__microseconds != 0)
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __getstate(self):
return (self.__days, self.__seconds, self.__microseconds)
def __reduce__(self):
return (self.__class__, self.__getstate())
timedelta.min = timedelta(-999999999)
timedelta.max = timedelta(days=999999999, hours=23, minutes=59, seconds=59,
microseconds=999999)
timedelta.resolution = timedelta(microseconds=1)
class date(object):
"""Concrete date type.
Constructors:
__new__()
fromtimestamp()
today()
fromordinal()
Operators:
__repr__, __str__
__cmp__, __hash__
__add__, __radd__, __sub__ (add/radd only with timedelta arg)
Methods:
timetuple()
toordinal()
weekday()
isoweekday(), isocalendar(), isoformat()
ctime()
strftime()
Properties (readonly):
year, month, day
"""
def __new__(cls, year, month=None, day=None):
"""Constructor.
Arguments:
year, month, day (required, base 1)
"""
if isinstance(year, str):
# Pickle support
self = object.__new__(cls)
self.__setstate(year)
return self
_check_date_fields(year, month, day)
self = object.__new__(cls)
self.__year = year
self.__month = month
self.__day = day
return self
# Additional constructors
def fromtimestamp(cls, t):
"Construct a date from a POSIX timestamp (like time.time())."
y, m, d, hh, mm, ss, weekday, jday, dst = _time.localtime(t)
return cls(y, m, d)
fromtimestamp = classmethod(fromtimestamp)
def today(cls):
"Construct a date from time.time()."
t = _time.time()
return cls.fromtimestamp(t)
today = classmethod(today)
def fromordinal(cls, n):
"""Contruct a date from a proleptic Gregorian ordinal.
January 1 of year 1 is day 1. Only the year, month and day are
non-zero in the result.
"""
y, m, d = _ord2ymd(n)
return cls(y, m, d)
fromordinal = classmethod(fromordinal)
# Conversions to string
def __repr__(self):
"Convert to formal string, for repr()."
return "%s(%d, %d, %d)" % ('datetime.' + self.__class__.__name__,
self.__year,
self.__month,
self.__day)
# XXX These shouldn't depend on time.localtime(), because that
# clips the usable dates to [1970 .. 2038). At least ctime() is
# easily done without using strftime() -- that's better too because
# strftime("%c", ...) is locale specific.
def ctime(self):
"Format a la ctime()."
return tmxxx(self.__year, self.__month, self.__day).ctime()
def strftime(self, fmt):
"Format using strftime()."
return _wrap_strftime(self, fmt, self.timetuple())
def isoformat(self):
"""Return the date formatted according to ISO.
This is 'YYYY-MM-DD'.
References:
- http://www.w3.org/TR/NOTE-datetime
- http://www.cl.cam.ac.uk/~mgk25/iso-time.html
"""
return "%04d-%02d-%02d" % (self.__year, self.__month, self.__day)
__str__ = isoformat
# Read-only field accessors
year = property(lambda self: self.__year,
doc="year (%d-%d)" % (MINYEAR, MAXYEAR))
month = property(lambda self: self.__month, doc="month (1-12)")
day = property(lambda self: self.__day, doc="day (1-31)")
# Standard conversions, __cmp__, __hash__ (and helpers)
def timetuple(self):
"Return local time tuple compatible with time.localtime()."
return _build_struct_time(self.__year, self.__month, self.__day,
0, 0, 0, -1)
def toordinal(self):
"""Return proleptic Gregorian ordinal for the year, month and day.
January 1 of year 1 is day 1. Only the year, month and day values
contribute to the result.
"""
return _ymd2ord(self.__year, self.__month, self.__day)
def replace(self, year=None, month=None, day=None):
"""Return a new date with new values for the specified fields."""
if year is None:
year = self.__year
if month is None:
month = self.__month
if day is None:
day = self.__day
_check_date_fields(year, month, day)
return date(year, month, day)
# Comparisons.
def __eq__(self, other):
if isinstance(other, date):
return self.__cmp(other) == 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
return False
def __ne__(self, other):
if isinstance(other, date):
return self.__cmp(other) != 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
return True
def __le__(self, other):
if isinstance(other, date):
return self.__cmp(other) <= 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, date):
return self.__cmp(other) < 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, date):
return self.__cmp(other) >= 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, date):
return self.__cmp(other) > 0
elif hasattr(other, "timetuple"):
return NotImplemented
else:
_cmperror(self, other)
def __cmp(self, other):
assert isinstance(other, date)
y, m, d = self.__year, self.__month, self.__day
y2, m2, d2 = other.__year, other.__month, other.__day
return cmp((y, m, d), (y2, m2, d2))
def __hash__(self):
"Hash."
return hash(self.__getstate())
# Computations
def _checkOverflow(self, year):
if not MINYEAR <= year <= MAXYEAR:
raise OverflowError("date +/-: result year %d not in %d..%d" %
(year, MINYEAR, MAXYEAR))
def __add__(self, other):
"Add a date to a timedelta."
if isinstance(other, timedelta):
t = tmxxx(self.__year,
self.__month,
self.__day + other.days)
self._checkOverflow(t.year)
result = date(t.year, t.month, t.day)
return result
raise TypeError
# XXX Should be 'return NotImplemented', but there's a bug in 2.2...
__radd__ = __add__
def __sub__(self, other):
"""Subtract two dates, or a date and a timedelta."""
if isinstance(other, timedelta):
return self + timedelta(-other.days)
if isinstance(other, date):
days1 = self.toordinal()
days2 = other.toordinal()
return timedelta(days1 - days2)
return NotImplemented
def weekday(self):
"Return day of the week, where Monday == 0 ... Sunday == 6."
return (self.toordinal() + 6) % 7
# Day-of-the-week and week-of-the-year, according to ISO
def isoweekday(self):
"Return day of the week, where Monday == 1 ... Sunday == 7."
# 1-Jan-0001 is a Monday
return self.toordinal() % 7 or 7
def isocalendar(self):
"""Return a 3-tuple containing ISO year, week number, and weekday.
The first ISO week of the year is the (Mon-Sun) week
containing the year's first Thursday; everything else derives
from that.
The first week is 1; Monday is 1 ... Sunday is 7.
ISO calendar algorithm taken from
http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm
"""
year = self.__year
week1monday = _isoweek1monday(year)
today = _ymd2ord(self.__year, self.__month, self.__day)
# Internally, week and day have origin 0
week, day = divmod(today - week1monday, 7)
if week < 0:
year -= 1
week1monday = _isoweek1monday(year)
week, day = divmod(today - week1monday, 7)
elif week >= 52:
if today >= _isoweek1monday(year+1):
year += 1
week = 0
return year, week+1, day+1
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __getstate(self):
yhi, ylo = divmod(self.__year, 256)
return ("%c%c%c%c" % (yhi, ylo, self.__month, self.__day), )
def __setstate(self, string):
if len(string) != 4 or not (1 <= ord(string[2]) <= 12):
raise TypeError("not enough arguments")
yhi, ylo, self.__month, self.__day = map(ord, string)
self.__year = yhi * 256 + ylo
def __reduce__(self):
return (self.__class__, self.__getstate())
_date_class = date # so functions w/ args named "date" can get at the class
date.min = date(1, 1, 1)
date.max = date(9999, 12, 31)
date.resolution = timedelta(days=1)
class tzinfo(object):
"""Abstract base class for time zone info classes.
Subclasses must override the name(), utcoffset() and dst() methods.
"""
def tzname(self, dt):
"datetime -> string name of time zone."
raise NotImplementedError("tzinfo subclass must override tzname()")
def utcoffset(self, dt):
"datetime -> minutes east of UTC (negative for west of UTC)"
raise NotImplementedError("tzinfo subclass must override utcoffset()")
def dst(self, dt):
"""datetime -> DST offset in minutes east of UTC.
Return 0 if DST not in effect. utcoffset() must include the DST
offset.
"""
raise NotImplementedError("tzinfo subclass must override dst()")
def fromutc(self, dt):
"datetime in UTC -> datetime in local time."
if not isinstance(dt, datetime):
raise TypeError("fromutc() requires a datetime argument")
if dt.tzinfo is not self:
raise ValueError("dt.tzinfo is not self")
dtoff = dt.utcoffset()
if dtoff is None:
raise ValueError("fromutc() requires a non-None utcoffset() "
"result")
# See the long comment block at the end of this file for an
# explanation of this algorithm.
dtdst = dt.dst()
if dtdst is None:
raise ValueError("fromutc() requires a non-None dst() result")
delta = dtoff - dtdst
if delta:
dt += delta
dtdst = dt.dst()
if dtdst is None:
raise ValueError("fromutc(): dt.dst gave inconsistent "
"results; cannot convert")
if dtdst:
return dt + dtdst
else:
return dt
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __reduce__(self):
getinitargs = getattr(self, "__getinitargs__", None)
if getinitargs:
args = getinitargs()
else:
args = ()
getstate = getattr(self, "__getstate__", None)
if getstate:
state = getstate()
else:
state = getattr(self, "__dict__", None) or None
if state is None:
return (self.__class__, args)
else:
return (self.__class__, args, state)
_tzinfo_class = tzinfo # so functions w/ args named "tinfo" can get at it
class time(object):
"""Time with time zone.
Constructors:
__new__()
Operators:
__repr__, __str__
__cmp__, __hash__
Methods:
strftime()
isoformat()
utcoffset()
tzname()
dst()
Properties (readonly):
hour, minute, second, microsecond, tzinfo
"""
def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None):
"""Constructor.
Arguments:
hour, minute (required)
second, microsecond (default to zero)
tzinfo (default to None)
"""
self = object.__new__(cls)
if isinstance(hour, str):
# Pickle support
self.__setstate(hour, minute or None)
return self
_check_tzinfo_arg(tzinfo)
_check_time_fields(hour, minute, second, microsecond)
self.__hour = hour
self.__minute = minute
self.__second = second
self.__microsecond = microsecond
self._tzinfo = tzinfo
return self
# Read-only field accessors
hour = property(lambda self: self.__hour, doc="hour (0-23)")
minute = property(lambda self: self.__minute, doc="minute (0-59)")
second = property(lambda self: self.__second, doc="second (0-59)")
microsecond = property(lambda self: self.__microsecond,
doc="microsecond (0-999999)")
tzinfo = property(lambda self: self._tzinfo, doc="timezone info object")
# Standard conversions, __hash__ (and helpers)
# Comparisons.
def __eq__(self, other):
if isinstance(other, time):
return self.__cmp(other) == 0
else:
return False
def __ne__(self, other):
if isinstance(other, time):
return self.__cmp(other) != 0
else:
return True
def __le__(self, other):
if isinstance(other, time):
return self.__cmp(other) <= 0
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, time):
return self.__cmp(other) < 0
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, time):
return self.__cmp(other) >= 0
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, time):
return self.__cmp(other) > 0
else:
_cmperror(self, other)
def __cmp(self, other):
assert isinstance(other, time)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
myoff = self._utcoffset()
otoff = other._utcoffset()
base_compare = myoff == otoff
if base_compare:
return cmp((self.__hour, self.__minute, self.__second,
self.__microsecond),
(other.__hour, other.__minute, other.__second,
other.__microsecond))
if myoff is None or otoff is None:
# XXX Buggy in 2.2.2.
raise TypeError("cannot compare naive and aware times")
myhhmm = self.__hour * 60 + self.__minute - myoff
othhmm = other.__hour * 60 + other.__minute - otoff
return cmp((myhhmm, self.__second, self.__microsecond),
(othhmm, other.__second, other.__microsecond))
def __hash__(self):
"""Hash."""
tzoff = self._utcoffset()
if not tzoff: # zero or None
return hash(self.__getstate()[0])
h, m = divmod(self.hour * 60 + self.minute - tzoff, 60)
if 0 <= h < 24:
return hash(time(h, m, self.second, self.microsecond))
return hash((h, m, self.second, self.microsecond))
# Conversion to string
def _tzstr(self, sep=":"):
"""Return formatted timezone offset (+xx:xx) or None."""
off = self._utcoffset()
if off is not None:
if off < 0:
sign = "-"
off = -off
else:
sign = "+"
hh, mm = divmod(off, 60)
assert 0 <= hh < 24
off = "%s%02d%s%02d" % (sign, hh, sep, mm)
return off
def __repr__(self):
"""Convert to formal string, for repr()."""
if self.__microsecond != 0:
s = ", %d, %d" % (self.__second, self.__microsecond)
elif self.__second != 0:
s = ", %d" % self.__second
else:
s = ""
s= "%s(%d, %d%s)" % ('datetime.' + self.__class__.__name__,
self.__hour, self.__minute, s)
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
return s
def isoformat(self):
"""Return the time formatted according to ISO.
This is 'HH:MM:SS.mmmmmm+zz:zz', or 'HH:MM:SS+zz:zz' if
self.microsecond == 0.
"""
s = _format_time(self.__hour, self.__minute, self.__second,
self.__microsecond)
tz = self._tzstr()
if tz:
s += tz
return s
__str__ = isoformat
def strftime(self, fmt):
"""Format using strftime(). The date part of the timestamp passed
to underlying strftime should not be used.
"""
# The year must be >= 1900 else Python's strftime implementation
# can raise a bogus exception.
timetuple = (1900, 1, 1,
self.__hour, self.__minute, self.__second,
0, 1, -1)
return _wrap_strftime(self, fmt, timetuple)
# Timezone functions
def utcoffset(self):
"""Return the timezone offset in minutes east of UTC (negative west of
UTC)."""
offset = _call_tzinfo_method(self._tzinfo, "utcoffset", None)
offset = _check_utc_offset("utcoffset", offset)
if offset is not None:
offset = timedelta(minutes=offset)
return offset
# Return an integer (or None) instead of a timedelta (or None).
def _utcoffset(self):
offset = _call_tzinfo_method(self._tzinfo, "utcoffset", None)
offset = _check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
name = _call_tzinfo_method(self._tzinfo, "tzname", None)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (in minutes
eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
offset = _call_tzinfo_method(self._tzinfo, "dst", None)
offset = _check_utc_offset("dst", offset)
if offset is not None:
offset = timedelta(minutes=offset)
return offset
def replace(self, hour=None, minute=None, second=None, microsecond=None,
tzinfo=True):
"""Return a new time with new values for the specified fields."""
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
_check_time_fields(hour, minute, second, microsecond)
_check_tzinfo_arg(tzinfo)
return time(hour, minute, second, microsecond, tzinfo)
# Return an integer (or None) instead of a timedelta (or None).
def _dst(self):
offset = _call_tzinfo_method(self._tzinfo, "dst", None)
offset = _check_utc_offset("dst", offset)
return offset
def __nonzero__(self):
if self.second or self.microsecond:
return 1
offset = self._utcoffset() or 0
return self.hour * 60 + self.minute - offset != 0
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __getstate(self):
us2, us3 = divmod(self.__microsecond, 256)
us1, us2 = divmod(us2, 256)
basestate = ("%c" * 6) % (self.__hour, self.__minute, self.__second,
us1, us2, us3)
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
if len(string) != 6 or ord(string[0]) >= 24:
raise TypeError("an integer is required")
self.__hour, self.__minute, self.__second, us1, us2, us3 = \
map(ord, string)
self.__microsecond = (((us1 << 8) | us2) << 8) | us3
self._tzinfo = tzinfo
def __reduce__(self):
return (time, self.__getstate())
_time_class = time # so functions w/ args named "time" can get at the class
time.min = time(0, 0, 0)
time.max = time(23, 59, 59, 999999)
time.resolution = timedelta(microseconds=1)
class datetime(date):
# XXX needs docstrings
# See http://www.zope.org/Members/fdrake/DateTimeWiki/TimeZoneInfo
def __new__(cls, year, month=None, day=None, hour=0, minute=0, second=0,
microsecond=0, tzinfo=None):
if isinstance(year, str):
# Pickle support
self = date.__new__(cls, year[:4])
self.__setstate(year, month)
return self
_check_tzinfo_arg(tzinfo)
_check_time_fields(hour, minute, second, microsecond)
self = date.__new__(cls, year, month, day)
# XXX This duplicates __year, __month, __day for convenience :-(
self.__year = year
self.__month = month
self.__day = day
self.__hour = hour
self.__minute = minute
self.__second = second
self.__microsecond = microsecond
self._tzinfo = tzinfo
return self
# Read-only field accessors
hour = property(lambda self: self.__hour, doc="hour (0-23)")
minute = property(lambda self: self.__minute, doc="minute (0-59)")
second = property(lambda self: self.__second, doc="second (0-59)")
microsecond = property(lambda self: self.__microsecond,
doc="microsecond (0-999999)")
tzinfo = property(lambda self: self._tzinfo, doc="timezone info object")
def fromtimestamp(cls, t, tz=None):
"""Construct a datetime from a POSIX timestamp (like time.time()).
A timezone info object may be passed in as well.
"""
_check_tzinfo_arg(tz)
if tz is None:
converter = _time.localtime
else:
converter = _time.gmtime
y, m, d, hh, mm, ss, weekday, jday, dst = converter(t)
us = int((t % 1.0) * 1000000)
ss = min(ss, 59) # clamp out leap seconds if the platform has them
result = cls(y, m, d, hh, mm, ss, us, tz)
if tz is not None:
result = tz.fromutc(result)
return result
fromtimestamp = classmethod(fromtimestamp)
def utcfromtimestamp(cls, t):
"Construct a UTC datetime from a POSIX timestamp (like time.time())."
y, m, d, hh, mm, ss, weekday, jday, dst = _time.gmtime(t)
us = int((t % 1.0) * 1000000)
ss = min(ss, 59) # clamp out leap seconds if the platform has them
return cls(y, m, d, hh, mm, ss, us)
utcfromtimestamp = classmethod(utcfromtimestamp)
# XXX This is supposed to do better than we *can* do by using time.time(),
# XXX if the platform supports a more accurate way. The C implementation
# XXX uses gettimeofday on platforms that have it, but that isn't
# XXX available from Python. So now() may return different results
# XXX across the implementations.
def now(cls, tz=None):
"Construct a datetime from time.time() and optional time zone info."
t = _time.time()
return cls.fromtimestamp(t, tz)
now = classmethod(now)
def utcnow(cls):
"Construct a UTC datetime from time.time()."
t = _time.time()
return cls.utcfromtimestamp(t)
utcnow = classmethod(utcnow)
def combine(cls, date, time):
"Construct a datetime from a given date and a given time."
if not isinstance(date, _date_class):
raise TypeError("date argument must be a date instance")
if not isinstance(time, _time_class):
raise TypeError("time argument must be a time instance")
return cls(date.year, date.month, date.day,
time.hour, time.minute, time.second, time.microsecond,
time.tzinfo)
combine = classmethod(combine)
def timetuple(self):
"Return local time tuple compatible with time.localtime()."
dst = self._dst()
if dst is None:
dst = -1
elif dst:
dst = 1
return _build_struct_time(self.year, self.month, self.day,
self.hour, self.minute, self.second,
dst)
def utctimetuple(self):
"Return UTC time tuple compatible with time.gmtime()."
y, m, d = self.year, self.month, self.day
hh, mm, ss = self.hour, self.minute, self.second
offset = self._utcoffset()
if offset: # neither None nor 0
tm = tmxxx(y, m, d, hh, mm - offset)
y, m, d = tm.year, tm.month, tm.day
hh, mm = tm.hour, tm.minute
return _build_struct_time(y, m, d, hh, mm, ss, 0)
def date(self):
"Return the date part."
return date(self.__year, self.__month, self.__day)
def time(self):
"Return the time part, with tzinfo None."
return time(self.hour, self.minute, self.second, self.microsecond)
def timetz(self):
"Return the time part, with same tzinfo."
return time(self.hour, self.minute, self.second, self.microsecond,
self._tzinfo)
def replace(self, year=None, month=None, day=None, hour=None,
minute=None, second=None, microsecond=None, tzinfo=True):
"""Return a new datetime with new values for the specified fields."""
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
_check_date_fields(year, month, day)
_check_time_fields(hour, minute, second, microsecond)
_check_tzinfo_arg(tzinfo)
return datetime(year, month, day, hour, minute, second,
microsecond, tzinfo)
def astimezone(self, tz):
if not isinstance(tz, tzinfo):
raise TypeError("tz argument must be an instance of tzinfo")
mytz = self.tzinfo
if mytz is None:
raise ValueError("astimezone() requires an aware datetime")
if tz is mytz:
return self
# Convert self to UTC, and attach the new time zone object.
myoffset = self.utcoffset()
if myoffset is None:
raise ValuError("astimezone() requires an aware datetime")
utc = (self - myoffset).replace(tzinfo=tz)
# Convert from UTC to tz's local time.
return tz.fromutc(utc)
# Ways to produce a string.
def ctime(self):
"Format a la ctime()."
t = tmxxx(self.__year, self.__month, self.__day, self.__hour,
self.__minute, self.__second)
return t.ctime()
def isoformat(self, sep='T'):
"""Return the time formatted according to ISO.
This is 'YYYY-MM-DD HH:MM:SS.mmmmmm', or 'YYYY-MM-DD HH:MM:SS' if
self.microsecond == 0.
If self.tzinfo is not None, the UTC offset is also attached, giving
'YYYY-MM-DD HH:MM:SS.mmmmmm+HH:MM' or 'YYYY-MM-DD HH:MM:SS+HH:MM'.
Optional argument sep specifies the separator between date and
time, default 'T'.
"""
s = ("%04d-%02d-%02d%c" % (self.__year, self.__month, self.__day,
sep) +
_format_time(self.__hour, self.__minute, self.__second,
self.__microsecond))
off = self._utcoffset()
if off is not None:
if off < 0:
sign = "-"
off = -off
else:
sign = "+"
hh, mm = divmod(off, 60)
s += "%s%02d:%02d" % (sign, hh, mm)
return s
def __repr__(self):
"Convert to formal string, for repr()."
L = [self.__year, self.__month, self.__day, # These are never zero
self.__hour, self.__minute, self.__second, self.__microsecond]
while L[-1] == 0:
del L[-1]
s = ", ".join(map(str, L))
s = "%s(%s)" % ('datetime.' + self.__class__.__name__, s)
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
return s
def __str__(self):
"Convert to string, for str()."
return self.isoformat(sep=' ')
def utcoffset(self):
"""Return the timezone offset in minutes east of UTC (negative west of
UTC)."""
offset = _call_tzinfo_method(self._tzinfo, "utcoffset", self)
offset = _check_utc_offset("utcoffset", offset)
if offset is not None:
offset = timedelta(minutes=offset)
return offset
# Return an integer (or None) instead of a timedelta (or None).
def _utcoffset(self):
offset = _call_tzinfo_method(self._tzinfo, "utcoffset", self)
offset = _check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
name = _call_tzinfo_method(self._tzinfo, "tzname", self)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (in minutes
eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
offset = _call_tzinfo_method(self._tzinfo, "dst", self)
offset = _check_utc_offset("dst", offset)
if offset is not None:
offset = timedelta(minutes=offset)
return offset
# Return an integer (or None) instead of a timedelta (or None).1573
def _dst(self):
offset = _call_tzinfo_method(self._tzinfo, "dst", self)
offset = _check_utc_offset("dst", offset)
return offset
# Comparisons.
def __eq__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) == 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
return False
def __ne__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) != 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
return True
def __le__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) <= 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) < 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) >= 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, datetime):
return self.__cmp(other) > 0
elif hasattr(other, "timetuple") and not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __cmp(self, other):
assert isinstance(other, datetime)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
if mytz is not None:
myoff = self._utcoffset()
if ottz is not None:
otoff = other._utcoffset()
base_compare = myoff == otoff
if base_compare:
return cmp((self.__year, self.__month, self.__day,
self.__hour, self.__minute, self.__second,
self.__microsecond),
(other.__year, other.__month, other.__day,
other.__hour, other.__minute, other.__second,
other.__microsecond))
if myoff is None or otoff is None:
# XXX Buggy in 2.2.2.
raise TypeError("cannot compare naive and aware datetimes")
# XXX What follows could be done more efficiently...
diff = self - other # this will take offsets into account
if diff.days < 0:
return -1
return diff and 1 or 0
def __add__(self, other):
"Add a datetime and a timedelta."
if not isinstance(other, timedelta):
return NotImplemented
t = tmxxx(self.__year,
self.__month,
self.__day + other.days,
self.__hour,
self.__minute,
self.__second + other.seconds,
self.__microsecond + other.microseconds)
self._checkOverflow(t.year)
result = datetime(t.year, t.month, t.day,
t.hour, t.minute, t.second,
t.microsecond, tzinfo=self._tzinfo)
return result
__radd__ = __add__
def __sub__(self, other):
"Subtract two datetimes, or a datetime and a timedelta."
if not isinstance(other, datetime):
if isinstance(other, timedelta):
return self + -other
return NotImplemented
days1 = self.toordinal()
days2 = other.toordinal()
secs1 = self.__second + self.__minute * 60 + self.__hour * 3600
secs2 = other.__second + other.__minute * 60 + other.__hour * 3600
base = timedelta(days1 - days2,
secs1 - secs2,
self.__microsecond - other.__microsecond)
if self._tzinfo is other._tzinfo:
return base
myoff = self._utcoffset()
otoff = other._utcoffset()
if myoff == otoff:
return base
if myoff is None or otoff is None:
raise TypeError, "cannot mix naive and timezone-aware time"
return base + timedelta(minutes = otoff-myoff)
def __hash__(self):
tzoff = self._utcoffset()
if tzoff is None:
return hash(self.__getstate()[0])
days = _ymd2ord(self.year, self.month, self.day)
seconds = self.hour * 3600 + (self.minute - tzoff) * 60 + self.second
return hash(timedelta(days, seconds, self.microsecond))
# Pickle support.
__safe_for_unpickling__ = True # For Python 2.2
def __getstate(self):
yhi, ylo = divmod(self.__year, 256)
us2, us3 = divmod(self.__microsecond, 256)
us1, us2 = divmod(us2, 256)
basestate = ("%c" * 10) % (yhi, ylo, self.__month, self.__day,
self.__hour, self.__minute, self.__second,
us1, us2, us3)
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
(yhi, ylo, self.__month, self.__day, self.__hour,
self.__minute, self.__second, us1, us2, us3) = map(ord, string)
self.__year = yhi * 256 + ylo
self.__microsecond = (((us1 << 8) | us2) << 8) | us3
self._tzinfo = tzinfo
def __reduce__(self):
return (self.__class__, self.__getstate())
datetime.min = datetime(1, 1, 1)
datetime.max = datetime(9999, 12, 31, 23, 59, 59, 999999)
datetime.resolution = timedelta(microseconds=1)
def _isoweek1monday(year):
# Helper to calculate the day number of the Monday starting week 1
# XXX This could be done more efficiently
THURSDAY = 3
firstday = _ymd2ord(year, 1, 1)
firstweekday = (firstday + 6) % 7 # See weekday() above
week1monday = firstday - firstweekday
if firstweekday > THURSDAY:
week1monday += 7
return week1monday
"""
Some time zone algebra. For a datetime x, let
x.n = x stripped of its timezone -- its naive time.
x.o = x.utcoffset(), and assuming that doesn't raise an exception or
return None
x.d = x.dst(), and assuming that doesn't raise an exception or
return None
x.s = x's standard offset, x.o - x.d
Now some derived rules, where k is a duration (timedelta).
1. x.o = x.s + x.d
This follows from the definition of x.s.
2. If x and y have the same tzinfo member, x.s = y.s.
This is actually a requirement, an assumption we need to make about
sane tzinfo classes.
3. The naive UTC time corresponding to x is x.n - x.o.
This is again a requirement for a sane tzinfo class.
4. (x+k).s = x.s
This follows from #2, and that datimetimetz+timedelta preserves tzinfo.
5. (x+k).n = x.n + k
Again follows from how arithmetic is defined.
Now we can explain tz.fromutc(x). Let's assume it's an interesting case
(meaning that the various tzinfo methods exist, and don't blow up or return
None when called).
The function wants to return a datetime y with timezone tz, equivalent to x.
x is already in UTC.
By #3, we want
y.n - y.o = x.n [1]
The algorithm starts by attaching tz to x.n, and calling that y. So
x.n = y.n at the start. Then it wants to add a duration k to y, so that [1]
becomes true; in effect, we want to solve [2] for k:
(y+k).n - (y+k).o = x.n [2]
By #1, this is the same as
(y+k).n - ((y+k).s + (y+k).d) = x.n [3]
By #5, (y+k).n = y.n + k, which equals x.n + k because x.n=y.n at the start.
Substituting that into [3],
x.n + k - (y+k).s - (y+k).d = x.n; the x.n terms cancel, leaving
k - (y+k).s - (y+k).d = 0; rearranging,
k = (y+k).s - (y+k).d; by #4, (y+k).s == y.s, so
k = y.s - (y+k).d
On the RHS, (y+k).d can't be computed directly, but y.s can be, and we
approximate k by ignoring the (y+k).d term at first. Note that k can't be
very large, since all offset-returning methods return a duration of magnitude
less than 24 hours. For that reason, if y is firmly in std time, (y+k).d must
be 0, so ignoring it has no consequence then.
In any case, the new value is
z = y + y.s [4]
It's helpful to step back at look at [4] from a higher level: it's simply
mapping from UTC to tz's standard time.
At this point, if
z.n - z.o = x.n [5]
we have an equivalent time, and are almost done. The insecurity here is
at the start of daylight time. Picture US Eastern for concreteness. The wall
time jumps from 1:59 to 3:00, and wall hours of the form 2:MM don't make good
sense then. The docs ask that an Eastern tzinfo class consider such a time to
be EDT (because it's "after 2"), which is a redundant spelling of 1:MM EST
on the day DST starts. We want to return the 1:MM EST spelling because that's
the only spelling that makes sense on the local wall clock.
In fact, if [5] holds at this point, we do have the standard-time spelling,
but that takes a bit of proof. We first prove a stronger result. What's the
difference between the LHS and RHS of [5]? Let
diff = x.n - (z.n - z.o) [6]
Now
z.n = by [4]
(y + y.s).n = by #5
y.n + y.s = since y.n = x.n
x.n + y.s = since z and y are have the same tzinfo member,
y.s = z.s by #2
x.n + z.s
Plugging that back into [6] gives
diff =
x.n - ((x.n + z.s) - z.o) = expanding
x.n - x.n - z.s + z.o = cancelling
- z.s + z.o = by #2
z.d
So diff = z.d.
If [5] is true now, diff = 0, so z.d = 0 too, and we have the standard-time
spelling we wanted in the endcase described above. We're done. Contrarily,
if z.d = 0, then we have a UTC equivalent, and are also done.
If [5] is not true now, diff = z.d != 0, and z.d is the offset we need to
add to z (in effect, z is in tz's standard time, and we need to shift the
local clock into tz's daylight time).
Let
z' = z + z.d = z + diff [7]
and we can again ask whether
z'.n - z'.o = x.n [8]
If so, we're done. If not, the tzinfo class is insane, according to the
assumptions we've made. This also requires a bit of proof. As before, let's
compute the difference between the LHS and RHS of [8] (and skipping some of
the justifications for the kinds of substitutions we've done several times
already):
diff' = x.n - (z'.n - z'.o) = replacing z'.n via [7]
x.n - (z.n + diff - z'.o) = replacing diff via [6]
x.n - (z.n + x.n - (z.n - z.o) - z'.o) =
x.n - z.n - x.n + z.n - z.o + z'.o = cancel x.n
- z.n + z.n - z.o + z'.o = cancel z.n
- z.o + z'.o = #1 twice
-z.s - z.d + z'.s + z'.d = z and z' have same tzinfo
z'.d - z.d
So z' is UTC-equivalent to x iff z'.d = z.d at this point. If they are equal,
we've found the UTC-equivalent so are done. In fact, we stop with [7] and
return z', not bothering to compute z'.d.
How could z.d and z'd differ? z' = z + z.d [7], so merely moving z' by
a dst() offset, and starting *from* a time already in DST (we know z.d != 0),
would have to change the result dst() returns: we start in DST, and moving
a little further into it takes us out of DST.
There isn't a sane case where this can happen. The closest it gets is at
the end of DST, where there's an hour in UTC with no spelling in a hybrid
tzinfo class. In US Eastern, that's 5:MM UTC = 0:MM EST = 1:MM EDT. During
that hour, on an Eastern clock 1:MM is taken as being in standard time (6:MM
UTC) because the docs insist on that, but 0:MM is taken as being in daylight
time (4:MM UTC). There is no local time mapping to 5:MM UTC. The local
clock jumps from 1:59 back to 1:00 again, and repeats the 1:MM hour in
standard time. Since that's what the local clock *does*, we want to map both
UTC hours 5:MM and 6:MM to 1:MM Eastern. The result is ambiguous
in local time, but so it goes -- it's the way the local clock works.
When x = 5:MM UTC is the input to this algorithm, x.o=0, y.o=-5 and y.d=0,
so z=0:MM. z.d=60 (minutes) then, so [5] doesn't hold and we keep going.
z' = z + z.d = 1:MM then, and z'.d=0, and z'.d - z.d = -60 != 0 so [8]
(correctly) concludes that z' is not UTC-equivalent to x.
Because we know z.d said z was in daylight time (else [5] would have held and
we would have stopped then), and we know z.d != z'.d (else [8] would have held
and we we have stopped then), and there are only 2 possible values dst() can
return in Eastern, it follows that z'.d must be 0 (which it is in the example,
but the reasoning doesn't depend on the example -- it depends on there being
two possible dst() outcomes, one zero and the other non-zero). Therefore
z' must be in standard time, and is the spelling we want in this case.
Note again that z' is not UTC-equivalent as far as the hybrid tzinfo class is
concerned (because it takes z' as being in standard time rather than the
daylight time we intend here), but returning it gives the real-life "local
clock repeats an hour" behavior when mapping the "unspellable" UTC hour into
tz.
When the input is 6:MM, z=1:MM and z.d=0, and we stop at once, again with
the 1:MM standard time spelling we want.
So how can this break? One of the assumptions must be violated. Two
possibilities:
1) [2] effectively says that y.s is invariant across all y belong to a given
time zone. This isn't true if, for political reasons or continental drift,
a region decides to change its base offset from UTC.
2) There may be versions of "double daylight" time where the tail end of
the analysis gives up a step too early. I haven't thought about that
enough to say.
In any case, it's clear that the default fromutc() is strong enough to handle
"almost all" time zones: so long as the standard offset is invariant, it
doesn't matter if daylight time transition points change from year to year, or
if daylight time is skipped in some years; it doesn't matter how large or
small dst() may get within its bounds; and it doesn't even matter if some
perverse time zone returns a negative dst()). So a breaking case must be
pretty bizarre, and a tzinfo subclass can override fromutc() if it is.
"""
| Python |
import parser, operator
def binaryVisit(operation):
def visit(self, node):
left = node.left
right = node.right
if isinstance(left, parser.ASTConst) and \
isinstance(right, parser.ASTConst):
if type(left.value) == type(right.value):
return parser.ASTConst(operation(left.value, right.value))
return node
return visit
def bitopVisit(astclass, operation):
def compress(values):
while len(values) > 1:
values[0] = operation(values[0], values[1])
del values[1]
return values[0]
def visit(self, node):
values = []
for i, n in enumerate(node.nodes):
if not isinstance(n, parser.ASTConst):
if values:
return astclass([compress(values)] + node.nodes[i:])
else:
return node
values.append(n.value)
return parser.ASTConst(compress(values))
return visit
class Folder:
def __init__(self):
pass
def defaultvisit(self, node):
return node
def __getattr__(self, attrname):
if attrname.startswith('visit'):
return self.defaultvisit
raise AttributeError(attrname)
visitAdd = binaryVisit(operator.add)
visitSub = binaryVisit(operator.sub)
visitMul = binaryVisit(operator.mul)
visitDiv = binaryVisit(operator.div)
visitBitand = bitopVisit(parser.ASTBitand, operator.and_)
visitBitor = bitopVisit(parser.ASTBitor, operator.or_)
visitBitxor = bitopVisit(parser.ASTBitxor, operator.xor)
def visitTuple(self, node):
contents = []
for n in node.nodes:
if not isinstance(n, parser.ASTConst):
return node
contents.append(n.value)
return parser.ASTConst(tuple(contents))
def visitDiscard(self, node):
if isinstance(node, parser.ASTConst):
return None
else:
return node
def hook(ast, enc):
return ast.mutate(Folder())
| Python |
"""This module is always available. It provides access to mathematical
functions for complex numbers."""
# Complex math module
# much code borrowed from mathmodule.c
import math
from math import e, pi
# constants
_one = complex(1., 0.)
_half = complex(0.5, 0.)
_i = complex(0., 1.)
_halfi = complex(0., 0.5)
# internal function not available from Python
def _prodi(x):
x = complex(x, 0)
real = -x.imag
imag = x.real
return complex(real, imag)
def acos(x):
"""acos(x)
Return the arc cosine of x."""
return -(_prodi(log((x+(_i*sqrt((_one-(x*x))))))))
def acosh(x):
"""acosh(x)
Return the hyperbolic arccosine of x."""
z = complex()
z = sqrt(_half)
z = log(z*(sqrt(x+_one)+sqrt(x-_one)))
return z+z
def asin(x):
"""asin(x)
Return the arc sine of x."""
# -i * log[(sqrt(1-x**2) + i*x]
squared = x*x
sqrt_1_minus_x_sq = sqrt(_one-squared)
return -(_prodi(log((sqrt_1_minus_x_sq+_prodi(x)))))
def asinh(x):
"""asinh(x)
Return the hyperbolic arc sine of x."""
z = complex()
z = sqrt(_half)
z = log((z * (sqrt(x+_i)+sqrt((x-_i))) ))
return z+z
def atan(x):
"""atan(x)
Return the arc tangent of x."""
return _halfi*log(((_i+x)/(_i-x)))
def atanh(x):
"""atanh(x)
Return the hyperbolic arc tangent of x."""
return _half*log((_one+x)/(_one-x))
def cos(x):
"""cos(x)
Return the cosine of x."""
x = complex(x, 0)
real = math.cos(x.real) * math.cosh(x.imag)
imag = -math.sin(x.real) * math.sinh(x.imag)
return complex(real, imag)
def cosh(x):
"""cosh(x)
Return the hyperbolic cosine of x."""
x = complex(x, 0)
real = math.cos(x.imag) * math.cosh(x.real)
imag = math.sin(x.imag) * math.sinh(x.real)
return complex(real, imag)
def exp(x):
"""exp(x)
Return the exponential value e**x."""
x = complex(x, 0)
l = math.exp(x.real)
real = l * math.cos(x.imag)
imag = l * math.sin(x.imag)
return complex(real, imag)
def log(x, base=None):
"""log(x)
Return the natural logarithm of x."""
if base is not None:
return log(x) / log(base)
x = complex(x, 0)
l = math.hypot(x.real,x.imag)
imag = math.atan2(x.imag, x.real)
real = math.log(l)
return complex(real, imag)
def log10(x):
"""log10(x)
Return the base-10 logarithm of x."""
x = complex(x, 0)
l = math.hypot(x.real, x.imag)
imag = math.atan2(x.imag, x.real)/math.log(10.)
real = math.log10(l)
return complex(real, imag)
def sin(x):
"""sin(x)
Return the sine of x."""
x = complex(x, 0)
real = math.sin(x.real) * math.cosh(x.imag)
imag = math.cos(x.real) * math.sinh(x.imag)
return complex(real, imag)
def sinh(x):
"""sinh(x)
Return the hyperbolic sine of x."""
x = complex(x, 0)
real = math.cos(x.imag) * math.sinh(x.real)
imag = math.sin(x.imag) * math.cosh(x.real)
return complex(real, imag)
def sqrt(x):
"""sqrt(x)
Return the square root of x."""
x = complex(x, 0)
if x.real == 0. and x.imag == 0.:
real, imag = 0, 0
else:
s = math.sqrt(0.5*(math.fabs(x.real) + math.hypot(x.real,x.imag)))
d = 0.5*x.imag/s
if x.real > 0.:
real = s
imag = d
elif x.imag >= 0.:
real = d
imag = s
else:
real = -d
imag = -s
return complex(real, imag)
def tan(x):
"""tan(x)
Return the tangent of x."""
x = complex(x, 0)
sr = math.sin(x.real)
cr = math.cos(x.real)
shi = math.sinh(x.imag)
chi = math.cosh(x.imag)
rs = sr * chi
is_ = cr * shi
rc = cr * chi
ic = -sr * shi
d = rc*rc + ic * ic
real = (rs*rc + is_*ic) / d
imag = (is_*rc - rs*ic) / d
return complex(real, imag)
def tanh(x):
"""tanh(x)
Return the hyperbolic tangent of x."""
x = complex(x, 0)
si = math.sin(x.imag)
ci = math.cos(x.imag)
shr = math.sinh(x.real)
chr = math.cosh(x.real)
rs = ci * shr
is_ = si * chr
rc = ci * chr
ic = si * shr
d = rc*rc + ic*ic
real = (rs*rc + is_*ic) / d
imag = (is_*rc - rs*ic) / d
return complex(real, imag)
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Bah, this would be easier to test if curses/terminfo didn't have so
# much non-introspectable global state.
from pyrepl import keymap
from pyrepl.console import Event
import curses
from termios import tcgetattr, VERASE
_keynames = {
"delete" : "kdch1",
"down" : "kcud1",
"end" : "kend",
"enter" : "kent",
"f1" : "kf1", "f2" : "kf2", "f3" : "kf3", "f4" : "kf4",
"f5" : "kf5", "f6" : "kf6", "f7" : "kf7", "f8" : "kf8",
"f9" : "kf9", "f10" : "kf10", "f11" : "kf11", "f12" : "kf12",
"f13" : "kf13", "f14" : "kf14", "f15" : "kf15", "f16" : "kf16",
"f17" : "kf17", "f18" : "kf18", "f19" : "kf19", "f20" : "kf20",
"home" : "khome",
"insert" : "kich1",
"left" : "kcub1",
"page down" : "knp",
"page up" : "kpp",
"right" : "kcuf1",
"up" : "kcuu1",
}
class EventQueue(object):
def __init__(self, fd):
our_keycodes = {}
for key, tiname in _keynames.items():
keycode = curses.tigetstr(tiname)
if keycode:
our_keycodes[keycode] = unicode(key)
our_keycodes[tcgetattr(fd)[6][VERASE]] = u'backspace'
self.k = self.ck = keymap.compile_keymap(our_keycodes)
self.events = []
self.buf = []
def get(self):
if self.events:
return self.events.pop(0)
else:
return None
def empty(self):
return not self.events
def insert(self, event):
self.events.append(event)
def push(self, char):
if char in self.k:
k = self.k[char]
if isinstance(k, dict):
self.buf.append(char)
self.k = k
else:
self.events.append(Event('key', k, ''.join(self.buf) + char))
self.buf = []
self.k = self.ck
elif self.buf:
self.events.extend([Event('key', c, c) for c in self.buf])
self.buf = []
self.k = self.ck
self.push(char)
else:
self.events.append(Event('key', char, char))
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Wedge pyrepl behaviour into cmd.Cmd-derived classes.
replize, when given a subclass of cmd.Cmd, returns a class that
behaves almost identically to the supplied class, except that it uses
pyrepl instead if raw_input.
It was designed to let you do this:
>>> import pdb
>>> from pyrepl import replize
>>> pdb.Pdb = replize(pdb.Pdb)
which is in fact done by the `pythoni' script that comes with
pyrepl."""
from __future__ import nested_scopes
from pyrepl import completing_reader as cr, reader, completer
from pyrepl.completing_reader import CompletingReader as CR
import cmd
class CmdReader(CR):
def collect_keymap(self):
return super(CmdReader, self).collect_keymap() + (
("\\M-\\n", "invalid-key"),
("\\n", "accept"))
CR_init = CR.__init__
def __init__(self, completions):
self.CR_init(self)
self.completions = completions
def get_completions(self, stem):
if len(stem) != self.pos:
return []
return cr.uniqify([s for s in self.completions
if s.startswith(stem)])
def replize(klass, history_across_invocations=1):
"""Return a subclass of the cmd.Cmd-derived klass that uses
pyrepl instead of readline.
Raises a ValueError if klass does not derive from cmd.Cmd.
The optional history_across_invocations parameter (default 1)
controls whether instances of the returned class share
histories."""
completions = [s[3:]
for s in completer.get_class_members(klass)
if s.startswith("do_")]
if not issubclass(klass, cmd.Cmd):
raise Exception
# if klass.cmdloop.im_class is not cmd.Cmd:
# print "this may not work"
class CmdRepl(klass):
k_init = klass.__init__
if history_across_invocations:
_CmdRepl__history = []
def __init__(self, *args, **kw):
self.k_init(*args, **kw)
self.__reader = CmdReader(completions)
self.__reader.history = CmdRepl._CmdRepl__history
self.__reader.historyi = len(CmdRepl._CmdRepl__history)
else:
def __init__(self, *args, **kw):
self.k_init(*args, **kw)
self.__reader = CmdReader(completions)
def cmdloop(self, intro=None):
self.preloop()
if intro is not None:
self.intro = intro
if self.intro:
print self.intro
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue[0]
del self.cmdqueue[0]
else:
try:
self.__reader.ps1 = self.prompt
line = self.__reader.readline()
except EOFError:
line = "EOF"
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
CmdRepl.__name__ = "replize(%s.%s)"%(klass.__module__, klass.__name__)
return CmdRepl
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# the pygame console is currently thoroughly broken.
# there's a fundamental difference from the UnixConsole: here we're
# the terminal emulator too, in effect. This means, e.g., for pythoni
# we really need a separate process (or thread) to monitor for ^C
# during command execution and zap the executor process. Making this
# work on non-Unix is expected to be even more entertaining.
from pygame.locals import *
from pyrepl.console import Console, Event
from pyrepl import pygame_keymap
import pygame
import types
lmargin = 5
rmargin = 5
tmargin = 5
bmargin = 5
try:
bool
except NameError:
def bool(x):
return not not x
modcolors = {K_LCTRL:1,
K_RCTRL:1,
K_LMETA:1,
K_RMETA:1,
K_LALT:1,
K_RALT:1,
K_LSHIFT:1,
K_RSHIFT:1}
class colors:
fg = 250,240,230
bg = 5, 5, 5
cursor = 230, 0, 230
margin = 5, 5, 15
class FakeStdout:
def __init__(self, con):
self.con = con
def write(self, text):
self.con.write(text)
def flush(self):
pass
class FakeStdin:
def __init__(self, con):
self.con = con
def read(self, n=None):
# argh!
raise NotImplementedError
def readline(self, n=None):
from reader import Reader
try:
# this isn't quite right: it will clobber any prompt that's
# been printed. Not sure how to get around this...
return Reader(self.con).readline()
except EOFError:
return ''
class PyGameConsole(Console):
"""Attributes:
(keymap),
(fd),
screen,
height,
width,
"""
def __init__(self):
self.pygame_screen = pygame.display.set_mode((800, 600))
pygame.font.init()
pygame.key.set_repeat(500, 30)
self.font = pygame.font.Font(
"/usr/X11R6/lib/X11/fonts/TTF/luximr.ttf", 15)
self.fw, self.fh = self.fontsize = self.font.size("X")
self.cursor = pygame.Surface(self.fontsize)
self.cursor.fill(colors.cursor)
self.clear()
self.curs_vis = 1
self.height, self.width = self.getheightwidth()
pygame.display.update()
pygame.event.set_allowed(None)
pygame.event.set_allowed(KEYDOWN)
def install_keymap(self, keymap):
"""Install a given keymap.
keymap is a tuple of 2-element tuples; each small tuple is a
pair (keyspec, event-name). The format for keyspec is
modelled on that used by readline (so read that manual for
now!)."""
self.k = self.keymap = pygame_keymap.compile_keymap(keymap)
def char_rect(self, x, y):
return self.char_pos(x, y), self.fontsize
def char_pos(self, x, y):
return (lmargin + x*self.fw,
tmargin + y*self.fh + self.cur_top + self.scroll)
def paint_margin(self):
s = self.pygame_screen
c = colors.margin
s.fill(c, [0, 0, 800, tmargin])
s.fill(c, [0, 0, lmargin, 600])
s.fill(c, [0, 600 - bmargin, 800, bmargin])
s.fill(c, [800 - rmargin, 0, lmargin, 600])
def refresh(self, screen, (cx, cy)):
self.screen = screen
self.pygame_screen.fill(colors.bg,
[0, tmargin + self.cur_top + self.scroll,
800, 600])
self.paint_margin()
line_top = self.cur_top
width, height = self.fontsize
self.cxy = (cx, cy)
cp = self.char_pos(cx, cy)
if cp[1] < tmargin:
self.scroll = - (cy*self.fh + self.cur_top)
self.repaint()
elif cp[1] + self.fh > 600 - bmargin:
self.scroll += (600 - bmargin) - (cp[1] + self.fh)
self.repaint()
if self.curs_vis:
self.pygame_screen.blit(self.cursor, self.char_pos(cx, cy))
for line in screen:
if 0 <= line_top + self.scroll <= (600 - bmargin - tmargin - self.fh):
if line:
ren = self.font.render(line, 1, colors.fg)
self.pygame_screen.blit(ren, (lmargin,
tmargin + line_top + self.scroll))
line_top += self.fh
pygame.display.update()
def prepare(self):
self.cmd_buf = ''
self.k = self.keymap
self.height, self.width = self.getheightwidth()
self.curs_vis = 1
self.cur_top = self.pos[0]
self.event_queue = []
def restore(self):
pass
def blit_a_char(self, linen, charn):
line = self.screen[linen]
if charn < len(line):
text = self.font.render(line[charn], 1, colors.fg)
self.pygame_screen.blit(text, self.char_pos(charn, linen))
def move_cursor(self, x, y):
cp = self.char_pos(x, y)
if cp[1] < tmargin or cp[1] + self.fh > 600 - bmargin:
self.event_queue.append(Event('refresh', '', ''))
else:
if self.curs_vis:
cx, cy = self.cxy
self.pygame_screen.fill(colors.bg, self.char_rect(cx, cy))
self.blit_a_char(cy, cx)
self.pygame_screen.blit(self.cursor, cp)
self.blit_a_char(y, x)
pygame.display.update()
self.cxy = (x, y)
def set_cursor_vis(self, vis):
self.curs_vis = vis
if vis:
self.move_cursor(*self.cxy)
else:
cx, cy = self.cxy
self.pygame_screen.fill(colors.bg, self.char_rect(cx, cy))
self.blit_a_char(cy, cx)
pygame.display.update()
def getheightwidth(self):
"""Return (height, width) where height and width are the height
and width of the terminal window in characters."""
return ((600 - tmargin - bmargin)/self.fh,
(800 - lmargin - rmargin)/self.fw)
def tr_event(self, pyg_event):
shift = bool(pyg_event.mod & KMOD_SHIFT)
ctrl = bool(pyg_event.mod & KMOD_CTRL)
meta = bool(pyg_event.mod & (KMOD_ALT|KMOD_META))
try:
return self.k[(pyg_event.unicode, meta, ctrl)], pyg_event.unicode
except KeyError:
try:
return self.k[(pyg_event.key, meta, ctrl)], pyg_event.unicode
except KeyError:
return "invalid-key", pyg_event.unicode
def get_event(self, block=1):
"""Return an Event instance. Returns None if |block| is false
and there is no event pending, otherwise waits for the
completion of an event."""
while 1:
if self.event_queue:
return self.event_queue.pop(0)
elif block:
pyg_event = pygame.event.wait()
else:
pyg_event = pygame.event.poll()
if pyg_event.type == NOEVENT:
return
if pyg_event.key in modcolors:
continue
k, c = self.tr_event(pyg_event)
self.cmd_buf += c.encode('ascii', 'replace')
self.k = k
if not isinstance(k, types.DictType):
e = Event(k, self.cmd_buf, [])
self.k = self.keymap
self.cmd_buf = ''
return e
def beep(self):
# uhh, can't be bothered now.
# pygame.sound.something, I guess.
pass
def clear(self):
"""Wipe the screen"""
self.pygame_screen.fill(colors.bg)
#self.screen = []
self.pos = [0, 0]
self.grobs = []
self.cur_top = 0
self.scroll = 0
def finish(self):
"""Move the cursor to the end of the display and otherwise get
ready for end. XXX could be merged with restore? Hmm."""
if self.curs_vis:
cx, cy = self.cxy
self.pygame_screen.fill(colors.bg, self.char_rect(cx, cy))
self.blit_a_char(cy, cx)
for line in self.screen:
self.write_line(line, 1)
if self.curs_vis:
self.pygame_screen.blit(self.cursor,
(lmargin + self.pos[1],
tmargin + self.pos[0] + self.scroll))
pygame.display.update()
def flushoutput(self):
"""Flush all output to the screen (assuming there's some
buffering going on somewhere)"""
# no buffering here, ma'am (though perhaps there should be!)
pass
def forgetinput(self):
"""Forget all pending, but not yet processed input."""
while pygame.event.poll().type <> NOEVENT:
pass
def getpending(self):
"""Return the characters that have been typed but not yet
processed."""
events = []
while 1:
event = pygame.event.poll()
if event.type == NOEVENT:
break
events.append(event)
return events
def wait(self):
"""Wait for an event."""
raise Exception, "erp!"
def repaint(self):
# perhaps we should consolidate grobs?
self.pygame_screen.fill(colors.bg)
self.paint_margin()
for (y, x), surf, text in self.grobs:
if surf and 0 < y + self.scroll:
self.pygame_screen.blit(surf, (lmargin + x,
tmargin + y + self.scroll))
pygame.display.update()
def write_line(self, line, ret):
charsleft = (self.width*self.fw - self.pos[1])/self.fw
while len(line) > charsleft:
self.write_line(line[:charsleft], 1)
line = line[charsleft:]
if line:
ren = self.font.render(line, 1, colors.fg, colors.bg)
self.grobs.append((self.pos[:], ren, line))
self.pygame_screen.blit(ren,
(lmargin + self.pos[1],
tmargin + self.pos[0] + self.scroll))
else:
self.grobs.append((self.pos[:], None, line))
if ret:
self.pos[0] += self.fh
if tmargin + self.pos[0] + self.scroll + self.fh > 600 - bmargin:
self.scroll = 600 - bmargin - self.pos[0] - self.fh - tmargin
self.repaint()
self.pos[1] = 0
else:
self.pos[1] += self.fw*len(line)
def write(self, text):
lines = text.split("\n")
if self.curs_vis:
self.pygame_screen.fill(colors.bg,
(lmargin + self.pos[1],
tmargin + self.pos[0] + self.scroll,
self.fw, self.fh))
for line in lines[:-1]:
self.write_line(line, 1)
self.write_line(lines[-1], 0)
if self.curs_vis:
self.pygame_screen.blit(self.cursor,
(lmargin + self.pos[1],
tmargin + self.pos[0] + self.scroll))
pygame.display.update()
def flush(self):
pass
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import __builtin__
class Completer:
def __init__(self, ns):
self.ns = ns
def complete(self, text):
if "." in text:
return self.attr_matches(text)
else:
return self.global_matches(text)
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names
currently defines in __main__ that match.
"""
import keyword
matches = []
n = len(text)
for list in [keyword.kwlist,
__builtin__.__dict__.keys(),
self.ns.keys()]:
for word in list:
if word[:n] == text and word != "__builtins__":
matches.append(word)
return matches
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in the globals of __main__, it will be evaluated
and its attributes (as revealed by dir()) are used as possible
completions. (For class instances, class members are are also
considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text)
if not m:
return []
expr, attr = m.group(1, 3)
object = eval(expr, self.ns)
words = dir(object)
if hasattr(object, '__class__'):
words.append('__class__')
words = words + get_class_members(object.__class__)
matches = []
n = len(attr)
for word in words:
if word[:n] == attr and word != "__builtins__":
matches.append("%s.%s" % (expr, word))
return matches
def get_class_members(klass):
ret = dir(klass)
if hasattr(klass, '__bases__'):
for base in klass.__bases__:
ret = ret + get_class_members(base)
return ret
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from pyrepl.console import Event
from pyrepl.tests.infrastructure import ReaderTestCase, EA, run_testcase
# this test case should contain as-verbatim-as-possible versions of
# (applicable) bug reports
class BugsTestCase(ReaderTestCase):
def test_transpose_at_start(self):
self.run_test([( 'transpose', [EA, '']),
( 'accept', [''])])
def test():
run_testcase(BugsTestCase)
if __name__ == '__main__':
test()
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from pyrepl.console import Event
from pyrepl.tests.infrastructure import ReaderTestCase, EA, run_testcase
class SimpleTestCase(ReaderTestCase):
def test_basic(self):
self.run_test([(('self-insert', 'a'), ['a']),
( 'accept', ['a'])])
def test_repeat(self):
self.run_test([(('digit-arg', '3'), ['']),
(('self-insert', 'a'), ['aaa']),
( 'accept', ['aaa'])])
def test_kill_line(self):
self.run_test([(('self-insert', 'abc'), ['abc']),
( 'left', None),
( 'kill-line', ['ab']),
( 'accept', ['ab'])])
def test_unix_line_discard(self):
self.run_test([(('self-insert', 'abc'), ['abc']),
( 'left', None),
( 'unix-word-rubout', ['c']),
( 'accept', ['c'])])
def test_kill_word(self):
self.run_test([(('self-insert', 'ab cd'), ['ab cd']),
( 'beginning-of-line', ['ab cd']),
( 'kill-word', [' cd']),
( 'accept', [' cd'])])
def test_backward_kill_word(self):
self.run_test([(('self-insert', 'ab cd'), ['ab cd']),
( 'backward-kill-word', ['ab ']),
( 'accept', ['ab '])])
def test_yank(self):
self.run_test([(('self-insert', 'ab cd'), ['ab cd']),
( 'backward-kill-word', ['ab ']),
( 'beginning-of-line', ['ab ']),
( 'yank', ['cdab ']),
( 'accept', ['cdab '])])
def test_yank_pop(self):
self.run_test([(('self-insert', 'ab cd'), ['ab cd']),
( 'backward-kill-word', ['ab ']),
( 'left', ['ab ']),
( 'backward-kill-word', [' ']),
( 'yank', ['ab ']),
( 'yank-pop', ['cd ']),
( 'accept', ['cd '])])
def test_interrupt(self):
try:
self.run_test([( 'interrupt', [''])])
except KeyboardInterrupt:
pass
else:
self.fail('KeyboardInterrupt got lost')
# test_suspend -- hah
def test_up(self):
self.run_test([(('self-insert', 'ab\ncd'), ['ab', 'cd']),
( 'up', ['ab', 'cd']),
(('self-insert', 'e'), ['abe', 'cd']),
( 'accept', ['abe', 'cd'])])
def test_down(self):
self.run_test([(('self-insert', 'ab\ncd'), ['ab', 'cd']),
( 'up', ['ab', 'cd']),
(('self-insert', 'e'), ['abe', 'cd']),
( 'down', ['abe', 'cd']),
(('self-insert', 'f'), ['abe', 'cdf']),
( 'accept', ['abe', 'cdf'])])
def test_left(self):
self.run_test([(('self-insert', 'ab'), ['ab']),
( 'left', ['ab']),
(('self-insert', 'c'), ['acb']),
( 'accept', ['acb'])])
def test_right(self):
self.run_test([(('self-insert', 'ab'), ['ab']),
( 'left', ['ab']),
(('self-insert', 'c'), ['acb']),
( 'right', ['acb']),
(('self-insert', 'd'), ['acbd']),
( 'accept', ['acbd'])])
def test():
run_testcase(SimpleTestCase)
if __name__ == '__main__':
test()
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from pyrepl.reader import Reader
from pyrepl.console import Console, Event
import unittest
import sys
class EqualsAnything(object):
def __eq__(self, other):
return True
EA = EqualsAnything()
class TestConsole(Console):
height = 24
width = 80
encoding = 'utf-8'
def __init__(self, events, testcase, verbose=False):
self.events = events
self.next_screen = None
self.verbose = verbose
self.testcase = testcase
def refresh(self, screen, xy):
if self.next_screen is not None:
self.testcase.assertEqual(
screen, self.next_screen,
"[ %s != %s after %r ]"%(screen, self.next_screen,
self.last_event_name))
def get_event(self, block=1):
ev, sc = self.events.pop(0)
self.next_screen = sc
if not isinstance(ev, tuple):
ev = (ev,)
self.last_event_name = ev[0]
if self.verbose:
print "event", ev
return Event(*ev)
class TestReader(Reader):
def get_prompt(self, lineno, cursor_on_line):
return ''
def refresh(self):
Reader.refresh(self)
self.dirty = True
class ReaderTestCase(unittest.TestCase):
def run_test(self, test_spec, reader_class=TestReader):
# remember to finish your test_spec with 'accept' or similar!
con = TestConsole(test_spec, self)
reader = reader_class(con)
reader.readline()
class BasicTestRunner:
def run(self, test):
result = unittest.TestResult()
test(result)
return result
def run_testcase(testclass):
suite = unittest.makeSuite(testclass)
runner = unittest.TextTestRunner(sys.stdout, verbosity=1)
result = runner.run(suite)
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from pyrepl.console import Event
from pyrepl.tests.infrastructure import ReaderTestCase, EA, run_testcase
# this test case should contain as-verbatim-as-possible versions of
# (applicable) feature requests
class WishesTestCase(ReaderTestCase):
def test_quoted_insert_repeat(self):
self.run_test([(('digit-arg', '3'), ['']),
( 'quoted-insert', ['']),
(('self-insert', '\033'), ['^[^[^[']),
( 'accept', None)])
def test():
run_testcase(WishesTestCase)
if __name__ == '__main__':
test()
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# moo
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
class Event:
"""An Event. `evt' is 'key' or somesuch."""
def __init__(self, evt, data, raw=''):
self.evt = evt
self.data = data
self.raw = raw
def __repr__(self):
return 'Event(%r, %r)'%(self.evt, self.data)
class Console:
"""Attributes:
screen,
height,
width,
"""
def refresh(self, screen, xy):
pass
def prepare(self):
pass
def restore(self):
pass
def move_cursor(self, x, y):
pass
def set_cursor_vis(self, vis):
pass
def getheightwidth(self):
"""Return (height, width) where height and width are the height
and width of the terminal window in characters."""
pass
def get_event(self, block=1):
"""Return an Event instance. Returns None if |block| is false
and there is no event pending, otherwise waits for the
completion of an event."""
pass
def beep(self):
pass
def clear(self):
"""Wipe the screen"""
pass
def finish(self):
"""Move the cursor to the end of the display and otherwise get
ready for end. XXX could be merged with restore? Hmm."""
pass
def flushoutput(self):
"""Flush all output to the screen (assuming there's some
buffering going on somewhere)."""
pass
def forgetinput(self):
"""Forget all pending, but not yet processed input."""
pass
def getpending(self):
"""Return the characters that have been typed but not yet
processed."""
pass
def wait(self):
"""Wait for an event."""
pass
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
functions for parsing keyspecs
Support for turning keyspecs into appropriate sequences.
pyrepl uses it's own bastardized keyspec format, which is meant to be
a strict superset of readline's \"KEYSEQ\" format (which is to say
that if you can come up with a spec readline accepts that this
doesn't, you've found a bug and should tell me about it).
Note that this is the `\\C-o' style of readline keyspec, not the
`Control-o' sort.
A keyspec is a string representing a sequence of keypresses that can
be bound to a command.
All characters other than the backslash represent themselves. In the
traditional manner, a backslash introduces a escape sequence.
The extension to readline is that the sequence \\<KEY> denotes the
sequence of charaters produced by hitting KEY.
Examples:
`a' - what you get when you hit the `a' key
`\\EOA' - Escape - O - A (up, on my terminal)
`\\<UP>' - the up arrow key
`\\<up>' - ditto (keynames are case insensitive)
`\\C-o', `\\c-o' - control-o
`\\M-.' - meta-period
`\\E.' - ditto (that's how meta works for pyrepl)
`\\<tab>', `\\<TAB>', `\\t', `\\011', '\\x09', '\\X09', '\\C-i', '\\C-I'
- all of these are the tab character. Can you think of any more?
"""
from curses import ascii
_escapes = {
'\\':'\\',
"'":"'",
'"':'"',
'a':'\a',
'b':'\h',
'e':'\033',
'f':'\f',
'n':'\n',
'r':'\r',
't':'\t',
'v':'\v'
}
_keynames = {
'backspace': 'backspace',
'delete': 'delete',
'down': 'down',
'end': 'end',
'enter': '\r',
'escape': '\033',
'f1' : 'f1', 'f2' : 'f2', 'f3' : 'f3', 'f4' : 'f4',
'f5' : 'f5', 'f6' : 'f6', 'f7' : 'f7', 'f8' : 'f8',
'f9' : 'f9', 'f10': 'f10', 'f11': 'f11', 'f12': 'f12',
'f13': 'f13', 'f14': 'f14', 'f15': 'f15', 'f16': 'f16',
'f17': 'f17', 'f18': 'f18', 'f19': 'f19', 'f20': 'f20',
'home': 'home',
'insert': 'insert',
'left': 'left',
'page down': 'page down',
'page up': 'page up',
'return': '\r',
'right': 'right',
'space': ' ',
'tab': '\t',
'up': 'up',
}
class KeySpecError(Exception):
pass
def _parse_key1(key, s):
ctrl = 0
meta = 0
ret = ''
while not ret and s < len(key):
if key[s] == '\\':
c = key[s+1].lower()
if _escapes.has_key(c):
ret = _escapes[c]
s += 2
elif c == "c":
if key[s + 2] != '-':
raise KeySpecError, \
"\\C must be followed by `-' (char %d of %s)"%(
s + 2, repr(key))
if ctrl:
raise KeySpecError, "doubled \\C- (char %d of %s)"%(
s + 1, repr(key))
ctrl = 1
s += 3
elif c == "m":
if key[s + 2] != '-':
raise KeySpecError, \
"\\M must be followed by `-' (char %d of %s)"%(
s + 2, repr(key))
if meta:
raise KeySpecError, "doubled \\M- (char %d of %s)"%(
s + 1, repr(key))
meta = 1
s += 3
elif c.isdigit():
n = key[s+1:s+4]
ret = chr(int(n, 8))
s += 4
elif c == 'x':
n = key[s+2:s+4]
ret = chr(int(n, 16))
s += 4
elif c == '<':
t = key.find('>', s)
if t == -1:
raise KeySpecError, \
"unterminated \\< starting at char %d of %s"%(
s + 1, repr(key))
ret = key[s+2:t].lower()
if ret not in _keynames:
raise KeySpecError, \
"unrecognised keyname `%s' at char %d of %s"%(
ret, s + 2, repr(key))
ret = _keynames[ret]
s = t + 1
else:
raise KeySpecError, \
"unknown backslash escape %s at char %d of %s"%(
`c`, s + 2, repr(key))
else:
ret = key[s]
s += 1
if ctrl:
if len(ret) > 1:
raise KeySpecError, "\\C- must be followed by a character"
ret = ascii.ctrl(ret)
if meta:
ret = ['\033', ret]
else:
ret = [ret]
return ret, s
def parse_keys(key):
s = 0
r = []
while s < len(key):
k, s = _parse_key1(key, s)
r.extend(k)
return r
def compile_keymap(keymap, empty=''):
r = {}
for key, value in keymap.items():
r.setdefault(key[0], {})[key[1:]] = value
for key, value in r.items():
if empty in value:
if len(value) <> 1:
raise KeySpecError, \
"key definitions for %s clash"%(value.values(),)
else:
r[key] = value[empty]
else:
r[key] = compile_keymap(value, empty)
return r
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import types, unicodedata
from pyrepl import commands
from curses import ascii
from pyrepl import input
def _make_unctrl_map():
uc_map = {}
for c in map(unichr, range(256)):
if unicodedata.category(c)[0] <> 'C':
uc_map[c] = c
for i in range(32):
c = unichr(i)
uc_map[c] = u'^' + unichr(ord('A') + i - 1)
uc_map['\177'] = u'^?'
for i in range(256):
c = unichr(i)
if not uc_map.has_key(c):
uc_map[c] = u'\\%03o'%i
return uc_map
# disp_str proved to be a bottleneck for large inputs, so it's been
# rewritten in C; it's not required though.
try:
raise ImportError # currently it's borked by the unicode support
from _pyrepl_utils import disp_str, init_unctrl_map
init_unctrl_map(_make_unctrl_map())
del init_unctrl_map
except ImportError:
def _my_unctrl(c, u=_make_unctrl_map()):
import unicodedata
if c in u:
return u[c]
else:
if unicodedata.category(c).startswith('C'):
return '\u%04x'%(ord(c),)
else:
return c
def disp_str(buffer, join=''.join, uc=_my_unctrl):
""" disp_str(buffer:string) -> (string, [int])
Return the string that should be the printed represenation of
|buffer| and a list detailing where the characters of |buffer|
get used up. E.g.:
>>> disp_str(chr(3))
('^C', [1, 0])
the list always contains 0s or 1s at present; it could conceivably
go higher as and when unicode support happens."""
s = map(uc, buffer)
return (join(s),
map(ord, join(map(lambda x:'\001'+(len(x)-1)*'\000', s))))
del _my_unctrl
del _make_unctrl_map
# syntax classes:
[SYNTAX_WHITESPACE,
SYNTAX_WORD,
SYNTAX_SYMBOL] = range(3)
def make_default_syntax_table():
# XXX perhaps should use some unicodedata here?
st = {}
for c in map(unichr, range(256)):
st[c] = SYNTAX_SYMBOL
for c in [a for a in map(unichr, range(256)) if a.isalpha()]:
st[c] = SYNTAX_WORD
st[u'\n'] = st[u' '] = SYNTAX_WHITESPACE
return st
default_keymap = tuple(
[(r'\C-a', 'beginning-of-line'),
(r'\C-b', 'left'),
(r'\C-c', 'interrupt'),
(r'\C-d', 'delete'),
(r'\C-e', 'end-of-line'),
(r'\C-f', 'right'),
(r'\C-g', 'cancel'),
(r'\C-h', 'backspace'),
(r'\C-j', 'accept'),
(r'\<return>', 'accept'),
(r'\C-k', 'kill-line'),
(r'\C-l', 'clear-screen'),
(r'\C-m', 'accept'),
(r'\C-q', 'quoted-insert'),
(r'\C-t', 'transpose-characters'),
(r'\C-u', 'unix-line-discard'),
(r'\C-v', 'quoted-insert'),
(r'\C-w', 'unix-word-rubout'),
(r'\C-x\C-u', 'upcase-region'),
(r'\C-y', 'yank'),
(r'\C-z', 'suspend'),
(r'\M-b', 'backward-word'),
(r'\M-c', 'capitalize-word'),
(r'\M-d', 'kill-word'),
(r'\M-f', 'forward-word'),
(r'\M-l', 'downcase-word'),
(r'\M-t', 'transpose-words'),
(r'\M-u', 'upcase-word'),
(r'\M-y', 'yank-pop'),
(r'\M--', 'digit-arg'),
(r'\M-0', 'digit-arg'),
(r'\M-1', 'digit-arg'),
(r'\M-2', 'digit-arg'),
(r'\M-3', 'digit-arg'),
(r'\M-4', 'digit-arg'),
(r'\M-5', 'digit-arg'),
(r'\M-6', 'digit-arg'),
(r'\M-7', 'digit-arg'),
(r'\M-8', 'digit-arg'),
(r'\M-9', 'digit-arg'),
(r'\M-\n', 'insert-nl'),
('\\\\', 'self-insert')] + \
[(c, 'self-insert')
for c in map(chr, range(32, 127)) if c <> '\\'] + \
[(c, 'self-insert')
for c in map(chr, range(128, 256)) if c.isalpha()] + \
[(r'\<up>', 'up'),
(r'\<down>', 'down'),
(r'\<left>', 'left'),
(r'\<right>', 'right'),
(r'\<insert>', 'quoted-insert'),
(r'\<delete>', 'delete'),
(r'\<backspace>', 'backspace'),
(r'\M-\<backspace>', 'backward-kill-word'),
(r'\<end>', 'end'),
(r'\<home>', 'home'),
(r'\<f1>', 'help'),
(r'\EOF', 'end'), # the entries in the terminfo database for xterms
(r'\EOH', 'home'), # seem to be wrong. this is a less than ideal
# workaround
])
del c # from the listcomps
class Reader(object):
"""The Reader class implements the bare bones of a command reader,
handling such details as editing and cursor motion. What it does
not support are such things as completion or history support -
these are implemented elsewhere.
Instance variables of note include:
* buffer:
A *list* (*not* a string at the moment :-) containing all the
characters that have been entered.
* console:
Hopefully encapsulates the OS dependent stuff.
* pos:
A 0-based index into `buffer' for where the insertion point
is.
* screeninfo:
Ahem. This list contains some info needed to move the
insertion point around reasonably efficiently. I'd like to
get rid of it, because its contents are obtuse (to put it
mildly) but I haven't worked out if that is possible yet.
* cxy, lxy:
the position of the insertion point in screen ... XXX
* syntax_table:
Dictionary mapping characters to `syntax class'; read the
emacs docs to see what this means :-)
* commands:
Dictionary mapping command names to command classes.
* arg:
The emacs-style prefix argument. It will be None if no such
argument has been provided.
* dirty:
True if we need to refresh the display.
* kill_ring:
The emacs-style kill-ring; manipulated with yank & yank-pop
* ps1, ps2, ps3, ps4:
prompts. ps1 is the prompt for a one-line input; for a
multiline input it looks like:
ps2> first line of input goes here
ps3> second and further
ps3> lines get ps3
...
ps4> and the last one gets ps4
As with the usual top-level, you can set these to instances if
you like; str() will be called on them (once) at the beginning
of each command. Don't put really long or newline containing
strings here, please!
This is just the default policy; you can change it freely by
overriding get_prompt() (and indeed some standard subclasses
do).
* finished:
handle1 will set this to a true value if a command signals
that we're done.
"""
help_text = """\
This is pyrepl. Hear my roar.
Helpful text may appear here at some point in the future when I'm
feeling more loquacious than I am now."""
def __init__(self, console):
self.buffer = []
self.ps1 = "->> "
self.ps2 = "/>> "
self.ps3 = "|.. "
self.ps4 = "\__ "
self.kill_ring = []
self.arg = None
self.finished = 0
self.console = console
self.commands = {}
self.msg = ''
for v in vars(commands).values():
if ( isinstance(v, type)
and issubclass(v, commands.Command)
and v.__name__[0].islower() ):
self.commands[v.__name__] = v
self.commands[v.__name__.replace('_', '-')] = v
self.syntax_table = make_default_syntax_table()
self.input_trans_stack = []
self.keymap = self.collect_keymap()
self.input_trans = input.KeymapTranslator(
self.keymap,
invalid_cls='invalid-key',
character_cls='self-insert')
def collect_keymap(self):
return default_keymap
def calc_screen(self):
"""The purpose of this method is to translate changes in
self.buffer into changes in self.screen. Currently it rips
everything down and starts from scratch, which whilst not
especially efficient is certainly simple(r).
"""
lines = self.get_unicode().split("\n")
screen = []
screeninfo = []
w = self.console.width - 1
p = self.pos
for ln, line in zip(range(len(lines)), lines):
ll = len(line)
if 0 <= p <= ll:
if self.msg:
for mline in self.msg.split("\n"):
screen.append(mline)
screeninfo.append((0, []))
self.lxy = p, ln
prompt = self.get_prompt(ln, ll >= p >= 0)
p -= ll + 1
lp = len(prompt)
l, l2 = disp_str(line)
wrapcount = (len(l) + lp) / w
if wrapcount == 0:
screen.append(prompt + l)
screeninfo.append((lp, l2+[1]))
else:
screen.append(prompt + l[:w-lp] + "\\")
screeninfo.append((lp, l2[:w-lp]))
for i in range(-lp + w, -lp + wrapcount*w, w):
screen.append(l[i:i+w] + "\\")
screeninfo.append((0, l2[i:i + w]))
screen.append(l[wrapcount*w - lp:])
screeninfo.append((0, l2[wrapcount*w - lp:]+[1]))
self.screeninfo = screeninfo
self.cxy = self.pos2xy(self.pos)
return screen
def bow(self, p=None):
"""Return the 0-based index of the word break preceding p most
immediately.
p defaults to self.pos; word boundaries are determined using
self.syntax_table."""
if p is None:
p = self.pos
st = self.syntax_table
b = self.buffer
p -= 1
while p >= 0 and st.get(b[p], SYNTAX_WORD) <> SYNTAX_WORD:
p -= 1
while p >= 0 and st.get(b[p], SYNTAX_WORD) == SYNTAX_WORD:
p -= 1
return p + 1
def eow(self, p=None):
"""Return the 0-based index of the word break following p most
immediately.
p defaults to self.pos; word boundaries are determined using
self.syntax_table."""
if p is None:
p = self.pos
st = self.syntax_table
b = self.buffer
while p < len(b) and st.get(b[p], SYNTAX_WORD) <> SYNTAX_WORD:
p += 1
while p < len(b) and st.get(b[p], SYNTAX_WORD) == SYNTAX_WORD:
p += 1
return p
def bol(self, p=None):
"""Return the 0-based index of the line break preceding p most
immediately.
p defaults to self.pos."""
# XXX there are problems here.
if p is None:
p = self.pos
b = self.buffer
p -= 1
while p >= 0 and b[p] <> '\n':
p -= 1
return p + 1
def eol(self, p=None):
"""Return the 0-based index of the line break following p most
immediately.
p defaults to self.pos."""
if p is None:
p = self.pos
b = self.buffer
while p < len(b) and b[p] <> '\n':
p += 1
return p
def get_arg(self, default=1):
"""Return any prefix argument that the user has supplied,
returning `default' if there is None. `default' defaults
(groan) to 1."""
if self.arg is None:
return default
else:
return self.arg
def get_prompt(self, lineno, cursor_on_line):
"""Return what should be in the left-hand margin for line
`lineno'."""
if self.arg is not None and cursor_on_line:
return "(arg: %s) "%self.arg
if "\n" in self.buffer:
if lineno == 0:
return self._ps2
elif lineno == self.buffer.count("\n"):
return self._ps4
else:
return self._ps3
else:
return self._ps1
def push_input_trans(self, itrans):
self.input_trans_stack.append(self.input_trans)
self.input_trans = itrans
def pop_input_trans(self):
self.input_trans = self.input_trans_stack.pop()
def pos2xy(self, pos):
"""Return the x, y coordinates of position 'pos'."""
# this *is* incomprehensible, yes.
y = 0
assert 0 <= pos <= len(self.buffer)
if pos == len(self.buffer):
y = len(self.screeninfo) - 1
p, l2 = self.screeninfo[y]
return p + len(l2) - 1, y
else:
for p, l2 in self.screeninfo:
l = l2.count(1)
if l > pos:
break
else:
pos -= l
y += 1
c = 0
i = 0
while c < pos:
c += l2[i]
i += 1
while l2[i] == 0:
i += 1
return p + i, y
def insert(self, text):
"""Insert 'text' at the insertion point."""
self.buffer[self.pos:self.pos] = list(text)
self.pos += len(text)
self.dirty = 1
def update_cursor(self):
"""Move the cursor to reflect changes in self.pos"""
self.cxy = self.pos2xy(self.pos)
self.console.move_cursor(*self.cxy)
def after_command(self, cmd):
"""This function is called to allow post command cleanup."""
if getattr(cmd, "kills_digit_arg", 1):
if self.arg is not None:
self.dirty = 1
self.arg = None
def prepare(self):
"""Get ready to run. Call restore when finished. You must not
write to the console in between the calls to prepare and
restore."""
try:
self.console.prepare()
self.arg = None
self.screeninfo = []
self.finished = 0
del self.buffer[:]
self.pos = 0
self.dirty = 1
self.last_command = None
self._ps1, self._ps2, self._ps3, self._ps4 = \
map(str, [self.ps1, self.ps2, self.ps3, self.ps4])
except:
self.restore()
raise
def last_command_is(self, klass):
if not self.last_command:
return 0
return issubclass(klass, self.last_command)
def restore(self):
"""Clean up after a run."""
self.console.restore()
def finish(self):
"""Called when a command signals that we're finished."""
pass
def error(self, msg="none"):
self.msg = "! " + msg + " "
self.dirty = 1
self.console.beep()
def refresh(self):
"""Recalculate and refresh the screen."""
# this call sets up self.cxy, so call it first.
screen = self.calc_screen()
self.console.refresh(screen, self.cxy)
self.dirty = 0 # forgot this for a while (blush)
def do_cmd(self, cmd):
#print cmd
if isinstance(cmd[0], str):
cmd = self.commands.get(cmd[0],
commands.invalid_command)(self, cmd)
elif isinstance(cmd[0], type):
cmd = cmd[0](self, cmd)
cmd.do()
self.after_command(cmd)
if self.dirty:
self.refresh()
else:
self.update_cursor()
if not isinstance(cmd, commands.digit_arg):
self.last_command = cmd.__class__
self.finished = cmd.finish
if self.finished:
self.console.finish()
self.finish()
def handle1(self, block=1):
"""Handle a single event. Wait as long as it takes if block
is true (the default), otherwise return None if no event is
pending."""
if self.msg:
self.msg = ''
self.dirty = 1
while 1:
event = self.console.get_event(block)
if not event: # can only happen if we're not blocking
return None
if event.evt == 'key':
self.input_trans.push(event)
elif event.evt == 'scroll':
self.refresh()
elif event.evt == 'resize':
self.refresh()
else:
pass
cmd = self.input_trans.get()
if cmd is None:
if block:
continue
else:
return None
self.do_cmd(cmd)
return 1
def push_char(self, char):
self.console.push_char(char)
self.handle1(0)
def readline(self):
"""Read a line. The implementation of this method also shows
how to drive Reader if you want more control over the event
loop."""
self.prepare()
try:
self.refresh()
while not self.finished:
self.handle1()
return self.get_buffer()
finally:
self.restore()
def bind(self, spec, command):
self.keymap = self.keymap + ((spec, command),)
self.input_trans = input.KeymapTranslator(
self.keymap,
invalid_cls='invalid-key',
character_cls='self-insert')
def get_buffer(self, encoding=None):
if encoding is None:
encoding = self.console.encoding
return u''.join(self.buffer).encode(self.console.encoding)
def get_unicode(self):
"""Return the current buffer as a unicode string."""
return u''.join(self.buffer)
def test():
from pyrepl.unix_console import UnixConsole
reader = Reader(UnixConsole())
reader.ps1 = "**> "
reader.ps2 = "/*> "
reader.ps3 = "|*> "
reader.ps4 = "\*> "
while reader.readline():
pass
if __name__=='__main__':
test()
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# keyspec parsing for a pygame console. currently this is simply copy
# n' change from the unix (ie. trad terminal) variant; probably some
# refactoring will happen when I work out how it will work best.
# A key is represented as *either*
# a) a (keycode, meta, ctrl) sequence (used for special keys such as
# f1, the up arrow key, etc)
# b) a (unichar, meta, ctrl) sequence (used for printable chars)
# Because we allow keystokes like '\\C-xu', I'll use the same trick as
# the unix keymap module uses.
# '\\C-a' --> (K_a, 0, 1)
# XXX it's actually possible to test this module, so it should have a
# XXX test suite.
from curses import ascii
from pygame.locals import *
_escapes = {
'\\': K_BACKSLASH,
"'" : K_QUOTE,
'"' : K_QUOTEDBL,
# 'a' : '\a',
'b' : K_BACKSLASH,
'e' : K_ESCAPE,
# 'f' : '\f',
'n' : K_RETURN,
'r' : K_RETURN,
't' : K_TAB,
# 'v' : '\v'
}
_keynames = {
'backspace' : K_BACKSPACE,
'delete' : K_DELETE,
'down' : K_DOWN,
'end' : K_END,
'enter' : K_KP_ENTER,
'escape' : K_ESCAPE,
'f1' : K_F1, 'f2' : K_F2, 'f3' : K_F3, 'f4' : K_F4,
'f5' : K_F5, 'f6' : K_F6, 'f7' : K_F7, 'f8' : K_F8,
'f9' : K_F9, 'f10': K_F10,'f11': K_F11,'f12': K_F12,
'f13': K_F13,'f14': K_F14,'f15': K_F15,
'home' : K_HOME,
'insert' : K_INSERT,
'left' : K_LEFT,
'pgdown' : K_PAGEDOWN, 'page down' : K_PAGEDOWN,
'pgup' : K_PAGEUP, 'page up' : K_PAGEUP,
'return' : K_RETURN,
'right' : K_RIGHT,
'space' : K_SPACE,
'tab' : K_TAB,
'up' : K_UP,
}
class KeySpecError(Exception):
pass
def _parse_key1(key, s):
ctrl = 0
meta = 0
ret = ''
while not ret and s < len(key):
if key[s] == '\\':
c = key[s+1].lower()
if _escapes.has_key(c):
ret = _escapes[c]
s += 2
elif c == "c":
if key[s + 2] != '-':
raise KeySpecError, \
"\\C must be followed by `-' (char %d of %s)"%(
s + 2, repr(key))
if ctrl:
raise KeySpecError, "doubled \\C- (char %d of %s)"%(
s + 1, repr(key))
ctrl = 1
s += 3
elif c == "m":
if key[s + 2] != '-':
raise KeySpecError, \
"\\M must be followed by `-' (char %d of %s)"%(
s + 2, repr(key))
if meta:
raise KeySpecError, "doubled \\M- (char %d of %s)"%(
s + 1, repr(key))
meta = 1
s += 3
elif c.isdigit():
n = key[s+1:s+4]
ret = chr(int(n, 8))
s += 4
elif c == 'x':
n = key[s+2:s+4]
ret = chr(int(n, 16))
s += 4
elif c == '<':
t = key.find('>', s)
if t == -1:
raise KeySpecError, \
"unterminated \\< starting at char %d of %s"%(
s + 1, repr(key))
try:
ret = _keynames[key[s+2:t].lower()]
s = t + 1
except KeyError:
raise KeySpecError, \
"unrecognised keyname `%s' at char %d of %s"%(
key[s+2:t], s + 2, repr(key))
if ret is None:
return None, s
else:
raise KeySpecError, \
"unknown backslash escape %s at char %d of %s"%(
`c`, s + 2, repr(key))
else:
if ctrl:
ret = unicode(ascii.ctrl(key[s]))
else:
ret = unicode(key[s])
s += 1
return (ret, meta, ctrl), s
def parse_keys(key):
s = 0
r = []
while s < len(key):
k, s = _parse_key1(key, s)
if k is None:
return None
r.append(k)
return tuple(r)
def _compile_keymap(keymap):
r = {}
for key, value in keymap.items():
r.setdefault(key[0], {})[key[1:]] = value
for key, value in r.items():
if value.has_key(()):
if len(value) <> 1:
raise KeySpecError, \
"key definitions for %s clash"%(value.values(),)
else:
r[key] = value[()]
else:
r[key] = _compile_keymap(value)
return r
def compile_keymap(keymap):
r = {}
for key, value in keymap:
k = parse_keys(key)
if value is None and r.has_key(k):
del r[k]
if k is not None:
r[k] = value
return _compile_keymap(r)
def keyname(key):
longest_match = ''
longest_match_name = ''
for name, keyseq in keyset.items():
if keyseq and key.startswith(keyseq) and \
len(keyseq) > len(longest_match):
longest_match = keyseq
longest_match_name = name
if len(longest_match) > 0:
return longest_match_name, len(longest_match)
else:
return None, 0
_unescapes = {'\r':'\\r', '\n':'\\n', '\177':'^?'}
#for k,v in _escapes.items():
# _unescapes[v] = k
def unparse_key(keyseq):
if not keyseq:
return ''
name, s = keyname(keyseq)
if name:
if name <> 'escape' or s == len(keyseq):
return '\\<' + name + '>' + unparse_key(keyseq[s:])
else:
return '\\M-' + unparse_key(keyseq[1:])
else:
c = keyseq[0]
r = keyseq[1:]
if c == '\\':
p = '\\\\'
elif _unescapes.has_key(c):
p = _unescapes[c]
elif ord(c) < ord(' '):
p = '\\C-%s'%(chr(ord(c)+96),)
elif ord(' ') <= ord(c) <= ord('~'):
p = c
else:
p = '\\%03o'%(ord(c),)
return p + unparse_key(r)
def _unparse_keyf(keyseq):
if not keyseq:
return []
name, s = keyname(keyseq)
if name:
if name <> 'escape' or s == len(keyseq):
return [name] + _unparse_keyf(keyseq[s:])
else:
rest = _unparse_keyf(keyseq[1:])
return ['M-'+rest[0]] + rest[1:]
else:
c = keyseq[0]
r = keyseq[1:]
if c == '\\':
p = '\\'
elif _unescapes.has_key(c):
p = _unescapes[c]
elif ord(c) < ord(' '):
p = 'C-%s'%(chr(ord(c)+96),)
elif ord(' ') <= ord(c) <= ord('~'):
p = c
else:
p = '\\%03o'%(ord(c),)
return [p] + _unparse_keyf(r)
def unparse_keyf(keyseq):
return " ".join(_unparse_keyf(keyseq))
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from curses import ascii
import sys, os
# Catgories of actions:
# killing
# yanking
# motion
# editing
# history
# finishing
# [completion]
class Command(object):
finish = 0
kills_digit_arg = 1
def __init__(self, reader, (event_name, event)):
self.reader = reader
self.event = event
self.event_name = event_name
def do(self):
pass
class KillCommand(Command):
def kill_range(self, start, end):
if start == end:
return
r = self.reader
b = r.buffer
text = b[start:end]
del b[start:end]
if is_kill(r.last_command):
if start < r.pos:
r.kill_ring[-1] = text + r.kill_ring[-1]
else:
r.kill_ring[-1] = r.kill_ring[-1] + text
else:
r.kill_ring.append(text)
r.pos = start
r.dirty = 1
class YankCommand(Command):
pass
class MotionCommand(Command):
pass
class EditCommand(Command):
pass
class FinishCommand(Command):
finish = 1
pass
def is_kill(command):
return command and issubclass(command, KillCommand)
def is_yank(command):
return command and issubclass(command, YankCommand)
# etc
class digit_arg(Command):
kills_digit_arg = 0
def do(self):
r = self.reader
c = self.event[-1]
if c == "-":
if r.arg is not None:
r.arg = -r.arg
else:
r.arg = -1
else:
d = int(c)
if r.arg is None:
r.arg = d
else:
if r.arg < 0:
r.arg = 10*r.arg - d
else:
r.arg = 10*r.arg + d
r.dirty = 1
class clear_screen(Command):
def do(self):
r = self.reader
r.console.clear()
r.dirty = 1
class refresh(Command):
def do(self):
self.reader.dirty = 1
class repaint(Command):
def do(self):
self.reader.dirty = 1
self.reader.console.repaint_prep()
class kill_line(KillCommand):
def do(self):
r = self.reader
b = r.buffer
eol = r.eol()
for c in b[r.pos:eol]:
if not ascii.isspace(c):
self.kill_range(r.pos, eol)
return
else:
self.kill_range(r.pos, eol+1)
class unix_line_discard(KillCommand):
def do(self):
r = self.reader
self.kill_range(r.bol(), r.pos)
# XXX unix_word_rubout and backward_kill_word should actually
# do different things...
class unix_word_rubout(KillCommand):
def do(self):
r = self.reader
for i in range(r.get_arg()):
self.kill_range(r.bow(), r.pos)
class kill_word(KillCommand):
def do(self):
r = self.reader
for i in range(r.get_arg()):
self.kill_range(r.pos, r.eow())
class backward_kill_word(KillCommand):
def do(self):
r = self.reader
for i in range(r.get_arg()):
self.kill_range(r.bow(), r.pos)
class yank(YankCommand):
def do(self):
r = self.reader
if not r.kill_ring:
r.error("nothing to yank")
return
r.insert(r.kill_ring[-1])
class yank_pop(YankCommand):
def do(self):
r = self.reader
b = r.buffer
if not r.kill_ring:
r.error("nothing to yank")
return
if not is_yank(r.last_command):
r.error("previous command was not a yank")
return
repl = len(r.kill_ring[-1])
r.kill_ring.insert(0, r.kill_ring.pop())
t = r.kill_ring[-1]
b[r.pos - repl:r.pos] = t
r.pos = r.pos - repl + len(t)
r.dirty = 1
class interrupt(FinishCommand):
def do(self):
import signal
self.reader.console.finish()
os.kill(os.getpid(), signal.SIGINT)
class suspend(Command):
def do(self):
import signal
r = self.reader
p = r.pos
r.console.finish()
os.kill(os.getpid(), signal.SIGSTOP)
## this should probably be done
## in a handler for SIGCONT?
r.pos = p
r.posxy = 0, 0
r.dirty = 1
r.console.screen = []
class up(MotionCommand):
def do(self):
r = self.reader
for i in range(r.get_arg()):
bol1 = r.bol()
if bol1 == 0:
if r.historyi > 0:
r.select_item(r.historyi - 1)
return
r.pos = 0
r.error("start of buffer")
return
bol2 = r.bol(bol1-1)
line_pos = r.pos - bol1
if line_pos > bol1 - bol2 - 1:
r.sticky_y = line_pos
r.pos = bol1 - 1
else:
r.pos = bol2 + line_pos
class down(MotionCommand):
def do(self):
r = self.reader
b = r.buffer
for i in range(r.get_arg()):
bol1 = r.bol()
eol1 = r.eol()
if eol1 == len(b):
if r.historyi < len(r.history):
r.select_item(r.historyi + 1)
r.pos = r.eol(0)
return
r.pos = len(b)
r.error("end of buffer")
return
eol2 = r.eol(eol1+1)
if r.pos - bol1 > eol2 - eol1 - 1:
r.pos = eol2
else:
r.pos = eol1 + (r.pos - bol1) + 1
class left(MotionCommand):
def do(self):
r = self.reader
for i in range(r.get_arg()):
p = r.pos - 1
if p >= 0:
r.pos = p
else:
self.reader.error("start of buffer")
class right(MotionCommand):
def do(self):
r = self.reader
b = r.buffer
for i in range(r.get_arg()):
p = r.pos + 1
if p <= len(b):
r.pos = p
else:
self.reader.error("end of buffer")
class beginning_of_line(MotionCommand):
def do(self):
self.reader.pos = self.reader.bol()
class end_of_line(MotionCommand):
def do(self):
r = self.reader
self.reader.pos = self.reader.eol()
class home(MotionCommand):
def do(self):
self.reader.pos = 0
class end(MotionCommand):
def do(self):
self.reader.pos = len(self.reader.buffer)
class forward_word(MotionCommand):
def do(self):
r = self.reader
for i in range(r.get_arg()):
r.pos = r.eow()
class backward_word(MotionCommand):
def do(self):
r = self.reader
for i in range(r.get_arg()):
r.pos = r.bow()
class self_insert(EditCommand):
def do(self):
r = self.reader
r.insert(self.event * r.get_arg())
class insert_nl(EditCommand):
def do(self):
r = self.reader
r.insert("\n" * r.get_arg())
class transpose_characters(EditCommand):
def do(self):
r = self.reader
b = r.buffer
s = r.pos - 1
if s < 0:
r.error("cannot transpose at start of buffer")
else:
if s == len(b):
s -= 1
t = min(s + r.get_arg(), len(b) - 1)
c = b[s]
del b[s]
b.insert(t, c)
r.pos = t
r.dirty = 1
class backspace(EditCommand):
def do(self):
r = self.reader
b = r.buffer
for i in range(r.get_arg()):
if r.pos > 0:
r.pos -= 1
del b[r.pos]
r.dirty = 1
else:
self.reader.error("can't backspace at start")
class delete(EditCommand):
def do(self):
r = self.reader
b = r.buffer
if ( r.pos == 0 and len(b) == 0 # this is something of a hack
and self.event[-1] == "\004"):
r.console.finish()
raise EOFError
for i in range(r.get_arg()):
if r.pos != len(b):
del b[r.pos]
r.dirty = 1
else:
self.reader.error("end of buffer")
class accept(FinishCommand):
def do(self):
pass
class help(Command):
def do(self):
self.reader.msg = self.reader.help_text
self.reader.dirty = 1
class invalid_key(Command):
def do(self):
pending = self.reader.console.getpending()
s = ''.join(self.event) + pending.data
self.reader.error("`%r' not bound"%s)
class invalid_command(Command):
def do(self):
s = self.event_name
self.reader.error("command `%s' not known"%s)
class qIHelp(Command):
def do(self):
r = self.reader
r.insert((self.event + r.console.getpending().data) * r.get_arg())
r.pop_input_trans()
from pyrepl import input
class QITrans(object):
def push(self, evt):
self.evt = evt
def get(self):
return ('qIHelp', self.evt.raw)
class quoted_insert(Command):
kills_digit_arg = 0
def do(self):
self.reader.push_input_trans(QITrans())
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from pyrepl import commands, reader
from pyrepl.reader import Reader
def uniqify(l):
d = {}
for i in l:
d[i] = 1
r = d.keys()
r.sort()
return r
def prefix(wordlist, j = 0):
d = {}
i = j
try:
while 1:
for word in wordlist:
d[word[i]] = 1
if len(d) > 1:
return wordlist[0][j:i]
i += 1
d = {}
except IndexError:
return wordlist[0][j:i]
def build_menu(cons, wordlist, start):
maxlen = min(max(map(len, wordlist)), cons.width - 4)
cols = cons.width / (maxlen + 4)
rows = (len(wordlist) - 1)/cols + 1
menu = []
i = start
for r in range(rows):
row = []
for col in range(cols):
row.append("[ %-*s ]"%(maxlen, wordlist[i][:maxlen]))
i += 1
if i >= len(wordlist):
break
menu.append( ''.join(row) )
if i >= len(wordlist):
i = 0
break
if r + 5 > cons.height:
menu.append(" %d more... "%(len(wordlist) - i))
break
return menu, i
# this gets somewhat user interface-y, and as a result the logic gets
# very convoluted.
#
# To summarise the summary of the summary:- people are a problem.
# -- The Hitch-Hikers Guide to the Galaxy, Episode 12
#### Desired behaviour of the completions commands.
# the considerations are:
# (1) how many completions are possible
# (2) whether the last command was a completion
#
# if there's no possible completion, beep at the user and point this out.
# this is easy.
#
# if there's only one possible completion, stick it in. if the last thing
# user did was a completion, point out that he isn't getting anywhere.
#
# now it gets complicated.
#
# for the first press of a completion key:
# if there's a common prefix, stick it in.
# irrespective of whether anything got stuck in, if the word is now
# complete, show the "complete but not unique" message
# if there's no common prefix and if the word is not now complete,
# beep.
# common prefix -> yes no
# word complete \/
# yes "cbnu" "cbnu"
# no - beep
# for the second bang on the completion key
# there will necessarily be no common prefix
# show a menu of the choices.
# for subsequent bangs, rotate the menu around (if there are sufficient
# choices).
class complete(commands.Command):
def do(self):
r = self.reader
stem = r.get_stem()
if r.last_command_is(self.__class__):
completions = r.cmpltn_menu_choices
else:
r.cmpltn_menu_choices = completions = \
r.get_completions(stem)
if len(completions) == 0:
r.error("no matches")
elif len(completions) == 1:
if len(completions[0]) == len(stem) and \
r.last_command_is(self.__class__):
r.msg = "[ sole completion ]"
r.dirty = 1
r.insert(completions[0][len(stem):])
else:
p = prefix(completions, len(stem))
if p <> '':
r.insert(p)
if r.last_command_is(self.__class__):
if not r.cmpltn_menu_vis:
r.cmpltn_menu_vis = 1
r.cmpltn_menu, r.cmpltn_menu_end = build_menu(
r.console, completions, r.cmpltn_menu_end)
r.dirty = 1
elif stem + p in completions:
r.msg = "[ complete but not unique ]"
r.dirty = 1
else:
r.msg = "[ not unique ]"
r.dirty = 1
class self_insert(commands.self_insert):
def do(self):
commands.self_insert.do(self)
r = self.reader
if r.cmpltn_menu_vis:
stem = r.get_stem()
if len(stem) < 1:
r.cmpltn_reset()
else:
completions = [w for w in r.cmpltn_menu_choices
if w.startswith(stem)]
if completions:
r.cmpltn_menu, r.cmpltn_menu_end = build_menu(
r.console, completions, 0)
else:
r.cmpltn_reset()
class CompletingReader(Reader):
"""Adds completion support
Adds instance variables:
* cmpltn_menu, cmpltn_menu_vis, cmpltn_menu_end, cmpltn_choices:
*
"""
def collect_keymap(self):
return super(CompletingReader, self).collect_keymap() + (
(r'\t', 'complete'),)
def __init__(self, console):
super(CompletingReader, self).__init__(console)
self.cmpltn_menu = ["[ menu 1 ]", "[ menu 2 ]"]
self.cmpltn_menu_vis = 0
self.cmpltn_menu_end = 0
for c in [complete, self_insert]:
self.commands[c.__name__] = c
self.commands[c.__name__.replace('_', '-')] = c
def after_command(self, cmd):
super(CompletingReader, self).after_command(cmd)
if not isinstance(cmd, complete) and not isinstance(cmd, self_insert):
self.cmpltn_reset()
def calc_screen(self):
screen = super(CompletingReader, self).calc_screen()
if self.cmpltn_menu_vis:
ly = self.lxy[1]
screen[ly:ly] = self.cmpltn_menu
self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu)
self.cxy = self.cxy[0], self.cxy[1] + len(self.cmpltn_menu)
return screen
def finish(self):
super(CompletingReader, self).finish()
self.cmpltn_reset()
def cmpltn_reset(self):
self.cmpltn_menu = []
self.cmpltn_menu_vis = 0
self.cmpltn_menu_end = 0
self.cmpltn_menu_choices = []
def get_stem(self):
st = self.syntax_table
SW = reader.SYNTAX_WORD
b = self.buffer
p = self.pos - 1
while p >= 0 and st.get(b[p], SW) == SW:
p -= 1
return u''.join(b[p+1:self.pos])
def get_completions(self, stem):
return []
def test():
class TestReader(CompletingReader):
def get_completions(self, stem):
return [s for l in map(lambda x:x.split(),self.history)
for s in l if s and s.startswith(stem)]
reader = TestReader()
reader.ps1 = "c**> "
reader.ps2 = "c/*> "
reader.ps3 = "c|*> "
reader.ps4 = "c\*> "
while reader.readline():
pass
if __name__=='__main__':
test()
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# (naming modules after builtin functions is not such a hot idea...)
# an KeyTrans instance translates Event objects into Command objects
# hmm, at what level do we want [C-i] and [tab] to be equivalent?
# [meta-a] and [esc a]? obviously, these are going to be equivalent
# for the UnixConsole, but should they be for PygameConsole?
# it would in any situation seem to be a bad idea to bind, say, [tab]
# and [C-i] to *different* things... but should binding one bind the
# other?
# executive, temporary decision: [tab] and [C-i] are distinct, but
# [meta-key] is identified with [esc key]. We demand that any console
# class does quite a lot towards emulating a unix terminal.
import unicodedata
class InputTranslator(object):
def push(self, evt):
pass
def get(self):
pass
def empty(self):
pass
class KeymapTranslator(InputTranslator):
def __init__(self, keymap, verbose=0,
invalid_cls=None, character_cls=None):
self.verbose = verbose
from pyrepl.keymap import compile_keymap, parse_keys
self.keymap = keymap
self.invalid_cls = invalid_cls
self.character_cls = character_cls
d = {}
for keyspec, command in keymap:
keyseq = tuple(parse_keys(keyspec))
d[keyseq] = command
if self.verbose:
print d
self.k = self.ck = compile_keymap(d, ())
self.results = []
self.stack = []
def push(self, evt):
if self.verbose:
print "pushed", evt.data,
key = evt.data
d = self.k.get(key)
if isinstance(d, dict):
if self.verbose:
print "transition"
self.stack.append(key)
self.k = d
else:
if d is None:
if self.verbose:
print "invalid"
if self.stack or len(key) > 1 or unicodedata.category(key) == 'C':
self.results.append(
(self.invalid_cls, self.stack + [key]))
else:
# small optimization:
self.k[key] = self.character_cls
self.results.append(
(self.character_cls, [key]))
else:
if self.verbose:
print "matched", d
self.results.append((d, self.stack + [key]))
self.stack = []
self.k = self.ck
def get(self):
if self.results:
return self.results.pop(0)
else:
return None
def empty(self):
return not self.results
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import commands
import reader
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
reader_emacs_keymap = tuple(
[(r'\C-a', 'beginning-of-line'),
(r'\C-b', 'left'),
(r'\C-c', 'interrupt'),
(r'\C-d', 'delete'),
(r'\C-e', 'end-of-line'),
(r'\C-f', 'right'),
(r'\C-g', 'cancel'),
(r'\C-h', 'backspace'),
(r'\C-j', 'self-insert'),
(r'\<return>', 'accept'),
(r'\C-k', 'kill-line'),
(r'\C-l', 'clear-screen'),
# (r'\C-m', 'accept'),
(r'\C-q', 'quoted-insert'),
(r'\C-t', 'transpose-characters'),
(r'\C-u', 'unix-line-discard'),
(r'\C-v', 'quoted-insert'),
(r'\C-w', 'unix-word-rubout'),
(r'\C-x\C-u', 'upcase-region'),
(r'\C-y', 'yank'),
(r'\C-z', 'suspend'),
(r'\M-b', 'backward-word'),
(r'\M-c', 'capitalize-word'),
(r'\M-d', 'kill-word'),
(r'\M-f', 'forward-word'),
(r'\M-l', 'downcase-word'),
(r'\M-t', 'transpose-words'),
(r'\M-u', 'upcase-word'),
(r'\M-y', 'yank-pop'),
(r'\M--', 'digit-arg'),
(r'\M-0', 'digit-arg'),
(r'\M-1', 'digit-arg'),
(r'\M-2', 'digit-arg'),
(r'\M-3', 'digit-arg'),
(r'\M-4', 'digit-arg'),
(r'\M-5', 'digit-arg'),
(r'\M-6', 'digit-arg'),
(r'\M-7', 'digit-arg'),
(r'\M-8', 'digit-arg'),
(r'\M-9', 'digit-arg'),
(r'\M-\n', 'self-insert'),
(r'\<backslash>', 'self-insert')] + \
[(c, 'self-insert')
for c in map(chr, range(32, 127)) if c <> '\\'] + \
[(c, 'self-insert')
for c in map(chr, range(128, 256)) if c.isalpha()] + \
[(r'\<up>', 'up'),
(r'\<down>', 'down'),
(r'\<left>', 'left'),
(r'\<right>', 'right'),
(r'\<insert>', 'quoted-insert'),
(r'\<delete>', 'delete'),
(r'\<backspace>', 'backspace'),
(r'\M-\<backspace>', 'backward-kill-word'),
(r'\<end>', 'end'),
(r'\<home>', 'home'),
(r'\<f1>', 'help'),
(r'\EOF', 'end'), # the entries in the terminfo database for xterms
(r'\EOH', 'home'), # seem to be wrong. this is a less than ideal
# workaround
])
hist_emacs_keymap = reader_emacs_keymap + (
(r'\C-n', 'next-history'),
(r'\C-p', 'previous-history'),
(r'\C-o', 'operate-and-get-next'),
(r'\C-r', 'reverse-history-isearch'),
(r'\C-s', 'forward-history-isearch'),
(r'\M-r', 'restore-history'),
(r'\M-.', 'yank-arg'),
(r'\<page down>', 'last-history'),
(r'\<page up>', 'first-history'))
comp_emacs_keymap = hist_emacs_keymap + (
(r'\t', 'complete'),)
python_emacs_keymap = comp_emacs_keymap + (
(r'\n', 'maybe-accept'),
(r'\M-\n', 'self-insert'))
reader_vi_insert_keymap = tuple(
[(c, 'self-insert')
for c in map(chr, range(32, 127)) if c <> '\\'] + \
[(c, 'self-insert')
for c in map(chr, range(128, 256)) if c.isalpha()] + \
[(r'\C-d', 'delete'),
(r'\<backspace>', 'backspace'),
('')])
reader_vi_command_keymap = tuple(
[
('E', 'enter-emacs-mode'),
('R', 'enter-replace-mode'),
('dw', 'delete-word'),
('dd', 'delete-line'),
('h', 'left'),
('i', 'enter-insert-mode'),
('j', 'down'),
('k', 'up'),
('l', 'right'),
('r', 'replace-char'),
('w', 'forward-word'),
('x', 'delete'),
('.', 'repeat-edit'), # argh!
(r'\<insert>', 'enter-insert-mode'),
] +
[(c, 'digit-arg') for c in '01234567689'] +
[])
reader_keymaps = {
'emacs' : reader_emacs_keymap,
'vi-insert' : reader_vi_insert_keymap,
'vi-command' : reader_vi_command_keymap
}
del c # from the listcomps
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import new
def copy_code_with_changes(codeobject,
argcount=None,
nlocals=None,
stacksize=None,
flags=None,
code=None,
consts=None,
names=None,
varnames=None,
filename=None,
name=None,
firstlineno=None,
lnotab=None):
if argcount is None: argcount = codeobject.co_argcount
if nlocals is None: nlocals = codeobject.co_nlocals
if stacksize is None: stacksize = codeobject.co_stacksize
if flags is None: flags = codeobject.co_flags
if code is None: code = codeobject.co_code
if consts is None: consts = codeobject.co_consts
if names is None: names = codeobject.co_names
if varnames is None: varnames = codeobject.co_varnames
if filename is None: filename = codeobject.co_filename
if name is None: name = codeobject.co_name
if firstlineno is None: firstlineno = codeobject.co_firstlineno
if lnotab is None: lnotab = codeobject.co_lnotab
return new.code(argcount,
nlocals,
stacksize,
flags,
code,
consts,
names,
varnames,
filename,
name,
firstlineno,
lnotab)
code_attrs=['argcount',
'nlocals',
'stacksize',
'flags',
'code',
'consts',
'names',
'varnames',
'filename',
'name',
'firstlineno',
'lnotab']
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import termios, curses, select, os, struct, errno
import signal, re, time, sys
from fcntl import ioctl
from pyrepl.fancy_termios import tcgetattr, tcsetattr
from pyrepl.console import Console, Event
from pyrepl import unix_eventqueue
# there are arguments for changing this to "refresh"
SIGWINCH_EVENT = 'repaint'
FIONREAD = getattr(termios, "FIONREAD", None)
TIOCGWINSZ = getattr(termios, "TIOCGWINSZ", None)
def _my_getstr(cap, optional=0):
r = curses.tigetstr(cap)
if not optional and r is None:
raise RuntimeError, \
"terminal doesn't have the required '%s' capability"%cap
return r
# at this point, can we say: AAAAAAAAAAAAAAAAAAAAAARGH!
def maybe_add_baudrate(dict, rate):
name = 'B%d'%rate
if hasattr(termios, name):
dict[getattr(termios, name)] = rate
ratedict = {}
for r in [0, 110, 115200, 1200, 134, 150, 1800, 19200, 200, 230400,
2400, 300, 38400, 460800, 4800, 50, 57600, 600, 75, 9600]:
maybe_add_baudrate(ratedict, r)
del r, maybe_add_baudrate
delayprog = re.compile("\\$<([0-9]+)((?:/|\\*){0,2})>")
try:
poll = select.poll
except AttributeError:
# this is exactly the minumum necessary to support what we
# do with poll objects
class poll:
def __init__(self):
pass
def register(self, fd, flag):
self.fd = fd
def poll(self, timeout=None):
r,w,e = select.select([self.fd],[],[],timeout)
return r
POLLIN = getattr(select, "POLLIN", None)
class UnixConsole(Console):
def __init__(self, f_in=0, f_out=1, term=None, encoding=None):
if encoding is None:
encoding = sys.getdefaultencoding()
self.encoding = encoding
if isinstance(f_in, int):
self.input_fd = f_in
else:
self.input_fd = f_in.fileno()
if isinstance(f_out, int):
self.output_fd = f_out
else:
self.output_fd = f_out.fileno()
self.pollob = poll()
self.pollob.register(self.input_fd, POLLIN)
curses.setupterm(term, self.output_fd)
self.term = term
self._bel = _my_getstr("bel")
self._civis = _my_getstr("civis", optional=1)
self._clear = _my_getstr("clear")
self._cnorm = _my_getstr("cnorm", optional=1)
self._cub = _my_getstr("cub", optional=1)
self._cub1 = _my_getstr("cub1", 1)
self._cud = _my_getstr("cud", 1)
self._cud1 = _my_getstr("cud1", 1)
self._cuf = _my_getstr("cuf", 1)
self._cuf1 = _my_getstr("cuf1", 1)
self._cup = _my_getstr("cup")
self._cuu = _my_getstr("cuu", 1)
self._cuu1 = _my_getstr("cuu1", 1)
self._dch1 = _my_getstr("dch1", 1)
self._dch = _my_getstr("dch", 1)
self._el = _my_getstr("el")
self._hpa = _my_getstr("hpa", 1)
self._ich = _my_getstr("ich", 1)
self._ich1 = _my_getstr("ich1", 1)
self._ind = _my_getstr("ind", 1)
self._pad = _my_getstr("pad", 1)
self._ri = _my_getstr("ri", 1)
self._rmkx = _my_getstr("rmkx", 1)
self._smkx = _my_getstr("smkx", 1)
## work out how we're going to sling the cursor around
if 0 and self._hpa: # hpa don't work in windows telnet :-(
self.__move_x = self.__move_x_hpa
elif self._cub and self._cuf:
self.__move_x = self.__move_x_cub_cuf
elif self._cub1 and self._cuf1:
self.__move_x = self.__move_x_cub1_cuf1
else:
raise RuntimeError, "insufficient terminal (horizontal)"
if self._cuu and self._cud:
self.__move_y = self.__move_y_cuu_cud
elif self._cuu1 and self._cud1:
self.__move_y = self.__move_y_cuu1_cud1
else:
raise RuntimeError, "insufficient terminal (vertical)"
if self._dch1:
self.dch1 = self._dch1
elif self._dch:
self.dch1 = curses.tparm(self._dch, 1)
else:
self.dch1 = None
if self._ich1:
self.ich1 = self._ich1
elif self._ich:
self.ich1 = curses.tparm(self._ich, 1)
else:
self.ich1 = None
self.__move = self.__move_short
self.event_queue = unix_eventqueue.EventQueue(self.input_fd)
self.partial_char = ''
def change_encoding(self, encoding):
self.encoding = encoding
def refresh(self, screen, (cx, cy)):
# this function is still too long (over 90 lines)
self.__maybe_write_code(self._civis)
if not self.__gone_tall:
while len(self.screen) < min(len(screen), self.height):
self.__move(0, len(self.screen) - 1)
self.__write("\n")
self.__posxy = 0, len(self.screen)
self.screen.append("")
else:
while len(self.screen) < len(screen):
self.screen.append("")
if len(screen) > self.height:
self.__gone_tall = 1
self.__move = self.__move_tall
px, py = self.__posxy
old_offset = offset = self.__offset
height = self.height
if 0:
global counter
try:
counter
except NameError:
counter = 0
self.__write_code(curses.tigetstr("setaf"), counter)
counter += 1
if counter > 8:
counter = 0
# we make sure the cursor is on the screen, and that we're
# using all of the screen if we can
if cy < offset:
offset = cy
elif cy >= offset + height:
offset = cy - height + 1
elif offset > 0 and len(screen) < offset + height:
offset = max(len(screen) - height, 0)
screen.append([])
oldscr = self.screen[old_offset:old_offset + height]
newscr = screen[offset:offset + height]
# use hardware scrolling if we have it.
if old_offset > offset and self._ri:
self.__write_code(self._cup, 0, 0)
self.__posxy = 0, old_offset
for i in range(old_offset - offset):
self.__write_code(self._ri)
oldscr.pop(-1)
oldscr.insert(0, "")
elif old_offset < offset and self._ind:
self.__write_code(self._cup, self.height - 1, 0)
self.__posxy = 0, old_offset + self.height - 1
for i in range(offset - old_offset):
self.__write_code(self._ind)
oldscr.pop(0)
oldscr.append("")
self.__offset = offset
for y, oldline, newline, in zip(range(offset, offset + height),
oldscr,
newscr):
if oldline != newline:
self.write_changed_line(y, oldline, newline, px)
y = len(newscr)
while y < len(oldscr):
self.__move(0, y)
self.__posxy = 0, y
self.__write_code(self._el)
y += 1
self.__maybe_write_code(self._cnorm)
self.flushoutput()
self.screen = screen
self.move_cursor(cx, cy)
def write_changed_line(self, y, oldline, newline, px):
# this is frustrating; there's no reason to test (say)
# self.dch1 inside the loop -- but alternative ways of
# structuring this function are equally painful (I'm trying to
# avoid writing code generators these days...)
x = 0
minlen = min(len(oldline), len(newline))
while x < minlen and oldline[x] == newline[x]:
x += 1
if oldline[x:] == newline[x+1:] and self.ich1:
if ( y == self.__posxy[1] and x > self.__posxy[0]
and oldline[px:x] == newline[px+1:x+1] ):
x = px
self.__move(x, y)
self.__write_code(self.ich1)
self.__write(newline[x])
self.__posxy = x + 1, y
elif x < minlen and oldline[x + 1:] == newline[x + 1:]:
self.__move(x, y)
self.__write(newline[x])
self.__posxy = x + 1, y
elif (self.dch1 and self.ich1 and len(newline) == self.width
and x < len(newline) - 2
and newline[x+1:-1] == oldline[x:-2]):
self.__move(self.width - 2, y)
self.__posxy = self.width - 2, y
self.__write_code(self.dch1)
self.__move(x, y)
self.__write_code(self.ich1)
self.__write(newline[x])
self.__posxy = x + 1, y
else:
self.__move(x, y)
if len(oldline) > len(newline):
self.__write_code(self._el)
self.__write(newline[x:])
self.__posxy = len(newline), y
self.flushoutput()
def __write(self, text):
self.__buffer.append((text, 0))
def __write_code(self, fmt, *args):
self.__buffer.append((curses.tparm(fmt, *args), 1))
def __maybe_write_code(self, fmt, *args):
if fmt:
self.__write_code(fmt, *args)
def __move_y_cuu1_cud1(self, y):
dy = y - self.__posxy[1]
if dy > 0:
self.__write_code(dy*self._cud1)
elif dy < 0:
self.__write_code((-dy)*self._cuu1)
def __move_y_cuu_cud(self, y):
dy = y - self.__posxy[1]
if dy > 0:
self.__write_code(self._cud, dy)
elif dy < 0:
self.__write_code(self._cuu, -dy)
def __move_x_hpa(self, x):
if x != self.__posxy[0]:
self.__write_code(self._hpa, x)
def __move_x_cub1_cuf1(self, x):
dx = x - self.__posxy[0]
if dx > 0:
self.__write_code(self._cuf1*dx)
elif dx < 0:
self.__write_code(self._cub1*(-dx))
def __move_x_cub_cuf(self, x):
dx = x - self.__posxy[0]
if dx > 0:
self.__write_code(self._cuf, dx)
elif dx < 0:
self.__write_code(self._cub, -dx)
def __move_short(self, x, y):
self.__move_x(x)
self.__move_y(y)
def __move_tall(self, x, y):
assert 0 <= y - self.__offset < self.height, y - self.__offset
self.__write_code(self._cup, y - self.__offset, x)
def move_cursor(self, x, y):
if y < self.__offset or y >= self.__offset + self.height:
self.event_queue.insert(Event('scroll', None))
else:
self.__move(x, y)
self.__posxy = x, y
self.flushoutput()
def prepare(self):
# per-readline preparations:
self.__svtermstate = tcgetattr(self.input_fd)
raw = self.__svtermstate.copy()
raw.iflag &=~ (termios.BRKINT | termios.INPCK |
termios.ISTRIP | termios.IXON)
raw.oflag &=~ (termios.OPOST)
raw.cflag &=~ (termios.CSIZE|termios.PARENB)
raw.cflag |= (termios.CS8)
raw.lflag &=~ (termios.ICANON|termios.ECHO|
termios.IEXTEN|(termios.ISIG*1))
raw.cc[termios.VMIN] = 1
raw.cc[termios.VTIME] = 0
tcsetattr(self.input_fd, termios.TCSADRAIN, raw)
self.screen = []
self.height, self.width = self.getheightwidth()
self.__buffer = []
self.__posxy = 0, 0
self.__gone_tall = 0
self.__move = self.__move_short
self.__offset = 0
self.__maybe_write_code(self._smkx)
self.old_sigwinch = signal.signal(
signal.SIGWINCH, self.__sigwinch)
def restore(self):
self.__maybe_write_code(self._rmkx)
self.flushoutput()
tcsetattr(self.input_fd, termios.TCSADRAIN, self.__svtermstate)
signal.signal(signal.SIGWINCH, self.old_sigwinch)
def __sigwinch(self, signum, frame):
self.height, self.width = self.getheightwidth()
self.event_queue.insert(Event('resize', None))
def push_char(self, char):
self.partial_char += char
try:
c = unicode(self.partial_char, self.encoding)
except UnicodeError, e:
if len(e.args) > 4 and \
e.args[4] == 'unexpected end of data':
pass
else:
raise
else:
self.partial_char = ''
self.event_queue.push(c)
def get_event(self, block=1):
while self.event_queue.empty():
while 1: # All hail Unix!
try:
self.push_char(os.read(self.input_fd, 1))
except IOError, err:
if err.errno == errno.EINTR:
if not self.event_queue.empty():
return self.event_queue.get()
else:
continue
else:
raise
else:
break
if not block:
break
return self.event_queue.get()
def wait(self):
self.pollob.poll()
def set_cursor_vis(self, vis):
if vis:
self.__maybe_write_code(self._cnorm)
else:
self.__maybe_write_code(self._civis)
def repaint_prep(self):
if not self.__gone_tall:
self.__posxy = 0, self.__posxy[1]
self.__write("\r")
ns = len(self.screen)*['\000'*self.width]
self.screen = ns
else:
self.__posxy = 0, self.__offset
self.__move(0, self.__offset)
ns = self.height*['\000'*self.width]
self.screen = ns
if TIOCGWINSZ:
def getheightwidth(self):
try:
return int(os.environ["LINES"]), int(os.environ["COLUMNS"])
except KeyError:
height, width = struct.unpack(
"hhhh", ioctl(self.input_fd, TIOCGWINSZ, "\000"*8))[0:2]
if not height: return 25, 80
return height, width
else:
def getheightwidth(self):
try:
return int(os.environ["LINES"]), int(os.environ["COLUMNS"])
except KeyError:
return 25, 80
def forgetinput(self):
termios.tcflush(self.input_fd, termios.TCIFLUSH)
def flushoutput(self):
for text, iscode in self.__buffer:
if iscode:
self.__tputs(text)
else:
os.write(self.output_fd, text.encode(self.encoding))
del self.__buffer[:]
def __tputs(self, fmt, prog=delayprog):
"""A Python implementation of the curses tputs function; the
curses one can't really be wrapped in a sane manner.
I have the strong suspicion that this is complexity that
will never do anyone any good."""
# using .get() means that things will blow up
# only if the bps is actually needed (which I'm
# betting is pretty unlkely)
bps = ratedict.get(self.__svtermstate.ospeed)
while 1:
m = prog.search(fmt)
if not m:
os.write(self.output_fd, fmt)
break
x, y = m.span()
os.write(self.output_fd, fmt[:x])
fmt = fmt[y:]
delay = int(m.group(1))
if '*' in m.group(2):
delay *= self.height
if self._pad:
nchars = (bps*delay)/1000
os.write(self.output_fd, self._pad*nchars)
else:
time.sleep(float(delay)/1000.0)
def finish(self):
y = len(self.screen) - 1
while y >= 0 and not self.screen[y]:
y -= 1
self.__move(0, min(y, self.height + self.__offset - 1))
self.__write("\n\r")
self.flushoutput()
def beep(self):
self.__maybe_write_code(self._bel)
self.flushoutput()
if FIONREAD:
def getpending(self):
e = Event('key', '', '')
while not self.event_queue.empty():
e2 = self.event_queue.get()
e.data += e2.data
e.raw += e.raw
amount = struct.unpack(
"i", ioctl(self.input_fd, FIONREAD, "\0\0\0\0"))[0]
raw = unicode(os.read(self.input_fd, amount), self.encoding, 'replace')
e.data += raw
e.raw += raw
return e
else:
def getpending(self):
e = Event('key', '', '')
while not self.event_queue.empty():
e2 = self.event_queue.get()
e.data += e2.data
e.raw += e.raw
amount = 10000
raw = unicode(os.read(self.input_fd, amount), self.encoding, 'replace')
e.data += raw
e.raw += raw
return e
def clear(self):
self.__write_code(self._clear)
self.__gone_tall = 1
self.__move = self.__move_tall
self.__posxy = 0, 0
self.screen = []
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# one impressive collections of imports:
from pyrepl.completing_reader import CompletingReader
from pyrepl.historical_reader import HistoricalReader
from pyrepl import completing_reader, reader
from pyrepl import copy_code, commands, completer
from pyrepl import module_lister
import new, sys, os, re, code, traceback
import atexit, warnings
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import imp
imp.find_module("twisted")
from twisted.internet import reactor
from twisted.internet.abstract import FileDescriptor
except ImportError:
default_interactmethod = "interact"
else:
default_interactmethod = "twistedinteract"
CommandCompiler = code.CommandCompiler
def eat_it(*args):
"""this function eats warnings, if you were wondering"""
pass
class maybe_accept(commands.Command):
def do(self):
r = self.reader
text = r.get_unicode()
try:
# ooh, look at the hack:
code = r.compiler("#coding:utf-8\n"+text.encode('utf-8'))
except (OverflowError, SyntaxError, ValueError):
self.finish = 1
else:
if code is None:
r.insert("\n")
else:
self.finish = 1
from_line_prog = re.compile(
"^from\s+(?P<mod>[A-Za-z_.0-9]*)\s+import\s+(?P<name>[A-Za-z_.0-9]*)")
import_line_prog = re.compile(
"^(?:import|from)\s+(?P<mod>[A-Za-z_.0-9]*)\s*$")
def mk_saver(reader):
def saver(reader=reader):
try:
file = open(os.path.expanduser("~/.pythoni.hist"), "w")
except IOError:
pass
else:
pickle.dump(reader.history, file)
file.close()
return saver
class PythonicReader(CompletingReader, HistoricalReader):
def collect_keymap(self):
return super(PythonicReader, self).collect_keymap() + (
(r'\n', 'maybe-accept'),
(r'\M-\n', 'insert-nl'))
def __init__(self, console, locals,
compiler=None):
super(PythonicReader, self).__init__(console)
self.completer = completer.Completer(locals)
st = self.syntax_table
for c in "._0123456789":
st[c] = reader.SYNTAX_WORD
self.locals = locals
if compiler is None:
self.compiler = CommandCompiler()
else:
self.compiler = compiler
try:
file = open(os.path.expanduser("~/.pythoni.hist"))
except IOError:
pass
else:
try:
self.history = pickle.load(file)
except:
self.history = []
self.historyi = len(self.history)
file.close()
atexit.register(mk_saver(self))
for c in [maybe_accept]:
self.commands[c.__name__] = c
self.commands[c.__name__.replace('_', '-')] = c
def get_completions(self, stem):
b = self.get_unicode()
m = import_line_prog.match(b)
if m:
if not self._module_list_ready:
module_lister._make_module_list()
self._module_list_ready = True
mod = m.group("mod")
try:
return module_lister.find_modules(mod)
except ImportError:
pass
m = from_line_prog.match(b)
if m:
mod, name = m.group("mod", "name")
try:
l = module_lister._packages[mod]
except KeyError:
try:
mod = __import__(mod, self.locals, self.locals, [''])
return [x for x in dir(mod) if x.startswith(name)]
except ImportError:
pass
else:
return [x[len(mod) + 1:]
for x in l if x.startswith(mod + '.' + name)]
try:
l = completing_reader.uniqify(self.completer.complete(stem))
return l
except (NameError, AttributeError):
return []
class ReaderConsole(code.InteractiveInterpreter):
II_init = code.InteractiveInterpreter.__init__
def __init__(self, console, locals=None):
if locals is None:
locals = {}
self.II_init(locals)
self.compiler = CommandCompiler()
self.compile = self.compiler.compiler
self.reader = PythonicReader(console, locals, self.compiler)
locals['Reader'] = self.reader
def run_user_init_file(self):
for key in "PYREPLSTARTUP", "PYTHONSTARTUP":
initfile = os.environ.get(key)
if initfile is not None and os.path.exists(initfile):
break
else:
return
try:
execfile(initfile, self.locals, self.locals)
except:
etype, value, tb = sys.exc_info()
traceback.print_exception(etype, value, tb.tb_next)
def execute(self, text):
try:
# ooh, look at the hack:
code = self.compile("# coding:utf8\n"+text.encode('utf-8'),
'<input>', 'single')
except (OverflowError, SyntaxError, ValueError):
self.showsyntaxerror("<input>")
else:
self.runcode(code)
sys.stdout.flush()
def interact(self):
while 1:
try: # catches EOFError's and KeyboardInterrupts during execution
try: # catches KeyboardInterrupts during editing
try: # warning saver
# can't have warnings spewed onto terminal
sv = warnings.showwarning
warnings.showwarning = eat_it
l = unicode(self.reader.readline(), 'utf-8')
finally:
warnings.showwarning = sv
except KeyboardInterrupt:
print "KeyboardInterrupt"
else:
if l:
self.execute(l)
except EOFError:
break
except KeyboardInterrupt:
continue
def prepare(self):
self.sv_sw = warnings.showwarning
warnings.showwarning = eat_it
self.reader.prepare()
self.reader.refresh() # we want :after methods...
def restore(self):
self.reader.restore()
warnings.showwarning = self.sv_sw
def handle1(self, block=1):
try:
r = 1
r = self.reader.handle1(block)
except KeyboardInterrupt:
self.restore()
print "KeyboardInterrupt"
self.prepare()
else:
if self.reader.finished:
text = self.reader.get_unicode()
self.restore()
if text:
self.execute(text)
self.prepare()
return r
def tkfilehandler(self, file, mask):
try:
self.handle1(block=0)
except:
self.exc_info = sys.exc_info()
# how the <expletive> do you get this to work on Windows (without
# createfilehandler)? threads, I guess
def really_tkinteract(self):
import _tkinter
_tkinter.createfilehandler(
self.reader.console.input_fd, _tkinter.READABLE,
self.tkfilehandler)
self.exc_info = None
while 1:
# dooneevent will return 0 without blocking if there are
# no Tk windows, 1 after blocking until an event otherwise
# so the following does what we want (this wasn't expected
# to be obvious).
if not _tkinter.dooneevent(_tkinter.ALL_EVENTS):
self.handle1(block=1)
if self.exc_info:
type, value, tb = self.exc_info
self.exc_info = None
raise type, value, tb
def tkinteract(self):
"""Run a Tk-aware Python interactive session.
This function simulates the Python top-level in a way that
allows Tk's mainloop to run."""
# attempting to understand the control flow of this function
# without help may cause internal injuries. so, some
# explanation.
# The outer while loop is there to restart the interaction if
# the user types control-c when execution is deep in our
# innards. I'm not sure this can't leave internals in an
# inconsistent state, but it's a good start.
# then the inside loop keeps calling self.handle1 until
# _tkinter gets imported; then control shifts to
# self.really_tkinteract, above.
# this function can only return via an exception; we mask
# EOFErrors (but they end the interaction) and
# KeyboardInterrupts cause a restart. All other exceptions
# are likely bugs in pyrepl (well, 'cept for SystemExit, of
# course).
while 1:
try:
try:
self.prepare()
try:
while 1:
if sys.modules.has_key("_tkinter"):
self.really_tkinteract()
# really_tkinteract is not expected to
# return except via an exception, but:
break
self.handle1()
except EOFError:
pass
finally:
self.restore()
except KeyboardInterrupt:
continue
else:
break
def twistedinteract(self):
from twisted.internet import reactor
from twisted.internet.abstract import FileDescriptor
import signal
outerself = self
class Me(FileDescriptor):
def fileno(self):
""" We want to select on FD 0 """
return 0
def doRead(self):
"""called when input is ready"""
try:
outerself.handle1()
except EOFError:
reactor.stop()
reactor.addReader(Me())
reactor.callWhenRunning(signal.signal,
signal.SIGINT,
signal.default_int_handler)
self.prepare()
try:
reactor.run()
finally:
self.restore()
def cocoainteract(self, inputfilehandle=None, outputfilehandle=None):
# only call this when there's a run loop already going!
# note that unlike the other *interact methods, this returns immediately
from cocoasupport import CocoaInteracter
self.cocoainteracter = CocoaInteracter.alloc().init(self, inputfilehandle, outputfilehandle)
def main(use_pygame_console=0, interactmethod=default_interactmethod, print_banner=True, clear_main=True):
si, se, so = sys.stdin, sys.stderr, sys.stdout
try:
if 0 and use_pygame_console: # pygame currently borked
from pyrepl.pygame_console import PyGameConsole, FakeStdin, FakeStdout
con = PyGameConsole()
sys.stderr = sys.stdout = FakeStdout(con)
sys.stdin = FakeStdin(con)
else:
from pyrepl.unix_console import UnixConsole
try:
import locale
except ImportError:
encoding = None
else:
if hasattr(locale, 'nl_langinfo') \
and hasattr(locale, 'CODESET'):
encoding = locale.nl_langinfo(locale.CODESET)
elif os.environ.get('TERM_PROGRAM') == 'Apple_Terminal':
# /me whistles innocently...
code = int(os.popen(
"defaults read com.apple.Terminal StringEncoding"
).read())
if code == 4:
encoding = 'utf-8'
# More could go here -- and what's here isn't
# bulletproof. What would be? AppleScript?
# Doesn't seem to be possible.
else:
encoding = None
else:
encoding = None # so you get ASCII...
con = UnixConsole(0, 1, None, encoding)
if print_banner:
print "Python", sys.version, "on", sys.platform
print 'Type "help", "copyright", "credits" or "license" '\
'for more information.'
sys.path.insert(0, os.getcwd())
if clear_main and __name__ != '__main__':
mainmod = new.module('__main__')
sys.modules['__main__'] = mainmod
else:
mainmod = sys.modules['__main__']
rc = ReaderConsole(con, mainmod.__dict__)
rc.reader._module_list_ready = False
rc.run_user_init_file()
getattr(rc, interactmethod)()
finally:
sys.stdin, sys.stderr, sys.stdout = si, se, so
if __name__ == '__main__':
main()
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from pyrepl.completing_reader import uniqify
import os, sys
# for the completion support.
# this is all quite nastily written.
_packages = {}
def _make_module_list_dir(dir, suffs, prefix=''):
l = []
for fname in os.listdir(dir):
file = os.path.join(dir, fname)
if os.path.isfile(file):
for suff in suffs:
if fname.endswith(suff):
l.append( prefix + fname[:-len(suff)] )
break
elif os.path.isdir(file) \
and os.path.exists(os.path.join(file, "__init__.py")):
l.append( prefix + fname )
_packages[prefix + fname] = _make_module_list_dir(
file, suffs, prefix + fname + '.' )
l = uniqify(l)
l.sort()
return l
def _make_module_list():
import imp
suffs = [x[0] for x in imp.get_suffixes() if x[0] != '.pyc']
def compare(x, y):
c = -cmp(len(x), len(y))
if c:
return c
else:
return -cmp(x, y)
suffs.sort(compare)
_packages[''] = list(sys.builtin_module_names)
for dir in sys.path:
if dir == '':
dir = '.'
if os.path.isdir(dir):
_packages[''] += _make_module_list_dir(dir, suffs)
_packages[''].sort()
def find_modules(stem):
l = stem.split('.')
pack = '.'.join(l[:-1])
try:
mods = _packages[pack]
except KeyError:
raise ImportError, "can't find \"%s\" package"%pack
return [mod for mod in mods if mod.startswith(stem)]
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from pyrepl import reader, commands
from pyrepl.reader import Reader as R
isearch_keymap = tuple(
[('\\%03o'%c, 'isearch-end') for c in range(256) if chr(c) != '\\'] + \
[(c, 'isearch-add-character')
for c in map(chr, range(32, 127)) if c != '\\'] + \
[('\\%03o'%c, 'isearch-add-character')
for c in range(256) if chr(c).isalpha() and chr(c) != '\\'] + \
[('\\\\', 'self-insert'),
(r'\C-r', 'isearch-backwards'),
(r'\C-s', 'isearch-forwards'),
(r'\C-c', 'isearch-cancel'),
(r'\C-g', 'isearch-cancel'),
(r'\<backspace>', 'isearch-backspace')])
del c
ISEARCH_DIRECTION_NONE = ''
ISEARCH_DIRECTION_BACKWARDS = 'r'
ISEARCH_DIRECTION_FORWARDS = 'f'
class next_history(commands.Command):
def do(self):
r = self.reader
if r.historyi == len(r.history):
r.error("end of history list")
return
r.select_item(r.historyi + 1)
class previous_history(commands.Command):
def do(self):
r = self.reader
if r.historyi == 0:
r.error("start of history list")
return
r.select_item(r.historyi - 1)
class restore_history(commands.Command):
def do(self):
r = self.reader
if r.historyi != len(r.history):
if r.get_unicode() != r.history[r.historyi]:
r.buffer = list(r.history[r.historyi])
r.pos = len(r.buffer)
r.dirty = 1
class first_history(commands.Command):
def do(self):
self.reader.select_item(0)
class last_history(commands.Command):
def do(self):
self.reader.select_item(len(self.reader.history))
class operate_and_get_next(commands.FinishCommand):
def do(self):
self.reader.next_history = self.reader.historyi + 1
class yank_arg(commands.Command):
def do(self):
r = self.reader
if r.last_command is self.__class__:
r.yank_arg_i += 1
else:
r.yank_arg_i = 0
if r.historyi < r.yank_arg_i:
r.error("beginning of history list")
return
a = r.get_arg(-1)
# XXX how to split?
words = r.get_item(r.historyi - r.yank_arg_i - 1).split()
if a < -len(words) or a >= len(words):
r.error("no such arg")
return
w = words[a]
b = r.buffer
if r.yank_arg_i > 0:
o = len(r.yank_arg_yanked)
else:
o = 0
b[r.pos - o:r.pos] = list(w)
r.yank_arg_yanked = w
r.pos += len(w) - o
r.dirty = 1
class forward_history_isearch(commands.Command):
def do(self):
r = self.reader
r.isearch_direction = ISEARCH_DIRECTION_FORWARDS
r.isearch_start = r.historyi, r.pos
r.isearch_term = ''
r.dirty = 1
r.push_input_trans(r.isearch_trans)
class reverse_history_isearch(commands.Command):
def do(self):
r = self.reader
r.isearch_direction = ISEARCH_DIRECTION_BACKWARDS
r.dirty = 1
r.isearch_term = ''
r.push_input_trans(r.isearch_trans)
r.isearch_start = r.historyi, r.pos
class isearch_cancel(commands.Command):
def do(self):
r = self.reader
r.isearch_direction = ISEARCH_DIRECTION_NONE
r.pop_input_trans()
r.select_item(r.isearch_start[0])
r.pos = r.isearch_start[1]
r.dirty = 1
class isearch_add_character(commands.Command):
def do(self):
r = self.reader
b = r.buffer
r.isearch_term += self.event[-1]
r.dirty = 1
p = r.pos + len(r.isearch_term) - 1
if b[p:p+1] != [r.isearch_term[-1]]:
r.isearch_next()
class isearch_backspace(commands.Command):
def do(self):
r = self.reader
if len(r.isearch_term) > 0:
r.isearch_term = r.isearch_term[:-1]
r.dirty = 1
else:
r.error("nothing to rubout")
class isearch_forwards(commands.Command):
def do(self):
r = self.reader
r.isearch_direction = ISEARCH_DIRECTION_FORWARDS
r.isearch_next()
class isearch_backwards(commands.Command):
def do(self):
r = self.reader
r.isearch_direction = ISEARCH_DIRECTION_BACKWARDS
r.isearch_next()
class isearch_end(commands.Command):
def do(self):
r = self.reader
r.isearch_direction = ISEARCH_DIRECTION_NONE
r.console.forgetinput()
r.pop_input_trans()
r.dirty = 1
class HistoricalReader(R):
"""Adds history support (with incremental history searching) to the
Reader class.
Adds the following instance variables:
* history:
a list of strings
* historyi:
* transient_history:
* next_history:
* isearch_direction, isearch_term, isearch_start:
* yank_arg_i, yank_arg_yanked:
used by the yank-arg command; not actually manipulated by any
HistoricalReader instance methods.
"""
def collect_keymap(self):
return super(HistoricalReader, self).collect_keymap() + (
(r'\C-n', 'next-history'),
(r'\C-p', 'previous-history'),
(r'\C-o', 'operate-and-get-next'),
(r'\C-r', 'reverse-history-isearch'),
(r'\C-s', 'forward-history-isearch'),
(r'\M-r', 'restore-history'),
(r'\M-.', 'yank-arg'),
(r'\<page down>', 'last-history'),
(r'\<page up>', 'first-history'))
def __init__(self, console):
super(HistoricalReader, self).__init__(console)
self.history = []
self.historyi = 0
self.transient_history = {}
self.next_history = None
self.isearch_direction = ISEARCH_DIRECTION_NONE
for c in [next_history, previous_history, restore_history,
first_history, last_history, yank_arg,
forward_history_isearch, reverse_history_isearch,
isearch_end, isearch_add_character, isearch_cancel,
isearch_add_character, isearch_backspace,
isearch_forwards, isearch_backwards, operate_and_get_next]:
self.commands[c.__name__] = c
self.commands[c.__name__.replace('_', '-')] = c
from pyrepl import input
self.isearch_trans = input.KeymapTranslator(
isearch_keymap, invalid_cls=isearch_end,
character_cls=isearch_add_character)
def select_item(self, i):
self.transient_history[self.historyi] = self.get_unicode()
buf = self.transient_history.get(i)
if buf is None:
buf = self.history[i]
self.buffer = list(buf)
self.historyi = i
self.pos = len(self.buffer)
self.dirty = 1
def get_item(self, i):
if i <> len(self.history):
return self.transient_history.get(i, self.history[i])
else:
return self.transient_history.get(i, self.get_unicode())
def prepare(self):
super(HistoricalReader, self).prepare()
try:
self.transient_history = {}
if self.next_history is not None \
and self.next_history < len(self.history):
self.historyi = self.next_history
self.buffer[:] = list(self.history[self.next_history])
self.pos = len(self.buffer)
self.transient_history[len(self.history)] = ''
else:
self.historyi = len(self.history)
self.next_history = None
except:
self.restore()
raise
def get_prompt(self, lineno, cursor_on_line):
if cursor_on_line and self.isearch_direction <> ISEARCH_DIRECTION_NONE:
d = 'rf'[self.isearch_direction == ISEARCH_DIRECTION_FORWARDS]
return "(%s-search `%s') "%(d, self.isearch_term)
else:
return super(HistoricalReader, self).get_prompt(lineno, cursor_on_line)
def isearch_next(self):
st = self.isearch_term
p = self.pos
i = self.historyi
s = self.get_unicode()
forwards = self.isearch_direction == ISEARCH_DIRECTION_FORWARDS
while 1:
if forwards:
p = s.find(st, p + 1)
else:
p = s.rfind(st, 0, p + len(st) - 1)
if p != -1:
self.select_item(i)
self.pos = p
return
elif ((forwards and i == len(self.history) - 1)
or (not forwards and i == 0)):
self.error("not found")
return
else:
if forwards:
i += 1
s = self.get_item(i)
p = -1
else:
i -= 1
s = self.get_item(i)
p = len(s)
def finish(self):
super(HistoricalReader, self).finish()
ret = self.get_unicode()
for i, t in self.transient_history.items():
if i < len(self.history) and i != self.historyi:
self.history[i] = t
if ret:
self.history.append(ret)
def test():
from pyrepl.unix_console import UnixConsole
reader = HistoricalReader(UnixConsole())
reader.ps1 = "h**> "
reader.ps2 = "h/*> "
reader.ps3 = "h|*> "
reader.ps4 = "h\*> "
while reader.readline():
pass
if __name__=='__main__':
test()
| Python |
# Copyright 2000-2004 Michael Hudson mwh@python.net
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import termios
class TermState:
def __init__(self, tuples):
self.iflag, self.oflag, self.cflag, self.lflag, \
self.ispeed, self.ospeed, self.cc = tuples
def as_list(self):
return [self.iflag, self.oflag, self.cflag, self.lflag,
self.ispeed, self.ospeed, self.cc]
def copy(self):
return self.__class__(self.as_list())
def tcgetattr(fd):
return TermState(termios.tcgetattr(fd))
def tcsetattr(fd, when, attrs):
termios.tcsetattr(fd, when, attrs.as_list())
class Term(TermState):
TS__init__ = TermState.__init__
def __init__(self, fd=0):
self.TS__init__(termios.tcgetattr(fd))
self.fd = fd
self.stack = []
def save(self):
self.stack.append( self.as_list() )
def set(self, when=termios.TCSANOW):
termios.tcsetattr(self.fd, when, self.as_list())
def restore(self):
self.TS__init__(self.stack.pop())
self.set()
| Python |
class Error(Exception):
pass
class Done(Exception):
pass
class Incomplete(Exception):
pass
def a2b_uu(s):
if not s:
return ''
length = (ord(s[0]) - 0x20) % 64
def quadruplets_gen(s):
while s:
try:
yield ord(s[0]), ord(s[1]), ord(s[2]), ord(s[3])
except IndexError:
s += ' '
yield ord(s[0]), ord(s[1]), ord(s[2]), ord(s[3])
return
s = s[4:]
try:
result = [''.join(
[chr((A - 0x20) << 2 | (((B - 0x20) >> 4) & 0x3)),
chr(((B - 0x20) & 0xf) << 4 | (((C - 0x20) >> 2) & 0xf)),
chr(((C - 0x20) & 0x3) << 6 | ((D - 0x20) & 0x3f))
]) for A, B, C, D in quadruplets_gen(s[1:].rstrip())]
except ValueError:
raise Error('Illegal char')
result = ''.join(result)
trailingdata = result[length:]
if trailingdata.strip('\x00'):
raise Error('Trailing garbage')
result = result[:length]
if len(result) < length:
result += ((length - len(result)) * '\x00')
return result
def b2a_uu(s):
length = len(s)
if length > 45:
raise Error('At most 45 bytes at once')
def triples_gen(s):
while s:
try:
yield ord(s[0]), ord(s[1]), ord(s[2])
except IndexError:
s += '\0\0'
yield ord(s[0]), ord(s[1]), ord(s[2])
return
s = s[3:]
result = [''.join(
[chr(0x20 + (( A >> 2 ) & 0x3F)),
chr(0x20 + (((A << 4) | ((B >> 4) & 0xF)) & 0x3F)),
chr(0x20 + (((B << 2) | ((C >> 6) & 0x3)) & 0x3F)),
chr(0x20 + (( C ) & 0x3F))])
for A, B, C in triples_gen(s)]
return chr(ord(' ') + (length & 077)) + ''.join(result) + '\n'
table_a2b_base64 = {
'A': 0,
'B': 1,
'C': 2,
'D': 3,
'E': 4,
'F': 5,
'G': 6,
'H': 7,
'I': 8,
'J': 9,
'K': 10,
'L': 11,
'M': 12,
'N': 13,
'O': 14,
'P': 15,
'Q': 16,
'R': 17,
'S': 18,
'T': 19,
'U': 20,
'V': 21,
'W': 22,
'X': 23,
'Y': 24,
'Z': 25,
'a': 26,
'b': 27,
'c': 28,
'd': 29,
'e': 30,
'f': 31,
'g': 32,
'h': 33,
'i': 34,
'j': 35,
'k': 36,
'l': 37,
'm': 38,
'n': 39,
'o': 40,
'p': 41,
'q': 42,
'r': 43,
's': 44,
't': 45,
'u': 46,
'v': 47,
'w': 48,
'x': 49,
'y': 50,
'z': 51,
'0': 52,
'1': 53,
'2': 54,
'3': 55,
'4': 56,
'5': 57,
'6': 58,
'7': 59,
'8': 60,
'9': 61,
'+': 62,
'/': 63,
}
def a2b_base64(s):
s = s.rstrip()
# clean out all invalid characters, this also strips the final '=' padding
clean_s = []
for item in s:
if item in table_a2b_base64:
clean_s.append(item)
s = ''.join(clean_s)
# Add '=' padding back into the string
if len(s) % 4:
s = s + ('=' * (4 - len(s) % 4))
def quadruplets_gen(s):
while s:
yield (table_a2b_base64[s[0]],
table_a2b_base64[s[1]],
table_a2b_base64[s[2]],
table_a2b_base64[s[3]])
s = s[4:]
result = [
chr(A << 2 | ((B >> 4) & 0x3)) +
chr((B & 0xf) << 4 | ((C >> 2 ) & 0xf)) +
chr((C & 0x3) << 6 | D )
for A, B, C, D in quadruplets_gen(s[:-4])]
if s:
final = s[-4:]
if final[2] == '=':
A = table_a2b_base64[final[0]]
B = table_a2b_base64[final[1]]
snippet = chr(A << 2 | ((B >> 4) & 0x3))
elif final[3] == '=':
A = table_a2b_base64[final[0]]
B = table_a2b_base64[final[1]]
C = table_a2b_base64[final[2]]
snippet = chr(A << 2 | ((B >> 4) & 0x3)) + \
chr((B & 0xf) << 4 | ((C >> 2 ) & 0xf))
else:
A = table_a2b_base64[final[0]]
B = table_a2b_base64[final[1]]
C = table_a2b_base64[final[2]]
D = table_a2b_base64[final[3]]
snippet = chr(A << 2 | ((B >> 4) & 0x3)) + \
chr((B & 0xf) << 4 | ((C >> 2 ) & 0xf)) + \
chr((C & 0x3) << 6 | D )
result.append(snippet)
return ''.join(result)
table_b2a_base64 = \
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
def b2a_base64(s):
length = len(s)
final_length = length % 3
def triples_gen(s):
while s:
try:
yield ord(s[0]), ord(s[1]), ord(s[2])
except IndexError:
s += '\0\0'
yield ord(s[0]), ord(s[1]), ord(s[2])
return
s = s[3:]
a = triples_gen(s[ :length - final_length])
result = [''.join(
[table_b2a_base64[( A >> 2 ) & 0x3F],
table_b2a_base64[((A << 4) | ((B >> 4) & 0xF)) & 0x3F],
table_b2a_base64[((B << 2) | ((C >> 6) & 0x3)) & 0x3F],
table_b2a_base64[( C ) & 0x3F]])
for A, B, C in a]
final = s[length - final_length:]
if final_length == 0:
snippet = ''
elif final_length == 1:
a = ord(final[0])
snippet = table_b2a_base64[(a >> 2 ) & 0x3F] + \
table_b2a_base64[(a << 4 ) & 0x3F] + '=='
else:
a = ord(final[0])
b = ord(final[1])
snippet = table_b2a_base64[(a >> 2) & 0x3F] + \
table_b2a_base64[((a << 4) | (b >> 4) & 0xF) & 0x3F] + \
table_b2a_base64[(b << 2) & 0x3F] + '='
return ''.join(result) + snippet + '\n'
def a2b_qp(s, header=False):
parts = s.rstrip('\t ')
if header:
parts = ' '.join(parts.split('_'))
parts = parts.split('=')
# Change the parts in-place
for index, part in enumerate(parts[1:]):
if len(part) and part[0] == '\n':
parts[index + 1] = part[1:]
continue
if len(part) > 1 and part[0] == '\r' and part[1] == '\n':
parts[index + 1] = part[2:]
continue
if len(part) > 1 and part[0] in hex_numbers and part[1] in hex_numbers:
parts[index + 1] = chr(strhex_to_int(part[0:2])) + part[2:]
if parts[index + 1] == '_' and header:
parts[index + 1] = ' '
elif index == len(parts) - 2 and len(part) < 2:
parts[index + 1] = ''
else:
parts[index + 1] = '=' + parts[index + 1]
return ''.join(parts)
def b2a_qp(s, quotetabs=False, istext=True, header=False):
"""quotetabs=True means that tab and space characters are always
quoted.
istext=False means that \r and \n are treated as regular characters
header=True encodes space characters with '_' and requires
real '_' characters to be quoted.
"""
crlf = s.find('\r\n')
lf = s.find('\n')
linebreak = None
if crlf >= 0 and crlf <= lf:
linebreak = '\r\n'
elif lf > 0:
linebreak = '\n'
# if linebreak and linebreak == '\r\n':
# The above is more efficient for files with \n linebreaks,
# but fails badly on files with mixed linebreak encoding
if linebreak:
s = s.replace('\r\n', '\n')
else:
linebreak = '\n'
lines = s.split('\n')
soft_lbr = '=' + linebreak
result = []
for line in lines:
charlist = []
count = 0
for c in line:
# Don't quote
if '!' <= c <= '<' or '>' <= c <= '^' or '`' <= c <= '~' or (
c == '_' and not header) or (c in '\n\r' and istext):
if count >= 75:
charlist.append(soft_lbr)
count = 0
charlist.append(c)
count += 1
elif not quotetabs and c in '\t ':
if count >= 72:
charlist.append(soft_lbr)
count = 0
if count >= 71: # Quote
count += 3
charlist.append('=' + two_hex_digits(ord(c)))
else: # Don't quote
if c == ' ' and header:
charlist.append('_')
else:
charlist.append(c)
count += 1
else: # Quote
if count >= 72:
charlist.append(soft_lbr)
count = 0
count += 3
charlist.append('=' + two_hex_digits(ord(c)))
if charlist and charlist[-1] in '\t ':
# Whitespace at end of line has to be quoted
charlist[-1] = '=' + two_hex_digits(ord(charlist[-1]))
result.append(''.join(charlist))
return linebreak.join(result)
hex_numbers = '0123456789ABCDEF'
def hex(n):
if n == 0:
return '0'
if n < 0:
n = -n
sign = '-'
else:
sign = ''
arr = []
def hex_gen(n):
""" Yield a nibble at a time. """
while n:
yield n % 0x10
n = n / 0x10
for nibble in hex_gen(n):
arr = [hex_numbers[nibble]] + arr
return sign + ''.join(arr)
def two_hex_digits(n):
return hex_numbers[n / 0x10] + hex_numbers[n % 0x10]
def strhex_to_int(s):
i = 0
for c in s:
i = i * 0x10 + hex_numbers.index(c)
return i
hqx_encoding = '!"#$%&\'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr'
DONE = 0x7f
SKIP = 0x7e
FAIL = 0x7d
table_a2b_hqx = [
#^@ ^A ^B ^C ^D ^E ^F ^G
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
#\b \t \n ^K ^L \r ^N ^O
FAIL, FAIL, SKIP, FAIL, FAIL, SKIP, FAIL, FAIL,
#^P ^Q ^R ^S ^T ^U ^V ^W
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
#^X ^Y ^Z ^[ ^\ ^] ^^ ^_
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
# ! " # $ % & '
FAIL, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06,
#( ) * + , - . /
0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, FAIL, FAIL,
#0 1 2 3 4 5 6 7
0x0D, 0x0E, 0x0F, 0x10, 0x11, 0x12, 0x13, FAIL,
#8 9 : ; < = > ?
0x14, 0x15, DONE, FAIL, FAIL, FAIL, FAIL, FAIL,
#@ A B C D E F G
0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D,
#H I J K L M N O
0x1E, 0x1F, 0x20, 0x21, 0x22, 0x23, 0x24, FAIL,
#P Q R S T U V W
0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x2B, FAIL,
#X Y Z [ \ ] ^ _
0x2C, 0x2D, 0x2E, 0x2F, FAIL, FAIL, FAIL, FAIL,
#` a b c d e f g
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, FAIL,
#h i j k l m n o
0x37, 0x38, 0x39, 0x3A, 0x3B, 0x3C, FAIL, FAIL,
#p q r s t u v w
0x3D, 0x3E, 0x3F, FAIL, FAIL, FAIL, FAIL, FAIL,
#x y z { | } ~ ^?
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
]
def a2b_hqx(s):
result = []
def quadruples_gen(s):
t = []
for c in s:
res = table_a2b_hqx[ord(c)]
if res == SKIP:
continue
elif res == FAIL:
raise Error('Illegal character')
elif res == DONE:
yield t
raise Done
else:
t.append(res)
if len(t) == 4:
yield t
t = []
yield t
done = 0
try:
for snippet in quadruples_gen(s):
length = len(snippet)
if length == 4:
result.append(chr(((snippet[0] & 0x3f) << 2) | (snippet[1] >> 4)))
result.append(chr(((snippet[1] & 0x0f) << 4) | (snippet[2] >> 2)))
result.append(chr(((snippet[2] & 0x03) << 6) | (snippet[3])))
elif length == 3:
result.append(chr(((snippet[0] & 0x3f) << 2) | (snippet[1] >> 4)))
result.append(chr(((snippet[1] & 0x0f) << 4) | (snippet[2] >> 2)))
elif length == 2:
result.append(chr(((snippet[0] & 0x3f) << 2) | (snippet[1] >> 4)))
except Done:
done = 1
except Error:
raise
return (''.join(result), done)
def b2a_hqx(s):
result =[]
def triples_gen(s):
while s:
try:
yield ord(s[0]), ord(s[1]), ord(s[2])
except IndexError:
yield tuple([ord(c) for c in s])
s = s[3:]
for snippet in triples_gen(s):
length = len(snippet)
if length == 3:
result.append(
hqx_encoding[(snippet[0] & 0xfc) >> 2])
result.append(hqx_encoding[
((snippet[0] & 0x03) << 4) | ((snippet[1] & 0xf0) >> 4)])
result.append(hqx_encoding[
(snippet[1] & 0x0f) << 2 | ((snippet[2] & 0xc0) >> 6)])
result.append(hqx_encoding[snippet[2] & 0x3f])
elif length == 2:
result.append(
hqx_encoding[(snippet[0] & 0xfc) >> 2])
result.append(hqx_encoding[
((snippet[0] & 0x03) << 4) | ((snippet[1] & 0xf0) >> 4)])
result.append(hqx_encoding[
(snippet[1] & 0x0f) << 2])
elif length == 1:
result.append(
hqx_encoding[(snippet[0] & 0xfc) >> 2])
result.append(hqx_encoding[
((snippet[0] & 0x03) << 4)])
return ''.join(result)
crctab_hqx = [
0x0000, 0x1021, 0x2042, 0x3063, 0x4084, 0x50a5, 0x60c6, 0x70e7,
0x8108, 0x9129, 0xa14a, 0xb16b, 0xc18c, 0xd1ad, 0xe1ce, 0xf1ef,
0x1231, 0x0210, 0x3273, 0x2252, 0x52b5, 0x4294, 0x72f7, 0x62d6,
0x9339, 0x8318, 0xb37b, 0xa35a, 0xd3bd, 0xc39c, 0xf3ff, 0xe3de,
0x2462, 0x3443, 0x0420, 0x1401, 0x64e6, 0x74c7, 0x44a4, 0x5485,
0xa56a, 0xb54b, 0x8528, 0x9509, 0xe5ee, 0xf5cf, 0xc5ac, 0xd58d,
0x3653, 0x2672, 0x1611, 0x0630, 0x76d7, 0x66f6, 0x5695, 0x46b4,
0xb75b, 0xa77a, 0x9719, 0x8738, 0xf7df, 0xe7fe, 0xd79d, 0xc7bc,
0x48c4, 0x58e5, 0x6886, 0x78a7, 0x0840, 0x1861, 0x2802, 0x3823,
0xc9cc, 0xd9ed, 0xe98e, 0xf9af, 0x8948, 0x9969, 0xa90a, 0xb92b,
0x5af5, 0x4ad4, 0x7ab7, 0x6a96, 0x1a71, 0x0a50, 0x3a33, 0x2a12,
0xdbfd, 0xcbdc, 0xfbbf, 0xeb9e, 0x9b79, 0x8b58, 0xbb3b, 0xab1a,
0x6ca6, 0x7c87, 0x4ce4, 0x5cc5, 0x2c22, 0x3c03, 0x0c60, 0x1c41,
0xedae, 0xfd8f, 0xcdec, 0xddcd, 0xad2a, 0xbd0b, 0x8d68, 0x9d49,
0x7e97, 0x6eb6, 0x5ed5, 0x4ef4, 0x3e13, 0x2e32, 0x1e51, 0x0e70,
0xff9f, 0xefbe, 0xdfdd, 0xcffc, 0xbf1b, 0xaf3a, 0x9f59, 0x8f78,
0x9188, 0x81a9, 0xb1ca, 0xa1eb, 0xd10c, 0xc12d, 0xf14e, 0xe16f,
0x1080, 0x00a1, 0x30c2, 0x20e3, 0x5004, 0x4025, 0x7046, 0x6067,
0x83b9, 0x9398, 0xa3fb, 0xb3da, 0xc33d, 0xd31c, 0xe37f, 0xf35e,
0x02b1, 0x1290, 0x22f3, 0x32d2, 0x4235, 0x5214, 0x6277, 0x7256,
0xb5ea, 0xa5cb, 0x95a8, 0x8589, 0xf56e, 0xe54f, 0xd52c, 0xc50d,
0x34e2, 0x24c3, 0x14a0, 0x0481, 0x7466, 0x6447, 0x5424, 0x4405,
0xa7db, 0xb7fa, 0x8799, 0x97b8, 0xe75f, 0xf77e, 0xc71d, 0xd73c,
0x26d3, 0x36f2, 0x0691, 0x16b0, 0x6657, 0x7676, 0x4615, 0x5634,
0xd94c, 0xc96d, 0xf90e, 0xe92f, 0x99c8, 0x89e9, 0xb98a, 0xa9ab,
0x5844, 0x4865, 0x7806, 0x6827, 0x18c0, 0x08e1, 0x3882, 0x28a3,
0xcb7d, 0xdb5c, 0xeb3f, 0xfb1e, 0x8bf9, 0x9bd8, 0xabbb, 0xbb9a,
0x4a75, 0x5a54, 0x6a37, 0x7a16, 0x0af1, 0x1ad0, 0x2ab3, 0x3a92,
0xfd2e, 0xed0f, 0xdd6c, 0xcd4d, 0xbdaa, 0xad8b, 0x9de8, 0x8dc9,
0x7c26, 0x6c07, 0x5c64, 0x4c45, 0x3ca2, 0x2c83, 0x1ce0, 0x0cc1,
0xef1f, 0xff3e, 0xcf5d, 0xdf7c, 0xaf9b, 0xbfba, 0x8fd9, 0x9ff8,
0x6e17, 0x7e36, 0x4e55, 0x5e74, 0x2e93, 0x3eb2, 0x0ed1, 0x1ef0,
]
def crc_hqx(s, crc):
for c in s:
crc = ((crc << 8) & 0xff00) ^ crctab_hqx[((crc >> 8) & 0xff) ^ ord(c)]
return crc
def rlecode_hqx(s):
"""
Run length encoding for binhex4.
The CPython implementation does not do run length encoding
of \x90 characters. This implementation does.
"""
if not s:
return ''
result = []
prev = s[0]
count = 1
# Add a dummy character to get the loop to go one extra round.
# The dummy must be different from the last character of s.
# In the same step we remove the first character, which has
# already been stored in prev.
if s[-1] == '!':
s = s[1:] + '?'
else:
s = s[1:] + '!'
for c in s:
if c == prev and count < 255:
count += 1
else:
if count == 1:
if prev != '\x90':
result.append(prev)
else:
result.extend(['\x90', '\x00'])
elif count < 4:
if prev != '\x90':
result.extend([prev] * count)
else:
result.extend(['\x90', '\x00'] * count)
else:
if prev != '\x90':
result.extend([prev, '\x90', chr(count)])
else:
result.extend(['\x90', '\x00', '\x90', chr(count)])
count = 1
prev = c
return ''.join(result)
def rledecode_hqx(s):
s = s.split('\x90')
result = [s[0]]
prev = s[0]
for snippet in s[1:]:
count = ord(snippet[0])
if count > 0:
result.append(prev[-1] * (count-1))
prev = snippet
else:
result. append('\x90')
prev = '\x90'
result.append(snippet[1:])
return ''.join(result)
crc_32_tab = [
0x00000000L, 0x77073096L, 0xee0e612cL, 0x990951baL, 0x076dc419L,
0x706af48fL, 0xe963a535L, 0x9e6495a3L, 0x0edb8832L, 0x79dcb8a4L,
0xe0d5e91eL, 0x97d2d988L, 0x09b64c2bL, 0x7eb17cbdL, 0xe7b82d07L,
0x90bf1d91L, 0x1db71064L, 0x6ab020f2L, 0xf3b97148L, 0x84be41deL,
0x1adad47dL, 0x6ddde4ebL, 0xf4d4b551L, 0x83d385c7L, 0x136c9856L,
0x646ba8c0L, 0xfd62f97aL, 0x8a65c9ecL, 0x14015c4fL, 0x63066cd9L,
0xfa0f3d63L, 0x8d080df5L, 0x3b6e20c8L, 0x4c69105eL, 0xd56041e4L,
0xa2677172L, 0x3c03e4d1L, 0x4b04d447L, 0xd20d85fdL, 0xa50ab56bL,
0x35b5a8faL, 0x42b2986cL, 0xdbbbc9d6L, 0xacbcf940L, 0x32d86ce3L,
0x45df5c75L, 0xdcd60dcfL, 0xabd13d59L, 0x26d930acL, 0x51de003aL,
0xc8d75180L, 0xbfd06116L, 0x21b4f4b5L, 0x56b3c423L, 0xcfba9599L,
0xb8bda50fL, 0x2802b89eL, 0x5f058808L, 0xc60cd9b2L, 0xb10be924L,
0x2f6f7c87L, 0x58684c11L, 0xc1611dabL, 0xb6662d3dL, 0x76dc4190L,
0x01db7106L, 0x98d220bcL, 0xefd5102aL, 0x71b18589L, 0x06b6b51fL,
0x9fbfe4a5L, 0xe8b8d433L, 0x7807c9a2L, 0x0f00f934L, 0x9609a88eL,
0xe10e9818L, 0x7f6a0dbbL, 0x086d3d2dL, 0x91646c97L, 0xe6635c01L,
0x6b6b51f4L, 0x1c6c6162L, 0x856530d8L, 0xf262004eL, 0x6c0695edL,
0x1b01a57bL, 0x8208f4c1L, 0xf50fc457L, 0x65b0d9c6L, 0x12b7e950L,
0x8bbeb8eaL, 0xfcb9887cL, 0x62dd1ddfL, 0x15da2d49L, 0x8cd37cf3L,
0xfbd44c65L, 0x4db26158L, 0x3ab551ceL, 0xa3bc0074L, 0xd4bb30e2L,
0x4adfa541L, 0x3dd895d7L, 0xa4d1c46dL, 0xd3d6f4fbL, 0x4369e96aL,
0x346ed9fcL, 0xad678846L, 0xda60b8d0L, 0x44042d73L, 0x33031de5L,
0xaa0a4c5fL, 0xdd0d7cc9L, 0x5005713cL, 0x270241aaL, 0xbe0b1010L,
0xc90c2086L, 0x5768b525L, 0x206f85b3L, 0xb966d409L, 0xce61e49fL,
0x5edef90eL, 0x29d9c998L, 0xb0d09822L, 0xc7d7a8b4L, 0x59b33d17L,
0x2eb40d81L, 0xb7bd5c3bL, 0xc0ba6cadL, 0xedb88320L, 0x9abfb3b6L,
0x03b6e20cL, 0x74b1d29aL, 0xead54739L, 0x9dd277afL, 0x04db2615L,
0x73dc1683L, 0xe3630b12L, 0x94643b84L, 0x0d6d6a3eL, 0x7a6a5aa8L,
0xe40ecf0bL, 0x9309ff9dL, 0x0a00ae27L, 0x7d079eb1L, 0xf00f9344L,
0x8708a3d2L, 0x1e01f268L, 0x6906c2feL, 0xf762575dL, 0x806567cbL,
0x196c3671L, 0x6e6b06e7L, 0xfed41b76L, 0x89d32be0L, 0x10da7a5aL,
0x67dd4accL, 0xf9b9df6fL, 0x8ebeeff9L, 0x17b7be43L, 0x60b08ed5L,
0xd6d6a3e8L, 0xa1d1937eL, 0x38d8c2c4L, 0x4fdff252L, 0xd1bb67f1L,
0xa6bc5767L, 0x3fb506ddL, 0x48b2364bL, 0xd80d2bdaL, 0xaf0a1b4cL,
0x36034af6L, 0x41047a60L, 0xdf60efc3L, 0xa867df55L, 0x316e8eefL,
0x4669be79L, 0xcb61b38cL, 0xbc66831aL, 0x256fd2a0L, 0x5268e236L,
0xcc0c7795L, 0xbb0b4703L, 0x220216b9L, 0x5505262fL, 0xc5ba3bbeL,
0xb2bd0b28L, 0x2bb45a92L, 0x5cb36a04L, 0xc2d7ffa7L, 0xb5d0cf31L,
0x2cd99e8bL, 0x5bdeae1dL, 0x9b64c2b0L, 0xec63f226L, 0x756aa39cL,
0x026d930aL, 0x9c0906a9L, 0xeb0e363fL, 0x72076785L, 0x05005713L,
0x95bf4a82L, 0xe2b87a14L, 0x7bb12baeL, 0x0cb61b38L, 0x92d28e9bL,
0xe5d5be0dL, 0x7cdcefb7L, 0x0bdbdf21L, 0x86d3d2d4L, 0xf1d4e242L,
0x68ddb3f8L, 0x1fda836eL, 0x81be16cdL, 0xf6b9265bL, 0x6fb077e1L,
0x18b74777L, 0x88085ae6L, 0xff0f6a70L, 0x66063bcaL, 0x11010b5cL,
0x8f659effL, 0xf862ae69L, 0x616bffd3L, 0x166ccf45L, 0xa00ae278L,
0xd70dd2eeL, 0x4e048354L, 0x3903b3c2L, 0xa7672661L, 0xd06016f7L,
0x4969474dL, 0x3e6e77dbL, 0xaed16a4aL, 0xd9d65adcL, 0x40df0b66L,
0x37d83bf0L, 0xa9bcae53L, 0xdebb9ec5L, 0x47b2cf7fL, 0x30b5ffe9L,
0xbdbdf21cL, 0xcabac28aL, 0x53b39330L, 0x24b4a3a6L, 0xbad03605L,
0xcdd70693L, 0x54de5729L, 0x23d967bfL, 0xb3667a2eL, 0xc4614ab8L,
0x5d681b02L, 0x2a6f2b94L, 0xb40bbe37L, 0xc30c8ea1L, 0x5a05df1bL,
0x2d02ef8dL
]
def crc32(s, crc=0):
result = 0
crc = ~long(crc) & 0xffffffffL
for c in s:
crc = crc_32_tab[(crc ^ long(ord(c))) & 0xffL] ^ (crc >> 8)
#/* Note: (crc >> 8) MUST zero fill on left
result = crc ^ 0xffffffffL
if result > 2**31:
result = ((result + 2**31) % 2**32) - 2**31
return result
def b2a_hex(s):
result = []
for char in s:
c = (ord(char) >> 4) & 0xf
if c > 9:
c = c + ord('a') - 10
else:
c = c + ord('0')
result.append(chr(c))
c = ord(char) & 0xf
if c > 9:
c = c + ord('a') - 10
else:
c = c + ord('0')
result.append(chr(c))
return ''.join(result)
hexlify = b2a_hex
table_hex = [
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,-1,-1, -1,-1,-1,-1,
-1,10,11,12, 13,14,15,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,10,11,12, 13,14,15,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1
]
def a2b_hex(t):
result = []
def pairs_gen(s):
while s:
try:
yield table_hex[ord(s[0])], table_hex[ord(s[1])]
except IndexError:
if len(s):
raise TypeError('Odd-length string')
return
s = s[2:]
for a, b in pairs_gen(t):
if a < 0 or b < 0:
raise TypeError('Non-hexadecimal digit found')
result.append(chr((a << 4) + b))
return ''.join(result)
unhexlify = a2b_hex
| Python |
"""
The Stackless module allows you to do multitasking without using threads.
The essential objects are tasklets and channels.
Please refer to their documentation.
"""
import traceback
import sys
try:
from _stackless import coroutine, greenlet
except ImportError: # we are running from CPython
from py.magic import greenlet
try:
from functools import partial
except ImportError: # we are not running python 2.5
class partial(object):
# just enough of 'partial' to be usefull
def __init__(self, func, *argl, **argd):
self.func = func
self.argl = argl
self.argd = argd
def __call__(self):
return self.func(*self.argl, **self.argd)
class GWrap(greenlet):
"""This is just a wrapper around greenlets to allow
to stick additional attributes to a greenlet.
To be more concrete, we need a backreference to
the coroutine object"""
class MWrap(object):
def __init__(self,something):
self.something = something
def __getattr__(self, attr):
return getattr(self.something, attr)
class coroutine(object):
"we can't have greenlet as a base, because greenlets can't be rebound"
def __init__(self):
self._frame = None
self.is_zombie = False
def __getattr__(self, attr):
return getattr(self._frame, attr)
def __del__(self):
self.is_zombie = True
del self._frame
self._frame = None
def bind(self, func, *argl, **argd):
"""coro.bind(f, *argl, **argd) -> None.
binds function f to coro. f will be called with
arguments *argl, **argd
"""
if self._frame is None or self._frame.dead:
self._frame = frame = GWrap()
frame.coro = self
if hasattr(self._frame, 'run') and self._frame.run:
raise ValueError("cannot bind a bound coroutine")
self._frame.run = partial(func, *argl, **argd)
def switch(self):
"""coro.switch() -> returnvalue
switches to coroutine coro. If the bound function
f finishes, the returnvalue is that of f, otherwise
None is returned
"""
try:
return greenlet.switch(self._frame)
except TypeError, exp: # self._frame is the main coroutine
return greenlet.switch(self._frame.something)
def kill(self):
"""coro.kill() : kill coroutine coro"""
self._frame.throw()
def _is_alive(self):
if self._frame is None:
return False
return not self._frame.dead
is_alive = property(_is_alive)
del _is_alive
def getcurrent():
"""coroutine.getcurrent() -> the currently running coroutine"""
try:
return greenlet.getcurrent().coro
except AttributeError:
return _maincoro
getcurrent = staticmethod(getcurrent)
_maincoro = coroutine()
maingreenlet = greenlet.getcurrent()
_maincoro._frame = frame = MWrap(maingreenlet)
frame.coro = _maincoro
del frame
del maingreenlet
from collections import deque
import operator
__all__ = 'run getcurrent getmain schedule tasklet channel coroutine \
TaskletExit greenlet'.split()
_global_task_id = 0
_squeue = None
_main_tasklet = None
_main_coroutine = None
_last_task = None
_channel_callback = None
_schedule_callback = None
def _scheduler_remove(value):
try:
del _squeue[operator.indexOf(_squeue, value)]
except ValueError:pass
def _scheduler_append(value, normal=True):
if normal:
_squeue.append(value)
else:
_squeue.rotate(-1)
_squeue.appendleft(value)
_squeue.rotate(1)
def _scheduler_contains(value):
try:
operator.indexOf(_squeue, value)
return True
except ValueError:
return False
def _scheduler_switch(current, next):
global _last_task
prev = _last_task
if (_schedule_callback is not None and
prev is not next):
_schedule_callback(prev, next)
_last_task = next
assert not next.blocked
if next is not current:
next.switch()
return current
class TaskletExit(Exception):pass
def set_schedule_callback(callback):
global _schedule_callback
_schedule_callback = callback
def set_channel_callback(callback):
global _channel_callback
_channel_callback = callback
def getruncount():
return len(_squeue)
class bomb(object):
def __init__(self, exp_type=None, exp_value=None, exp_traceback=None):
self.type = exp_type
self.value = exp_value
self.traceback = exp_traceback
def raise_(self):
raise self.type, self.value, self.traceback
class channel(object):
"""
A channel object is used for communication between tasklets.
By sending on a channel, a tasklet that is waiting to receive
is resumed. If there is no waiting receiver, the sender is suspended.
By receiving from a channel, a tasklet that is waiting to send
is resumed. If there is no waiting sender, the receiver is suspended.
"""
def __init__(self, label=''):
self.balance = 0
self.closing = False
self.queue = deque()
self.label = label
def __str__(self):
return 'channel[%s](%s,%s)' % (self.label, self.balance, self.queue)
def close(self):
"""
channel.close() -- stops the channel from enlarging its queue.
If the channel is not empty, the flag 'closing' becomes true.
If the channel is empty, the flag 'closed' becomes true.
"""
self.closing = True
@property
def closed(self):
return self.closing and not self.queue
def open(self):
"""
channel.open() -- reopen a channel. See channel.close.
"""
self.closing = False
def receive(self):
"""
channel.receive() -- receive a value over the channel.
If no other tasklet is already sending on the channel,
the receiver will be blocked. Otherwise, the receiver will
continue immediately, and the sender is put at the end of
the runnables list.
The above policy can be changed by setting channel flags.
"""
receiver = getcurrent()
willblock = not self.balance > 0
if _channel_callback is not None:
_channel_callback(self, receiver, 0, willblock)
if self.balance > 0: # somebody is already sending
self.balance -= 1
sender = self.queue.popleft()
sender.blocked = False
receiver.tempval = sender.tempval
_scheduler_append(sender)
else: # nobody is waiting
self.balance -= 1
self.queue.append(receiver)
receiver.blocked = True
_scheduler_remove(getcurrent())
schedule()
assert not receiver.blocked
msg = receiver.tempval
if isinstance(msg, bomb):
msg.raise_()
return msg
def send_exception(self, exp_type, msg):
self.send(bomb(exp_type, exp_type(msg)))
def send_sequence(self, iterable):
for item in iterable:
self.send(item)
def send(self, msg):
"""
channel.send(value) -- send a value over the channel.
If no other tasklet is already receiving on the channel,
the sender will be blocked. Otherwise, the receiver will
be activated immediately, and the sender is put at the end of
the runnables list.
"""
sender = getcurrent()
sender.tempval = msg
willblock = not self.balance < 0
if _channel_callback is not None:
_channel_callback(self, sender, 1, willblock)
if self.balance < 0: # somebody is already waiting
receiver = self.queue.popleft()
receiver.blocked = False
self.balance += 1
receiver.tempval = msg
_scheduler_append(receiver, False)
schedule()
else: # nobody is waiting
self.queue.append(sender)
sender.blocked = True
self.balance += 1
_scheduler_remove(getcurrent())
schedule()
assert not sender.blocked
class tasklet(coroutine):
"""
A tasklet object represents a tiny task in a Python thread.
At program start, there is always one running main tasklet.
New tasklets can be created with methods from the stackless
module.
"""
tempval = None
def __new__(cls, func=None, label=''):
return coroutine.__new__(cls)
def __init__(self, func=None, label=''):
coroutine.__init__(self)
self._init(func, label)
def _init(self, func=None, label=''):
global _global_task_id
self.func = func
self.alive = False
self.blocked = False
self._task_id = _global_task_id
self.label = label
_global_task_id += 1
def __str__(self):
return '<tasklet[%s, %s]>' % (self.label,self._task_id)
__repr__ = __str__
def __call__(self, *argl, **argd):
return self.setup(*argl, **argd)
def bind(self, func):
"""
Binding a tasklet to a callable object.
The callable is usually passed in to the constructor.
In some cases, it makes sense to be able to re-bind a tasklet,
after it has been run, in order to keep its identity.
Note that a tasklet can only be bound when it doesn't have a frame.
"""
if not callable(func):
raise TypeError('tasklet function must be a callable')
self.func = func
def kill(self):
"""
tasklet.kill -- raise a TaskletExit exception for the tasklet.
Note that this is a regular exception that can be caught.
The tasklet is immediately activated.
If the exception passes the toplevel frame of the tasklet,
the tasklet will silently die.
"""
if not self.is_zombie:
coroutine.kill(self)
_scheduler_remove(self)
self.alive = False
def setup(self, *argl, **argd):
"""
supply the parameters for the callable
"""
if self.func is None:
raise TypeError('cframe function must be callable')
func = self.func
def _func():
try:
try:
func(*argl, **argd)
except TaskletExit:
pass
finally:
_scheduler_remove(self)
self.alive = False
self.func = None
coroutine.bind(self, _func)
self.alive = True
_scheduler_append(self)
return self
def run(self):
if _scheduler_contains(self):
return
else:
_scheduler_append(self)
def __reduce__(self):
one, two, three = coroutine.__reduce__(self)
assert one is coroutine
assert two == ()
return tasklet, (), (three, self.alive, self.tempval)
def __setstate__(self, (coro_state, alive, tempval)):
coroutine.__setstate__(self, coro_state)
self.alive = alive
self.tempval = tempval
def getmain():
"""
getmain() -- return the main tasklet.
"""
return _main_tasklet
def getcurrent():
"""
getcurrent() -- return the currently executing tasklet.
"""
curr = coroutine.getcurrent()
if curr is _main_coroutine:
return _main_tasklet
else:
return curr
_run_calls = []
def run():
"""
run_watchdog(timeout) -- run tasklets until they are all
done, or timeout instructions have passed. Tasklets must
provide cooperative schedule() calls.
If the timeout is met, the function returns.
The calling tasklet is put aside while the tasklets are running.
It is inserted back after the function stops, right before the
tasklet that caused a timeout, if any.
If an exception occours, it will be passed to the main tasklet.
Please note that the 'timeout' feature is not yet implemented
"""
curr = getcurrent()
_run_calls.append(curr)
_scheduler_remove(curr)
try:
schedule()
assert not _squeue
finally:
_scheduler_append(curr)
def schedule_remove(retval=None):
"""
schedule(retval=stackless.current) -- switch to the next runnable tasklet.
The return value for this call is retval, with the current
tasklet as default.
schedule_remove(retval=stackless.current) -- ditto, and remove self.
"""
_scheduler_remove(getcurrent())
r = schedule(retval)
return r
def schedule(retval=None):
"""
schedule(retval=stackless.current) -- switch to the next runnable tasklet.
The return value for this call is retval, with the current
tasklet as default.
schedule_remove(retval=stackless.current) -- ditto, and remove self.
"""
mtask = getmain()
curr = getcurrent()
if retval is None:
retval = curr
while True:
if _squeue:
if _squeue[0] is curr:
# If the current is at the head, skip it.
_squeue.rotate(-1)
task = _squeue[0]
#_squeue.rotate(-1)
elif _run_calls:
task = _run_calls.pop()
else:
raise RuntimeError('No runnable tasklets left.')
_scheduler_switch(curr, task)
if curr is _last_task:
# We are in the tasklet we want to resume at this point.
return retval
def _init():
global _main_tasklet
global _global_task_id
global _squeue
global _last_task
_global_task_id = 0
_main_tasklet = coroutine.getcurrent()
try:
_main_tasklet.__class__ = tasklet
except TypeError: # we are running pypy-c
class TaskletProxy(object):
"""TaskletProxy is needed to give the _main_coroutine tasklet behaviour"""
def __init__(self, coro):
self._coro = coro
def __getattr__(self,attr):
return getattr(self._coro,attr)
def __str__(self):
return '<tasklet %s a:%s>' % (self._task_id, self.is_alive)
def __reduce__(self):
return getmain, ()
__repr__ = __str__
global _main_coroutine
_main_coroutine = _main_tasklet
_main_tasklet = TaskletProxy(_main_tasklet)
assert _main_tasklet.is_alive and not _main_tasklet.is_zombie
_last_task = _main_tasklet
tasklet._init.im_func(_main_tasklet, label='main')
_squeue = deque()
_scheduler_append(_main_tasklet)
_init()
| Python |
"""
The Stackless module allows you to do multitasking without using threads.
The essential objects are tasklets and channels.
Please refer to their documentation.
"""
import traceback
import sys
try:
deadtask = set()
except NameError:
from sets import Set as set
deadtask = set()
switches = 0
try:
from _stackless import coroutine, greenlet
except ImportError: # we are running from CPython
# you must have coroutine from
# http://codespeak.net/svn/user/stephan/hacks/coroutine/
# in your path in order to get the following to work
from py.magic import greenlet
from coroutine import coroutine
__all__ = 'run getcurrent getmain schedule tasklet \
channel TaskletExit coroutine greenlet'.split()
main_tasklet = main_coroutine = None
scheduler = None
channel_hook = None
schedlock = False
_schedule_fasthook = None
_schedule_hook = None
class TaskletExit(Exception):pass
def SETNEXT(obj, val):
"this function just makes debugging a bit easier :-)"
obj.next = val
def SETPREV(obj, val):
"just for debugging"
obj.prev = val
def SETNONE(obj):
"just for debugging"
obj.prev = obj.next = None
def SWAPVAL(task1, task2):
"just for debugging"
assert task1 is not None
assert task2 is not None
task1.tempval, task2.tempval = task2.tempval, task1.tempval
def SETVAL(task, val):
"just for debugging"
assert task is not None
task.tempval = val
last_task_id = 0
def restore_exception(etype, value, stack):
"""until I find out how to restore an exception on python level"""
#sys.excepthook(etype, value, stack)
raise etype, value, stack
#raise etype(value)
class TaskletProxy(object):
"""TaskletProxy is needed to give the main_coroutine tasklet behaviour"""
def __init__(self, coro):
self.alive = True
self.atomic = False
self.blocked = 0
self.block_trap = False
self.frame = None
self.ignore_nesting = False
self.is_current = False
self.is_main = False
self.nesting_level = 0
self.next = self.prev = None
self.paused = False
self.recursion_depth = 0
self.restorable = False
self.scheduled = False
self.task_id = 0
self.tempval = None
self._coro = coro
def __repr__(self):
return tasklet.__str__(self)
__str__ = __repr__
def __getattr__(self,attr):
return getattr(self._coro,attr)
def __reduce__(self):
return getmain, ()
class bomb(object):
"""
A bomb object is used to hold exceptions in tasklets.
Whenever a tasklet is activated and its tempval is a bomb,
it will explode as an exception.
You can create a bomb by hand and attach it to a tasklet if you like.
Note that bombs are 'sloppy' about the argument list, which means that
the following works, although you should use '*sys.exc_info()'.
from stackless import *; import sys
t = tasklet(lambda:42)()
try: 1/0
except: b = bomb(sys.exc_info())
t.tempval = b
nt.run() # let the bomb explode
"""
traceback = None
type = None
value = None
def __init__(self,etype=None, value=None, traceback=None):
self.type = etype
self.value = value
self.traceback = traceback
def _explode(self):
restore_exception(self.type, self.value, self.traceback)
def make_deadlock_bomb():
return bomb(RuntimeError,
RuntimeError("Deadlock: the last runnable tasklet cannot be blocked."),
None)
def curexc_to_bomb():
return bomb(*sys.exc_info())
def enable_softswitch(flag):
"""
enable_softswitch(flag) -- control the switching behavior.
Tasklets can be either switched by moving C stack slices around
or by avoiding stack changes at all. The latter is only possible
in the top interpreter level. Switching it off is for timing and
debugging purposes. This flag exists once for the whole process.
For inquiry only, use the phrase
ret = enable_softswitch(0); enable_softswitch(ret)
By default, soft switching is enabled.
This is not implemented yet!!!!
"""
pass
def get_thread_info(task_id):
"""
get_thread_info(task_id) -- return a 3-tuple of the thread's
main tasklet, current tasklet and runcount.
To obtain a list of all thread infos, use
map (stackless.get_thread_info, stackless.threads)
This is not implemented yet!!!!
"""
pass
def set_channel_callback(callable):
"""
set_channel_callback(callable) -- install a callback for channels.
Every send/receive action will call the callback function.
Example:
def channel_cb(channel, tasklet, sending, willblock):
...
sending and willblock are booleans.
Pass None to switch monitoring off again.
"""
global channel_hook
channel_hook = callable
def _schedule_callback(prev, next):
global _schedule_hook
return _schedule_hook(prev, next)
def set_schedule_callback(func):
"""
set_schedule_callback(callable) -- install a callback for scheduling.
Every explicit or implicit schedule will call the callback function
right before the switch is actually done.
Example:
def schedule_cb(prev, next):
...
When a tasklet is dying, next is None.
When main starts up or after death, prev is None.
Pass None to switch monitoring off again.
"""
global _schedule_fasthook
global _schedule_hook
global _schedule_callback
if func is not None and not callable(func):
raise TypeError("schedule callback nust be callable")
_schedule_hook = func
if func is None:
_schedule_fasthook = None
else:
_schedule_fasthook = _schedule_callback
def run(timeout=0):
"""
run_watchdog(timeout) -- run tasklets until they are all
done, or timeout instructions have passed. Tasklets must
provide cooperative schedule() calls.
If the timeout is met, the function returns.
The calling tasklet is put aside while the tasklets are running.
It is inserted back after the function stops, right before the
tasklet that caused a timeout, if any.
If an exception occours, it will be passed to the main tasklet.
Please note that the 'timeout' feature is not yet implemented
"""
me = scheduler.current_remove()
if me is not main_tasklet:
raise RuntimeError("run() must be run from the main thread's \
main tasklet")
return scheduler.schedule_task(me, scheduler._head)
def getcurrent():
"""
getcurrent() -- return the currently executing tasklet.
"""
curr = coroutine.getcurrent()
if curr is main_coroutine:
return main_tasklet
else:
return curr
def getmain():
return main_tasklet
def _do_schedule(retval=None, remove=False):
prev = scheduler._head
next = prev.next
if remove:
scheduler.current_remove()
ret = scheduler.schedule_task(prev, next)
if retval is None:
return ret
else:
return retval
def schedule_remove(retval=None):
"""
schedule(retval=stackless.current) -- switch to the next runnable tasklet.
The return value for this call is retval, with the current
tasklet as default.
schedule_remove(retval=stackless.current) -- ditto, and remove self.
"""
return _do_schedule(retval, True)
def schedule(retval=None):
"""
schedule(retval=stackless.current) -- switch to the next runnable tasklet.
The return value for this call is retval, with the current
tasklet as default.
schedule_remove(retval=stackless.current) -- ditto, and remove self.
"""
return _do_schedule(retval, False)
class tasklet(coroutine):
"""
A tasklet object represents a tiny task in a Python thread.
At program start, there is always one running main tasklet.
New tasklets can be created with methods from the stackless
module.
"""
__slots__ = ['alive','atomic','blocked','block_trap','frame',
'ignore_nesting','is_current','is_main',
'nesting_level','next','paused','prev','recursion_depth',
'restorable','scheduled','tempval','task_id']
def __new__(cls, func=None):
return super(tasklet,cls).__new__(cls)
def __init__(self, func=None):
global last_task_id
super(tasklet,self).__init__()
self.alive = False
self.atomic = False
self.blocked = 0
self.block_trap = False
self.frame = None
self.ignore_nesting = False
self.is_current = False
self.is_main = False
self.nesting_level = 0
self.next = self.prev = None
self.paused = False
self.recursion_depth = 0
self.restorable = False
self.scheduled = False
last_task_id += 1
self.task_id = last_task_id
self.tempval = None
if func is not None:
self.bind(func)
def __call__(self, *argl, **argd):
self.setup(*argl, **argd)
return self
def __repr__(self):
next = None
if self.next is not None:
next = self.next.task_id
prev = None
if self.prev is not None:
prev = self.prev.task_id
if self.blocked:
bs = 'b'
else:
bs = '-'
return 'T%s(%s) (%s, %s)' % (self.task_id, bs, next, prev)
__str__ = __repr__
def bind(self, func):
"""
Binding a tasklet to a callable object.
The callable is usually passed in to the constructor.
In some cases, it makes sense to be able to re-bind a tasklet,
after it has been run, in order to keep its identity.
Note that a tasklet can only be bound when it doesn't have a frame.
"""
if not callable(func):
raise TypeError('tasklet function must be a callable')
SETVAL(self, func)
def insert(self):
"""
Insert this tasklet at the end of the scheduler list,
given that it isn't blocked.
Blocked tasklets need to be reactivated by channels.
"""
if self.blocked:
raise RuntimeError('You cannot run a blocked tasklet')
if self.is_zombie:
raise RuntimeError('You cannot run an unbound(dead) tasklet')
if self.next is None:
scheduler.current_insert(self)
def kill(self):
"""
tasklet.kill -- raise a TaskletExit exception for the tasklet.
Note that this is a regular exception that can be caught.
The tasklet is immediately activated.
If the exception passes the toplevel frame of the tasklet,
the tasklet will silently die.
"""
if not self.is_zombie:
coroutine.kill(self)
return self.raise_exception(TaskletExit, TaskletExit())
def raise_exception(self, exc, value):
"""
tasklet.raise_exception(exc, value) -- raise an exception for the
tasklet. exc must be a subclass of Exception.
The tasklet is immediately activated.
"""
b = bomb(exc, value)
SETVAL(self, b)
return scheduler.schedule_task(getcurrent(), self)
def remove(self):
"""
Removing a tasklet from the runnables queue.
Note: If this tasklet has a non-trivial C stack attached,
it will be destructed when the containing thread state is destroyed.
Since this will happen in some unpredictable order, it may cause
unwanted side-effects. Therefore it is recommended to either run
tasklets to the end or to explicitly kill() them.
"""
scheduler.current_remove(self)
def run(self):
"""
Run this tasklet, given that it isn't blocked.
Blocked tasks need to be reactivated by channels.
"""
scheduler.schedule_task(getcurrent(), self)
def set_atomic(self, val):
"""
t.set_atomic(flag) -- set tasklet atomic status and return current one.
If set, the tasklet will not be auto-scheduled.
This flag is useful for critical sections which should not be
interrupted.
usage:
tmp = t.set_atomic(1)
# do critical stuff
t.set_atomic(tmp)
Note: Whenever a new tasklet is created, the atomic flag is initialized
with the atomic flag of the current tasklet.Atomic behavior is
additionally influenced by the interpreter nesting level.
See set_ignore_nesting.
"""
tmpval = self.atomic
self.atomic = val
return tmpval
def set_ignore_nesting(self, flag):
"""
t.set_ignore_nesting(flag) -- set tasklet ignore_nesting status and
return current one. If set, the tasklet may be auto-scheduled,
even if its nesting_level is > 0.
This flag makes sense if you know that nested interpreter levels are
safe for auto-scheduling. This is on your own risk, handle with care!
usage:
tmp = t.set_ignore_nesting(1)
# do critical stuff
t.set_ignore_nesting(tmp)
Please note that this piece of code does effectively nothing.
"""
tmpval = self.ignore_nesting
self.ignore_nesting = flag
return tmpval
def finished(self, excinfo):
"""called, when coroutine is finished. This gives the tasklet
a chance to clean up after himself."""
if self.alive:
self.alive = False
if self.next is not self:
next = self.next
else:
next = getmain()
scheduler.remove_task(self)
deadtask.add(self)
prev = self
if excinfo[0] is not None:
et = excinfo[0]
ev = excinfo[1]
tr = excinfo[2]
b = bomb(et, et(ev), tr)
next = getmain()
SETVAL(next, b)
scheduler.schedule_task(prev, next)
def setup(self, *argl, **argd):
"""
supply the parameters for the callable
"""
if self.tempval is None:
raise TypeError('cframe function must be callable')
coroutine.bind(self,self.tempval,*argl,**argd)
SETVAL(self, None)
self.alive = True
self.insert()
def __reduce__(self):
# xxx save more
one, two, three = coroutine.__reduce__(self)
assert one is coroutine
assert two == ()
return tasklet, (), (three, self.alive, self.tempval)
def __setstate__(self, (coro_state, alive, tempval)):
coroutine.__setstate__(self, coro_state)
self.alive = alive
self.tempval = tempval
def channel_callback(chan, task, sending, willblock):
return channel_hook(chan, task, sending, willblock)
class channel(object):
"""
A channel object is used for communication between tasklets.
By sending on a channel, a tasklet that is waiting to receive
is resumed. If there is no waiting receiver, the sender is suspended.
By receiving from a channel, a tasklet that is waiting to send
is resumed. If there is no waiting sender, the receiver is suspended.
"""
def __init__(self):
self.balance = 0
self.closing = False
self.preference = -1
self.next = self.prev = self
self.schedule_all = False
self.task_id = -2
def __str__(self):
parts = ['%s' % x.task_id for x in self._content()]
return 'channel(' + str(self.balance) + '): ['+' -> '.join(parts)+']'
def _get_closed(self):
return self.closing and self.next is None
closed = property(_get_closed)
def _channel_insert(self, task, d):
self._ins(task)
self.balance += d
task.blocked = d
def _content(self):
visited = set((self,))
items = []
next = self.next
if next is not self:
while next is not None and next not in visited:
items.append(next)
visited.add(next)
next = next.next
return items
def _queue(self):
if self.next is self:
return None
else:
return self.next
def _channel_remove(self, d):
ret = self.next
assert isinstance(ret, (tasklet, TaskletProxy))
self.balance -= d
self._rem(ret)
ret.blocked = 0
return ret
def channel_remove_specific(self, d, task):
# note: we assume that the task is in the channel
self.balance -= d
self._rem(task)
task.blocked = 0
return task
def _ins(self, task):
if (task.next is not None) or (task.prev is not None):
raise AssertionError('task.next and task.prev must be None')
# insert at end
SETPREV(task, self.prev)
SETNEXT(task, self)
SETNEXT(self.prev, task)
SETPREV(self, task)
def _rem(self, task):
assert task.next is not None
assert task.prev is not None
#remove at end
SETPREV(task.next, task.prev)
SETNEXT(task.prev, task.next)
SETNONE(task)
def _notify(self, task, d, cando, res):
global schedlock
global channel_hook
if channel_hook is not None:
if schedlock:
raise RuntimeError('Recursive channel call due to callbacks!')
schedlock = 1
channel_callback(self, task, d > 0, not cando)
schedlock = 0
def _channel_action(self, arg, d, stackl):
source = scheduler._head
target = self.next
assert source is getcurrent()
interthread = 0 # no interthreading at the moment
if d > 0:
cando = self.balance < 0
else:
cando = self.balance > 0
assert abs(d) == 1
SETVAL(source, arg)
if not interthread:
self._notify(source, d, cando, None)
if cando:
# communication 1): there is somebody waiting
target = self._channel_remove(-d)
SWAPVAL(source, target)
if interthread:
raise Exception('no interthreading: I can not be reached...')
else:
if self.schedule_all:
scheduler.current_insert(target)
target = source.next
elif self.preference == -d:
scheduler._set_head(source.next)
scheduler.current_insert(target)
scheduler._set_head(source)
else:
scheduler.current_insert(target)
target = source
else:
# communication 2): there is nobody waiting
if source.block_trap:
raise RuntimeError("this tasklet does not like to be blocked")
if self.closing:
raise StopIteration()
scheduler.current_remove()
self._channel_insert(source, d)
target = scheduler._head
retval = scheduler.schedule_task(source, target)
if interthread:
self._notify(source, d, cando, None)
return retval
def close(self):
"""
channel.close() -- stops the channel from enlarging its queue.
If the channel is not empty, the flag 'closing' becomes true.
If the channel is empty, the flag 'closed' becomes true.
"""
self.closing = True
def next(self):
"""
x.next() -> the next value, or raise StopIteration
"""
if self.closing and not self.balance:
raise StopIteration()
yield self.receive()
def open(self):
"""
channel.open() -- reopen a channel. See channel.close.
"""
self.closing = False
def receive(self):
"""
channel.receive() -- receive a value over the channel.
If no other tasklet is already sending on the channel,
the receiver will be blocked. Otherwise, the receiver will
continue immediately, and the sender is put at the end of
the runnables list.
The above policy can be changed by setting channel flags.
"""
return self._channel_action(None, -1, 1)
def send(self, msg):
"""
channel.send(value) -- send a value over the channel.
If no other tasklet is already receiving on the channel,
the sender will be blocked. Otherwise, the receiver will
be activated immediately, and the sender is put at the end of
the runnables list.
"""
return self._channel_action(msg, 1, 1)
def send_exception(self, exc, value):
"""
channel.send_exception(exc, value) -- send an exception over the
channel. exc must be a subclass of Exception.
Behavior is like channel.send, but that the receiver gets an exception.
"""
b = bomb(exc, value)
self.send(bomb)
def send_sequence(self, value):
"""
channel.send_sequence(seq) -- sed a stream of values
over the channel. Combined with a generator, this is
a very efficient way to build fast pipes.
"""
for item in value:
self.send(item)
class Scheduler(object):
"""The singleton Scheduler. Provides mostly scheduling convenience
functions. In normal circumstances, scheduler._head point the
current running tasklet. _head and current_tasklet might be
out of sync just before the actual task switch takes place."""
def __init__(self):
self._set_head(getcurrent())
def _cleanup(self, task):
task.alive = False
self.remove_task(task)
if self._head is None:
self.current_insert(main_tasklet)
self.schedule_task(getcurrent(), self._head)
def _set_head(self, task):
self._head = task
def reset(self):
self.__init__()
def __len__(self):
return len(self._content())
def _content(self):
"convenience method to get the tasklets that are in the queue"
visited = set()
items = []
next = self._head
if next is not self:
while next is not None and next not in visited:
items.append(next)
visited.add(next)
next = next.next
return items
def __str__(self):
parts = ['%s' % x.task_id for x in self._content()]
if self._head is not self:
currid = self._head.task_id
else:
currid = -1
return 'Scheduler: [' + ' -> '.join(parts) + ']'
def _chain_insert(self, task):
assert task.next is None
assert task.prev is None
if self._head is None:
SETNEXT(task, task)
SETPREV(task, task)
self._set_head(task)
else:
r = self._head
l = r.prev
SETNEXT(l, task)
SETPREV(r, task)
SETPREV(task, l)
SETNEXT(task, r)
def remove_task(self, task):
l = task.prev
r = task.next
SETNEXT(l, r)
SETPREV(r, l)
self._set_head(r)
if r == task:
self._set_head(None)
SETNONE(task)
return task
def _chain_remove(self):
if self._head is None:
return None
return self.remove_task(self._head)
def current_insert(self, task):
"insert 'task' at end of running queue"
self._chain_insert(task)
def current_insert_after(self, task):
"insert 'task' just after the current one"
if self._head is not None:
curr = self._head
self._set_head(curr.next)
self._chain_insert(task)
self._set_head(curr)
else:
self.current_insert(task)
def current_remove(self):
"remove current tasklet from queue"
return self._chain_remove()
def channel_remove_slow(self, task):
prev = task.prev
while not isinstance(prev, channel):
prev = prev.prev
chan = prev
assert chan.balance
if chan.balance > 0:
d = 1
else:
d = -1
return chan.channel_remove_specific(d, task)
def bomb_explode(self, task):
thisbomb = task.tempval
assert isinstance(thisbomb, bomb)
SETVAL(task, None)
thisbomb._explode()
# try:
# thisbomb._explode()
# finally:
# if getcurrent() == main_tasklet:
# sys.excepthook(thisbomb.type,
# thisbomb.value,
# thisbomb.traceback)
# sys.exit()
def _notify_schedule(self, prev, next, errflag):
if _schedule_fasthook is not None:
global schedlock
if schedlock:
raise RuntimeError('Recursive scheduler call due to callbacks!')
schedlock = True
ret = _schedule_fasthook(prev, next)
schedlock = False
if ret:
return errflag
def schedule_task_block(self, prev):
if main_tasklet.next is None:
if isinstance(prev.tempval, bomb):
SETVAL(main_tasklet, prev.tempval)
return self.schedule_task(prev, main_tasklet)
retval = make_deadlock_bomb()
SETVAL(prev, retval)
return self.schedule_task(prev, prev)
def schedule_task(self, prev, next):
global switches
switches += 1
myswitch = switches
if next is None:
return self.schedule_task_block(prev)
if next.blocked:
self.channel_remove_slow(next)
self.current_insert(next)
elif next.next is None:
self.current_insert(next)
if prev is next:
retval = prev.tempval
if isinstance(retval, bomb):
self.bomb_explode(prev)
return retval
self._notify_schedule(prev, next, None)
self._set_head(next)
try:
res = next.switch()
except:
pass
for dead in tuple(deadtask):
deadtask.discard(dead)
# the code below should work, but doesn't
#if not dead.is_zombie:
# coroutine.kill(dead)
# del dead
retval = prev.tempval
if isinstance(retval, bomb):
self.bomb_explode(prev)
return retval
def schedule_callback(self, prev, next):
ret = _schedule_hook(prev, next)
if ret:
return 0
else:
return -1
def __reduce__(self):
if self is scheduler:
return _return_sched, (), ()
def _return_sched():
return scheduler
def __init():
global main_tasklet
global main_coroutine
global scheduler
main_coroutine = c = coroutine.getcurrent()
main_tasklet = TaskletProxy(c)
SETNEXT(main_tasklet, main_tasklet)
SETPREV(main_tasklet, main_tasklet)
main_tasklet.is_main = True
scheduler = Scheduler()
__init()
_init = __init # compatibility to stackless_new
| Python |
"""
Arguments objects.
"""
from pypy.interpreter.error import OperationError
class AbstractArguments:
def parse(self, fnname, signature, defaults_w=[]):
"""Parse args and kwargs to initialize a frame
according to the signature of code object.
"""
try:
return self.match_signature(signature, defaults_w)
except ArgErr, e:
raise OperationError(self.space.w_TypeError,
self.space.wrap(e.getmsg(fnname)))
def parse_into_scope(self, scope_w, fnname, signature, defaults_w=[]):
"""Parse args and kwargs to initialize a frame
according to the signature of code object.
Store the argumentvalues into scope_w.
scope_w must be big enough for signature.
"""
argnames, varargname, kwargname = signature
has_vararg = varargname is not None
has_kwarg = kwargname is not None
try:
return self._match_signature(scope_w, argnames, has_vararg,
has_kwarg, defaults_w, 0, None)
except ArgErr, e:
raise OperationError(self.space.w_TypeError,
self.space.wrap(e.getmsg(fnname)))
def frompacked(space, w_args=None, w_kwds=None):
"""Convenience static method to build an Arguments
from a wrapped sequence and a wrapped dictionary."""
return Arguments(space, [], w_stararg=w_args, w_starstararg=w_kwds)
frompacked = staticmethod(frompacked)
def topacked(self):
"""Express the Argument object as a pair of wrapped w_args, w_kwds."""
space = self.space
args_w, kwds_w = self.unpack()
w_args = space.newtuple(args_w)
w_kwds = space.newdict()
for key, w_value in kwds_w.items():
space.setitem(w_kwds, space.wrap(key), w_value)
return w_args, w_kwds
def fromshape(space, (shape_cnt,shape_keys,shape_star,shape_stst), data_w):
args_w = data_w[:shape_cnt]
p = shape_cnt
kwds_w = {}
for i in range(len(shape_keys)):
kwds_w[shape_keys[i]] = data_w[p]
p += 1
if shape_star:
w_star = data_w[p]
p += 1
else:
w_star = None
if shape_stst:
w_starstar = data_w[p]
p += 1
else:
w_starstar = None
return Arguments(space, args_w, kwds_w, w_star, w_starstar)
fromshape = staticmethod(fromshape)
def prepend(self, w_firstarg):
"Return a new Arguments with a new argument inserted first."
return ArgumentsPrepended(self, w_firstarg)
def popfirst(self):
"""For optimization only: might return (w_firstarg, args_with_rest),
or might just raise IndexError.
"""
raise IndexError
def match_signature(self, signature, defaults_w):
"""Parse args and kwargs according to the signature of a code object,
or raise an ArgErr in case of failure.
"""
argnames, varargname, kwargname = signature
scopelen = len(argnames)
has_vararg = varargname is not None
has_kwarg = kwargname is not None
if has_vararg:
scopelen += 1
if has_kwarg:
scopelen += 1
scope_w = [None] * scopelen
self._match_signature(scope_w, argnames, has_vararg, has_kwarg, defaults_w, 0, None)
return scope_w
def unmatch_signature(self, signature, data_w):
"""kind of inverse of match_signature"""
args_w, kwds_w = self.unpack()
need_cnt = len(args_w)
need_kwds = kwds_w.keys()
space = self.space
argnames, varargname, kwargname = signature
cnt = len(argnames)
data_args_w = data_w[:cnt]
if varargname:
data_w_stararg = data_w[cnt]
cnt += 1
else:
data_w_stararg = space.newtuple([])
unfiltered_kwds_w = {}
if kwargname:
data_w_starargarg = data_w[cnt]
for w_key in space.unpackiterable(data_w_starargarg):
key = space.str_w(w_key)
w_value = space.getitem(data_w_starargarg, w_key)
unfiltered_kwds_w[key] = w_value
cnt += 1
assert len(data_w) == cnt
ndata_args_w = len(data_args_w)
if ndata_args_w >= need_cnt:
args_w = data_args_w[:need_cnt]
for argname, w_arg in zip(argnames[need_cnt:], data_args_w[need_cnt:]):
unfiltered_kwds_w[argname] = w_arg
assert not space.is_true(data_w_stararg)
else:
args_w = data_args_w[:]
for w_stararg in space.unpackiterable(data_w_stararg):
args_w.append(w_stararg)
assert len(args_w) == need_cnt
kwds_w = {}
for key in need_kwds:
kwds_w[key] = unfiltered_kwds_w[key]
return Arguments(self.space, args_w, kwds_w)
def normalize(self):
"""Return an instance of the Arguments class. (Instances of other
classes may not be suitable for long-term storage or multiple
usage.) Also force the type and validity of the * and ** arguments
to be checked now.
"""
args_w, kwds_w = self.unpack()
return Arguments(self.space, args_w, kwds_w)
class ArgumentsPrepended(AbstractArguments):
def __init__(self, args, w_firstarg):
self.space = args.space
self.args = args
self.w_firstarg = w_firstarg
def firstarg(self):
"Return the first argument for inspection."
return self.w_firstarg
def popfirst(self):
return self.w_firstarg, self.args
def __repr__(self):
return 'ArgumentsPrepended(%r, %r)' % (self.args, self.w_firstarg)
def has_keywords(self):
return self.args.has_keywords()
def unpack(self):
arguments_w, kwds_w = self.args.unpack()
return ([self.w_firstarg] + arguments_w), kwds_w
def fixedunpack(self, argcount):
if argcount <= 0:
raise ValueError, "too many arguments (%d expected)" % argcount # XXX: Incorrect
return [self.w_firstarg] + self.args.fixedunpack(argcount - 1)
def _rawshape(self, nextra=0):
return self.args._rawshape(nextra + 1)
def _match_signature(self, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0, extravarargs=None):
"""Parse args and kwargs according to the signature of a code object,
or raise an ArgErr in case of failure.
Return the number of arguments filled in.
"""
if blindargs < len(argnames):
scope_w[blindargs] = self.w_firstarg
else:
if extravarargs is None:
extravarargs = [ self.w_firstarg ]
else:
extravarargs.append(self.w_firstarg)
return self.args._match_signature(scope_w, argnames, has_vararg,
has_kwarg, defaults_w,
blindargs + 1, extravarargs)
def flatten(self):
(shape_cnt, shape_keys, shape_star, shape_stst), data_w = self.args.flatten()
data_w.insert(0, self.w_firstarg)
return (shape_cnt + 1, shape_keys, shape_star, shape_stst), data_w
def num_args(self):
return self.args.num_args() + 1
def num_kwds(self):
return self.args.num_kwds()
class ArgumentsFromValuestack(AbstractArguments):
"""
Collects the arguments of a function call as stored on a PyFrame
valuestack.
Only for the case of purely positional arguments, for now.
"""
def __init__(self, space, frame, nargs=0):
self.space = space
self.frame = frame
self.nargs = nargs
def firstarg(self):
if self.nargs <= 0:
return None
return self.frame.peekvalue(self.nargs - 1)
def popfirst(self):
if self.nargs <= 0:
raise IndexError
frame = self.frame
newnargs = self.nargs-1
return (frame.peekvalue(newnargs),
ArgumentsFromValuestack(self.space, frame, newnargs))
def __repr__(self):
return 'ArgumentsFromValuestack(%r, %r)' % (self.frame, self.nargs)
def has_keywords(self):
return False
def unpack(self):
args_w = [None] * self.nargs
for i in range(self.nargs):
args_w[i] = self.frame.peekvalue(self.nargs - 1 - i)
return args_w, {}
def fixedunpack(self, argcount):
if self.nargs > argcount:
raise ValueError, "too many arguments (%d expected)" % argcount
elif self.nargs < argcount:
raise ValueError, "not enough arguments (%d expected)" % argcount
data_w = [None] * self.nargs
nargs = self.nargs
for i in range(nargs):
data_w[i] = self.frame.peekvalue(nargs - 1 - i)
return data_w
def _rawshape(self, nextra=0):
return nextra + self.nargs, (), False, False
def _match_signature(self, scope_w, argnames, has_vararg=False, has_kwarg=False, defaults_w=[], blindargs=0, extravarargs=None):
"""Parse args and kwargs according to the signature of a code object,
or raise an ArgErr in case of failure.
Return the number of arguments filled in.
"""
co_argcount = len(argnames)
if blindargs + self.nargs + len(defaults_w) < co_argcount:
raise ArgErrCount(blindargs + self.nargs , 0,
(co_argcount, has_vararg, has_kwarg),
defaults_w, co_argcount - blindargs -
self.nargs - len(defaults_w))
if blindargs + self.nargs > co_argcount and not has_vararg:
raise ArgErrCount(blindargs + self.nargs, 0,
(co_argcount, has_vararg, has_kwarg),
defaults_w, 0)
if blindargs + self.nargs >= co_argcount:
for i in range(co_argcount - blindargs):
scope_w[i + blindargs] = self.frame.peekvalue(self.nargs - 1 - i)
if has_vararg:
if blindargs > co_argcount:
stararg_w = extravarargs
for i in range(self.nargs):
stararg_w.append(self.frame.peekvalue(self.nargs - 1 - i))
else:
stararg_w = [None] * (self.nargs + blindargs - co_argcount)
for i in range(co_argcount - blindargs, self.nargs):
stararg_w[i - co_argcount + blindargs] = self.frame.peekvalue(self.nargs - 1 - i)
scope_w[co_argcount] = self.space.newtuple(stararg_w)
co_argcount += 1
else:
for i in range(self.nargs):
scope_w[i + blindargs] = self.frame.peekvalue(self.nargs - 1 - i)
ndefaults = len(defaults_w)
missing = co_argcount - self.nargs - blindargs
first_default = ndefaults - missing
for i in range(missing):
scope_w[self.nargs + blindargs + i] = defaults_w[first_default + i]
if has_vararg:
scope_w[co_argcount] = self.space.newtuple([])
co_argcount += 1
if has_kwarg:
scope_w[co_argcount] = self.space.newdict()
co_argcount += 1
return co_argcount
def flatten(self):
data_w = [None] * self.nargs
for i in range(self.nargs):
data_w[i] = self.frame.peekvalue(self.nargs - 1 - i)
return nextra + self.nargs, (), False, False, data_w
def num_args(self):
return self.nargs
def num_kwds(self):
return 0
class Arguments(AbstractArguments):
"""
Collects the arguments of a function call.
Instances should be considered immutable.
"""
### Construction ###
def __init__(self, space, args_w, kwds_w=None,
w_stararg=None, w_starstararg=None):
self.space = space
self.arguments_w = args_w
self.kwds_w = kwds_w
self.w_stararg = w_stararg
self.w_starstararg = w_starstararg
def num_args(self):
self._unpack()
return len(self.arguments_w)
def num_kwds(self):
self._unpack()
return len(self.kwds_w)
def __repr__(self):
if self.w_starstararg is not None:
return 'Arguments(%s, %s, %s, %s)' % (self.arguments_w,
self.kwds_w,
self.w_stararg,
self.w_starstararg)
if self.w_stararg is None:
if not self.kwds_w:
return 'Arguments(%s)' % (self.arguments_w,)
else:
return 'Arguments(%s, %s)' % (self.arguments_w, self.kwds_w)
else:
return 'Arguments(%s, %s, %s)' % (self.arguments_w,
self.kwds_w,
self.w_stararg)
### Manipulation ###
def unpack(self):
"Return a ([w1,w2...], {'kw':w3...}) pair."
self._unpack()
return self.arguments_w, self.kwds_w
def popfirst(self):
self._unpack()
return self.arguments_w[0], Arguments(self.space, self.arguments_w[1:],
kwds_w = self.kwds_w)
def _unpack(self):
"unpack the *arg and **kwd into w_arguments and kwds_w"
# --- unpack the * argument now ---
if self.w_stararg is not None:
self.arguments_w += self.space.unpackiterable(self.w_stararg)
self.w_stararg = None
# --- unpack the ** argument now ---
if self.kwds_w is None:
self.kwds_w = {}
if self.w_starstararg is not None:
space = self.space
w_starstararg = self.w_starstararg
# maybe we could allow general mappings?
if not space.is_true(space.isinstance(w_starstararg, space.w_dict)):
raise OperationError(space.w_TypeError,
space.wrap("argument after ** must be "
"a dictionary"))
# don't change the original yet,
# in case something goes wrong
d = self.kwds_w.copy()
for w_key in space.unpackiterable(w_starstararg):
try:
key = space.str_w(w_key)
except OperationError, e:
if not e.match(space, space.w_TypeError):
raise
raise OperationError(space.w_TypeError,
space.wrap("keywords must be strings"))
if key in d:
raise OperationError(self.space.w_TypeError,
self.space.wrap("got multiple values "
"for keyword argument "
"'%s'" % key))
d[key] = space.getitem(w_starstararg, w_key)
self.kwds_w = d
self.w_starstararg = None
def has_keywords(self):
return bool(self.kwds_w) or (self.w_starstararg is not None and
self.space.is_true(self.w_starstararg))
def fixedunpack(self, argcount):
"""The simplest argument parsing: get the 'argcount' arguments,
or raise a real ValueError if the length is wrong."""
if self.has_keywords():
raise ValueError, "no keyword arguments expected"
if len(self.arguments_w) > argcount:
raise ValueError, "too many arguments (%d expected)" % argcount
if self.w_stararg is not None:
self.arguments_w += self.space.unpackiterable(self.w_stararg,
argcount - len(self.arguments_w))
self.w_stararg = None
elif len(self.arguments_w) < argcount:
raise ValueError, "not enough arguments (%d expected)" % argcount
return self.arguments_w
def firstarg(self):
"Return the first argument for inspection."
if self.arguments_w:
return self.arguments_w[0]
if self.w_stararg is None:
return None
w_iter = self.space.iter(self.w_stararg)
try:
return self.space.next(w_iter)
except OperationError, e:
if not e.match(self.space, self.space.w_StopIteration):
raise
return None
### Parsing for function calls ###
def _match_signature(self, scope_w, argnames, has_vararg=False,
has_kwarg=False, defaults_w=[], blindargs=0,
extravarargs=None):
"""Parse args and kwargs according to the signature of a code object,
or raise an ArgErr in case of failure.
Return the number of arguments filled in.
"""
#
# args_w = list of the normal actual parameters, wrapped
# kwds_w = real dictionary {'keyword': wrapped parameter}
# argnames = list of formal parameter names
# scope_w = resulting list of wrapped values
#
co_argcount = len(argnames) # expected formal arguments, without */**
if self.w_stararg is not None:
# There is a case where we don't have to unpack() a w_stararg:
# if it matches exactly a *arg in the signature.
if (len(self.arguments_w) + blindargs == co_argcount and
has_vararg and
self.space.is_w(self.space.type(self.w_stararg),
self.space.w_tuple)):
pass
else:
self._unpack() # sets self.w_stararg to None
# always unpack the ** arguments
if self.w_starstararg is not None:
self._unpack()
args_w = self.arguments_w
kwds_w = self.kwds_w
num_kwds = 0
if kwds_w is not None:
num_kwds = len(kwds_w)
# put as many positional input arguments into place as available
if blindargs >= co_argcount:
input_argcount = co_argcount
elif len(args_w) + blindargs > co_argcount:
for i in range(co_argcount - blindargs):
scope_w[i + blindargs] = args_w[i]
input_argcount = co_argcount
next_arg = co_argcount - blindargs
else:
for i in range(len(args_w)):
scope_w[i + blindargs] = args_w[i]
input_argcount = len(args_w) + blindargs
# check that no keyword argument conflicts with these
# note that for this purpose we ignore the first blindargs,
# which were put into place by prepend(). This way, keywords do
# not conflict with the hidden extra argument bound by methods.
if kwds_w and input_argcount > blindargs:
for name in argnames[blindargs:input_argcount]:
if name in kwds_w:
raise ArgErrMultipleValues(name)
remainingkwds_w = self.kwds_w
missing = 0
if input_argcount < co_argcount:
if remainingkwds_w is None:
remainingkwds_w = {}
else:
remainingkwds_w = remainingkwds_w.copy()
# not enough args, fill in kwargs or defaults if exists
def_first = co_argcount - len(defaults_w)
for i in range(input_argcount, co_argcount):
name = argnames[i]
if name in remainingkwds_w:
scope_w[i] = remainingkwds_w[name]
del remainingkwds_w[name]
elif i >= def_first:
scope_w[i] = defaults_w[i-def_first]
else:
# error: not enough arguments. Don't signal it immediately
# because it might be related to a problem with */** or
# keyword arguments, which will be checked for below.
missing += 1
# collect extra positional arguments into the *vararg
if has_vararg:
if self.w_stararg is None: # common case
args_left = co_argcount - blindargs
if args_left < 0: # check required by rpython
starargs_w = extravarargs
if len(args_w):
starargs_w.extend(args_w)
elif len(args_w) > args_left:
starargs_w = args_w[args_left:]
else:
starargs_w = []
scope_w[co_argcount] = self.space.newtuple(starargs_w)
else: # shortcut for the non-unpack() case above
scope_w[co_argcount] = self.w_stararg
elif len(args_w) + blindargs > co_argcount:
raise ArgErrCount(len(args_w) + blindargs, num_kwds,
(co_argcount, has_vararg, has_kwarg),
defaults_w, 0)
# collect extra keyword arguments into the **kwarg
if has_kwarg:
w_kwds = self.space.newdict()
if remainingkwds_w:
for key, w_value in remainingkwds_w.items():
self.space.setitem(w_kwds, self.space.wrap(key), w_value)
scope_w[co_argcount + has_vararg] = w_kwds
elif remainingkwds_w:
raise ArgErrUnknownKwds(remainingkwds_w)
if missing:
raise ArgErrCount(len(args_w) + blindargs, num_kwds,
(co_argcount, has_vararg, has_kwarg),
defaults_w, missing)
return co_argcount + has_vararg + has_kwarg
### Argument <-> list of w_objects together with "shape" information
def _rawshape(self, nextra=0):
shape_cnt = len(self.arguments_w)+nextra # Number of positional args
if self.kwds_w:
shape_keys = self.kwds_w.keys() # List of keywords (strings)
else:
shape_keys = []
shape_star = self.w_stararg is not None # Flag: presence of *arg
shape_stst = self.w_starstararg is not None # Flag: presence of **kwds
shape_keys.sort()
return shape_cnt, tuple(shape_keys), shape_star, shape_stst # shape_keys are sorted
def flatten(self):
shape_cnt, shape_keys, shape_star, shape_stst = self._rawshape()
data_w = self.arguments_w + [self.kwds_w[key] for key in shape_keys]
if shape_star:
data_w.append(self.w_stararg)
if shape_stst:
data_w.append(self.w_starstararg)
return (shape_cnt, shape_keys, shape_star, shape_stst), data_w
def rawshape(args, nextra=0):
return args._rawshape(nextra)
#
# ArgErr family of exceptions raised in case of argument mismatch.
# We try to give error messages following CPython's, which are very informative.
#
class ArgErr(Exception):
def getmsg(self, fnname):
raise NotImplementedError
class ArgErrCount(ArgErr):
def __init__(self, nargs, nkwds, signature, defaults_w, missing_args):
self.signature = signature
self.num_defaults = len(defaults_w)
self.missing_args = missing_args
self.num_args = nargs
self.num_kwds = nkwds
def getmsg(self, fnname):
args = None
num_args, has_vararg, has_kwarg = self.signature
#args_w, kwds_w = args.unpack()
if has_kwarg or (self.num_kwds and self.num_defaults):
msg2 = "non-keyword "
if self.missing_args:
required_args = num_args - self.num_defaults
nargs = required_args - self.missing_args
else:
nargs = self.num_args
else:
msg2 = ""
nargs = self.num_args + self.num_kwds
n = num_args
if n == 0:
msg = "%s() takes no %sargument (%d given)" % (
fnname,
msg2,
nargs)
else:
defcount = self.num_defaults
if defcount == 0 and not has_vararg:
msg1 = "exactly"
elif not self.missing_args:
msg1 = "at most"
else:
msg1 = "at least"
n -= defcount
if not self.num_kwds: # msg "f() takes at least X non-keyword args"
msg2 = "" # is confusing if no kwd arg actually provided
if n == 1:
plural = ""
else:
plural = "s"
msg = "%s() takes %s %d %sargument%s (%d given)" % (
fnname,
msg1,
n,
msg2,
plural,
nargs)
return msg
class ArgErrMultipleValues(ArgErr):
def __init__(self, argname):
self.argname = argname
def getmsg(self, fnname):
msg = "%s() got multiple values for keyword argument '%s'" % (
fnname,
self.argname)
return msg
class ArgErrUnknownKwds(ArgErr):
def __init__(self, kwds_w):
self.kwd_name = ''
self.num_kwds = len(kwds_w)
if self.num_kwds == 1:
self.kwd_name = kwds_w.keys()[0]
def getmsg(self, fnname):
if self.num_kwds == 1:
msg = "%s() got an unexpected keyword argument '%s'" % (
fnname,
self.kwd_name)
else:
msg = "%s() got %d unexpected keyword arguments" % (
fnname,
self.num_kwds)
return msg
| Python |
import os, sys
from pypy.rlib.objectmodel import we_are_translated
AUTO_DEBUG = os.getenv('PYPY_DEBUG')
RECORD_INTERPLEVEL_TRACEBACK = True
class OperationError(Exception):
"""Interpreter-level exception that signals an exception that should be
sent to the application level.
OperationError instances have three public attributes (and no .args),
w_type, w_value and application_traceback, which contain the wrapped
type and value describing the exception, and a chained list of
PyTraceback objects making the application-level traceback.
"""
def __init__(self, w_type, w_value, tb=None):
if w_type is None:
from pypy.tool.error import FlowingError
raise FlowingError(w_value)
self.w_type = w_type
self.w_value = w_value
self.application_traceback = tb
if not we_are_translated():
self.debug_excs = []
def clear(self, space):
# for sys.exc_clear()
self.w_type = space.w_None
self.w_value = space.w_None
self.application_traceback = None
if not we_are_translated():
del self.debug_excs[:]
def match(self, space, w_check_class):
"Check if this application-level exception matches 'w_check_class'."
return space.exception_match(self.w_type, w_check_class)
def async(self, space):
"Check if this is an exception that should better not be caught."
return (self.match(space, space.w_SystemExit) or
self.match(space, space.w_KeyboardInterrupt))
def __str__(self):
"NOT_RPYTHON: Convenience for tracebacks."
return '[%s: %s]' % (self.w_type, self.w_value)
def errorstr(self, space):
"The exception class and value, as a string."
if space is None:
# this part NOT_RPYTHON
exc_typename = str(self.w_type)
exc_value = str(self.w_value)
else:
w = space.wrap
if space.is_w(space.type(self.w_type), space.w_str):
exc_typename = space.str_w(self.w_type)
else:
exc_typename = space.str_w(
space.getattr(self.w_type, w('__name__')))
if space.is_w(self.w_value, space.w_None):
exc_value = ""
else:
try:
exc_value = space.str_w(space.str(self.w_value))
except OperationError:
# oups, cannot __str__ the exception object
exc_value = "<oups, exception object itself cannot be str'd>"
if not exc_value:
return exc_typename
else:
return '%s: %s' % (exc_typename, exc_value)
def getframe(self):
"The frame this exception was raised in, or None."
if self.application_traceback:
return self.application_traceback.frame
else:
return None
def record_interpreter_traceback(self):
"""Records the current traceback inside the interpreter.
This traceback is only useful to debug the interpreter, not the
application."""
if not we_are_translated():
if RECORD_INTERPLEVEL_TRACEBACK:
self.debug_excs.append(sys.exc_info())
def print_application_traceback(self, space, file=None):
"NOT_RPYTHON: Dump a standard application-level traceback."
if file is None: file = sys.stderr
self.print_app_tb_only(file)
print >> file, self.errorstr(space)
def print_app_tb_only(self, file):
"NOT_RPYTHON"
tb = self.application_traceback
if tb:
import linecache
print >> file, "Traceback (application-level):"
while tb is not None:
co = tb.frame.pycode
lineno = tb.lineno
fname = co.co_filename
if fname.startswith('<inline>\n'):
lines = fname.split('\n')
fname = lines[0].strip()
try:
l = lines[lineno]
except IndexError:
l = ''
else:
l = linecache.getline(fname, lineno)
print >> file, " File \"%s\"," % fname,
print >> file, "line", lineno, "in", co.co_name
if l:
if l.endswith('\n'):
l = l[:-1]
l = " " + l.lstrip()
print >> file, l
tb = tb.next
def print_detailed_traceback(self, space=None, file=None):
"""NOT_RPYTHON: Dump a nice detailed interpreter- and
application-level traceback, useful to debug the interpreter."""
import traceback, cStringIO
if file is None: file = sys.stderr
f = cStringIO.StringIO()
for i in range(len(self.debug_excs)-1, -1, -1):
print >> f, "Traceback (interpreter-level):"
traceback.print_tb(self.debug_excs[i][2], file=f)
f.seek(0)
debug_print(''.join(['|| ' + line for line in f.readlines()]), file)
if self.debug_excs:
from pypy.tool import tb_server
tb_server.publish_exc(self.debug_excs[-1])
self.print_app_tb_only(file)
print >> file, '(application-level)', self.errorstr(space)
if AUTO_DEBUG:
import debug
debug.fire(self)
def normalize_exception(self, space):
"""Normalize the OperationError. In other words, fix w_type and/or
w_value to make sure that the __class__ of w_value is exactly w_type.
"""
w_type = self.w_type
w_value = self.w_value
if space.full_exceptions:
while space.is_true(space.isinstance(w_type, space.w_tuple)):
w_type = space.getitem(w_type, space.wrap(0))
if space.is_true(space.abstract_isclass(w_type)):
if space.is_w(w_value, space.w_None):
# raise Type: we assume we have to instantiate Type
w_value = space.call_function(w_type)
w_type = space.abstract_getclass(w_value)
else:
w_valuetype = space.abstract_getclass(w_value)
if space.is_true(space.abstract_issubclass(w_valuetype,
w_type)):
# raise Type, Instance: let etype be the exact type of value
w_type = w_valuetype
else:
if space.full_exceptions and space.is_true(
space.isinstance(w_value, space.w_tuple)):
# raise Type, tuple: assume the tuple contains the
# constructor args
w_value = space.call(w_type, w_value)
else:
# raise Type, X: assume X is the constructor argument
w_value = space.call_function(w_type, w_value)
w_type = space.abstract_getclass(w_value)
elif space.full_exceptions and space.is_w(space.type(w_type),
space.w_str):
# XXX warn -- deprecated
pass
else:
# raise X: we assume that X is an already-built instance
if not space.is_w(w_value, space.w_None):
raise OperationError(space.w_TypeError,
space.wrap("instance exception may not "
"have a separate value"))
w_value = w_type
w_type = space.abstract_getclass(w_value)
if space.full_exceptions:
# for the sake of language consistency we should not allow
# things like 'raise 1', but it is probably fine (i.e.
# not ambiguous) to allow them in the explicit form
# 'raise int, 1'
if (space.findattr(w_value, space.wrap('__dict__')) is None and
space.findattr(w_value, space.wrap('__slots__')) is None):
msg = ("raising built-in objects can be ambiguous, "
"use 'raise type, value' instead")
raise OperationError(space.w_TypeError, space.wrap(msg))
self.w_type = w_type
self.w_value = w_value
def write_unraisable(self, space, where, w_object=None):
if w_object is None:
objrepr = ''
else:
try:
objrepr = space.str_w(space.repr(w_object))
except OperationError:
objrepr = '?'
msg = 'Exception "%s" in %s%s ignored\n' % (self.errorstr(space),
where, objrepr)
try:
space.call_method(space.sys.get('stderr'), 'write', space.wrap(msg))
except OperationError:
pass # ignored
# Utilities
from pypy.tool.ansi_print import ansi_print
def debug_print(text, file=None, newline=True):
# 31: ANSI color code "red"
ansi_print(text, esc="31", file=file, newline=newline)
def wrap_oserror(space, e):
assert isinstance(e, OSError)
errno = e.errno
try:
msg = os.strerror(errno)
except ValueError:
msg = 'error %d' % errno
w_error = space.call_function(space.w_OSError,
space.wrap(errno),
space.wrap(msg))
return OperationError(space.w_OSError, w_error)
### installing the excepthook for OperationErrors
##def operr_excepthook(exctype, value, traceback):
## if issubclass(exctype, OperationError):
## value.debug_excs.append((exctype, value, traceback))
## value.print_detailed_traceback()
## else:
## old_excepthook(exctype, value, traceback)
## from pypy.tool import tb_server
## tb_server.publish_exc((exctype, value, traceback))
##old_excepthook = sys.excepthook
##sys.excepthook = operr_excepthook
| Python |
"""Parser for future statements
"""
from pypy.interpreter.stablecompiler import ast, walk
def is_future(stmt):
"""Return true if statement is a well-formed future statement"""
if not isinstance(stmt, ast.From):
return 0
if stmt.modname == "__future__":
return 1
else:
return 0
class FutureParser:
features = ("nested_scopes", "generators", "division")
def __init__(self):
self.found = {} # set
def visitModule(self, node):
stmt = node.node
for s in stmt.nodes:
if not self.check_stmt(s):
break
def check_stmt(self, stmt):
if is_future(stmt):
for name, asname in stmt.names:
if name in self.features:
self.found[name] = 1
elif name=="*":
raise SyntaxError(
"future statement does not support import *",
( stmt.filename, stmt.lineno, 0, "" ) )
else:
raise SyntaxError(
"future feature %s is not defined" % name,
( stmt.filename, stmt.lineno, 0, "" ) )
stmt.valid_future = 1
return 1
return 0
def get_features(self):
"""Return list of features enabled by future statements"""
return self.found.keys()
class BadFutureParser:
"""Check for invalid future statements
Those not marked valid are appearing after other statements
"""
def visitFrom(self, node):
if hasattr(node, 'valid_future'):
return
if node.modname != "__future__":
return
raise SyntaxError( "from __future__ imports must occur at the beginning of the file",
( node.filename, node.lineno, 0, "" ) )
def find_futures(node):
p1 = FutureParser()
p2 = BadFutureParser()
walk(node, p1)
walk(node, p2)
return p1.get_features()
if __name__ == "__main__":
import sys
from pypy.interpreter.stablecompiler import parseFile, walk
for file in sys.argv[1:]:
print file
tree = parseFile(file)
v = FutureParser()
walk(tree, v)
print v.found
print
| Python |
"""Parse tree transformation module.
Transforms Python source code into an abstract syntax tree (AST)
defined in the ast module.
The simplest ways to invoke this module are via parse and parseFile.
parse(buf) -> AST
parseFile(path) -> AST
"""
# Original version written by Greg Stein (gstein@lyra.org)
# and Bill Tutt (rassilon@lima.mudlib.org)
# February 1997.
#
# Modifications and improvements for Python 2.0 by Jeremy Hylton and
# Mark Hammond
#
# Some fixes to try to have correct line number on almost all nodes
# (except Module, Discard and Stmt) added by Sylvain Thenault
#
# Portions of this file are:
# Copyright (C) 1997-1998 Greg Stein. All Rights Reserved.
#
# This module is provided under a BSD-ish license. See
# http://www.opensource.org/licenses/bsd-license.html
# and replace OWNER, ORGANIZATION, and YEAR as appropriate.
# make sure we import the parser with the correct grammar
from pypy.interpreter.pyparser.pythonparse import make_pyparser
from pypy.interpreter.stablecompiler.ast import *
import parser
import pypy.interpreter.pyparser.pytoken as token
import sys
# Create parser from Grammar_stable, not current grammar.
# stable_grammar, _ = pythonparse.get_grammar_file("stable")
# stable_parser = pythonparse.python_grammar(stable_grammar)
stable_parser = make_pyparser('stable')
class symbol:
pass
sym_name = {}
for name, value in stable_parser.symbols.items():
sym_name[value] = name
setattr(symbol, name, value)
# transforming is requiring a lot of recursion depth so make sure we have enough
if sys.getrecursionlimit()<2000:
sys.setrecursionlimit(2000)
class WalkerError(StandardError):
pass
from consts import CO_VARARGS, CO_VARKEYWORDS
from consts import OP_ASSIGN, OP_DELETE, OP_APPLY
def parseFile(path):
f = open(path, "U")
# XXX The parser API tolerates files without a trailing newline,
# but not strings without a trailing newline. Always add an extra
# newline to the file contents, since we're going through the string
# version of the API.
src = f.read() + "\n"
f.close()
return parse(src)
# added a filename keyword argument to improve SyntaxErrors' messages
def parse(buf, mode="exec", filename=''):
if mode == "exec" or mode == "single":
return Transformer(filename).parsesuite(buf)
elif mode == "eval":
return Transformer(filename).parseexpr(buf)
else:
raise ValueError("compile() arg 3 must be"
" 'exec' or 'eval' or 'single'")
def asList(nodes):
l = []
for item in nodes:
if hasattr(item, "asList"):
l.append(item.asList())
else:
if type(item) is type( (None, None) ):
l.append(tuple(asList(item)))
elif type(item) is type( [] ):
l.append(asList(item))
else:
l.append(item)
return l
def extractLineNo(ast):
if not isinstance(ast[1], tuple):
# get a terminal node
return ast[2]
for child in ast[1:]:
if isinstance(child, tuple):
lineno = extractLineNo(child)
if lineno is not None:
return lineno
def Node(*args):
kind = args[0]
if nodes.has_key(kind):
try:
return nodes[kind](*args[1:])
except TypeError:
print nodes[kind], len(args), args
raise
else:
raise WalkerError, "Can't find appropriate Node type: %s" % str(args)
#return apply(ast.Node, args)
class Transformer:
"""Utility object for transforming Python parse trees.
Exposes the following methods:
tree = transform(ast_tree)
tree = parsesuite(text)
tree = parseexpr(text)
tree = parsefile(fileob | filename)
"""
def __init__(self, filename=''):
self._dispatch = {}
self.filename = filename
for value, name in sym_name.items():
if hasattr(self, name):
self._dispatch[value] = getattr(self, name)
self._dispatch[stable_parser.tokens['NEWLINE']] = self.com_NEWLINE
self._atom_dispatch = {stable_parser.tokens['LPAR']: self.atom_lpar,
stable_parser.tokens['LSQB']: self.atom_lsqb,
stable_parser.tokens['LBRACE']: self.atom_lbrace,
stable_parser.tokens['BACKQUOTE']: self.atom_backquote,
stable_parser.tokens['NUMBER']: self.atom_number,
stable_parser.tokens['STRING']: self.atom_string,
stable_parser.tokens['NAME']: self.atom_name,
}
self.encoding = None
def syntaxerror(self, msg, node):
offset = 0
text = ""
lineno = extractLineNo( node )
args = ( self.filename, lineno, offset, text )
raise SyntaxError( msg, args )
def none_assignment_error(self, assigning, node):
if assigning==OP_DELETE:
self.syntaxerror( "deleting None", node )
else:
self.syntaxerror( "assignment to None", node )
def transform(self, tree):
"""Transform an AST into a modified parse tree."""
if not (isinstance(tree, tuple) or isinstance(tree, list)):
tree = parser.ast2tuple(tree, line_info=1)
return self.compile_node(tree)
def parsesuite(self, text):
"""Return a modified parse tree for the given suite text."""
return self.transform(parser.suite(text))
def parseexpr(self, text):
"""Return a modified parse tree for the given expression text."""
return self.transform(parser.expr(text))
def parsefile(self, file):
"""Return a modified parse tree for the contents of the given file."""
if type(file) == type(''):
file = open(file)
return self.parsesuite(file.read())
# --------------------------------------------------------------
#
# PRIVATE METHODS
#
def compile_node(self, node):
### emit a line-number node?
n = node[0]
if n == symbol.encoding_decl:
self.encoding = node[2]
node = node[1]
n = node[0]
if n == symbol.single_input:
return self.single_input(node[1:])
if n == symbol.file_input:
return self.file_input(node[1:])
if n == symbol.eval_input:
return self.eval_input(node[1:])
if n == symbol.lambdef:
return self.lambdef(node[1:])
if n == symbol.funcdef:
return self.funcdef(node[1:])
if n == symbol.classdef:
return self.classdef(node[1:])
raise WalkerError, ('unexpected node type', n)
def single_input(self, node):
# NEWLINE | simple_stmt | compound_stmt NEWLINE
n = node[0][0]
if n != stable_parser.tokens['NEWLINE']:
stmt = self.com_stmt(node[0])
else:
stmt = Pass()
return Module(None, stmt)
def file_input(self, nodelist):
doc = self.get_docstring(nodelist, symbol.file_input)
stmts = []
for node in nodelist:
if node[0] != stable_parser.tokens['ENDMARKER'] and node[0] != stable_parser.tokens['NEWLINE']:
self.com_append_stmt(stmts, node)
if doc is not None:
assert isinstance(stmts[0], Discard)
assert isinstance(stmts[0].expr, Const)
del stmts[0]
return Module(doc, Stmt(stmts))
def eval_input(self, nodelist):
# from the built-in function input()
### is this sufficient?
return Expression(self.com_node(nodelist[0]))
def decorator_name(self, nodelist):
listlen = len(nodelist)
assert listlen >= 1 and listlen % 2 == 1
item = self.atom_name(nodelist)
i = 1
while i < listlen:
assert nodelist[i][0] == stable_parser.tokens['DOT']
assert nodelist[i + 1][0] == stable_parser.tokens['NAME']
item = Getattr(item, nodelist[i + 1][1])
i += 2
return item
def decorator(self, nodelist):
# '@' dotted_name [ '(' [arglist] ')' ]
assert len(nodelist) in (3, 5, 6)
assert nodelist[0][0] == stable_parser.tokens['AT']
assert nodelist[-1][0] == stable_parser.tokens['NEWLINE']
assert nodelist[1][0] == symbol.dotted_name
funcname = self.decorator_name(nodelist[1][1:])
if len(nodelist) > 3:
assert nodelist[2][0] == stable_parser.tokens['LPAR']
expr = self.com_call_function(funcname, nodelist[3])
else:
expr = funcname
return expr
def decorators(self, nodelist):
# decorators: decorator ([NEWLINE] decorator)* NEWLINE
items = []
for dec_nodelist in nodelist:
assert dec_nodelist[0] == symbol.decorator
items.append(self.decorator(dec_nodelist[1:]))
return Decorators(items)
def funcdef(self, nodelist):
# -6 -5 -4 -3 -2 -1
# funcdef: [decorators] 'def' NAME parameters ':' suite
# parameters: '(' [varargslist] ')'
if len(nodelist) == 6:
assert nodelist[0][0] == symbol.decorators
decorators = self.decorators(nodelist[0][1:])
else:
assert len(nodelist) == 5
decorators = None
lineno = nodelist[-4][2]
name = nodelist[-4][1]
args = nodelist[-3][2]
if args[0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(args[1:])
else:
names = []
defaults = []
flags = 0
doc = self.get_docstring(nodelist[-1])
# code for function
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
if name == "None":
self.none_assignment_error( OP_ASSIGN, nodelist[-4] )
return Function(decorators, name, names, defaults, flags, doc, code,
lineno=lineno)
def lambdef(self, nodelist):
# lambdef: 'lambda' [varargslist] ':' test
if nodelist[2][0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(nodelist[2][1:])
else:
names = []
defaults = []
flags = 0
# code for lambda
code = self.com_node(nodelist[-1])
return Lambda(names, defaults, flags, code, lineno=nodelist[1][2])
# (This is like lambdef but it uses the old_test instead.)
# old_lambdef: 'lambda' [varargslist] ':' old_test
old_lambdef = lambdef
def classdef(self, nodelist):
# classdef: 'class' NAME ['(' testlist ')'] ':' suite
name = nodelist[1][1]
doc = self.get_docstring(nodelist[-1])
if nodelist[2][0] == stable_parser.tokens['COLON']:
bases = []
else:
bases = self.com_bases(nodelist[3])
# code for class
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
if name == "None":
self.none_assignment_error(OP_ASSIGN, nodelist[1])
return Class(name, bases, doc, code, lineno=nodelist[1][2])
def stmt(self, nodelist):
return self.com_stmt(nodelist[0])
small_stmt = stmt
flow_stmt = stmt
compound_stmt = stmt
def simple_stmt(self, nodelist):
# small_stmt (';' small_stmt)* [';'] NEWLINE
stmts = []
for i in range(0, len(nodelist), 2):
self.com_append_stmt(stmts, nodelist[i])
return Stmt(stmts)
def parameters(self, nodelist):
raise WalkerError
def varargslist(self, nodelist):
raise WalkerError
def fpdef(self, nodelist):
raise WalkerError
def fplist(self, nodelist):
raise WalkerError
def dotted_name(self, nodelist):
raise WalkerError
def comp_op(self, nodelist):
raise WalkerError
def trailer(self, nodelist):
raise WalkerError
def sliceop(self, nodelist):
raise WalkerError
def argument(self, nodelist):
raise WalkerError
# --------------------------------------------------------------
#
# STATEMENT NODES (invoked by com_node())
#
def expr_stmt(self, nodelist):
# augassign testlist | testlist ('=' testlist)*
en = nodelist[-1]
exprNode = self.lookup_node(en)(en[1:])
if len(nodelist) == 1:
return Discard(exprNode, lineno=exprNode.lineno)
if nodelist[1][0] == stable_parser.tokens['EQUAL']:
nodesl = []
for i in range(0, len(nodelist) - 2, 2):
nodesl.append(self.com_assign(nodelist[i], OP_ASSIGN))
return Assign(nodesl, exprNode, lineno=nodelist[1][2])
else:
lval = self.com_augassign(nodelist[0])
op = self.com_augassign_op(nodelist[1])
return AugAssign(lval, op[1], exprNode, lineno=op[2])
raise WalkerError, "can't get here"
def print_stmt(self, nodelist):
# print ([ test (',' test)* [','] ] | '>>' test [ (',' test)+ [','] ])
items = []
if len(nodelist) == 1:
start = 1
dest = None
elif nodelist[1][0] == stable_parser.tokens['RIGHTSHIFT']:
assert len(nodelist) == 3 \
or nodelist[3][0] == stable_parser.tokens['COMMA']
dest = self.com_node(nodelist[2])
start = 4
else:
dest = None
start = 1
for i in range(start, len(nodelist), 2):
items.append(self.com_node(nodelist[i]))
if nodelist[-1][0] == stable_parser.tokens['COMMA']:
return Print(items, dest, lineno=nodelist[0][2])
return Printnl(items, dest, lineno=nodelist[0][2])
def del_stmt(self, nodelist):
return self.com_assign(nodelist[1], OP_DELETE)
def pass_stmt(self, nodelist):
return Pass(lineno=nodelist[0][2])
def break_stmt(self, nodelist):
return Break(lineno=nodelist[0][2])
def continue_stmt(self, nodelist):
return Continue(lineno=nodelist[0][2])
def return_stmt(self, nodelist):
# return: [testlist]
if len(nodelist) < 2:
return Return(Const(None), lineno=nodelist[0][2])
return Return(self.com_node(nodelist[1]), lineno=nodelist[0][2])
def yield_stmt(self, nodelist):
return Yield(self.com_node(nodelist[1]), lineno=nodelist[0][2])
def raise_stmt(self, nodelist):
# raise: [test [',' test [',' test]]]
if len(nodelist) > 5:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
if len(nodelist) > 3:
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
if len(nodelist) > 1:
expr1 = self.com_node(nodelist[1])
else:
expr1 = None
return Raise(expr1, expr2, expr3, lineno=nodelist[0][2])
def import_stmt(self, nodelist):
# import_stmt: import_name | import_from
assert len(nodelist) == 1
return self.com_node(nodelist[0])
def import_name(self, nodelist):
# import_name: 'import' dotted_as_names
return Import(self.com_dotted_as_names(nodelist[1]),
lineno=nodelist[0][2])
def import_from(self, nodelist):
# import_from: 'from' dotted_name 'import' ('*' |
# '(' import_as_names ')' | import_as_names)
assert nodelist[0][1] == 'from'
assert nodelist[1][0] == symbol.dotted_name
assert nodelist[2][1] == 'import'
fromname = self.com_dotted_name(nodelist[1])
if nodelist[3][0] == stable_parser.tokens['STAR']:
return From(fromname, [('*', None)],
lineno=nodelist[0][2])
else:
if nodelist[3][0] == stable_parser.tokens['LPAR']:
node = nodelist[4]
else:
node = nodelist[3]
if node[-1][0] == stable_parser.tokens['COMMA']:
self.syntaxerror("trailing comma not allowed without surrounding parentheses", node)
return From(fromname, self.com_import_as_names(node),
lineno=nodelist[0][2])
def global_stmt(self, nodelist):
# global: NAME (',' NAME)*
names = []
for i in range(1, len(nodelist), 2):
names.append(nodelist[i][1])
return Global(names, lineno=nodelist[0][2])
def exec_stmt(self, nodelist):
# exec_stmt: 'exec' expr ['in' expr [',' expr]]
expr1 = self.com_node(nodelist[1])
if len(nodelist) >= 4:
expr2 = self.com_node(nodelist[3])
if len(nodelist) >= 6:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
else:
expr2 = expr3 = None
return Exec(expr1, expr2, expr3, lineno=nodelist[0][2])
def assert_stmt(self, nodelist):
# 'assert': test, [',' test]
expr1 = self.com_node(nodelist[1])
if (len(nodelist) == 4):
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
return Assert(expr1, expr2, lineno=nodelist[0][2])
def if_stmt(self, nodelist):
# if: test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
tests = []
for i in range(0, len(nodelist) - 3, 4):
testNode = self.com_node(nodelist[i + 1])
suiteNode = self.com_node(nodelist[i + 3])
tests.append((testNode, suiteNode))
if len(nodelist) % 4 == 3:
elseNode = self.com_node(nodelist[-1])
## elseNode.lineno = nodelist[-1][1][2]
else:
elseNode = None
return If(tests, elseNode, lineno=nodelist[0][2])
def while_stmt(self, nodelist):
# 'while' test ':' suite ['else' ':' suite]
testNode = self.com_node(nodelist[1])
bodyNode = self.com_node(nodelist[3])
if len(nodelist) > 4:
elseNode = self.com_node(nodelist[6])
else:
elseNode = None
return While(testNode, bodyNode, elseNode, lineno=nodelist[0][2])
def for_stmt(self, nodelist):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
assignNode = self.com_assign(nodelist[1], OP_ASSIGN)
listNode = self.com_node(nodelist[3])
bodyNode = self.com_node(nodelist[5])
if len(nodelist) > 8:
elseNode = self.com_node(nodelist[8])
else:
elseNode = None
return For(assignNode, listNode, bodyNode, elseNode,
lineno=nodelist[0][2])
def try_stmt(self, nodelist):
# 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
# | 'try' ':' suite 'finally' ':' suite
if nodelist[3][0] != symbol.except_clause:
return self.com_try_finally(nodelist)
return self.com_try_except(nodelist)
def suite(self, nodelist):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if len(nodelist) == 1:
return self.com_stmt(nodelist[0])
stmts = []
for node in nodelist:
if node[0] == symbol.stmt:
self.com_append_stmt(stmts, node)
return Stmt(stmts)
# --------------------------------------------------------------
#
# EXPRESSION NODES (invoked by com_node())
#
def testlist(self, nodelist):
# testlist: expr (',' expr)* [',']
# testlist_safe: old_test [(',' old_test)+ [',']]
# exprlist: expr (',' expr)* [',']
return self.com_binary(Tuple, nodelist)
testlist_safe = testlist # XXX
testlist1 = testlist
exprlist = testlist
def testlist_gexp(self, nodelist):
if len(nodelist) == 2 and nodelist[1][0] == symbol.gen_for:
test = self.com_node(nodelist[0])
return self.com_generator_expression(test, nodelist[1])
return self.testlist(nodelist)
def test(self, nodelist):
# test: or_test ['if' or_test 'else' test] | lambdef
if len(nodelist) == 1:
if nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
else:
# Normal or-expression
return self.com_node(nodelist[0])
elif len(nodelist) == 5 and nodelist[1][0] =='if':
# Here we implement conditional expressions
# XXX: CPython's nodename is IfExp, not CondExpr
return CondExpr(delist[2], nodelist[0], nodelist[4],
nodelist[1].lineno)
else:
return self.com_binary(Or, nodelist)
def and_test(self, nodelist):
# not_test ('and' not_test)*
return self.com_binary(And, nodelist)
def old_test(self, nodelist):
# old_test: or_test | old_lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
assert len(nodelist) == 1
return self.com_node(nodelist[0])
# XXX
# test = old_test
def or_test(self, nodelist):
# or_test: and_test ('or' and_test)*
return self.com_binary(Or, nodelist)
def not_test(self, nodelist):
# 'not' not_test | comparison
result = self.com_node(nodelist[-1])
if len(nodelist) == 2:
return Not(result, lineno=nodelist[0][2])
return result
def comparison(self, nodelist):
# comparison: expr (comp_op expr)*
node = self.com_node(nodelist[0])
if len(nodelist) == 1:
return node
results = []
for i in range(2, len(nodelist), 2):
nl = nodelist[i-1]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
n = nl[1]
if n[0] == stable_parser.tokens['NAME']:
type = n[1]
if len(nl) == 3:
if type == 'not':
type = 'not in'
else:
type = 'is not'
else:
type = _cmp_types[n[0]]
lineno = nl[1][2]
results.append((type, self.com_node(nodelist[i])))
# we need a special "compare" node so that we can distinguish
# 3 < x < 5 from (3 < x) < 5
# the two have very different semantics and results (note that the
# latter form is always true)
return Compare(node, results, lineno=lineno)
def expr(self, nodelist):
# xor_expr ('|' xor_expr)*
return self.com_binary(Bitor, nodelist)
def xor_expr(self, nodelist):
# xor_expr ('^' xor_expr)*
return self.com_binary(Bitxor, nodelist)
def and_expr(self, nodelist):
# xor_expr ('&' xor_expr)*
return self.com_binary(Bitand, nodelist)
def shift_expr(self, nodelist):
# shift_expr ('<<'|'>>' shift_expr)*
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == stable_parser.tokens['LEFTSHIFT']:
node = LeftShift([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == stable_parser.tokens['RIGHTSHIFT']:
node = RightShift([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def arith_expr(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == stable_parser.tokens['PLUS']:
node = Add([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == stable_parser.tokens['MINUS']:
node = Sub([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def term(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
t = nodelist[i-1][0]
if t == stable_parser.tokens['STAR']:
node = Mul([node, right])
elif t == stable_parser.tokens['SLASH']:
node = Div([node, right])
elif t == stable_parser.tokens['PERCENT']:
node = Mod([node, right])
elif t == stable_parser.tokens['DOUBLESLASH']:
node = FloorDiv([node, right])
else:
raise ValueError, "unexpected token: %s" % t
node.lineno = nodelist[1][2]
return node
def factor(self, nodelist):
elt = nodelist[0]
t = elt[0]
node = self.lookup_node(nodelist[-1])(nodelist[-1][1:])
# need to handle (unary op)constant here...
if t == stable_parser.tokens['PLUS']:
return UnaryAdd(node, lineno=elt[2])
elif t == stable_parser.tokens['MINUS']:
return UnarySub(node, lineno=elt[2])
elif t == stable_parser.tokens['TILDE']:
node = Invert(node, lineno=elt[2])
return node
def power(self, nodelist):
# power: atom trailer* ('**' factor)*
node = self.com_node(nodelist[0])
for i in range(1, len(nodelist)):
elt = nodelist[i]
if elt[0] == stable_parser.tokens['DOUBLESTAR']:
return Power([node, self.com_node(nodelist[i+1])],
lineno=elt[2])
node = self.com_apply_trailer(node, elt)
return node
def atom(self, nodelist):
return self._atom_dispatch[nodelist[0][0]](nodelist)
n.lineno = nodelist[0][2]
return n
def atom_lpar(self, nodelist):
if nodelist[1][0] == stable_parser.tokens['RPAR']:
return Tuple(())
return self.com_node(nodelist[1])
def atom_lsqb(self, nodelist):
if nodelist[1][0] == stable_parser.tokens['RSQB']:
return List([], lineno=nodelist[0][2])
return self.com_list_constructor(nodelist[1], nodelist[0][2])
def atom_lbrace(self, nodelist):
if nodelist[1][0] == stable_parser.tokens['RBRACE']:
return Dict(())
return self.com_dictmaker(nodelist[1])
def atom_backquote(self, nodelist):
return Backquote(self.com_node(nodelist[1]))
def atom_number(self, nodelist):
### need to verify this matches compile.c
k = eval(nodelist[0][1])
return Const(k, lineno=nodelist[0][2])
def decode_literal(self, lit):
if self.encoding:
# this is particularly fragile & a bit of a
# hack... changes in compile.c:parsestr and
# tokenizer.c must be reflected here.
if self.encoding not in ['utf-8', 'iso-8859-1']:
lit = unicode(lit, 'utf-8').encode(self.encoding)
return eval("# coding: %s\n%s" % (self.encoding, lit))
else:
return eval(lit)
def atom_string(self, nodelist):
k = ''
for node in nodelist:
k += self.decode_literal(node[1])
return Const(k, lineno=nodelist[0][2])
def atom_name(self, nodelist):
return Name(nodelist[0][1], lineno=nodelist[0][2])
# --------------------------------------------------------------
#
# INTERNAL PARSING UTILITIES
#
# The use of com_node() introduces a lot of extra stack frames,
# enough to cause a stack overflow compiling test.test_parser with
# the standard interpreter recursionlimit. The com_node() is a
# convenience function that hides the dispatch details, but comes
# at a very high cost. It is more efficient to dispatch directly
# in the callers. In these cases, use lookup_node() and call the
# dispatched node directly.
def lookup_node(self, node):
return self._dispatch[node[0]]
_callers = {}
def com_node(self, node):
# Note: compile.c has handling in com_node for del_stmt, pass_stmt,
# break_stmt, stmt, small_stmt, flow_stmt, simple_stmt,
# and compound_stmt.
# We'll just dispatch them.
return self._dispatch[node[0]](node[1:])
def com_NEWLINE(self, *args):
# A ';' at the end of a line can make a NEWLINE token appear
# here, Render it harmless. (genc discards ('discard',
# ('const', xxxx)) Nodes)
return Discard(Const(None))
def com_arglist(self, nodelist):
# varargslist:
# (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME)
# | fpdef ['=' test] (',' fpdef ['=' test])* [',']
# fpdef: NAME | '(' fplist ')'
# fplist: fpdef (',' fpdef)* [',']
names = []
defaults = []
flags = 0
i = 0
while i < len(nodelist):
node = nodelist[i]
if node[0] == stable_parser.tokens['STAR'] or node[0] == stable_parser.tokens['DOUBLESTAR']:
if node[0] == stable_parser.tokens['STAR']:
node = nodelist[i+1]
if node[0] == stable_parser.tokens['NAME']:
name = node[1]
if name in names:
self.syntaxerror("duplicate argument '%s' in function definition" %
name, node)
names.append(name)
flags = flags | CO_VARARGS
i = i + 3
if i < len(nodelist):
# should be DOUBLESTAR
t = nodelist[i][0]
if t == stable_parser.tokens['DOUBLESTAR']:
node = nodelist[i+1]
else:
raise ValueError, "unexpected token: %s" % t
name = node[1]
if name in names:
self.syntaxerror("duplicate argument '%s' in function definition" %
name, node)
names.append(name)
flags = flags | CO_VARKEYWORDS
break
# fpdef: NAME | '(' fplist ')'
name = self.com_fpdef(node)
if name in names:
self.syntaxerror("duplicate argument '%s' in function definition" %
name, node)
names.append(name)
i = i + 1
if i >= len(nodelist):
if len(defaults):
self.syntaxerror("non-default argument follows default argument",node)
break
if nodelist[i][0] == stable_parser.tokens['EQUAL']:
defaults.append(self.com_node(nodelist[i + 1]))
i = i + 2
elif len(defaults):
self.syntaxerror("non-default argument follows default argument",node)
i = i + 1
if "None" in names:
self.syntaxerror( "Invalid syntax. Assignment to None.", node)
return names, defaults, flags
def com_fpdef(self, node):
# fpdef: NAME | '(' fplist ')'
if node[1][0] == stable_parser.tokens['LPAR']:
return self.com_fplist(node[2])
return node[1][1]
def com_fplist(self, node):
# fplist: fpdef (',' fpdef)* [',']
if len(node) == 2:
return self.com_fpdef(node[1])
list = []
for i in range(1, len(node), 2):
list.append(self.com_fpdef(node[i]))
return tuple(list)
def com_dotted_name(self, node):
# String together the dotted names and return the string
name = ""
for n in node:
if type(n) == type(()) and n[0] == stable_parser.tokens['NAME']:
name = name + n[1] + '.'
return name[:-1]
def com_dotted_as_name(self, node):
assert node[0] == symbol.dotted_as_name
node = node[1:]
dot = self.com_dotted_name(node[0][1:])
if len(node) == 1:
return dot, None
assert node[1][1] == 'as'
assert node[2][0] == stable_parser.tokens['NAME']
return dot, node[2][1]
def com_dotted_as_names(self, node):
assert node[0] == symbol.dotted_as_names
node = node[1:]
names = [self.com_dotted_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_dotted_as_name(node[i]))
return names
def com_import_as_name(self, node):
assert node[0] == symbol.import_as_name
node = node[1:]
assert node[0][0] == stable_parser.tokens['NAME']
if len(node) == 1:
return node[0][1], None
assert node[1][1] == 'as', node
assert node[2][0] == stable_parser.tokens['NAME']
return node[0][1], node[2][1]
def com_import_as_names(self, node):
assert node[0] == symbol.import_as_names
node = node[1:]
names = [self.com_import_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_import_as_name(node[i]))
return names
def com_bases(self, node):
bases = []
for i in range(1, len(node), 2):
bases.append(self.com_node(node[i]))
return bases
def com_try_finally(self, nodelist):
# try_fin_stmt: "try" ":" suite "finally" ":" suite
return TryFinally(self.com_node(nodelist[2]),
self.com_node(nodelist[5]),
lineno=nodelist[0][2])
def com_try_except(self, nodelist):
# try_except: 'try' ':' suite (except_clause ':' suite)* ['else' suite]
#tryexcept: [TryNode, [except_clauses], elseNode)]
stmt = self.com_node(nodelist[2])
clauses = []
elseNode = None
for i in range(3, len(nodelist), 3):
node = nodelist[i]
if node[0] == symbol.except_clause:
# except_clause: 'except' [expr [',' expr]] */
if len(node) > 2:
expr1 = self.com_node(node[2])
if len(node) > 4:
expr2 = self.com_assign(node[4], OP_ASSIGN)
else:
expr2 = None
else:
expr1 = expr2 = None
clauses.append((expr1, expr2, self.com_node(nodelist[i+2])))
if node[0] == stable_parser.tokens['NAME']:
elseNode = self.com_node(nodelist[i+2])
return TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
lineno=nodelist[0][2])
def com_augassign_op(self, node):
assert node[0] == symbol.augassign
return node[1]
def com_augassign(self, node):
"""Return node suitable for lvalue of augmented assignment
Names, slices, and attributes are the only allowable nodes.
"""
l = self.com_node(node)
if isinstance(l, Name):
if l.name == "__debug__":
self.syntaxerror( "can not assign to __debug__", node )
if l.name == "None":
self.none_assignment_error( OP_ASSIGN, node )
if l.__class__ in (Name, Slice, Subscript, Getattr):
return l
self.syntaxerror( "can't assign to %s" % l.__class__.__name__, node)
def com_assign(self, node, assigning):
# return a node suitable for use as an "lvalue"
# loop to avoid trivial recursion
while 1:
t = node[0]
if t == symbol.exprlist or t == symbol.testlist or t == symbol.testlist_gexp:
if len(node) > 2:
return self.com_assign_tuple(node, assigning)
node = node[1]
elif t in _assign_types:
if len(node) > 2:
self.syntaxerror( "can't assign to operator", node)
node = node[1]
elif t == symbol.power:
if node[1][0] != symbol.atom:
self.syntaxerror( "can't assign to operator", node)
if len(node) > 2:
primary = self.com_node(node[1])
for i in range(2, len(node)-1):
ch = node[i]
if ch[0] == stable_parser.tokens['DOUBLESTAR']:
self.syntaxerror( "can't assign to operator", node)
primary = self.com_apply_trailer(primary, ch)
return self.com_assign_trailer(primary, node[-1],
assigning)
node = node[1]
elif t == symbol.atom:
t = node[1][0]
if t == stable_parser.tokens['LPAR']:
node = node[2]
if node[0] == stable_parser.tokens['RPAR']:
self.syntaxerror( "can't assign to ()", node)
elif t == stable_parser.tokens['LSQB']:
node = node[2]
if node[0] == stable_parser.tokens['RSQB']:
self.syntaxerror( "can't assign to []", node)
return self.com_assign_list(node, assigning)
elif t == stable_parser.tokens['NAME']:
if node[1][1] == "__debug__":
self.syntaxerror( "can not assign to __debug__", node )
if node[1][1] == "None":
self.none_assignment_error(assigning, node)
return self.com_assign_name(node[1], assigning)
else:
self.syntaxerror( "can't assign to literal", node)
else:
self.syntaxerror( "bad assignment", node)
def com_assign_tuple(self, node, assigning):
assigns = []
if len(node)>=3:
if node[2][0] == symbol.gen_for:
self.syntaxerror("assign to generator expression not possible", node)
for i in range(1, len(node), 2):
assigns.append(self.com_assign(node[i], assigning))
return AssTuple(assigns, lineno=extractLineNo(node))
def com_assign_list(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
if i + 1 < len(node):
if node[i + 1][0] == symbol.list_for:
self.syntaxerror( "can't assign to list comprehension", node)
assert node[i + 1][0] == stable_parser.tokens['COMMA'], node[i + 1]
assigns.append(self.com_assign(node[i], assigning))
return AssList(assigns, lineno=extractLineNo(node))
def com_assign_name(self, node, assigning):
return AssName(node[1], assigning, lineno=node[2])
def com_assign_trailer(self, primary, node, assigning):
t = node[1][0]
if t == stable_parser.tokens['DOT']:
return self.com_assign_attr(primary, node[2], assigning)
if t == stable_parser.tokens['LSQB']:
return self.com_subscriptlist(primary, node[2], assigning)
if t == stable_parser.tokens['LPAR']:
if assigning==OP_DELETE:
self.syntaxerror( "can't delete function call", node)
else:
self.syntaxerror( "can't assign to function call", node)
self.syntaxerror( "unknown trailer type: %s" % t, node)
def com_assign_attr(self, primary, node, assigning):
if node[1]=="None":
self.none_assignment_error(assigning, node)
return AssAttr(primary, node[1], assigning, lineno=node[-1])
def com_binary(self, constructor, nodelist):
"Compile 'NODE (OP NODE)*' into (type, [ node1, ..., nodeN ])."
l = len(nodelist)
if l == 1:
n = nodelist[0]
return self.lookup_node(n)(n[1:])
items = []
for i in range(0, l, 2):
n = nodelist[i]
items.append(self.lookup_node(n)(n[1:]))
return constructor(items, lineno=extractLineNo(nodelist))
def com_stmt(self, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
return result
return Stmt([result])
def com_append_stmt(self, stmts, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
stmts.extend(result.nodes)
else:
stmts.append(result)
if hasattr(symbol, 'list_for'):
def com_list_constructor(self, nodelist, lineno):
# listmaker: test ( list_for | (',' test)* [','] )
values = []
for i in range(1, len(nodelist)):
if nodelist[i][0] == symbol.list_for:
assert len(nodelist[i:]) == 1
return self.com_list_comprehension(values[0],
nodelist[i])
elif nodelist[i][0] == stable_parser.tokens['COMMA']:
continue
values.append(self.com_node(nodelist[i]))
return List(values, lineno=lineno)
def com_list_comprehension(self, expr, node):
# list_iter: list_for | list_if
# list_for: 'for' exprlist 'in' testlist [list_iter]
# list_if: 'if' test [list_iter]
# XXX should raise SyntaxError for assignment
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
listNode = self.com_node(node[4])
newfor = ListCompFor(assignNode, listNode, [])
newfor.lineno = node[1][2]
fors.append(newfor)
if len(node) == 5:
node = None
else:
node = self.com_list_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = ListCompIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
else:
node = self.com_list_iter(node[3])
else:
self.syntaxerror(
"unexpected list comprehension element: %s %d"
% (node, lineno), node)
return ListComp(expr, fors, lineno=lineno)
def com_list_iter(self, node):
assert node[0] == symbol.list_iter
return node[1]
else:
def com_list_constructor(self, nodelist, lineno):
values = []
for i in range(1, len(nodelist), 2):
values.append(self.com_node(nodelist[i]))
return List(values, lineno)
if hasattr(symbol, 'gen_for'):
def com_generator_expression(self, expr, node):
# gen_iter: gen_for | gen_if
# gen_for: 'for' exprlist 'in' test [gen_iter]
# gen_if: 'if' test [gen_iter]
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
genNode = self.com_node(node[4])
newfor = GenExprFor(assignNode, genNode, [],
lineno=node[1][2])
fors.append(newfor)
if (len(node)) == 5:
node = None
else:
node = self.com_gen_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = GenExprIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
else:
node = self.com_gen_iter(node[3])
else:
self.syntaxerror(
"unexpected generator expression element: %s %d"
% (node, lineno), node)
fors[0].is_outmost = True
return GenExpr(GenExprInner(expr, fors), lineno=lineno)
def com_gen_iter(self, node):
assert node[0] == symbol.gen_iter
return node[1]
def com_dictmaker(self, nodelist):
# dictmaker: test ':' test (',' test ':' value)* [',']
items = []
for i in range(1, len(nodelist), 4):
items.append((self.com_node(nodelist[i]),
self.com_node(nodelist[i+2])))
return Dict(items)
def com_apply_trailer(self, primaryNode, nodelist):
t = nodelist[1][0]
if t == stable_parser.tokens['LPAR']:
return self.com_call_function(primaryNode, nodelist[2])
if t == stable_parser.tokens['DOT']:
return self.com_select_member(primaryNode, nodelist[2])
if t == stable_parser.tokens['LSQB']:
return self.com_subscriptlist(primaryNode, nodelist[2], OP_APPLY)
self.syntaxerror( 'unknown node type: %s' % t, nodelist[1])
def com_select_member(self, primaryNode, nodelist):
if nodelist[0] != stable_parser.tokens['NAME']:
self.syntaxerror( "member must be a name", nodelist[0])
return Getattr(primaryNode, nodelist[1], lineno=nodelist[2])
def com_call_function(self, primaryNode, nodelist):
if nodelist[0] == stable_parser.tokens['RPAR']:
return CallFunc(primaryNode, [], lineno=extractLineNo(nodelist))
args = []
kw = 0
len_nodelist = len(nodelist)
for i in range(1, len_nodelist, 2):
node = nodelist[i]
if node[0] == stable_parser.tokens['STAR'] or node[0] == stable_parser.tokens['DOUBLESTAR']:
break
kw, result = self.com_argument(node, kw)
if len_nodelist != 2 and isinstance(result, GenExpr) \
and len(node) == 3 and node[2][0] == symbol.gen_for:
# allow f(x for x in y), but reject f(x for x in y, 1)
# should use f((x for x in y), 1) instead of f(x for x in y, 1)
self.syntaxerror( 'generator expression needs parenthesis', node)
args.append(result)
else:
# No broken by star arg, so skip the last one we processed.
i = i + 1
if i < len_nodelist and nodelist[i][0] == stable_parser.tokens['COMMA']:
# need to accept an application that looks like "f(a, b,)"
i = i + 1
star_node = dstar_node = None
while i < len_nodelist:
tok = nodelist[i]
ch = nodelist[i+1]
i = i + 3
if tok[0]==stable_parser.tokens['STAR']:
if star_node is not None:
self.syntaxerror( 'already have the varargs indentifier', tok )
star_node = self.com_node(ch)
elif tok[0]==stable_parser.tokens['DOUBLESTAR']:
if dstar_node is not None:
self.syntaxerror( 'already have the kwargs indentifier', tok )
dstar_node = self.com_node(ch)
else:
self.syntaxerror( 'unknown node type: %s' % tok, tok )
return CallFunc(primaryNode, args, star_node, dstar_node,
lineno=extractLineNo(nodelist))
def com_argument(self, nodelist, kw):
if len(nodelist) == 3 and nodelist[2][0] == symbol.gen_for:
test = self.com_node(nodelist[1])
return 0, self.com_generator_expression(test, nodelist[2])
if len(nodelist) == 2:
if kw:
self.syntaxerror( "non-keyword arg after keyword arg", nodelist )
return 0, self.com_node(nodelist[1])
result = self.com_node(nodelist[3])
n = nodelist[1]
while len(n) == 2 and n[0] != stable_parser.tokens['NAME']:
n = n[1]
if n[0] != stable_parser.tokens['NAME']:
self.syntaxerror( "keyword can't be an expression (%s)"%n[0], n)
node = Keyword(n[1], result, lineno=n[2])
return 1, node
def com_subscriptlist(self, primary, nodelist, assigning):
# slicing: simple_slicing | extended_slicing
# simple_slicing: primary "[" short_slice "]"
# extended_slicing: primary "[" slice_list "]"
# slice_list: slice_item ("," slice_item)* [","]
# backwards compat slice for '[i:j]'
if len(nodelist) == 2:
sub = nodelist[1]
if (sub[1][0] == stable_parser.tokens['COLON'] or \
(len(sub) > 2 and sub[2][0] == stable_parser.tokens['COLON'])) and \
sub[-1][0] != symbol.sliceop:
return self.com_slice(primary, sub, assigning)
subscripts = []
for i in range(1, len(nodelist), 2):
subscripts.append(self.com_subscript(nodelist[i]))
return Subscript(primary, assigning, subscripts,
lineno=extractLineNo(nodelist))
def com_subscript(self, node):
# slice_item: expression | proper_slice | ellipsis
ch = node[1]
t = ch[0]
if t == stable_parser.tokens['DOT'] and node[2][0] == stable_parser.tokens['DOT']:
return Ellipsis()
if t == stable_parser.tokens['COLON'] or len(node) > 2:
return self.com_sliceobj(node)
return self.com_node(ch)
def com_sliceobj(self, node):
# proper_slice: short_slice | long_slice
# short_slice: [lower_bound] ":" [upper_bound]
# long_slice: short_slice ":" [stride]
# lower_bound: expression
# upper_bound: expression
# stride: expression
#
# Note: a stride may be further slicing...
items = []
if node[1][0] == stable_parser.tokens['COLON']:
items.append(Const(None))
i = 2
else:
items.append(self.com_node(node[1]))
# i == 2 is a COLON
i = 3
if i < len(node) and node[i][0] == symbol.test:
items.append(self.com_node(node[i]))
i = i + 1
else:
items.append(Const(None))
# a short_slice has been built. look for long_slice now by looking
# for strides...
for j in range(i, len(node)):
ch = node[j]
if len(ch) == 2:
items.append(Const(None))
else:
items.append(self.com_node(ch[2]))
return Sliceobj(items, lineno=extractLineNo(node))
def com_slice(self, primary, node, assigning):
# short_slice: [lower_bound] ":" [upper_bound]
lower = upper = None
if len(node) == 3:
if node[1][0] == stable_parser.tokens['COLON']:
upper = self.com_node(node[2])
else:
lower = self.com_node(node[1])
elif len(node) == 4:
lower = self.com_node(node[1])
upper = self.com_node(node[3])
return Slice(primary, assigning, lower, upper,
lineno=extractLineNo(node))
def get_docstring(self, node, n=None):
if n is None:
n = node[0]
node = node[1:]
if n == symbol.suite:
if len(node) == 1:
return self.get_docstring(node[0])
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.file_input:
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.atom:
if node[0][0] == stable_parser.tokens['STRING']:
s = ''
for t in node:
s = s + eval(t[1])
return s
return None
if n == symbol.stmt or n == symbol.simple_stmt \
or n == symbol.small_stmt:
return self.get_docstring(node[0])
if n in _doc_nodes and len(node) == 1:
return self.get_docstring(node[0])
return None
_doc_nodes = [
symbol.expr_stmt,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.old_test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
_cmp_types = {
stable_parser.tokens['LESS'] : '<',
stable_parser.tokens['GREATER'] : '>',
stable_parser.tokens['EQEQUAL'] : '==',
stable_parser.tokens['EQUAL'] : '==',
stable_parser.tokens['LESSEQUAL'] : '<=',
stable_parser.tokens['GREATEREQUAL'] : '>=',
stable_parser.tokens['NOTEQUAL'] : '!=',
}
_assign_types = [
symbol.test,
symbol.old_test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
]
# import types
# _names = {}
# for k, v in sym_name.items():
# _names[k] = v
# for k, v in token.tok_name.items():
# _names[k] = v
#
# def debug_tree(tree):
# l = []
# for elt in tree:
# if type(elt) == types.IntType:
# l.append(_names.get(elt, elt))
# elif type(elt) == types.StringType:
# l.append(elt)
# else:
# l.append(debug_tree(elt))
# return l
| Python |
# operation flags
OP_ASSIGN = 0 # 'OP_ASSIGN'
OP_DELETE = 1 # 'OP_DELETE'
OP_APPLY = 2 # 'OP_APPLY'
SC_LOCAL = 1
SC_GLOBAL = 2
SC_FREE = 3
SC_CELL = 4
SC_UNKNOWN = 5
SC_DEFAULT = 6
CO_OPTIMIZED = 0x0001
CO_NEWLOCALS = 0x0002
CO_VARARGS = 0x0004
CO_VARKEYWORDS = 0x0008
CO_NESTED = 0x0010
CO_GENERATOR = 0x0020
CO_GENERATOR_ALLOWED = 0x1000
CO_FUTURE_DIVISION = 0x2000
| Python |
import types
def flatten(tup):
elts = []
for elt in tup:
if type(elt) == types.TupleType:
elts = elts + flatten(elt)
else:
elts.append(elt)
return elts
class Set:
def __init__(self):
self.elts = {}
def __len__(self):
return len(self.elts)
def __contains__(self, elt):
return self.elts.has_key(elt)
def add(self, elt):
self.elts[elt] = elt
def elements(self):
return self.elts.keys()
def has_elt(self, elt):
return self.elts.has_key(elt)
def remove(self, elt):
del self.elts[elt]
def copy(self):
c = Set()
c.elts.update(self.elts)
return c
class Stack:
def __init__(self):
self.stack = []
self.pop = self.stack.pop
def __len__(self):
return len(self.stack)
def push(self, elt):
self.stack.append(elt)
def top(self):
return self.stack[-1]
def __getitem__(self, index): # needed by visitContinue()
return self.stack[index]
MANGLE_LEN = 256 # magic constant from compile.c
def mangle(name, klass):
if not name.startswith('__'):
return name
if len(name) + 2 >= MANGLE_LEN:
return name
if name.endswith('__'):
return name
try:
i = 0
while klass[i] == '_':
i = i + 1
except IndexError:
return name
klass = klass[i:]
tlen = len(klass) + len(name)
if tlen > MANGLE_LEN:
klass = klass[:MANGLE_LEN-tlen]
return "_%s%s" % (klass, name)
def set_filename(filename, tree):
"""Set the filename attribute to filename on every node in tree"""
worklist = [tree]
while worklist:
node = worklist.pop(0)
node.filename = filename
worklist.extend(node.getChildNodes())
| Python |
"""A flow graph representation for Python bytecode"""
import dis
import new
import sys
import types
from pypy.interpreter.stablecompiler import misc
from pypy.interpreter.stablecompiler.consts \
import CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS
class FlowGraph:
def __init__(self):
self.current = self.entry = Block()
self.exit = Block("exit")
self.blocks = misc.Set()
self.blocks.add(self.entry)
self.blocks.add(self.exit)
def startBlock(self, block):
if self._debug:
if self.current:
print "end", repr(self.current)
print " next", self.current.next
print " ", self.current.get_children()
print repr(block)
self.current = block
def nextBlock(self, block=None):
# XXX think we need to specify when there is implicit transfer
# from one block to the next. might be better to represent this
# with explicit JUMP_ABSOLUTE instructions that are optimized
# out when they are unnecessary.
#
# I think this strategy works: each block has a child
# designated as "next" which is returned as the last of the
# children. because the nodes in a graph are emitted in
# reverse post order, the "next" block will always be emitted
# immediately after its parent.
# Worry: maintaining this invariant could be tricky
if block is None:
block = self.newBlock()
# Note: If the current block ends with an unconditional
# control transfer, then it is incorrect to add an implicit
# transfer to the block graph. The current code requires
# these edges to get the blocks emitted in the right order,
# however. :-( If a client needs to remove these edges, call
# pruneEdges().
self.current.addNext(block)
self.startBlock(block)
def newBlock(self):
b = Block()
self.blocks.add(b)
return b
def startExitBlock(self):
self.startBlock(self.exit)
_debug = 0
def _enable_debug(self):
self._debug = 1
def _disable_debug(self):
self._debug = 0
def emit(self, *inst):
if self._debug:
print "\t", inst
if inst[0] in ['RETURN_VALUE', 'YIELD_VALUE']:
self.current.addOutEdge(self.exit)
if len(inst) == 2 and isinstance(inst[1], Block):
self.current.addOutEdge(inst[1])
self.current.emit(inst)
def getBlocksInOrder(self):
"""Return the blocks in reverse postorder
i.e. each node appears before all of its successors
"""
# TODO: What we need here is a topological sort that
# XXX make sure every node that doesn't have an explicit next
# is set so that next points to exit
for b in self.blocks.elements():
if b is self.exit:
continue
if not b.next:
b.addNext(self.exit)
order = dfs_postorder(self.entry, {})
order.reverse()
self.fixupOrder(order, self.exit)
# hack alert
if not self.exit in order:
order.append(self.exit)
return order
def fixupOrder(self, blocks, default_next):
"""Fixup bad order introduced by DFS."""
# XXX This is a total mess. There must be a better way to get
# the code blocks in the right order.
self.fixupOrderHonorNext(blocks, default_next)
self.fixupOrderForward(blocks, default_next)
def fixupOrderHonorNext(self, blocks, default_next):
"""Fix one problem with DFS.
The DFS uses child block, but doesn't know about the special
"next" block. As a result, the DFS can order blocks so that a
block isn't next to the right block for implicit control
transfers.
"""
index = {}
for i in range(len(blocks)):
index[blocks[i]] = i
for i in range(0, len(blocks) - 1):
b = blocks[i]
n = blocks[i + 1]
if not b.next or b.next[0] == default_next or b.next[0] == n:
continue
# The blocks are in the wrong order. Find the chain of
# blocks to insert where they belong.
cur = b
chain = []
elt = cur
while elt.next and elt.next[0] != default_next:
chain.append(elt.next[0])
elt = elt.next[0]
# Now remove the blocks in the chain from the current
# block list, so that they can be re-inserted.
l = []
for b in chain:
assert index[b] > i
l.append((index[b], b))
l.sort()
l.reverse()
for j, b in l:
del blocks[index[b]]
# Insert the chain in the proper location
blocks[i:i + 1] = [cur] + chain
# Finally, re-compute the block indexes
for i in range(len(blocks)):
index[blocks[i]] = i
def fixupOrderForward(self, blocks, default_next):
"""Make sure all JUMP_FORWARDs jump forward"""
index = {}
chains = []
cur = []
for b in blocks:
index[b] = len(chains)
cur.append(b)
if b.next and b.next[0] == default_next:
chains.append(cur)
cur = []
chains.append(cur)
while 1:
constraints = []
for i in range(len(chains)):
l = chains[i]
for b in l:
for c in b.get_children():
if index[c] < i:
forward_p = 0
for inst in b.insts:
if inst[0] == 'JUMP_FORWARD':
if inst[1] == c:
forward_p = 1
if not forward_p:
continue
constraints.append((index[c], i))
if not constraints:
break
# XXX just do one for now
# do swaps to get things in the right order
goes_before, a_chain = constraints[0]
assert a_chain > goes_before
c = chains[a_chain]
chains.remove(c)
chains.insert(goes_before, c)
del blocks[:]
for c in chains:
for b in c:
blocks.append(b)
def getBlocks(self):
return self.blocks.elements()
def getRoot(self):
"""Return nodes appropriate for use with dominator"""
return self.entry
def getContainedGraphs(self):
l = []
for b in self.getBlocks():
l.extend(b.getContainedGraphs())
return l
def dfs_postorder(b, seen):
"""Depth-first search of tree rooted at b, return in postorder"""
order = []
seen[b] = b
for c in b.get_children():
if seen.has_key(c):
continue
order = order + dfs_postorder(c, seen)
order.append(b)
return order
class Block:
_count = 0
def __init__(self, label=''):
self.insts = []
self.inEdges = misc.Set()
self.outEdges = misc.Set()
self.label = label
self.bid = Block._count
self.next = []
Block._count = Block._count + 1
def __repr__(self):
if self.label:
return "<block %s id=%d>" % (self.label, self.bid)
else:
return "<block id=%d>" % (self.bid)
def __str__(self):
insts = map(str, self.insts)
return "<block %s %d:\n%s>" % (self.label, self.bid,
'\n'.join(insts))
def emit(self, inst):
op = inst[0]
if op[:4] == 'JUMP':
self.outEdges.add(inst[1])
self.insts.append( list(inst) )
def getInstructions(self):
return self.insts
def addInEdge(self, block):
self.inEdges.add(block)
def addOutEdge(self, block):
self.outEdges.add(block)
def addNext(self, block):
self.next.append(block)
assert len(self.next) == 1, map(str, self.next)
_uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS', 'YIELD_VALUE',
'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP')
def pruneNext(self):
"""Remove bogus edge for unconditional transfers
Each block has a next edge that accounts for implicit control
transfers, e.g. from a JUMP_IF_FALSE to the block that will be
executed if the test is true.
These edges must remain for the current assembler code to
work. If they are removed, the dfs_postorder gets things in
weird orders. However, they shouldn't be there for other
purposes, e.g. conversion to SSA form. This method will
remove the next edge when it follows an unconditional control
transfer.
"""
try:
op, arg = self.insts[-1]
except (IndexError, ValueError):
return
if op in self._uncond_transfer:
self.next = []
def get_children(self):
if self.next and self.next[0] in self.outEdges:
self.outEdges.remove(self.next[0])
return self.outEdges.elements() + self.next
def getContainedGraphs(self):
"""Return all graphs contained within this block.
For example, a MAKE_FUNCTION block will contain a reference to
the graph for the function body.
"""
contained = []
for inst in self.insts:
if len(inst) == 1:
continue
op = inst[1]
if hasattr(op, 'graph'):
contained.append(op.graph)
return contained
# flags for code objects
# the FlowGraph is transformed in place; it exists in one of these states
RAW = "RAW"
FLAT = "FLAT"
CONV = "CONV"
DONE = "DONE"
class PyFlowGraph(FlowGraph):
super_init = FlowGraph.__init__
def __init__(self, name, filename, args=(), optimized=0,
klass=None, newlocals=0):
self.super_init()
self.name = name
self.filename = filename
self.docstring = None
self.args = args # XXX
self.argcount = getArgCount(args)
self.klass = klass
self.flags = 0
if optimized:
self.flags |= CO_OPTIMIZED
if newlocals:
self.flags |= CO_NEWLOCALS
self.consts = []
self.names = []
# Free variables found by the symbol table scan, including
# variables used only in nested scopes, are included here.
self.freevars = []
self.cellvars = []
# The closure list is used to track the order of cell
# variables and free variables in the resulting code object.
# The offsets used by LOAD_CLOSURE/LOAD_DEREF refer to both
# kinds of variables.
self.closure = []
self.varnames = list(args) or []
for i in range(len(self.varnames)):
var = self.varnames[i]
if isinstance(var, TupleArg):
self.varnames[i] = var.getName()
self.stage = RAW
self.orderedblocks = []
def setDocstring(self, doc):
self.docstring = doc
def setFlag(self, flag):
self.flags = self.flags | flag
if flag == CO_VARARGS:
self.argcount = self.argcount - 1
def checkFlag(self, flag):
if self.flags & flag:
return 1
def setFreeVars(self, names):
self.freevars = list(names)
def setCellVars(self, names):
self.cellvars = names
def getCode(self):
"""Get a Python code object"""
if self.stage == RAW:
self.computeStackDepth()
self.convertArgs()
if self.stage == CONV:
self.flattenGraph()
if self.stage == FLAT:
self.makeByteCode()
if self.stage == DONE:
return self.newCodeObject()
raise RuntimeError, "inconsistent PyFlowGraph state"
def dump(self, io=None):
if io:
save = sys.stdout
sys.stdout = io
pc = 0
for t in self.insts:
opname = t[0]
if opname == "SET_LINENO":
print
if len(t) == 1:
print "\t", "%3d" % pc, opname
pc = pc + 1
else:
print "\t", "%3d" % pc, opname, t[1]
pc = pc + 3
if io:
sys.stdout = save
def computeStackDepth(self):
"""Compute the max stack depth.
Approach is to compute the stack effect of each basic block.
Then find the path through the code with the largest total
effect.
"""
depth = {}
exit = None
for b in self.getBlocks():
depth[b] = findDepth(b.getInstructions())
seen = {}
def max_depth(b, d):
if seen.has_key(b):
return d
seen[b] = 1
d = d + depth[b]
children = b.get_children()
if children:
return max([max_depth(c, d) for c in children])
else:
if not b.label == "exit":
return max_depth(self.exit, d)
else:
return d
self.stacksize = max_depth(self.entry, 0)
def flattenGraph(self):
"""Arrange the blocks in order and resolve jumps"""
assert self.stage == CONV
self.insts = insts = []
pc = 0
begin = {}
end = {}
forward_refs = []
for b in self.orderedblocks:
begin[b] = pc
for inst in b.getInstructions():
if len(inst) == 1:
insts.append(inst)
pc = pc + 1
elif inst[0] != "SET_LINENO":
opname, arg = inst
if self.hasjrel.has_elt(opname):
# relative jump - no extended arg
forward_refs.append( (arg, inst, pc ) )
insts.append(inst)
pc = pc + 3
elif self.hasjabs.has_elt(opname):
# absolute jump - can be extended if backward
if arg in begin:
# can only extend argument if backward
offset = begin[arg]
hi, lo = divmod(offset,65536)
if hi>0:
# extended argument
insts.append( ["EXTENDED_ARG", hi ] )
pc = pc + 3
inst[1] = lo
else:
forward_refs.append( (arg, inst, pc ) )
insts.append(inst)
pc = pc + 3
else:
# numerical arg
assert type(arg)==int
hi,lo = divmod(arg,65536)
if hi>0:
# extended argument
insts.append( ["EXTENDED_ARG", hi ] )
inst[1] = lo
pc = pc + 3
insts.append(inst)
pc = pc + 3
else:
insts.append(inst)
end[b] = pc
pc = 0
for arg, inst, pc in forward_refs:
opname, block = inst
abspos = begin[block]
if self.hasjrel.has_elt(opname):
offset = abspos - pc - 3
inst[1] = offset
else:
inst[1] = abspos
self.stage = FLAT
hasjrel = misc.Set()
for i in dis.hasjrel:
hasjrel.add(dis.opname[i])
hasjabs = misc.Set()
for i in dis.hasjabs:
hasjabs.add(dis.opname[i])
def convertArgs(self):
"""Convert arguments from symbolic to concrete form"""
assert self.stage == RAW
self.orderedblocks = self.getBlocksInOrder()
self.consts.insert(0, self.docstring)
self.sort_cellvars()
for b in self.orderedblocks:
for inst in b.getInstructions():
if len(inst) == 2:
opname, oparg = inst
conv = self._converters.get(opname, None)
if conv:
inst[1] = conv(self, oparg)
self.stage = CONV
def sort_cellvars(self):
"""Sort cellvars in the order of varnames and prune from freevars.
"""
cells = {}
for name in self.cellvars:
cells[name] = 1
self.cellvars = [name for name in self.varnames
if cells.has_key(name)]
for name in self.cellvars:
del cells[name]
self.cellvars = self.cellvars + cells.keys()
self.closure = self.cellvars + self.freevars
def _lookupName(self, name, list):
"""Return index of name in list, appending if necessary
This routine uses a list instead of a dictionary, because a
dictionary can't store two different keys if the keys have the
same value but different types, e.g. 2 and 2L. The compiler
must treat these two separately, so it does an explicit type
comparison before comparing the values.
"""
t = type(name)
for i in range(len(list)):
if t == type(list[i]) and list[i] == name:
return i
end = len(list)
list.append(name)
return end
_converters = {}
def _convert_LOAD_CONST(self, arg):
if hasattr(arg, 'getCode'):
arg = arg.getCode()
return self._lookupName(arg, self.consts)
def _convert_LOAD_FAST(self, arg):
self._lookupName(arg, self.names)
return self._lookupName(arg, self.varnames)
_convert_STORE_FAST = _convert_LOAD_FAST
_convert_DELETE_FAST = _convert_LOAD_FAST
def _convert_LOAD_NAME(self, arg):
return self._lookupName(arg, self.names)
def _convert_NAME(self, arg):
return self._lookupName(arg, self.names)
_convert_STORE_NAME = _convert_NAME
_convert_DELETE_NAME = _convert_NAME
_convert_IMPORT_NAME = _convert_NAME
_convert_IMPORT_FROM = _convert_NAME
_convert_STORE_ATTR = _convert_NAME
_convert_LOAD_ATTR = _convert_NAME
_convert_DELETE_ATTR = _convert_NAME
_convert_LOAD_GLOBAL = _convert_NAME
_convert_STORE_GLOBAL = _convert_NAME
_convert_DELETE_GLOBAL = _convert_NAME
def _convert_DEREF(self, arg):
self._lookupName(arg, self.names)
return self._lookupName(arg, self.closure)
_convert_LOAD_DEREF = _convert_DEREF
_convert_STORE_DEREF = _convert_DEREF
def _convert_LOAD_CLOSURE(self, arg):
return self._lookupName(arg, self.closure)
_cmp = list(dis.cmp_op)
def _convert_COMPARE_OP(self, arg):
return self._cmp.index(arg)
# similarly for other opcodes...
for name, obj in locals().items():
if name[:9] == "_convert_":
opname = name[9:]
_converters[opname] = obj
del name, obj, opname
def makeByteCode(self):
assert self.stage == FLAT
self.lnotab = lnotab = LineAddrTable()
for t in self.insts:
opname = t[0]
if self._debug:
if len(t)==1:
print "x",opname
else:
print "x",opname, t[1]
if len(t) == 1:
lnotab.addCode(self.opnum[opname])
else:
oparg = t[1]
if opname == "SET_LINENO":
lnotab.nextLine(oparg)
continue
hi, lo = twobyte(oparg)
try:
lnotab.addCode(self.opnum[opname], lo, hi)
except ValueError:
print opname, oparg
print self.opnum[opname], lo, hi
raise
self.stage = DONE
opnum = {}
for num in range(len(dis.opname)):
opnum[dis.opname[num]] = num
del num
def newCodeObject(self):
assert self.stage == DONE
if (self.flags & CO_NEWLOCALS) == 0:
nlocals = 0
else:
nlocals = len(self.varnames)
argcount = self.argcount
if self.flags & CO_VARKEYWORDS:
argcount = argcount - 1
return new.code(argcount, nlocals, self.stacksize, self.flags,
self.lnotab.getCode(), self.getConsts(),
tuple(self.names), tuple(self.varnames),
self.filename, self.name, self.lnotab.firstline,
self.lnotab.getTable(), tuple(self.freevars),
tuple(self.cellvars))
def getConsts(self):
"""Return a tuple for the const slot of the code object
Must convert references to code (MAKE_FUNCTION) to code
objects recursively.
"""
l = []
for elt in self.consts:
if isinstance(elt, PyFlowGraph):
elt = elt.getCode()
l.append(elt)
return tuple(l)
def isJump(opname):
if opname[:4] == 'JUMP':
return 1
class TupleArg:
"""Helper for marking func defs with nested tuples in arglist"""
def __init__(self, count, names):
self.count = count
self.names = names
def __repr__(self):
return "TupleArg(%s, %s)" % (self.count, self.names)
def getName(self):
return ".%d" % self.count
def getArgCount(args):
argcount = len(args)
if args:
for arg in args:
if isinstance(arg, TupleArg):
numNames = len(misc.flatten(arg.names))
argcount = argcount - numNames
return argcount
def twobyte(val):
"""Convert an int argument into high and low bytes"""
assert type(val) == types.IntType
return divmod(val, 256)
class LineAddrTable:
"""lnotab
This class builds the lnotab, which is documented in compile.c.
Here's a brief recap:
For each SET_LINENO instruction after the first one, two bytes are
added to lnotab. (In some cases, multiple two-byte entries are
added.) The first byte is the distance in bytes between the
instruction for the last SET_LINENO and the current SET_LINENO.
The second byte is offset in line numbers. If either offset is
greater than 255, multiple two-byte entries are added -- see
compile.c for the delicate details.
"""
def __init__(self):
self.code = []
self.codeOffset = 0
self.firstline = 0
self.lastline = 0
self.lastoff = 0
self.lnotab = []
def addCode(self, *args):
for arg in args:
self.code.append(chr(arg))
self.codeOffset = self.codeOffset + len(args)
def nextLine(self, lineno):
if self.firstline == 0:
self.firstline = lineno
self.lastline = lineno
else:
# compute deltas
addr = self.codeOffset - self.lastoff
line = lineno - self.lastline
# Python assumes that lineno always increases with
# increasing bytecode address (lnotab is unsigned char).
# Depending on when SET_LINENO instructions are emitted
# this is not always true. Consider the code:
# a = (1,
# b)
# In the bytecode stream, the assignment to "a" occurs
# after the loading of "b". This works with the C Python
# compiler because it only generates a SET_LINENO instruction
# for the assignment.
if line >= 0:
push = self.lnotab.append
while addr > 255:
push(255); push(0)
addr -= 255
while line > 255:
push(addr); push(255)
line -= 255
addr = 0
if addr > 0 or line > 0:
push(addr); push(line)
self.lastline = lineno
self.lastoff = self.codeOffset
def getCode(self):
return ''.join(self.code)
def getTable(self):
return ''.join(map(chr, self.lnotab))
class StackDepthTracker:
# XXX 1. need to keep track of stack depth on jumps
# XXX 2. at least partly as a result, this code is broken
def findDepth(self, insts, debug=0):
depth = 0
maxDepth = 0
for i in insts:
opname = i[0]
if debug:
print i,
delta = self.effect.get(opname, None)
if delta is not None:
depth = depth + delta
else:
# now check patterns
for pat, pat_delta in self.patterns:
if opname[:len(pat)] == pat:
delta = pat_delta
depth = depth + delta
break
# if we still haven't found a match
if delta is None:
meth = getattr(self, opname, None)
if meth is not None:
depth = depth + meth(i[1])
if depth > maxDepth:
maxDepth = depth
if debug:
print depth, maxDepth
return maxDepth
effect = {
'POP_TOP': -1,
'DUP_TOP': 1,
'SLICE+1': -1,
'SLICE+2': -1,
'SLICE+3': -2,
'STORE_SLICE+0': -1,
'STORE_SLICE+1': -2,
'STORE_SLICE+2': -2,
'STORE_SLICE+3': -3,
'DELETE_SLICE+0': -1,
'DELETE_SLICE+1': -2,
'DELETE_SLICE+2': -2,
'DELETE_SLICE+3': -3,
'STORE_SUBSCR': -3,
'DELETE_SUBSCR': -2,
# PRINT_EXPR?
'PRINT_ITEM': -1,
'RETURN_VALUE': -1,
'YIELD_VALUE': -1,
'EXEC_STMT': -3,
'BUILD_CLASS': -2,
'STORE_NAME': -1,
'STORE_ATTR': -2,
'DELETE_ATTR': -1,
'STORE_GLOBAL': -1,
'BUILD_MAP': 1,
'COMPARE_OP': -1,
'STORE_FAST': -1,
'IMPORT_STAR': -1,
'IMPORT_NAME': 0,
'IMPORT_FROM': 1,
'LOAD_ATTR': 0, # unlike other loads
# close enough...
'SETUP_EXCEPT': 3,
'SETUP_FINALLY': 3,
'FOR_ITER': 1,
}
# use pattern match
patterns = [
('BINARY_', -1),
('LOAD_', 1),
]
def UNPACK_SEQUENCE(self, count):
return count-1
def BUILD_TUPLE(self, count):
return -count+1
def BUILD_LIST(self, count):
return -count+1
def CALL_FUNCTION(self, argc):
hi, lo = divmod(argc, 256)
return -(lo + hi * 2)
def CALL_FUNCTION_VAR(self, argc):
return self.CALL_FUNCTION(argc)-1
def CALL_FUNCTION_KW(self, argc):
return self.CALL_FUNCTION(argc)-1
def CALL_FUNCTION_VAR_KW(self, argc):
return self.CALL_FUNCTION(argc)-2
def MAKE_FUNCTION(self, argc):
return -argc
def MAKE_CLOSURE(self, argc):
# XXX need to account for free variables too!
return -argc
def BUILD_SLICE(self, argc):
if argc == 2:
return -1
elif argc == 3:
return -2
def DUP_TOPX(self, argc):
return argc
findDepth = StackDepthTracker().findDepth
| Python |
"""Package for parsing and compiling Python source code
There are several functions defined at the top level that are imported
from modules contained in the package.
parse(buf, mode="exec") -> AST
Converts a string containing Python source code to an abstract
syntax tree (AST). The AST is defined in compiler.ast.
parseFile(path) -> AST
The same as parse(open(path))
walk(ast, visitor, verbose=None)
Does a pre-order walk over the ast using the visitor instance.
See compiler.visitor for details.
compile(source, filename, mode, flags=None, dont_inherit=None)
Returns a code object. A replacement for the builtin compile() function.
compileFile(filename)
Generates a .pyc file by compiling filename.
"""
from transformer import parse, parseFile
from visitor import walk
from pycodegen import compile, compileFile
| Python |
"""Check for errs in the AST.
The Python parser does not catch all syntax errors. Others, like
assignments with invalid targets, are caught in the code generation
phase.
The compiler package catches some errors in the transformer module.
But it seems clearer to write checkers that use the AST to detect
errors.
"""
from pypy.interpreter.stablecompiler import ast, walk
def check(tree, multi=None):
v = SyntaxErrorChecker(multi)
walk(tree, v)
return v.errors
class SyntaxErrorChecker:
"""A visitor to find syntax errors in the AST."""
def __init__(self, multi=None):
"""Create new visitor object.
If optional argument multi is not None, then print messages
for each error rather than raising a SyntaxError for the
first.
"""
self.multi = multi
self.errors = 0
def error(self, node, msg):
self.errors = self.errors + 1
if self.multi is not None:
print "%s:%s: %s" % (node.filename, node.lineno, msg)
else:
raise SyntaxError, "%s (%s:%s)" % (msg, node.filename, node.lineno)
def visitAssign(self, node):
# the transformer module handles many of these
for target in node.nodes:
pass
## if isinstance(target, ast.AssList):
## if target.lineno is None:
## target.lineno = node.lineno
## self.error(target, "can't assign to list comprehension")
| Python |
"""Module symbol-table generator"""
from pypy.interpreter.stablecompiler import ast
from pypy.interpreter.stablecompiler.consts import SC_LOCAL, SC_GLOBAL, \
SC_FREE, SC_CELL, SC_UNKNOWN, SC_DEFAULT
from pypy.interpreter.stablecompiler.misc import mangle
import types
import sys
MANGLE_LEN = 256
class Scope:
localsfullyknown = True
# XXX how much information do I need about each name?
def __init__(self, name, module, klass=None):
self.name = name
self.module = module
self.defs = {}
self.uses = {}
self.globals = {}
self.params = {}
self.frees = {}
self.hasbeenfree = {}
self.cells = {}
self.children = []
# nested is true if the class could contain free variables,
# i.e. if it is nested within another function.
self.nested = None
self.generator = None
self.klass = None
if klass is not None:
for i in range(len(klass)):
if klass[i] != '_':
self.klass = klass[i:]
break
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
def mangle(self, name):
if self.klass is None:
return name
return mangle(name, self.klass)
def add_def(self, name):
self.defs[self.mangle(name)] = 1
def add_use(self, name):
self.uses[self.mangle(name)] = 1
def add_global(self, name):
name = self.mangle(name)
if self.uses.has_key(name) or self.defs.has_key(name):
pass # XXX warn about global following def/use
if self.params.has_key(name):
raise SyntaxError, "%s in %s is global and parameter" % \
(name, self.name)
self.globals[name] = 1
self.module.add_def(name)
def add_param(self, name):
name = self.mangle(name)
self.defs[name] = 1
self.params[name] = 1
def get_names(self):
d = {}
d.update(self.defs)
d.update(self.uses)
d.update(self.globals)
return d.keys()
def add_child(self, child):
self.children.append(child)
def get_children(self):
return self.children
def DEBUG(self):
print >> sys.stderr, self.name, self.nested and "nested" or ""
print >> sys.stderr, "\tglobals: ", self.globals
print >> sys.stderr, "\tcells: ", self.cells
print >> sys.stderr, "\tdefs: ", self.defs
print >> sys.stderr, "\tuses: ", self.uses
print >> sys.stderr, "\tfrees:", self.frees
def check_name(self, name):
"""Return scope of name.
The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.
"""
if self.globals.has_key(name):
return SC_GLOBAL
if self.cells.has_key(name):
return SC_CELL
if self.defs.has_key(name):
return SC_LOCAL
if self.nested and (self.frees.has_key(name) or
self.uses.has_key(name)):
return SC_FREE
if self.nested:
return SC_UNKNOWN
else:
return SC_DEFAULT
def get_free_vars(self):
if not self.nested:
return ()
free = {}
free.update(self.frees)
for name in self.uses.keys():
if not (self.defs.has_key(name) or
self.globals.has_key(name)):
free[name] = 1
self.hasbeenfree.update(free)
return free.keys()
def handle_children(self):
for child in self.children:
frees = child.get_free_vars()
globals = self.add_frees(frees)
for name in globals:
child.force_global(name)
def force_global(self, name):
"""Force name to be global in scope.
Some child of the current node had a free reference to name.
When the child was processed, it was labelled a free
variable. Now that all its enclosing scope have been
processed, the name is known to be a global or builtin. So
walk back down the child chain and set the name to be global
rather than free.
Be careful to stop if a child does not think the name is
free.
"""
if name not in self.defs:
self.globals[name] = 1
if self.frees.has_key(name):
del self.frees[name]
for child in self.children:
if child.check_name(name) == SC_FREE:
child.force_global(name)
def add_frees(self, names):
"""Process list of free vars from nested scope.
Returns a list of names that are either 1) declared global in the
parent or 2) undefined in a top-level parent. In either case,
the nested scope should treat them as globals.
"""
child_globals = []
for name in names:
sc = self.check_name(name)
if self.nested:
if sc == SC_UNKNOWN or sc == SC_FREE \
or isinstance(self, ClassScope):
self.frees[name] = 1
elif sc == SC_GLOBAL:
child_globals.append(name)
elif isinstance(self, FunctionScope) and sc == SC_LOCAL:
self.cells[name] = 1
elif sc != SC_CELL:
child_globals.append(name)
else:
if sc == SC_LOCAL:
self.cells[name] = 1
elif sc != SC_CELL:
child_globals.append(name)
return child_globals
def get_cell_vars(self):
return self.cells.keys()
class ModuleScope(Scope):
__super_init = Scope.__init__
def __init__(self):
self.__super_init("global", self)
class FunctionScope(Scope):
pass
class GenExprScope(Scope):
__super_init = Scope.__init__
__counter = 1
def __init__(self, module, klass=None):
i = self.__counter
self.__counter += 1
self.__super_init("generator expression<%d>"%i, module, klass)
self.add_param('[outmost-iterable]')
def get_names(self):
keys = Scope.get_names()
return keys
class LambdaScope(FunctionScope):
__super_init = Scope.__init__
__counter = 1
def __init__(self, module, klass=None):
i = self.__counter
self.__counter += 1
self.__super_init("lambda.%d" % i, module, klass)
class ClassScope(Scope):
__super_init = Scope.__init__
def __init__(self, name, module):
self.__super_init(name, module, name)
class SymbolVisitor:
def __init__(self):
self.scopes = {}
self.klass = None
# node that define new scopes
def visitModule(self, node):
scope = self.module = self.scopes[node] = ModuleScope()
self.visit(node.node, scope)
visitExpression = visitModule
def visitFunction(self, node, parent):
if node.decorators:
self.visit(node.decorators, parent)
parent.add_def(node.name)
for n in node.defaults:
self.visit(n, parent)
scope = FunctionScope(node.name, self.module, self.klass)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
self.scopes[node] = scope
self._do_args(scope, node.argnames)
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def visitExec(self, node, parent):
if not (node.globals or node.locals):
parent.localsfullyknown = False # bare exec statement
for child in node.getChildNodes():
self.visit(child, parent)
def visitGenExpr(self, node, parent):
scope = GenExprScope(self.module, self.klass);
if parent.nested or isinstance(parent, FunctionScope) \
or isinstance(parent, GenExprScope):
scope.nested = 1
self.scopes[node] = scope
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def visitGenExprInner(self, node, scope):
for genfor in node.quals:
self.visit(genfor, scope)
self.visit(node.expr, scope)
def visitGenExprFor(self, node, scope):
self.visit(node.assign, scope, 1)
self.visit(node.iter, scope)
for if_ in node.ifs:
self.visit(if_, scope)
def visitGenExprIf(self, node, scope):
self.visit(node.test, scope)
def visitLambda(self, node, parent, assign=0):
# Lambda is an expression, so it could appear in an expression
# context where assign is passed. The transformer should catch
# any code that has a lambda on the left-hand side.
assert not assign
for n in node.defaults:
self.visit(n, parent)
scope = LambdaScope(self.module, self.klass)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
self.scopes[node] = scope
self._do_args(scope, node.argnames)
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def _do_args(self, scope, args):
for name in args:
if type(name) == types.TupleType:
self._do_args(scope, name)
else:
scope.add_param(name)
def handle_free_vars(self, scope, parent):
parent.add_child(scope)
scope.handle_children()
def visitClass(self, node, parent):
parent.add_def(node.name)
for n in node.bases:
self.visit(n, parent)
scope = ClassScope(node.name, self.module)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
if node.doc is not None:
scope.add_def('__doc__')
scope.add_def('__module__')
self.scopes[node] = scope
prev = self.klass
self.klass = node.name
self.visit(node.code, scope)
self.klass = prev
self.handle_free_vars(scope, parent)
# name can be a def or a use
# XXX a few calls and nodes expect a third "assign" arg that is
# true if the name is being used as an assignment. only
# expressions contained within statements may have the assign arg.
def visitName(self, node, scope, assign=0):
if assign:
scope.add_def(node.name)
else:
scope.add_use(node.name)
# operations that bind new names
def visitFor(self, node, scope):
self.visit(node.assign, scope, 1)
self.visit(node.list, scope)
self.visit(node.body, scope)
if node.else_:
self.visit(node.else_, scope)
def visitFrom(self, node, scope):
for name, asname in node.names:
if name == "*":
scope.localsfullyknown = False
continue
scope.add_def(asname or name)
def visitImport(self, node, scope):
for name, asname in node.names:
i = name.find(".")
if i > -1:
name = name[:i]
scope.add_def(asname or name)
def visitGlobal(self, node, scope):
for name in node.names:
scope.add_global(name)
def visitAssign(self, node, scope):
"""Propagate assignment flag down to child nodes.
The Assign node doesn't itself contains the variables being
assigned to. Instead, the children in node.nodes are visited
with the assign flag set to true. When the names occur in
those nodes, they are marked as defs.
Some names that occur in an assignment target are not bound by
the assignment, e.g. a name occurring inside a slice. The
visitor handles these nodes specially; they do not propagate
the assign flag to their children.
"""
for n in node.nodes:
self.visit(n, scope, 1)
self.visit(node.expr, scope)
def visitAssName(self, node, scope, assign=1):
scope.add_def(node.name)
def visitAssAttr(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
def visitSubscript(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
for n in node.subs:
self.visit(n, scope, 0)
def visitSlice(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
if node.lower:
self.visit(node.lower, scope, 0)
if node.upper:
self.visit(node.upper, scope, 0)
def visitAugAssign(self, node, scope):
# If the LHS is a name, then this counts as assignment.
# Otherwise, it's just use.
self.visit(node.node, scope)
if isinstance(node.node, ast.Name):
self.visit(node.node, scope, 1) # XXX worry about this
self.visit(node.expr, scope)
# prune if statements if tests are false
_const_types = types.StringType, types.IntType, types.FloatType
def visitIf(self, node, scope):
for test, body in node.tests:
if isinstance(test, ast.Const):
if type(test.value) in self._const_types:
if not test.value:
continue
self.visit(test, scope)
self.visit(body, scope)
if node.else_:
self.visit(node.else_, scope)
# a yield statement signals a generator
def visitYield(self, node, scope):
scope.generator = 1
self.visit(node.value, scope)
def sort(l):
l = l[:]
l.sort()
return l
def list_eq(l1, l2):
return sort(l1) == sort(l2)
if __name__ == "__main__":
import sys
from pypy.interpreter.stablecompiler import parseFile, walk
import symtable
def get_names(syms):
return [s for s in [s.get_name() for s in syms.get_symbols()]
if not (s.startswith('_[') or s.startswith('.'))]
for file in sys.argv[1:]:
print file
f = open(file)
buf = f.read()
f.close()
syms = symtable.symtable(buf, file, "exec")
mod_names = get_names(syms)
tree = parseFile(file)
s = SymbolVisitor()
walk(tree, s)
# compare module-level symbols
names2 = s.scopes[tree].get_names()
if not list_eq(mod_names, names2):
print
print "oops", file
print sort(mod_names)
print sort(names2)
sys.exit(-1)
d = {}
d.update(s.scopes)
del d[tree]
scopes = d.values()
del d
for s in syms.get_symbols():
if s.is_namespace():
l = [sc for sc in scopes
if sc.name == s.get_name()]
if len(l) > 1:
print "skipping", s.get_name()
else:
if not list_eq(get_names(s.get_namespace()),
l[0].get_names()):
print s.get_name()
print sort(get_names(s.get_namespace()))
print sort(l[0].get_names())
sys.exit(-1)
| Python |
import imp
import os
import marshal
import struct
import sys
import types
from cStringIO import StringIO
from pypy.interpreter.stablecompiler import ast, parse, walk, syntax
from pypy.interpreter.stablecompiler import pyassem, misc, future, symbols
from pypy.interpreter.stablecompiler.consts import SC_LOCAL, SC_GLOBAL, \
SC_FREE, SC_CELL, SC_DEFAULT, OP_APPLY, OP_DELETE, OP_ASSIGN
from pypy.interpreter.stablecompiler.consts import CO_VARARGS, CO_VARKEYWORDS, \
CO_NEWLOCALS, CO_NESTED, CO_GENERATOR, CO_GENERATOR_ALLOWED, CO_FUTURE_DIVISION
from pypy.interpreter.stablecompiler.pyassem import TupleArg
# XXX The version-specific code can go, since this code only works with 2.x.
# Do we have Python 1.x or Python 2.x?
try:
VERSION = sys.version_info[0]
except AttributeError:
VERSION = 1
callfunc_opcode_info = {
# (Have *args, Have **args) : opcode
(0,0) : "CALL_FUNCTION",
(1,0) : "CALL_FUNCTION_VAR",
(0,1) : "CALL_FUNCTION_KW",
(1,1) : "CALL_FUNCTION_VAR_KW",
}
LOOP = 1
EXCEPT = 2
TRY_FINALLY = 3
END_FINALLY = 4
def compileFile(filename, display=0):
f = open(filename, 'U')
buf = f.read()
f.close()
mod = Module(buf, filename)
try:
mod.compile(display)
except SyntaxError:
raise
else:
f = open(filename + "c", "wb")
mod.dump(f)
f.close()
def compile(source, filename, mode, flags=None, dont_inherit=None):
"""Replacement for builtin compile() function"""
if flags is not None or dont_inherit is not None:
raise RuntimeError, "not implemented yet"
if mode == "single":
gen = Interactive(source, filename)
elif mode == "exec":
gen = Module(source, filename)
elif mode == "eval":
gen = Expression(source, filename)
else:
raise ValueError("compile() 3rd arg must be 'exec' or "
"'eval' or 'single'")
gen.compile()
return gen.code
class AbstractCompileMode:
mode = None # defined by subclass
def __init__(self, source, filename):
self.source = source
self.filename = filename
self.code = None
def _get_tree(self):
tree = parse(self.source, self.mode, self.filename)
misc.set_filename(self.filename, tree)
syntax.check(tree)
return tree
def compile(self):
pass # implemented by subclass
def getCode(self):
return self.code
class Expression(AbstractCompileMode):
mode = "eval"
def compile(self):
tree = self._get_tree()
gen = ExpressionCodeGenerator(tree)
self.code = gen.getCode()
class Interactive(AbstractCompileMode):
mode = "single"
def compile(self):
tree = self._get_tree()
gen = InteractiveCodeGenerator(tree)
self.code = gen.getCode()
class Module(AbstractCompileMode):
mode = "exec"
def compile(self, display=0):
tree = self._get_tree()
gen = ModuleCodeGenerator(tree)
if display:
import pprint
print pprint.pprint(tree)
self.code = gen.getCode()
def dump(self, f):
f.write(self.getPycHeader())
marshal.dump(self.code, f)
MAGIC = imp.get_magic()
def getPycHeader(self):
# compile.c uses marshal to write a long directly, with
# calling the interface that would also generate a 1-byte code
# to indicate the type of the value. simplest way to get the
# same effect is to call marshal and then skip the code.
mtime = os.path.getmtime(self.filename)
mtime = struct.pack('<i', mtime)
return self.MAGIC + mtime
class LocalNameFinder:
"""Find local names in scope"""
def __init__(self, names=()):
self.names = misc.Set()
self.globals = misc.Set()
for name in names:
self.names.add(name)
# XXX list comprehensions and for loops
def getLocals(self):
for elt in self.globals.elements():
if self.names.has_elt(elt):
self.names.remove(elt)
return self.names
def visitDict(self, node):
pass
def visitGlobal(self, node):
for name in node.names:
self.globals.add(name)
def visitFunction(self, node):
self.names.add(node.name)
def visitLambda(self, node):
pass
def visitImport(self, node):
for name, alias in node.names:
self.names.add(alias or name)
def visitFrom(self, node):
for name, alias in node.names:
self.names.add(alias or name)
def visitClass(self, node):
self.names.add(node.name)
def visitAssName(self, node):
self.names.add(node.name)
def is_constant_false(node):
if isinstance(node, ast.Const):
if not node.value:
return 1
return 0
class CodeGenerator:
"""Defines basic code generator for Python bytecode
This class is an abstract base class. Concrete subclasses must
define an __init__() that defines self.graph and then calls the
__init__() defined in this class.
The concrete class must also define the class attributes
NameFinder, FunctionGen, and ClassGen. These attributes can be
defined in the initClass() method, which is a hook for
initializing these methods after all the classes have been
defined.
"""
scopeambiguity = False
parentscopeambiguity = False
optimized = 0 # is namespace access optimized?
__initialized = None
class_name = None # provide default for instance variable
def __init__(self):
if self.__initialized is None:
self.initClass()
self.__class__.__initialized = 1
self.checkClass()
self.locals = misc.Stack()
self.setups = misc.Stack()
self.last_lineno = None
self._setupGraphDelegation()
self._div_op = "BINARY_DIVIDE"
# XXX set flags based on future features
futures = self.get_module().futures
for feature in futures:
if feature == "division":
self.graph.setFlag(CO_FUTURE_DIVISION)
self._div_op = "BINARY_TRUE_DIVIDE"
elif feature == "generators":
self.graph.setFlag(CO_GENERATOR_ALLOWED)
def initClass(self):
"""This method is called once for each class"""
def checkClass(self):
"""Verify that class is constructed correctly"""
try:
assert hasattr(self, 'graph')
assert getattr(self, 'NameFinder')
assert getattr(self, 'FunctionGen')
assert getattr(self, 'ClassGen')
except AssertionError, msg:
intro = "Bad class construction for %s" % self.__class__.__name__
raise AssertionError, intro
def _setupGraphDelegation(self):
self.emit = self.graph.emit
self.newBlock = self.graph.newBlock
self.startBlock = self.graph.startBlock
self.nextBlock = self.graph.nextBlock
self.setDocstring = self.graph.setDocstring
def getCode(self):
"""Return a code object"""
return self.graph.getCode()
def mangle(self, name):
if self.class_name is not None:
return misc.mangle(name, self.class_name)
else:
return name
def parseSymbols(self, tree):
s = symbols.SymbolVisitor()
walk(tree, s)
return s.scopes
def get_module(self):
raise RuntimeError, "should be implemented by subclasses"
# Next five methods handle name access
def isLocalName(self, name):
return self.locals.top().has_elt(name)
def storeName(self, name):
self._nameOp('STORE', name)
def loadName(self, name):
if (self.scope.nested and self.scopeambiguity and
name in self.scope.hasbeenfree):
raise SyntaxError("cannot reference variable '%s' because "
"of ambiguity between "
"scopes" % name)
self._nameOp('LOAD', name)
def delName(self, name):
scope = self.scope.check_name(name)
if scope == SC_CELL:
raise SyntaxError("can not delete variable '%s' "
"referenced in nested scope" % name)
self._nameOp('DELETE', name)
def _nameOp(self, prefix, name):
name = self.mangle(name)
scope = self.scope.check_name(name)
if scope == SC_LOCAL:
if not self.optimized:
self.emit(prefix + '_NAME', name)
else:
self.emit(prefix + '_FAST', name)
elif scope == SC_GLOBAL:
self.emit(prefix + '_GLOBAL', name)
elif scope == SC_FREE or scope == SC_CELL:
self.emit(prefix + '_DEREF', name)
elif scope == SC_DEFAULT:
if self.optimized and self.localsfullyknown:
self.emit(prefix + '_GLOBAL', name)
else:
self.emit(prefix + '_NAME', name)
else:
raise RuntimeError, "unsupported scope for var %s: %d" % \
(name, scope)
def _implicitNameOp(self, prefix, name):
"""Emit name ops for names generated implicitly by for loops
The interpreter generates names that start with a period or
dollar sign. The symbol table ignores these names because
they aren't present in the program text.
"""
if self.optimized:
self.emit(prefix + '_FAST', name)
else:
self.emit(prefix + '_NAME', name)
# The set_lineno() function and the explicit emit() calls for
# SET_LINENO below are only used to generate the line number table.
# As of Python 2.3, the interpreter does not have a SET_LINENO
# instruction. pyassem treats SET_LINENO opcodes as a special case.
def set_lineno(self, node, force=False):
"""Emit SET_LINENO if necessary.
The instruction is considered necessary if the node has a
lineno attribute and it is different than the last lineno
emitted.
Returns true if SET_LINENO was emitted.
There are no rules for when an AST node should have a lineno
attribute. The transformer and AST code need to be reviewed
and a consistent policy implemented and documented. Until
then, this method works around missing line numbers.
"""
lineno = getattr(node, 'lineno', None)
if lineno is not None and (lineno != self.last_lineno
or force):
self.emit('SET_LINENO', lineno)
self.last_lineno = lineno
return True
return False
# The first few visitor methods handle nodes that generator new
# code objects. They use class attributes to determine what
# specialized code generators to use.
NameFinder = LocalNameFinder
FunctionGen = None
ClassGen = None
def visitModule(self, node):
self.scopes = self.parseSymbols(node)
self.scope = self.scopes[node]
self.emit('SET_LINENO', 0)
if node.doc:
self.emit('LOAD_CONST', node.doc)
self.storeName('__doc__')
lnf = walk(node.node, self.NameFinder(), verbose=0)
self.locals.push(lnf.getLocals())
self.visit(node.node)
self.emit('LOAD_CONST', None)
self.emit('RETURN_VALUE')
def visitExpression(self, node):
self.set_lineno(node)
self.scopes = self.parseSymbols(node)
self.scope = self.scopes[node]
self.visit(node.node)
self.emit('RETURN_VALUE')
def visitFunction(self, node):
self._visitFuncOrLambda(node, isLambda=0)
if node.doc:
self.setDocstring(node.doc)
self.storeName(node.name)
def visitLambda(self, node):
self._visitFuncOrLambda(node, isLambda=1)
def _visitFuncOrLambda(self, node, isLambda=0):
if not isLambda and node.decorators:
for decorator in node.decorators.nodes:
self.visit(decorator)
ndecorators = len(node.decorators.nodes)
else:
ndecorators = 0
gen = self.FunctionGen(node, self.scopes, isLambda,
self.class_name, self.get_module(),
parentscopeambiguity = self.scopeambiguity or self.parentscopeambiguity)
walk(node.code, gen)
gen.finish()
self.set_lineno(node)
for default in node.defaults:
self.visit(default)
frees = gen.scope.get_free_vars()
if frees:
for name in frees:
self.emit('LOAD_CLOSURE', name)
self.emit('LOAD_CONST', gen)
self.emit('MAKE_CLOSURE', len(node.defaults))
else:
self.emit('LOAD_CONST', gen)
self.emit('MAKE_FUNCTION', len(node.defaults))
for i in range(ndecorators):
self.emit('CALL_FUNCTION', 1)
def visitClass(self, node):
gen = self.ClassGen(node, self.scopes,
self.get_module(),
parentscopeambiguity = self.scopeambiguity or self.parentscopeambiguity)
walk(node.code, gen)
gen.finish()
self.set_lineno(node)
self.emit('LOAD_CONST', node.name)
for base in node.bases:
self.visit(base)
self.emit('BUILD_TUPLE', len(node.bases))
frees = gen.scope.get_free_vars()
for name in frees:
self.emit('LOAD_CLOSURE', name)
self.emit('LOAD_CONST', gen)
if frees:
self.emit('MAKE_CLOSURE', 0)
else:
self.emit('MAKE_FUNCTION', 0)
self.emit('CALL_FUNCTION', 0)
self.emit('BUILD_CLASS')
self.storeName(node.name)
# The rest are standard visitor methods
# The next few implement control-flow statements
def visitIf(self, node):
end = self.newBlock()
numtests = len(node.tests)
for i in range(numtests):
test, suite = node.tests[i]
if is_constant_false(test):
# XXX will need to check generator stuff here
continue
self.set_lineno(test)
self.visit(test)
nextTest = self.newBlock()
self.emit('JUMP_IF_FALSE', nextTest)
self.nextBlock()
self.emit('POP_TOP')
self.visit(suite)
self.emit('JUMP_FORWARD', end)
self.startBlock(nextTest)
self.emit('POP_TOP')
if node.else_:
self.visit(node.else_)
self.nextBlock(end)
def visitWhile(self, node):
self.set_lineno(node)
loop = self.newBlock()
else_ = self.newBlock()
after = self.newBlock()
self.emit('SETUP_LOOP', after)
self.nextBlock(loop)
self.setups.push((LOOP, loop))
self.set_lineno(node, force=True)
self.visit(node.test)
self.emit('JUMP_IF_FALSE', else_ or after)
self.nextBlock()
self.emit('POP_TOP')
self.visit(node.body)
self.emit('JUMP_ABSOLUTE', loop)
self.startBlock(else_) # or just the POPs if not else clause
self.emit('POP_TOP')
self.emit('POP_BLOCK')
self.setups.pop()
if node.else_:
self.visit(node.else_)
self.nextBlock(after)
def visitFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
after = self.newBlock()
self.setups.push((LOOP, start))
self.set_lineno(node)
self.emit('SETUP_LOOP', after)
self.visit(node.list)
self.emit('GET_ITER')
self.nextBlock(start)
self.set_lineno(node, force=1)
self.emit('FOR_ITER', anchor)
self.visit(node.assign)
self.visit(node.body)
self.emit('JUMP_ABSOLUTE', start)
self.nextBlock(anchor)
self.emit('POP_BLOCK')
self.setups.pop()
if node.else_:
self.visit(node.else_)
self.nextBlock(after)
def visitBreak(self, node):
if not self.setups:
raise SyntaxError, "'break' outside loop (%s, %d)" % \
(node.filename, node.lineno)
self.set_lineno(node)
self.emit('BREAK_LOOP')
def visitContinue(self, node):
if not self.setups:
raise SyntaxError, "'continue' not properly in loop" # (%s, %d)" % (node.filename, node.lineno)
kind, block = self.setups.top()
if kind == LOOP:
self.set_lineno(node)
self.emit('JUMP_ABSOLUTE', block)
self.nextBlock()
elif kind == EXCEPT or kind == TRY_FINALLY:
self.set_lineno(node)
# find the block that starts the loop
top = len(self.setups)
while top > 0:
top = top - 1
kind, loop_block = self.setups[top]
if kind == LOOP:
break
if kind != LOOP:
raise SyntaxError, "'continue' not properly in loop" # (%s, %d)" % (node.filename, node.lineno)
self.emit('CONTINUE_LOOP', loop_block)
self.nextBlock()
elif kind == END_FINALLY:
msg = "'continue' not supported inside 'finally' clause" # " (%s, %d)"
raise SyntaxError, msg # % (node.filename, node.lineno)
def visitTest(self, node, jump):
end = self.newBlock()
for child in node.nodes[:-1]:
self.visit(child)
self.emit(jump, end)
self.nextBlock()
self.emit('POP_TOP')
self.visit(node.nodes[-1])
self.nextBlock(end)
def visitAnd(self, node):
self.visitTest(node, 'JUMP_IF_FALSE')
def visitOr(self, node):
self.visitTest(node, 'JUMP_IF_TRUE')
def visitCompare(self, node):
self.visit(node.expr)
cleanup = self.newBlock()
for op, code in node.ops[:-1]:
self.visit(code)
self.emit('DUP_TOP')
self.emit('ROT_THREE')
self.emit('COMPARE_OP', op)
self.emit('JUMP_IF_FALSE', cleanup)
self.nextBlock()
self.emit('POP_TOP')
# now do the last comparison
if node.ops:
op, code = node.ops[-1]
self.visit(code)
self.emit('COMPARE_OP', op)
if len(node.ops) > 1:
end = self.newBlock()
self.emit('JUMP_FORWARD', end)
self.startBlock(cleanup)
self.emit('ROT_TWO')
self.emit('POP_TOP')
self.nextBlock(end)
# list comprehensions
__list_count = 0
def visitListComp(self, node):
self.set_lineno(node)
# setup list
append = "$append%d" % self.__list_count
self.__list_count = self.__list_count + 1
self.emit('BUILD_LIST', 0)
self.emit('DUP_TOP')
self.emit('LOAD_ATTR', 'append')
self._implicitNameOp('STORE', append)
stack = []
for i, for_ in zip(range(len(node.quals)), node.quals):
start, anchor = self.visit(for_)
cont = None
for if_ in for_.ifs:
if cont is None:
cont = self.newBlock()
self.visit(if_, cont)
stack.insert(0, (start, cont, anchor))
self._implicitNameOp('LOAD', append)
self.visit(node.expr)
self.emit('CALL_FUNCTION', 1)
self.emit('POP_TOP')
for start, cont, anchor in stack:
if cont:
skip_one = self.newBlock()
self.emit('JUMP_FORWARD', skip_one)
self.startBlock(cont)
self.emit('POP_TOP')
self.nextBlock(skip_one)
self.emit('JUMP_ABSOLUTE', start)
self.startBlock(anchor)
self._implicitNameOp('DELETE', append)
self.__list_count = self.__list_count - 1
def visitListCompFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
self.visit(node.list)
self.emit('GET_ITER')
self.nextBlock(start)
self.set_lineno(node, force=True)
self.emit('FOR_ITER', anchor)
self.nextBlock()
self.visit(node.assign)
return start, anchor
def visitListCompIf(self, node, branch):
self.set_lineno(node, force=True)
self.visit(node.test)
self.emit('JUMP_IF_FALSE', branch)
self.newBlock()
self.emit('POP_TOP')
def visitGenExpr(self, node):
gen = GenExprCodeGenerator(node, self.scopes, self.class_name,
self.get_module(),
parentscopeambiguity=self.scopeambiguity or self.parentscopeambiguity)
walk(node.code, gen)
gen.finish()
self.set_lineno(node)
frees = gen.scope.get_free_vars()
if frees:
for name in frees:
self.emit('LOAD_CLOSURE', name)
self.emit('LOAD_CONST', gen)
self.emit('MAKE_CLOSURE', 0)
else:
self.emit('LOAD_CONST', gen)
self.emit('MAKE_FUNCTION', 0)
# precomputation of outmost iterable
self.visit(node.code.quals[0].iter)
self.emit('GET_ITER')
self.emit('CALL_FUNCTION', 1)
def visitGenExprInner(self, node):
self.set_lineno(node)
# setup list
stack = []
for i, for_ in zip(range(len(node.quals)), node.quals):
start, anchor = self.visit(for_)
cont = None
for if_ in for_.ifs:
if cont is None:
cont = self.newBlock()
self.visit(if_, cont)
stack.insert(0, (start, cont, anchor))
self.visit(node.expr)
self.emit('YIELD_VALUE')
for start, cont, anchor in stack:
if cont:
skip_one = self.newBlock()
self.emit('JUMP_FORWARD', skip_one)
self.startBlock(cont)
self.emit('POP_TOP')
self.nextBlock(skip_one)
self.emit('JUMP_ABSOLUTE', start)
self.startBlock(anchor)
self.emit('LOAD_CONST', None)
def visitGenExprFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
if node.is_outmost:
self.loadName('[outmost-iterable]')
else:
self.visit(node.iter)
self.emit('GET_ITER')
self.nextBlock(start)
self.set_lineno(node, force=True)
self.emit('FOR_ITER', anchor)
self.nextBlock()
self.visit(node.assign)
return start, anchor
def visitGenExprIf(self, node, branch):
self.set_lineno(node, force=True)
self.visit(node.test)
self.emit('JUMP_IF_FALSE', branch)
self.newBlock()
self.emit('POP_TOP')
# exception related
def visitAssert(self, node):
# XXX would be interesting to implement this via a
# transformation of the AST before this stage
if __debug__:
end = self.newBlock()
self.set_lineno(node)
# XXX AssertionError appears to be special case -- it is always
# loaded as a global even if there is a local name. I guess this
# is a sort of renaming op.
self.nextBlock()
self.visit(node.test)
self.emit('JUMP_IF_TRUE', end)
self.nextBlock()
self.emit('POP_TOP')
self.emit('LOAD_GLOBAL', 'AssertionError')
if node.fail:
self.visit(node.fail)
self.emit('RAISE_VARARGS', 2)
else:
self.emit('RAISE_VARARGS', 1)
self.nextBlock(end)
self.emit('POP_TOP')
def visitRaise(self, node):
self.set_lineno(node)
n = 0
if node.expr1:
self.visit(node.expr1)
n = n + 1
if node.expr2:
self.visit(node.expr2)
n = n + 1
if node.expr3:
self.visit(node.expr3)
n = n + 1
self.emit('RAISE_VARARGS', n)
def visitTryExcept(self, node):
body = self.newBlock()
handlers = self.newBlock()
end = self.newBlock()
if node.else_:
lElse = self.newBlock()
else:
lElse = end
self.set_lineno(node)
self.emit('SETUP_EXCEPT', handlers)
self.nextBlock(body)
self.setups.push((EXCEPT, body))
self.visit(node.body)
self.emit('POP_BLOCK')
self.setups.pop()
self.emit('JUMP_FORWARD', lElse)
self.startBlock(handlers)
last = len(node.handlers) - 1
for i in range(len(node.handlers)):
expr, target, body = node.handlers[i]
self.set_lineno(expr)
if expr:
self.emit('DUP_TOP')
self.visit(expr)
self.emit('COMPARE_OP', 'exception match')
next = self.newBlock()
self.emit('JUMP_IF_FALSE', next)
self.nextBlock()
self.emit('POP_TOP')
self.emit('POP_TOP')
if target:
self.visit(target)
else:
self.emit('POP_TOP')
self.emit('POP_TOP')
self.visit(body)
self.emit('JUMP_FORWARD', end)
if expr:
self.nextBlock(next)
else:
self.nextBlock()
if expr: # XXX
self.emit('POP_TOP')
self.emit('END_FINALLY')
if node.else_:
self.nextBlock(lElse)
self.visit(node.else_)
self.nextBlock(end)
def visitTryFinally(self, node):
body = self.newBlock()
final = self.newBlock()
self.set_lineno(node)
self.emit('SETUP_FINALLY', final)
self.nextBlock(body)
self.setups.push((TRY_FINALLY, body))
self.visit(node.body)
self.emit('POP_BLOCK')
self.setups.pop()
self.emit('LOAD_CONST', None)
self.nextBlock(final)
self.setups.push((END_FINALLY, final))
self.visit(node.final)
self.emit('END_FINALLY')
self.setups.pop()
# misc
def visitDiscard(self, node):
# Important: this function is overridden in InteractiveCodeGenerator,
# which also has the effect that the following test only occurs in
# non-'single' modes.
if isinstance(node.expr, ast.Const):
return # skip LOAD_CONST/POP_TOP pairs (for e.g. docstrings)
self.set_lineno(node)
self.visit(node.expr)
self.emit('POP_TOP')
def visitConst(self, node):
self.emit('LOAD_CONST', node.value)
def visitKeyword(self, node):
self.emit('LOAD_CONST', node.name)
self.visit(node.expr)
def visitGlobal(self, node):
# no code to generate
pass
def visitName(self, node):
self.set_lineno(node)
self.loadName(node.name)
def visitPass(self, node):
self.set_lineno(node)
def visitImport(self, node):
self.set_lineno(node)
for name, alias in node.names:
if VERSION > 1:
self.emit('LOAD_CONST', None)
self.emit('IMPORT_NAME', name)
mod = name.split(".")[0]
if alias:
self._resolveDots(name)
self.storeName(alias)
else:
self.storeName(mod)
def visitFrom(self, node):
self.set_lineno(node)
fromlist = map(lambda (name, alias): name, node.names)
if VERSION > 1:
self.emit('LOAD_CONST', tuple(fromlist))
self.emit('IMPORT_NAME', node.modname)
for name, alias in node.names:
if VERSION > 1:
if name == '*':
self.namespace = 0
self.emit('IMPORT_STAR')
# There can only be one name w/ from ... import *
assert len(node.names) == 1
return
else:
self.emit('IMPORT_FROM', name)
self._resolveDots(name)
self.storeName(alias or name)
else:
self.emit('IMPORT_FROM', name)
self.emit('POP_TOP')
def _resolveDots(self, name):
elts = name.split(".")
if len(elts) == 1:
return
for elt in elts[1:]:
self.emit('LOAD_ATTR', elt)
def visitGetattr(self, node):
self.visit(node.expr)
self.emit('LOAD_ATTR', self.mangle(node.attrname))
# next five implement assignments
def visitAssign(self, node):
self.set_lineno(node)
self.visit(node.expr)
dups = len(node.nodes) - 1
for i in range(len(node.nodes)):
elt = node.nodes[i]
if i < dups:
self.emit('DUP_TOP')
if isinstance(elt, ast.Node):
self.visit(elt)
def visitAssName(self, node):
if node.flags == OP_ASSIGN:
self.storeName(node.name)
elif node.flags == OP_DELETE:
self.set_lineno(node)
self.delName(node.name)
else:
print "oops", node.flags
def visitAssAttr(self, node):
self.visit(node.expr)
if node.flags == OP_ASSIGN:
self.emit('STORE_ATTR', self.mangle(node.attrname))
elif node.flags == OP_DELETE:
self.emit('DELETE_ATTR', self.mangle(node.attrname))
else:
print "warning: unexpected flags:", node.flags
print node
def _visitAssSequence(self, node, op='UNPACK_SEQUENCE'):
if findOp(node) != OP_DELETE:
self.emit(op, len(node.nodes))
for child in node.nodes:
self.visit(child)
if VERSION > 1:
visitAssTuple = _visitAssSequence
visitAssList = _visitAssSequence
else:
def visitAssTuple(self, node):
self._visitAssSequence(node, 'UNPACK_TUPLE')
def visitAssList(self, node):
self._visitAssSequence(node, 'UNPACK_LIST')
# augmented assignment
def visitAugAssign(self, node):
self.set_lineno(node)
aug_node = wrap_aug(node.node)
self.visit(aug_node, "load")
self.visit(node.expr)
self.emit(self._augmented_opcode[node.op])
self.visit(aug_node, "store")
_augmented_opcode = {
'+=' : 'INPLACE_ADD',
'-=' : 'INPLACE_SUBTRACT',
'*=' : 'INPLACE_MULTIPLY',
'/=' : 'INPLACE_DIVIDE',
'//=': 'INPLACE_FLOOR_DIVIDE',
'%=' : 'INPLACE_MODULO',
'**=': 'INPLACE_POWER',
'>>=': 'INPLACE_RSHIFT',
'<<=': 'INPLACE_LSHIFT',
'&=' : 'INPLACE_AND',
'^=' : 'INPLACE_XOR',
'|=' : 'INPLACE_OR',
}
def visitAugName(self, node, mode):
if mode == "load":
self.loadName(node.name)
elif mode == "store":
self.storeName(node.name)
def visitAugGetattr(self, node, mode):
if mode == "load":
self.visit(node.expr)
self.emit('DUP_TOP')
self.emit('LOAD_ATTR', self.mangle(node.attrname))
elif mode == "store":
self.emit('ROT_TWO')
self.emit('STORE_ATTR', self.mangle(node.attrname))
def visitAugSlice(self, node, mode):
if mode == "load":
self.visitSlice(node, 1)
elif mode == "store":
slice = 0
if node.lower:
slice = slice | 1
if node.upper:
slice = slice | 2
if slice == 0:
self.emit('ROT_TWO')
elif slice == 3:
self.emit('ROT_FOUR')
else:
self.emit('ROT_THREE')
self.emit('STORE_SLICE+%d' % slice)
def visitAugSubscript(self, node, mode):
if len(node.subs) > 1:
raise SyntaxError, "augmented assignment to tuple is not possible"
if mode == "load":
self.visitSubscript(node, 1)
elif mode == "store":
self.emit('ROT_THREE')
self.emit('STORE_SUBSCR')
def visitExec(self, node):
self.visit(node.expr)
if node.locals is None:
self.emit('LOAD_CONST', None)
else:
self.visit(node.locals)
if node.globals is None:
self.emit('DUP_TOP')
else:
self.visit(node.globals)
self.emit('EXEC_STMT')
def visitCallFunc(self, node):
pos = 0
kw = 0
self.set_lineno(node)
self.visit(node.node)
for arg in node.args:
self.visit(arg)
if isinstance(arg, ast.Keyword):
kw = kw + 1
else:
pos = pos + 1
if node.star_args is not None:
self.visit(node.star_args)
if node.dstar_args is not None:
self.visit(node.dstar_args)
have_star = node.star_args is not None
have_dstar = node.dstar_args is not None
opcode = callfunc_opcode_info[have_star, have_dstar]
self.emit(opcode, kw << 8 | pos)
def visitPrint(self, node, newline=0):
self.set_lineno(node)
if node.dest:
self.visit(node.dest)
for child in node.nodes:
if node.dest:
self.emit('DUP_TOP')
self.visit(child)
if node.dest:
self.emit('ROT_TWO')
self.emit('PRINT_ITEM_TO')
else:
self.emit('PRINT_ITEM')
if node.dest and not newline:
self.emit('POP_TOP')
def visitPrintnl(self, node):
self.visitPrint(node, newline=1)
if node.dest:
self.emit('PRINT_NEWLINE_TO')
else:
self.emit('PRINT_NEWLINE')
def visitReturn(self, node):
self.set_lineno(node)
self.visit(node.value)
self.emit('RETURN_VALUE')
def visitYield(self, node):
self.set_lineno(node)
self.visit(node.value)
self.emit('YIELD_VALUE')
# slice and subscript stuff
def visitSlice(self, node, aug_flag=None):
# aug_flag is used by visitAugSlice
self.visit(node.expr)
slice = 0
if node.lower:
self.visit(node.lower)
slice = slice | 1
if node.upper:
self.visit(node.upper)
slice = slice | 2
if aug_flag:
if slice == 0:
self.emit('DUP_TOP')
elif slice == 3:
self.emit('DUP_TOPX', 3)
else:
self.emit('DUP_TOPX', 2)
if node.flags == OP_APPLY:
self.emit('SLICE+%d' % slice)
elif node.flags == OP_ASSIGN:
self.emit('STORE_SLICE+%d' % slice)
elif node.flags == OP_DELETE:
self.emit('DELETE_SLICE+%d' % slice)
else:
print "weird slice", node.flags
raise
def visitSubscript(self, node, aug_flag=None):
self.visit(node.expr)
for sub in node.subs:
self.visit(sub)
if aug_flag:
self.emit('DUP_TOPX', 2)
if len(node.subs) > 1:
self.emit('BUILD_TUPLE', len(node.subs))
if node.flags == OP_APPLY:
self.emit('BINARY_SUBSCR')
elif node.flags == OP_ASSIGN:
self.emit('STORE_SUBSCR')
elif node.flags == OP_DELETE:
self.emit('DELETE_SUBSCR')
# binary ops
def binaryOp(self, node, op):
self.visit(node.left)
self.visit(node.right)
self.emit(op)
def visitAdd(self, node):
return self.binaryOp(node, 'BINARY_ADD')
def visitSub(self, node):
return self.binaryOp(node, 'BINARY_SUBTRACT')
def visitMul(self, node):
return self.binaryOp(node, 'BINARY_MULTIPLY')
def visitDiv(self, node):
return self.binaryOp(node, self._div_op)
def visitFloorDiv(self, node):
return self.binaryOp(node, 'BINARY_FLOOR_DIVIDE')
def visitMod(self, node):
return self.binaryOp(node, 'BINARY_MODULO')
def visitPower(self, node):
return self.binaryOp(node, 'BINARY_POWER')
def visitLeftShift(self, node):
return self.binaryOp(node, 'BINARY_LSHIFT')
def visitRightShift(self, node):
return self.binaryOp(node, 'BINARY_RSHIFT')
# unary ops
def unaryOp(self, node, op):
self.visit(node.expr)
self.emit(op)
def visitInvert(self, node):
return self.unaryOp(node, 'UNARY_INVERT')
def visitUnarySub(self, node):
return self.unaryOp(node, 'UNARY_NEGATIVE')
def visitUnaryAdd(self, node):
return self.unaryOp(node, 'UNARY_POSITIVE')
def visitUnaryInvert(self, node):
return self.unaryOp(node, 'UNARY_INVERT')
def visitNot(self, node):
return self.unaryOp(node, 'UNARY_NOT')
def visitBackquote(self, node):
return self.unaryOp(node, 'UNARY_CONVERT')
# bit ops
def bitOp(self, nodes, op):
self.visit(nodes[0])
for node in nodes[1:]:
self.visit(node)
self.emit(op)
def visitBitand(self, node):
return self.bitOp(node.nodes, 'BINARY_AND')
def visitBitor(self, node):
return self.bitOp(node.nodes, 'BINARY_OR')
def visitBitxor(self, node):
return self.bitOp(node.nodes, 'BINARY_XOR')
# object constructors
def visitEllipsis(self, node):
self.emit('LOAD_CONST', Ellipsis)
def visitTuple(self, node):
self.set_lineno(node)
for elt in node.nodes:
self.visit(elt)
self.emit('BUILD_TUPLE', len(node.nodes))
def visitList(self, node):
self.set_lineno(node)
for elt in node.nodes:
self.visit(elt)
self.emit('BUILD_LIST', len(node.nodes))
def visitSliceobj(self, node):
for child in node.nodes:
self.visit(child)
self.emit('BUILD_SLICE', len(node.nodes))
def visitDict(self, node):
self.set_lineno(node)
self.emit('BUILD_MAP', 0)
for k, v in node.items:
self.emit('DUP_TOP')
self.visit(k)
self.visit(v)
self.emit('ROT_THREE')
self.emit('STORE_SUBSCR')
class NestedScopeMixin:
"""Defines initClass() for nested scoping (Python 2.2-compatible)"""
def initClass(self):
self.__class__.NameFinder = LocalNameFinder
self.__class__.FunctionGen = FunctionCodeGenerator
self.__class__.ClassGen = ClassCodeGenerator
class ModuleCodeGenerator(NestedScopeMixin, CodeGenerator):
__super_init = CodeGenerator.__init__
scopes = None
def __init__(self, tree, futures = []):
self.graph = pyassem.PyFlowGraph("<module>", tree.filename)
self.futures = future.find_futures(tree)
for f in futures:
if f not in self.futures:
self.futures.append(f)
self.__super_init()
walk(tree, self)
def get_module(self):
return self
class ExpressionCodeGenerator(NestedScopeMixin, CodeGenerator):
__super_init = CodeGenerator.__init__
scopes = None
def __init__(self, tree, futures=[]):
self.graph = pyassem.PyFlowGraph("<expression>", tree.filename)
self.futures = futures[:]
self.__super_init()
walk(tree, self)
def get_module(self):
return self
class InteractiveCodeGenerator(NestedScopeMixin, CodeGenerator):
__super_init = CodeGenerator.__init__
scopes = None
def __init__(self, tree, futures=[]):
self.graph = pyassem.PyFlowGraph("<interactive>", tree.filename)
self.futures = future.find_futures(tree)
for f in futures:
if f not in self.futures:
self.futures.append(f)
self.__super_init()
self.set_lineno(tree)
walk(tree, self)
self.emit('RETURN_VALUE')
def get_module(self):
return self
def visitDiscard(self, node):
# XXX Discard means it's an expression. Perhaps this is a bad
# name.
self.visit(node.expr)
self.emit('PRINT_EXPR')
class AbstractFunctionCode:
optimized = 1
def __init__(self, func, scopes, isLambda, class_name, mod):
self.class_name = class_name
self.module = mod
if isLambda:
klass = FunctionCodeGenerator
name = "<lambda>"
else:
name = func.name
args, hasTupleArg = generateArgList(func.argnames)
self.graph = pyassem.PyFlowGraph(name, func.filename, args,
optimized=self.localsfullyknown,
newlocals=1)
self.isLambda = isLambda
self.super_init()
if not isLambda and func.doc:
self.setDocstring(func.doc)
lnf = walk(func.code, self.NameFinder(args), verbose=0)
self.locals.push(lnf.getLocals())
if func.varargs:
self.graph.setFlag(CO_VARARGS)
if func.kwargs:
self.graph.setFlag(CO_VARKEYWORDS)
self.set_lineno(func)
if hasTupleArg:
self.generateArgUnpack(func.argnames)
def get_module(self):
return self.module
def finish(self):
self.graph.startExitBlock()
if not self.isLambda:
self.emit('LOAD_CONST', None)
self.emit('RETURN_VALUE')
def generateArgUnpack(self, args):
for i in range(len(args)):
arg = args[i]
if type(arg) == types.TupleType:
self.emit('LOAD_FAST', '.%d' % (i * 2))
self.unpackSequence(arg)
def unpackSequence(self, tup):
if VERSION > 1:
self.emit('UNPACK_SEQUENCE', len(tup))
else:
self.emit('UNPACK_TUPLE', len(tup))
for elt in tup:
if type(elt) == types.TupleType:
self.unpackSequence(elt)
else:
self._nameOp('STORE', elt)
unpackTuple = unpackSequence
class FunctionCodeGenerator(NestedScopeMixin, AbstractFunctionCode,
CodeGenerator):
super_init = CodeGenerator.__init__ # call be other init
scopes = None
__super_init = AbstractFunctionCode.__init__
def __init__(self, func, scopes, isLambda, class_name, mod, parentscopeambiguity):
self.scopes = scopes
self.scope = scopes[func]
self.localsfullyknown = self.scope.localsfullyknown
self.parentscopeambiguity = parentscopeambiguity
self.scopeambiguity = (not self.localsfullyknown or parentscopeambiguity)
self.__super_init(func, scopes, isLambda, class_name, mod)
self.graph.setFreeVars(self.scope.get_free_vars())
self.graph.setCellVars(self.scope.get_cell_vars())
if self.scope.generator is not None:
self.graph.setFlag(CO_GENERATOR)
class GenExprCodeGenerator(NestedScopeMixin, AbstractFunctionCode,
CodeGenerator):
super_init = CodeGenerator.__init__ # call be other init
scopes = None
__super_init = AbstractFunctionCode.__init__
def __init__(self, gexp, scopes, class_name, mod, parentscopeambiguity):
self.scopes = scopes
self.scope = scopes[gexp]
self.localsfullyknown = self.scope.localsfullyknown
self.parentscopeambiguity = parentscopeambiguity
self.scopeambiguity = (not self.localsfullyknown or parentscopeambiguity)
self.__super_init(gexp, scopes, 1, class_name, mod)
self.graph.setFreeVars(self.scope.get_free_vars())
self.graph.setCellVars(self.scope.get_cell_vars())
self.graph.setFlag(CO_GENERATOR)
class AbstractClassCode:
def __init__(self, klass, scopes, module):
self.class_name = klass.name
self.module = module
self.graph = pyassem.PyFlowGraph(klass.name, klass.filename,
optimized=0, klass=1)
self.super_init()
lnf = walk(klass.code, self.NameFinder(), verbose=0)
self.locals.push(lnf.getLocals())
self.graph.setFlag(CO_NEWLOCALS)
if klass.doc:
self.setDocstring(klass.doc)
def get_module(self):
return self.module
def finish(self):
self.graph.startExitBlock()
self.emit('LOAD_LOCALS')
self.emit('RETURN_VALUE')
class ClassCodeGenerator(NestedScopeMixin, AbstractClassCode, CodeGenerator):
super_init = CodeGenerator.__init__
scopes = None
__super_init = AbstractClassCode.__init__
def __init__(self, klass, scopes, module, parentscopeambiguity):
self.scopes = scopes
self.scope = scopes[klass]
self.parentscopeambiguity = parentscopeambiguity
self.scopeambiguity = parentscopeambiguity
self.__super_init(klass, scopes, module)
self.graph.setFreeVars(self.scope.get_free_vars())
self.graph.setCellVars(self.scope.get_cell_vars())
self.set_lineno(klass)
self.emit("LOAD_GLOBAL", "__name__")
self.storeName("__module__")
if klass.doc:
self.emit("LOAD_CONST", klass.doc)
self.storeName('__doc__')
def generateArgList(arglist):
"""Generate an arg list marking TupleArgs"""
args = []
extra = []
count = 0
for i in range(len(arglist)):
elt = arglist[i]
if type(elt) == types.StringType:
args.append(elt)
elif type(elt) == types.TupleType:
args.append(TupleArg(i * 2, elt))
extra.extend(misc.flatten(elt))
count = count + 1
else:
raise ValueError, "unexpect argument type:", elt
return args + extra, count
def findOp(node):
"""Find the op (DELETE, LOAD, STORE) in an AssTuple tree"""
v = OpFinder()
walk(node, v, verbose=0)
return v.op
class OpFinder:
def __init__(self):
self.op = None
def visitAssName(self, node):
if self.op is None:
self.op = node.flags
elif self.op != node.flags:
raise ValueError, "mixed ops in stmt"
visitAssAttr = visitAssName
visitSubscript = visitAssName
class Delegator:
"""Base class to support delegation for augmented assignment nodes
To generator code for augmented assignments, we use the following
wrapper classes. In visitAugAssign, the left-hand expression node
is visited twice. The first time the visit uses the normal method
for that node . The second time the visit uses a different method
that generates the appropriate code to perform the assignment.
These delegator classes wrap the original AST nodes in order to
support the variant visit methods.
"""
def __init__(self, obj):
self.obj = obj
def __getattr__(self, attr):
return getattr(self.obj, attr)
class AugGetattr(Delegator):
pass
class AugName(Delegator):
pass
class AugSlice(Delegator):
pass
class AugSubscript(Delegator):
pass
wrapper = {
ast.Getattr: AugGetattr,
ast.Name: AugName,
ast.Slice: AugSlice,
ast.Subscript: AugSubscript,
}
def wrap_aug(node):
return wrapper[node.__class__](node)
if __name__ == "__main__":
for file in sys.argv[1:]:
compileFile(file)
| Python |
from pypy.interpreter.stablecompiler import ast
# XXX should probably rename ASTVisitor to ASTWalker
# XXX can it be made even more generic?
class ASTVisitor:
"""Performs a depth-first walk of the AST
The ASTVisitor will walk the AST, performing either a preorder or
postorder traversal depending on which method is called.
methods:
preorder(tree, visitor)
postorder(tree, visitor)
tree: an instance of ast.Node
visitor: an instance with visitXXX methods
The ASTVisitor is responsible for walking over the tree in the
correct order. For each node, it checks the visitor argument for
a method named 'visitNodeType' where NodeType is the name of the
node's class, e.g. Class. If the method exists, it is called
with the node as its sole argument.
The visitor method for a particular node type can control how
child nodes are visited during a preorder walk. (It can't control
the order during a postorder walk, because it is called _after_
the walk has occurred.) The ASTVisitor modifies the visitor
argument by adding a visit method to the visitor; this method can
be used to visit a child node of arbitrary type.
"""
VERBOSE = 0
def __init__(self):
self.node = None
self._cache = {}
def default(self, node, *args):
for child in node.getChildNodes():
self.dispatch(child, *args)
def dispatch(self, node, *args):
self.node = node
klass = node.__class__
meth = self._cache.get(klass, None)
if meth is None:
className = klass.__name__
meth = getattr(self.visitor, 'visit' + className, self.default)
self._cache[klass] = meth
## if self.VERBOSE > 0:
## className = klass.__name__
## if self.VERBOSE == 1:
## if meth == 0:
## print "dispatch", className
## else:
## print "dispatch", className, (meth and meth.__name__ or '')
return meth(node, *args)
def preorder(self, tree, visitor, *args):
"""Do preorder walk of tree using visitor"""
self.visitor = visitor
visitor.visit = self.dispatch
self.dispatch(tree, *args) # XXX *args make sense?
class ExampleASTVisitor(ASTVisitor):
"""Prints examples of the nodes that aren't visited
This visitor-driver is only useful for development, when it's
helpful to develop a visitor incrementally, and get feedback on what
you still have to do.
"""
examples = {}
def dispatch(self, node, *args):
self.node = node
meth = self._cache.get(node.__class__, None)
className = node.__class__.__name__
if meth is None:
meth = getattr(self.visitor, 'visit' + className, 0)
self._cache[node.__class__] = meth
if self.VERBOSE > 1:
print "dispatch", className, (meth and meth.__name__ or '')
if meth:
meth(node, *args)
elif self.VERBOSE > 0:
klass = node.__class__
if not self.examples.has_key(klass):
self.examples[klass] = klass
print
print self.visitor
print klass
for attr in dir(node):
if attr[0] != '_':
print "\t", "%-12.12s" % attr, getattr(node, attr)
print
return self.default(node, *args)
# XXX this is an API change
_walker = ASTVisitor
def walk(tree, visitor, walker=None, verbose=None):
if walker is None:
walker = _walker()
if verbose is not None:
walker.VERBOSE = verbose
walker.preorder(tree, visitor)
return walker.visitor
def dumpNode(node):
print node.__class__
for attr in dir(node):
if attr[0] != '_':
print "\t", "%-10.10s" % attr, getattr(node, attr)
| Python |
from pypy.interpreter import baseobjspace
class PyTraceback(baseobjspace.Wrappable):
"""Traceback object
Public fields:
* 'tb_frame'
* 'tb_lasti'
* 'tb_lineno'
* 'tb_next'
"""
def __init__(self, space, frame, lasti, lineno, next):
self.space = space
self.frame = frame
self.lasti = lasti
self.lineno = lineno
self.next = next
def descr__reduce__(self, space):
from pypy.interpreter.mixedmodule import MixedModule
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('traceback_new')
w = space.wrap
tup_base = []
tup_state = [
w(self.frame),
w(self.lasti),
w(self.lineno),
w(self.next),
]
nt = space.newtuple
return nt([new_inst, nt(tup_base), nt(tup_state)])
def descr__setstate__(self, space, w_args):
from pypy.interpreter.pyframe import PyFrame
args_w = space.unpackiterable(w_args)
w_frame, w_lasti, w_lineno, w_next = args_w
self.frame = space.interp_w(PyFrame, w_frame)
self.lasti = space.int_w(w_lasti)
self.lineno = space.int_w(w_lineno)
self.next = space.interp_w(PyTraceback, w_next, can_be_None=True)
def record_application_traceback(space, operror, frame, last_instruction):
if frame.pycode.hidden_applevel:
return
lineno = offset2lineno(frame.pycode, last_instruction)
tb = operror.application_traceback
tb = PyTraceback(space, frame, last_instruction, lineno, tb)
operror.application_traceback = tb
def offset2lineno(c, stopat):
tab = c.co_lnotab
line = c.co_firstlineno
addr = 0
for i in range(0, len(tab), 2):
addr = addr + ord(tab[i])
if addr > stopat:
break
line = line + ord(tab[i+1])
return line
| Python |
"""
Python-style code objects.
PyCode instances have the same co_xxx arguments as CPython code objects.
The bytecode interpreter itself is implemented by the PyFrame class.
"""
import dis, imp, struct, types
from pypy.interpreter import eval
from pypy.interpreter.error import OperationError
from pypy.interpreter.gateway import NoneNotWrapped
from pypy.interpreter.baseobjspace import ObjSpace, W_Root
from pypy.rlib.rarithmetic import intmask
# helper
def unpack_str_tuple(space,w_str_tuple):
els = []
for w_el in space.unpackiterable(w_str_tuple):
els.append(space.str_w(w_el))
return els
# code object contants, for co_flags below
CO_OPTIMIZED = 0x0001
CO_NEWLOCALS = 0x0002
CO_VARARGS = 0x0004
CO_VARKEYWORDS = 0x0008
CO_NESTED = 0x0010
CO_GENERATOR = 0x0020
# cpython_code_signature helper
def cpython_code_signature(code):
"([list-of-arg-names], vararg-name-or-None, kwarg-name-or-None)."
argcount = code.co_argcount
assert argcount >= 0 # annotator hint
argnames = list(code.co_varnames[:argcount])
if code.co_flags & CO_VARARGS:
varargname = code.co_varnames[argcount]
argcount += 1
else:
varargname = None
if code.co_flags & CO_VARKEYWORDS:
kwargname = code.co_varnames[argcount]
argcount += 1
else:
kwargname = None
return argnames, varargname, kwargname
cpython_magic, = struct.unpack("<i", imp.get_magic())
class PyCode(eval.Code):
"CPython-style code objects."
def __init__(self, space, argcount, nlocals, stacksize, flags,
code, consts, names, varnames, filename,
name, firstlineno, lnotab, freevars, cellvars,
hidden_applevel=False, magic = 62061 | 0x0a0d0000): # value for Python 2.4.1
"""Initialize a new code object from parameters given by
the pypy compiler"""
self.space = space
eval.Code.__init__(self, name)
self.co_argcount = argcount
self.co_nlocals = nlocals
self.co_stacksize = stacksize
self.co_flags = flags
self.co_code = code
self.co_consts_w = consts
self.co_names_w = [space.new_interned_str(aname) for aname in names]
self.co_varnames = varnames
self.co_freevars = freevars
self.co_cellvars = cellvars
self.co_filename = filename
self.co_name = name
self.co_firstlineno = firstlineno
self.co_lnotab = lnotab
self.hidden_applevel = hidden_applevel
self.magic = magic
self._compute_fastcall()
self._signature = cpython_code_signature(self)
# Precompute what arguments need to be copied into cellvars
self._args_as_cellvars = []
if self.co_cellvars:
argcount = self.co_argcount
assert argcount >= 0 # annotator hint
if self.co_flags & CO_VARARGS:
argcount += 1
if self.co_flags & CO_VARKEYWORDS:
argcount += 1
# the first few cell vars could shadow already-set arguments,
# in the same order as they appear in co_varnames
argvars = self.co_varnames
cellvars = self.co_cellvars
next = 0
nextname = cellvars[0]
for i in range(argcount):
if argvars[i] == nextname:
# argument i has the same name as the next cell var
self._args_as_cellvars.append(i)
next += 1
try:
nextname = cellvars[next]
except IndexError:
break # all cell vars initialized this way
co_names = property(lambda self: [self.space.unwrap(w_name) for w_name in self.co_names_w]) # for trace
def signature(self):
return self._signature
def _from_code(space, code, hidden_applevel=False):
""" Initialize the code object from a real (CPython) one.
This is just a hack, until we have our own compile.
At the moment, we just fake this.
This method is called by our compile builtin function.
"""
assert isinstance(code, types.CodeType)
newconsts_w = []
for const in code.co_consts:
if isinstance(const, types.CodeType): # from stable compiler
const = PyCode._from_code(space, const, hidden_applevel=hidden_applevel)
newconsts_w.append(space.wrap(const))
# stick the underlying CPython magic value, if the code object
# comes from there
return PyCode(space, code.co_argcount,
code.co_nlocals,
code.co_stacksize,
code.co_flags,
code.co_code,
newconsts_w,
list(code.co_names),
list(code.co_varnames),
code.co_filename,
code.co_name,
code.co_firstlineno,
code.co_lnotab,
list(code.co_freevars),
list(code.co_cellvars),
hidden_applevel, cpython_magic)
_from_code = staticmethod(_from_code)
def _code_new_w(space, argcount, nlocals, stacksize, flags,
code, consts, names, varnames, filename,
name, firstlineno, lnotab, freevars, cellvars,
hidden_applevel=False):
"""Initialize a new code objects from parameters given by
the pypy compiler"""
return PyCode(space, argcount, nlocals, stacksize, flags, code, consts,
names, varnames, filename, name, firstlineno, lnotab,
freevars, cellvars, hidden_applevel)
_code_new_w = staticmethod(_code_new_w)
def _compute_fastcall(self):
# Speed hack!
self.do_fastcall = -1
if not (0 <= self.co_argcount <= 4):
return
if self.co_flags & (CO_VARARGS | CO_VARKEYWORDS):
return
if self.co_cellvars:
first_cellvar = self.co_cellvars[0]
for i in range(self.co_argcount):
if first_cellvar == self.co_varnames[i]:
return
self.do_fastcall = self.co_argcount
def fastcall_0(self, space, w_func):
if self.do_fastcall == 0:
frame = space.createframe(self, w_func.w_func_globals,
w_func.closure)
return frame.run()
return None
def fastcall_1(self, space, w_func, w_arg):
if self.do_fastcall == 1:
frame = space.createframe(self, w_func.w_func_globals,
w_func.closure)
frame.fastlocals_w[0] = w_arg # frame.setfastscope([w_arg])
return frame.run()
return None
def fastcall_2(self, space, w_func, w_arg1, w_arg2):
if self.do_fastcall == 2:
frame = space.createframe(self, w_func.w_func_globals,
w_func.closure)
frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg])
frame.fastlocals_w[1] = w_arg2
return frame.run()
return None
def fastcall_3(self, space, w_func, w_arg1, w_arg2, w_arg3):
if self.do_fastcall == 3:
frame = space.createframe(self, w_func.w_func_globals,
w_func.closure)
frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg])
frame.fastlocals_w[1] = w_arg2
frame.fastlocals_w[2] = w_arg3
return frame.run()
return None
def fastcall_4(self, space, w_func, w_arg1, w_arg2, w_arg3, w_arg4):
if self.do_fastcall == 4:
frame = space.createframe(self, w_func.w_func_globals,
w_func.closure)
frame.fastlocals_w[0] = w_arg1 # frame.setfastscope([w_arg])
frame.fastlocals_w[1] = w_arg2
frame.fastlocals_w[2] = w_arg3
frame.fastlocals_w[3] = w_arg4
return frame.run()
return None
def funcrun(self, func, args):
frame = self.space.createframe(self, func.w_func_globals,
func.closure)
sig = self._signature
# speed hack
args_matched = args.parse_into_scope(frame.fastlocals_w, func.name,
sig, func.defs_w)
frame.init_cells()
return frame.run()
def getvarnames(self):
return self.co_varnames
def getdocstring(self, space):
if self.co_consts_w: # it is probably never empty
return self.co_consts_w[0]
else:
return space.w_None
def getjoinpoints(self):
"""Compute the bytecode positions that are potential join points
(for FlowObjSpace)"""
# first approximation
return dis.findlabels(self.co_code)
def fget_co_consts(space, self):
return space.newtuple(self.co_consts_w)
def fget_co_names(space, self):
return space.newtuple(self.co_names_w)
def fget_co_varnames(space, self):
return space.newtuple([space.wrap(name) for name in self.co_varnames])
def fget_co_cellvars(space, self):
return space.newtuple([space.wrap(name) for name in self.co_cellvars])
def fget_co_freevars(space, self):
return space.newtuple([space.wrap(name) for name in self.co_freevars])
def descr_code__eq__(self, w_other):
space = self.space
other = space.interpclass_w(w_other)
if not isinstance(other, PyCode):
return space.w_False
areEqual = (self.co_name == other.co_name and
self.co_argcount == other.co_argcount and
self.co_nlocals == other.co_nlocals and
self.co_flags == other.co_flags and
self.co_firstlineno == other.co_firstlineno and
self.co_code == other.co_code and
len(self.co_consts_w) == len(other.co_consts_w) and
len(self.co_names_w) == len(other.co_names_w) and
self.co_varnames == other.co_varnames and
self.co_freevars == other.co_freevars and
self.co_cellvars == other.co_cellvars)
if not areEqual:
return space.w_False
for i in range(len(self.co_names_w)):
if not space.eq_w(self.co_names_w[i], other.co_names_w[i]):
return space.w_False
for i in range(len(self.co_consts_w)):
if not space.eq_w(self.co_consts_w[i], other.co_consts_w[i]):
return space.w_False
return space.w_True
def descr_code__hash__(self):
space = self.space
result = hash(self.co_name)
result ^= self.co_argcount
result ^= self.co_nlocals
result ^= self.co_flags
result ^= self.co_firstlineno
result ^= hash(self.co_code)
for name in self.co_varnames: result ^= hash(name)
for name in self.co_freevars: result ^= hash(name)
for name in self.co_cellvars: result ^= hash(name)
w_result = space.wrap(intmask(result))
for w_name in self.co_names_w:
w_result = space.xor(w_result, space.hash(w_name))
for w_const in self.co_consts_w:
w_result = space.xor(w_result, space.hash(w_const))
return w_result
unwrap_spec = [ObjSpace, W_Root,
int, int, int, int,
str, W_Root, W_Root,
W_Root, str, str, int,
str, W_Root,
W_Root]
def descr_code__new__(space, w_subtype,
argcount, nlocals, stacksize, flags,
codestring, w_constants, w_names,
w_varnames, filename, name, firstlineno,
lnotab, w_freevars=NoneNotWrapped,
w_cellvars=NoneNotWrapped):
if argcount < 0:
raise OperationError(space.w_ValueError,
space.wrap("code: argcount must not be negative"))
if nlocals < 0:
raise OperationError(space.w_ValueError,
space.wrap("code: nlocals must not be negative"))
consts_w = space.unpacktuple(w_constants)
names = unpack_str_tuple(space, w_names)
varnames = unpack_str_tuple(space, w_varnames)
if w_freevars is not None:
freevars = unpack_str_tuple(space, w_freevars)
else:
freevars = []
if w_cellvars is not None:
cellvars = unpack_str_tuple(space, w_cellvars)
else:
cellvars = []
code = space.allocate_instance(PyCode, w_subtype)
PyCode.__init__(code, space, argcount, nlocals, stacksize, flags, codestring, consts_w, names,
varnames, filename, name, firstlineno, lnotab, freevars, cellvars)
return space.wrap(code)
descr_code__new__.unwrap_spec = unwrap_spec
def descr__reduce__(self, space):
from pypy.interpreter.mixedmodule import MixedModule
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('code_new')
w = space.wrap
tup = [
w(self.co_argcount),
w(self.co_nlocals),
w(self.co_stacksize),
w(self.co_flags),
w(self.co_code),
space.newtuple(self.co_consts_w),
space.newtuple(self.co_names_w),
space.newtuple([w(v) for v in self.co_varnames]),
w(self.co_filename),
w(self.co_name),
w(self.co_firstlineno),
w(self.co_lnotab),
space.newtuple([w(v) for v in self.co_freevars]),
space.newtuple([w(v) for v in self.co_cellvars]),
#hidden_applevel=False, magic = 62061 | 0x0a0d0000
]
return space.newtuple([new_inst, space.newtuple(tup)])
| Python |
from pypy.interpreter.executioncontext import ExecutionContext
from pypy.interpreter.error import OperationError
from pypy.interpreter.argument import Arguments, ArgumentsFromValuestack
from pypy.interpreter.pycompiler import CPythonCompiler, PythonAstCompiler
from pypy.interpreter.miscutils import ThreadLocals
from pypy.rlib.jit import hint
from pypy.tool.cache import Cache
from pypy.tool.uid import HUGEVAL_BYTES
import os, sys
__all__ = ['ObjSpace', 'OperationError', 'Wrappable', 'W_Root']
class W_Root(object):
"""This is the abstract root class of all wrapped objects that live
in a 'normal' object space like StdObjSpace."""
__slots__ = ()
_settled_ = True
def getdict(self):
return None
def getdictvalue_w(self, space, attr):
return self.getdictvalue(space, space.wrap(attr))
def getdictvalue(self, space, w_attr):
w_dict = self.getdict()
if w_dict is not None:
return space.finditem(w_dict, w_attr)
return None
def getdictvalue_attr_is_in_class(self, space, w_attr):
return self.getdictvalue(space, w_attr)
def setdictvalue(self, space, w_attr, w_value, shadows_type=True):
w_dict = self.getdict()
if w_dict is not None:
space.set_str_keyed_item(w_dict, w_attr, w_value, shadows_type)
return True
return False
def deldictvalue(self, space, w_name):
w_dict = self.getdict()
if w_dict is not None:
try:
space.delitem(w_dict, w_name)
return True
except OperationError, ex:
if not ex.match(space, space.w_KeyError):
raise
return False
def setdict(self, space, w_dict):
typename = space.type(self).getname(space, '?')
raise OperationError(space.w_TypeError,
space.wrap("attribute '__dict__' of %s objects "
"is not writable" % typename))
# to be used directly only by space.type implementations
def getclass(self, space):
return space.gettypeobject(self.typedef)
def setclass(self, space, w_subtype):
raise OperationError(space.w_TypeError,
space.wrap("__class__ assignment: only for heap types"))
def getname(self, space, default):
try:
return space.str_w(space.getattr(self, space.wrap('__name__')))
except OperationError, e:
if e.match(space, space.w_TypeError) or e.match(space, space.w_AttributeError):
return default
raise
def getrepr(self, space, info):
# XXX slowish
w_id = space.id(self)
w_4 = space.wrap(4)
w_0x0F = space.wrap(0x0F)
i = 2 * HUGEVAL_BYTES
addrstring = [' '] * i
while True:
n = space.int_w(space.and_(w_id, w_0x0F))
n += ord('0')
if n > ord('9'):
n += (ord('a') - ord('9') - 1)
i -= 1
addrstring[i] = chr(n)
if i == 0:
break
w_id = space.rshift(w_id, w_4)
return space.wrap("<%s at 0x%s>" % (info, ''.join(addrstring)))
def getslotvalue(self, index):
raise NotImplementedError
def setslotvalue(self, index, w_val):
raise NotImplementedError
def descr_call_mismatch(self, space, opname, RequiredClass, args):
msg = "'%s' object expected, got '%s' instead" % (
RequiredClass.typedef.name,
self.getclass(space).getname(space, '?'))
raise OperationError(space.w_TypeError, space.wrap(msg))
# used by _weakref implemenation
def getweakref(self):
return None
def setweakref(self, space, weakreflifeline):
typename = space.type(self).getname(space, '?')
raise OperationError(space.w_TypeError, space.wrap(
"cannot create weak reference to '%s' object" % typename))
class Wrappable(W_Root):
"""A subclass of Wrappable is an internal, interpreter-level class
that can nevertheless be exposed at application-level by space.wrap()."""
__slots__ = ()
_settled_ = True
def __spacebind__(self, space):
return self
class InternalSpaceCache(Cache):
"""A generic cache for an object space. Arbitrary information can
be attached to the space by defining a function or class 'f' which
can be called as 'f(space)'. Its result is stored in this
ObjSpaceCache.
"""
def __init__(self, space):
Cache.__init__(self)
self.space = space
def _build(self, callable):
return callable(self.space)
class SpaceCache(Cache):
"""A base class for all our concrete caches."""
def __init__(self, space):
Cache.__init__(self)
self.space = space
def _build(self, key):
val = self.space.enter_cache_building_mode()
try:
return self.build(key)
finally:
self.space.leave_cache_building_mode(val)
def _ready(self, result):
val = self.space.enter_cache_building_mode()
try:
return self.ready(result)
finally:
self.space.leave_cache_building_mode(val)
def ready(self, result):
pass
class UnpackValueError(ValueError):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class DescrMismatch(Exception):
pass
class ObjSpace(object):
"""Base class for the interpreter-level implementations of object spaces.
http://codespeak.net/pypy/dist/pypy/doc/objspace.html"""
full_exceptions = True # full support for exceptions (normalization & more)
def __init__(self, config=None, **kw):
"NOT_RPYTHON: Basic initialization of objects."
self.fromcache = InternalSpaceCache(self).getorbuild
self.threadlocals = ThreadLocals()
# set recursion limit
# sets all the internal descriptors
if config is None:
from pypy.config.pypyoption import get_pypy_config
config = get_pypy_config(translating=False)
self.config = config
# import extra modules for side-effects, possibly based on config
import pypy.interpreter.nestedscope # register *_DEREF bytecodes
if self.config.objspace.opcodes.CALL_METHOD:
import pypy.interpreter.callmethod # register *_METHOD bytecodes
self.interned_strings = {}
self.pending_actions = []
self.setoptions(**kw)
# if self.config.objspace.logbytecodes:
# self.bytecodecounts = {}
self.initialize()
def setoptions(self):
# override this in subclasses for extra-options
pass
def startup(self):
# To be called before using the space
# Initialize all builtin modules
from pypy.interpreter.module import Module
for w_modname in self.unpackiterable(
self.sys.get('builtin_module_names')):
modname = self.str_w(w_modname)
mod = self.getbuiltinmodule(modname)
if isinstance(mod, Module):
mod.startup(self)
def finish(self):
w_exitfunc = self.sys.getdictvalue_w(self, 'exitfunc')
if w_exitfunc is not None:
self.call_function(w_exitfunc)
w_exithandlers = self.sys.getdictvalue_w(self, 'pypy__exithandlers__')
if w_exithandlers is not None:
while self.is_true(w_exithandlers):
w_key_value = self.call_method(w_exithandlers, 'popitem')
w_key, w_value = self.unpacktuple(w_key_value, 2)
self.call_function(w_value)
if self.config.objspace.std.withdictmeasurement:
from pypy.objspace.std.dictmultiobject import report
report()
if self.config.objspace.logbytecodes:
self.reportbytecodecounts()
if self.config.objspace.std.logspaceoptypes:
for s in self.FrameClass._space_op_types:
print s
def reportbytecodecounts(self):
os.write(2, "Starting bytecode report.\n")
fd = os.open('bytecode.txt', os.O_CREAT|os.O_WRONLY|os.O_TRUNC, 0644)
for opcode, count in self.bytecodecounts.items():
os.write(fd, str(opcode) + ", " + str(count) + "\n")
os.close(fd)
os.write(2, "Reporting done.\n")
def __repr__(self):
try:
return self._this_space_repr_
except AttributeError:
return self.__class__.__name__
def setbuiltinmodule(self, importname):
"""NOT_RPYTHON. load a lazy pypy/module and put it into sys.modules"""
import sys
fullname = "pypy.module.%s" % importname
Module = __import__(fullname,
None, None, ["Module"]).Module
if Module.applevel_name is not None:
name = Module.applevel_name
else:
name = importname
w_name = self.wrap(name)
w_mod = self.wrap(Module(self, w_name))
w_modules = self.sys.get('modules')
self.setitem(w_modules, w_name, w_mod)
return name
def getbuiltinmodule(self, name):
w_name = self.wrap(name)
w_modules = self.sys.get('modules')
return self.getitem(w_modules, w_name)
def get_builtinmodule_to_install(self):
"""NOT_RPYTHON"""
try:
return self._builtinmodule_list
except AttributeError:
pass
modules = []
# You can enable more modules by specifying --usemodules=xxx,yyy
for name, value in self.config.objspace.usemodules:
if value and name not in modules:
modules.append(name)
# a bit of custom logic: time2 or rctime take precedence over time
# XXX this could probably be done as a "requires" in the config
if ('time2' in modules or 'rctime' in modules) and 'time' in modules:
modules.remove('time')
import pypy
if not self.config.objspace.nofaking:
for modname in self.ALL_BUILTIN_MODULES:
if not (os.path.exists(
os.path.join(os.path.dirname(pypy.__file__),
'lib', modname+'.py'))):
modules.append('faked+'+modname)
self._builtinmodule_list = modules
return self._builtinmodule_list
ALL_BUILTIN_MODULES = [
'posix', 'nt', 'os2', 'mac', 'ce', 'riscos',
'math', 'array', 'select',
'_random', '_sre', 'time', '_socket', 'errno',
'unicodedata',
'parser', 'fcntl', '_codecs', 'binascii'
]
def make_builtins(self):
"NOT_RPYTHON: only for initializing the space."
from pypy.module.sys import Module
w_name = self.wrap('sys')
self.sys = Module(self, w_name)
w_modules = self.sys.get('modules')
self.setitem(w_modules, w_name, self.wrap(self.sys))
from pypy.module.__builtin__ import Module
w_name = self.wrap('__builtin__')
self.builtin = Module(self, w_name)
w_builtin = self.wrap(self.builtin)
self.setitem(w_modules, w_name, w_builtin)
self.setitem(self.builtin.w_dict, self.wrap('__builtins__'), w_builtin)
bootstrap_modules = ['sys', '__builtin__', 'exceptions']
installed_builtin_modules = bootstrap_modules[:]
# initialize with "bootstrap types" from objspace (e.g. w_None)
for name, value in self.__dict__.items():
if name.startswith('w_') and not name.endswith('Type'):
name = name[2:]
#print "setitem: space instance %-20s into builtins" % name
self.setitem(self.builtin.w_dict, self.wrap(name), value)
# install mixed and faked modules and set builtin_module_names on sys
for mixedname in self.get_builtinmodule_to_install():
if (mixedname not in bootstrap_modules
and not mixedname.startswith('faked+')):
self.install_mixedmodule(mixedname, installed_builtin_modules)
for mixedname in self.get_builtinmodule_to_install():
if mixedname.startswith('faked+'):
modname = mixedname[6:]
self.install_faked_module(modname, installed_builtin_modules)
installed_builtin_modules.sort()
w_builtin_module_names = self.newtuple(
[self.wrap(fn) for fn in installed_builtin_modules])
# force this value into the dict without unlazyfying everything
self.setitem(self.sys.w_dict, self.wrap('builtin_module_names'),
w_builtin_module_names)
def install_mixedmodule(self, mixedname, installed_builtin_modules):
"""NOT_RPYTHON"""
modname = self.setbuiltinmodule(mixedname)
if modname:
assert modname not in installed_builtin_modules, (
"duplicate interp-level module enabled for the "
"app-level module %r" % (modname,))
installed_builtin_modules.append(modname)
def load_cpython_module(self, modname):
"NOT_RPYTHON. Steal a module from CPython."
cpy_module = __import__(modname, {}, {}, ['*'])
return cpy_module
def install_faked_module(self, modname, installed_builtin_modules):
"""NOT_RPYTHON"""
if modname in installed_builtin_modules:
return
try:
module = self.load_cpython_module(modname)
except ImportError:
return
else:
w_modules = self.sys.get('modules')
self.setitem(w_modules, self.wrap(modname), self.wrap(module))
installed_builtin_modules.append(modname)
def setup_builtin_modules(self):
"NOT_RPYTHON: only for initializing the space."
from pypy.interpreter.module import Module
for w_modname in self.unpackiterable(self.sys.get('builtin_module_names')):
modname = self.unwrap(w_modname)
mod = self.getbuiltinmodule(modname)
if isinstance(mod, Module):
mod.setup_after_space_initialization()
def initialize(self):
"""NOT_RPYTHON: Abstract method that should put some minimal
content into the w_builtins."""
def enter_cache_building_mode(self):
"hook for the flow object space"
def leave_cache_building_mode(self, val):
"hook for the flow object space"
def getexecutioncontext(self):
"Return what we consider to be the active execution context."
ec = self.threadlocals.getvalue()
if ec is None:
ec = self.createexecutioncontext()
self.threadlocals.setvalue(ec)
return ec
def _freeze_(self):
# Important: the annotator must not see a prebuilt ExecutionContext
# for reasons related to the specialization of the framestack attribute
# so we make sure there is no executioncontext at freeze-time
self.threadlocals.setvalue(None)
return True
def createexecutioncontext(self):
"Factory function for execution contexts."
return ExecutionContext(self)
def createcompiler(self):
"Factory function creating a compiler object."
# XXX simple selection logic for now
try:
return self.default_compiler
except AttributeError:
if self.config.objspace.compiler == 'cpython':
compiler = CPythonCompiler(self)
elif self.config.objspace.compiler == 'ast':
compiler = PythonAstCompiler(self)
else:
raise ValueError('unknown --compiler option value: %r' % (
self.config.objspace.compiler,))
self.default_compiler = compiler
return compiler
def createframe(self, code, w_globals, closure=None):
"Create an empty PyFrame suitable for this code object."
from pypy.interpreter import pyframe
return pyframe.PyFrame(self, code, w_globals, closure)
# Following is a friendly interface to common object space operations
# that can be defined in term of more primitive ones. Subclasses
# may also override specific functions for performance.
#def is_(self, w_x, w_y): -- not really useful. Must be subclassed
# "'x is y'."
# w_id_x = self.id(w_x)
# w_id_y = self.id(w_y)
# return self.eq(w_id_x, w_id_y)
def not_(self, w_obj):
return self.wrap(not self.is_true(w_obj))
def eq_w(self, w_obj1, w_obj2):
"""shortcut for space.is_true(space.eq(w_obj1, w_obj2))"""
return self.is_w(w_obj1, w_obj2) or self.is_true(self.eq(w_obj1, w_obj2))
def is_w(self, w_obj1, w_obj2):
"""shortcut for space.is_true(space.is_(w_obj1, w_obj2))"""
return self.is_true(self.is_(w_obj1, w_obj2))
def hash_w(self, w_obj):
"""shortcut for space.int_w(space.hash(w_obj))"""
return self.int_w(self.hash(w_obj))
def set_str_keyed_item(self, w_obj, w_key, w_value, shadows_type=True):
return self.setitem(w_obj, w_key, w_value)
def finditem(self, w_obj, w_key):
try:
return self.getitem(w_obj, w_key)
except OperationError, e:
if e.match(self, self.w_KeyError):
return None
raise
def findattr(self, w_object, w_name):
try:
return self.getattr(w_object, w_name)
except OperationError, e:
# a PyPy extension: let SystemExit and KeyboardInterrupt go through
if e.async(self):
raise
return None
def newbool(self, b):
if b:
return self.w_True
else:
return self.w_False
def new_interned_w_str(self, w_s):
s = self.str_w(w_s)
try:
return self.interned_strings[s]
except KeyError:
pass
self.interned_strings[s] = w_s
return w_s
def new_interned_str(self, s):
try:
return self.interned_strings[s]
except KeyError:
pass
w_s = self.interned_strings[s] = self.wrap(s)
return w_s
# support for the deprecated __getslice__, __setslice__, __delslice__
def getslice(self, w_obj, w_start, w_stop):
w_slice = self.newslice(w_start, w_stop, self.w_None)
return self.getitem(w_obj, w_slice)
def setslice(self, w_obj, w_start, w_stop, w_sequence):
w_slice = self.newslice(w_start, w_stop, self.w_None)
self.setitem(w_obj, w_slice, w_sequence)
def delslice(self, w_obj, w_start, w_stop):
w_slice = self.newslice(w_start, w_stop, self.w_None)
self.delitem(w_obj, w_slice)
def interpclass_w(space, w_obj):
"""
If w_obj is a wrapped internal interpreter class instance unwrap to it,
otherwise return None. (Can be overridden in specific spaces; you
should generally use the helper space.interp_w() instead.)
"""
if isinstance(w_obj, Wrappable):
return w_obj
return None
def descr_self_interp_w(self, RequiredClass, w_obj):
obj = self.interpclass_w(w_obj)
if not isinstance(obj, RequiredClass):
raise DescrMismatch()
return obj
descr_self_interp_w._annspecialcase_ = 'specialize:arg(1)'
def interp_w(self, RequiredClass, w_obj, can_be_None=False):
"""
Unwrap w_obj, checking that it is an instance of the required internal
interpreter class (a subclass of Wrappable).
"""
if can_be_None and self.is_w(w_obj, self.w_None):
return None
obj = self.interpclass_w(w_obj)
if not isinstance(obj, RequiredClass): # or obj is None
msg = "'%s' object expected, got '%s' instead" % (
RequiredClass.typedef.name,
w_obj.getclass(self).getname(self, '?'))
raise OperationError(self.w_TypeError, self.wrap(msg))
return obj
interp_w._annspecialcase_ = 'specialize:arg(1)'
def unpackiterable(self, w_iterable, expected_length=-1):
"""Unpack an iterable object into a real (interpreter-level) list.
Raise a real (subclass of) ValueError if the length is wrong."""
w_iterator = self.iter(w_iterable)
items = []
while True:
try:
w_item = self.next(w_iterator)
except OperationError, e:
if not e.match(self, self.w_StopIteration):
raise
break # done
if expected_length != -1 and len(items) == expected_length:
raise UnpackValueError("too many values to unpack")
items.append(w_item)
if expected_length != -1 and len(items) < expected_length:
i = len(items)
if i == 1:
plural = ""
else:
plural = "s"
raise UnpackValueError("need more than %d value%s to unpack" %
(i, plural))
return items
def unpacktuple(self, w_tuple, expected_length=-1):
"""Same as unpackiterable(), but only for tuples.
Only use for bootstrapping or performance reasons."""
tuple_length = self.int_w(self.len(w_tuple))
if expected_length != -1 and tuple_length != expected_length:
raise UnpackValueError("got a tuple of length %d instead of %d" % (
tuple_length, expected_length))
items = [
self.getitem(w_tuple, self.wrap(i)) for i in range(tuple_length)]
return items
def exception_match(self, w_exc_type, w_check_class):
"""Checks if the given exception type matches 'w_check_class'."""
if self.is_w(w_exc_type, w_check_class):
return True
if self.is_true(self.abstract_issubclass(w_exc_type, w_check_class)):
return True
if self.is_true(self.isinstance(w_check_class, self.w_tuple)):
exclst_w = self.unpacktuple(w_check_class)
for w_e in exclst_w:
if self.exception_match(w_exc_type, w_e):
return True
return False
def call(self, w_callable, w_args, w_kwds=None):
args = Arguments.frompacked(self, w_args, w_kwds)
return self.call_args(w_callable, args)
def call_function(self, w_func, *args_w):
# XXX start of hack for performance
from pypy.interpreter.function import Function, Method
if isinstance(w_func, Method):
w_inst = w_func.w_instance
if w_inst is not None:
func = w_func.w_function
if isinstance(func, Function):
return func.funccall(w_inst, *args_w)
elif args_w and self.is_true(
self.abstract_isinstance(args_w[0], w_func.w_class)):
w_func = w_func.w_function
if isinstance(w_func, Function):
return w_func.funccall(*args_w)
# XXX end of hack for performance
args = Arguments(self, list(args_w))
return self.call_args(w_func, args)
def call_valuestack(self, w_func, nargs, frame):
# XXX start of hack for performance
from pypy.interpreter.function import Function, Method
hint(w_func.__class__, promote=True)
if isinstance(w_func, Method):
w_inst = w_func.w_instance
if w_inst is not None:
func = w_func.w_function
if isinstance(func, Function):
return func.funccall_obj_valuestack(w_inst, nargs, frame)
elif nargs > 0 and self.is_true(
self.abstract_isinstance(frame.peekvalue(nargs-1), # :-(
w_func.w_class)):
w_func = w_func.w_function
if isinstance(w_func, Function):
return w_func.funccall_valuestack(nargs, frame)
# XXX end of hack for performance
args = frame.make_arguments(nargs)
try:
return self.call_args(w_func, args)
finally:
if isinstance(args, ArgumentsFromValuestack):
args.frame = None
def call_method(self, w_obj, methname, *arg_w):
w_meth = self.getattr(w_obj, self.wrap(methname))
return self.call_function(w_meth, *arg_w)
def lookup(self, w_obj, name):
w_type = self.type(w_obj)
w_mro = self.getattr(w_type, self.wrap("__mro__"))
for w_supertype in self.unpackiterable(w_mro):
w_value = w_supertype.getdictvalue_w(self, name)
if w_value is not None:
return w_value
return None
def callable(self, w_obj):
if self.lookup(w_obj, "__call__") is not None:
w_is_oldstyle = self.isinstance(w_obj, self.w_instance)
if self.is_true(w_is_oldstyle):
# ugly old style class special treatment, but well ...
try:
self.getattr(w_obj, self.wrap("__call__"))
return self.w_True
except OperationError, e:
if not e.match(self, self.w_AttributeError):
raise
return self.w_False
else:
return self.w_True
return self.w_False
def isinstance(self, w_obj, w_type):
w_objtype = self.type(w_obj)
return self.issubtype(w_objtype, w_type)
def abstract_issubclass(self, w_obj, w_cls, failhard=False):
try:
return self.issubtype(w_obj, w_cls)
except OperationError, e:
if not e.match(self, self.w_TypeError):
raise
try:
self.getattr(w_cls, self.wrap('__bases__')) # type sanity check
return self.recursive_issubclass(w_obj, w_cls)
except OperationError, e:
if failhard or not (e.match(self, self.w_TypeError) or
e.match(self, self.w_AttributeError)):
raise
else:
return self.w_False
def recursive_issubclass(self, w_obj, w_cls):
if self.is_w(w_obj, w_cls):
return self.w_True
for w_base in self.unpackiterable(self.getattr(w_obj,
self.wrap('__bases__'))):
if self.is_true(self.recursive_issubclass(w_base, w_cls)):
return self.w_True
return self.w_False
def abstract_isinstance(self, w_obj, w_cls):
try:
return self.isinstance(w_obj, w_cls)
except OperationError, e:
if not e.match(self, self.w_TypeError):
raise
try:
w_objcls = self.getattr(w_obj, self.wrap('__class__'))
return self.abstract_issubclass(w_objcls, w_cls)
except OperationError, e:
if not (e.match(self, self.w_TypeError) or
e.match(self, self.w_AttributeError)):
raise
return self.w_False
def abstract_isclass(self, w_obj):
if self.is_true(self.isinstance(w_obj, self.w_type)):
return self.w_True
if self.findattr(w_obj, self.wrap('__bases__')) is not None:
return self.w_True
else:
return self.w_False
def abstract_getclass(self, w_obj):
try:
return self.getattr(w_obj, self.wrap('__class__'))
except OperationError, e:
if e.match(self, self.w_TypeError) or e.match(self, self.w_AttributeError):
return self.type(w_obj)
raise
def eval(self, expression, w_globals, w_locals):
"NOT_RPYTHON: For internal debugging."
import types
from pypy.interpreter.pycode import PyCode
if isinstance(expression, str):
expression = compile(expression, '?', 'eval')
if isinstance(expression, types.CodeType):
expression = PyCode._from_code(self, expression)
if not isinstance(expression, PyCode):
raise TypeError, 'space.eval(): expected a string, code or PyCode object'
return expression.exec_code(self, w_globals, w_locals)
def exec_(self, statement, w_globals, w_locals, hidden_applevel=False):
"NOT_RPYTHON: For internal debugging."
import types
from pypy.interpreter.pycode import PyCode
if isinstance(statement, str):
statement = compile(statement, '?', 'exec')
if isinstance(statement, types.CodeType):
statement = PyCode._from_code(self, statement,
hidden_applevel=hidden_applevel)
if not isinstance(statement, PyCode):
raise TypeError, 'space.exec_(): expected a string, code or PyCode object'
w_key = self.wrap('__builtins__')
if not self.is_true(self.contains(w_globals, w_key)):
self.setitem(w_globals, w_key, self.wrap(self.builtin))
return statement.exec_code(self, w_globals, w_locals)
def appexec(self, posargs_w, source):
""" return value from executing given source at applevel.
EXPERIMENTAL. The source must look like
'''(x, y):
do_stuff...
return result
'''
"""
w_func = self.fromcache(AppExecCache).getorbuild(source)
args = Arguments(self, list(posargs_w))
return self.call_args(w_func, args)
def decode_index(self, w_index_or_slice, seqlength):
"""Helper for custom sequence implementations
-> (index, 0, 0) or
(start, stop, step)
"""
if self.is_true(self.isinstance(w_index_or_slice, self.w_slice)):
w_indices = self.call_method(w_index_or_slice, "indices",
self.wrap(seqlength))
w_start, w_stop, w_step = self.unpackiterable(w_indices, 3)
start = self.int_w(w_start)
stop = self.int_w(w_stop)
step = self.int_w(w_step)
if step == 0:
raise OperationError(self.w_ValueError,
self.wrap("slice step cannot be zero"))
else:
start = self.int_w(w_index_or_slice)
if start < 0:
start += seqlength
if not (0 <= start < seqlength):
raise OperationError(self.w_IndexError,
self.wrap("index out of range"))
stop = 0
step = 0
return start, stop, step
def getindex_w(self, w_obj, w_exception, objdescr=None):
"""Return w_obj.__index__() as an RPython int.
If w_exception is None, silently clamp in case of overflow;
else raise w_exception.
"""
# shortcut for int objects
if self.is_w(self.type(w_obj), self.w_int):
return self.int_w(w_obj)
try:
w_index = self.index(w_obj)
except OperationError, err:
if objdescr is None or not err.match(self, self.w_TypeError):
raise
msg = "%s must be an integer, not %s" % (
objdescr, self.type(w_obj).getname(self, '?'))
raise OperationError(self.w_TypeError, self.wrap(msg))
try:
index = self.int_w(w_index)
except OperationError, err:
if not err.match(self, self.w_OverflowError):
raise
if not w_exception:
# w_index should be a long object, but can't be sure of that
if self.is_true(self.lt(w_index, self.wrap(0))):
return -sys.maxint-1
else:
return sys.maxint
else:
raise OperationError(
w_exception, self.wrap(
"cannot fit '%s' into an index-sized "
"integer" % self.type(w_obj).getname(self, '?')))
else:
return index
class AppExecCache(SpaceCache):
def build(cache, source):
""" NOT_RPYTHON """
space = cache.space
# XXX will change once we have our own compiler
import py
source = source.lstrip()
assert source.startswith('('), "incorrect header in:\n%s" % (source,)
source = py.code.Source("def anonymous%s\n" % source)
w_glob = space.newdict()
space.exec_(source.compile(), w_glob, w_glob)
return space.getitem(w_glob, space.wrap('anonymous'))
## Table describing the regular part of the interface of object spaces,
## namely all methods which only take w_ arguments and return a w_ result
## (if any). Note: keep in sync with pypy.objspace.flow.operation.Table.
ObjSpace.MethodTable = [
# method name # symbol # number of arguments # special method name(s)
('is_', 'is', 2, []),
('id', 'id', 1, []),
('type', 'type', 1, []),
('issubtype', 'issubtype', 2, []), # not for old-style classes
('repr', 'repr', 1, ['__repr__']),
('str', 'str', 1, ['__str__']),
('len', 'len', 1, ['__len__']),
('hash', 'hash', 1, ['__hash__']),
('getattr', 'getattr', 2, ['__getattribute__']),
('setattr', 'setattr', 3, ['__setattr__']),
('delattr', 'delattr', 2, ['__delattr__']),
('getitem', 'getitem', 2, ['__getitem__']),
('setitem', 'setitem', 3, ['__setitem__']),
('delitem', 'delitem', 2, ['__delitem__']),
('pos', 'pos', 1, ['__pos__']),
('neg', 'neg', 1, ['__neg__']),
('nonzero', 'truth', 1, ['__nonzero__']),
('abs' , 'abs', 1, ['__abs__']),
('hex', 'hex', 1, ['__hex__']),
('oct', 'oct', 1, ['__oct__']),
('ord', 'ord', 1, []),
('invert', '~', 1, ['__invert__']),
('add', '+', 2, ['__add__', '__radd__']),
('sub', '-', 2, ['__sub__', '__rsub__']),
('mul', '*', 2, ['__mul__', '__rmul__']),
('truediv', '/', 2, ['__truediv__', '__rtruediv__']),
('floordiv', '//', 2, ['__floordiv__', '__rfloordiv__']),
('div', 'div', 2, ['__div__', '__rdiv__']),
('mod', '%', 2, ['__mod__', '__rmod__']),
('divmod', 'divmod', 2, ['__divmod__', '__rdivmod__']),
('pow', '**', 3, ['__pow__', '__rpow__']),
('lshift', '<<', 2, ['__lshift__', '__rlshift__']),
('rshift', '>>', 2, ['__rshift__', '__rrshift__']),
('and_', '&', 2, ['__and__', '__rand__']),
('or_', '|', 2, ['__or__', '__ror__']),
('xor', '^', 2, ['__xor__', '__rxor__']),
('int', 'int', 1, ['__int__']),
('index', 'index', 1, ['__index__']),
('float', 'float', 1, ['__float__']),
('long', 'long', 1, ['__long__']),
('inplace_add', '+=', 2, ['__iadd__']),
('inplace_sub', '-=', 2, ['__isub__']),
('inplace_mul', '*=', 2, ['__imul__']),
('inplace_truediv', '/=', 2, ['__itruediv__']),
('inplace_floordiv','//=', 2, ['__ifloordiv__']),
('inplace_div', 'div=', 2, ['__idiv__']),
('inplace_mod', '%=', 2, ['__imod__']),
('inplace_pow', '**=', 2, ['__ipow__']),
('inplace_lshift', '<<=', 2, ['__ilshift__']),
('inplace_rshift', '>>=', 2, ['__irshift__']),
('inplace_and', '&=', 2, ['__iand__']),
('inplace_or', '|=', 2, ['__ior__']),
('inplace_xor', '^=', 2, ['__ixor__']),
('lt', '<', 2, ['__lt__', '__gt__']),
('le', '<=', 2, ['__le__', '__ge__']),
('eq', '==', 2, ['__eq__', '__eq__']),
('ne', '!=', 2, ['__ne__', '__ne__']),
('gt', '>', 2, ['__gt__', '__lt__']),
('ge', '>=', 2, ['__ge__', '__le__']),
('cmp', 'cmp', 2, ['__cmp__']), # rich cmps preferred
('coerce', 'coerce', 2, ['__coerce__', '__coerce__']),
('contains', 'contains', 2, ['__contains__']),
('iter', 'iter', 1, ['__iter__']),
('next', 'next', 1, ['next']),
# ('call', 'call', 3, ['__call__']),
('get', 'get', 3, ['__get__']),
('set', 'set', 3, ['__set__']),
('delete', 'delete', 2, ['__delete__']),
('userdel', 'del', 1, ['__del__']),
]
ObjSpace.BuiltinModuleTable = [
'__builtin__',
'sys',
]
ObjSpace.ConstantTable = [
'None',
'False',
'True',
'Ellipsis',
'NotImplemented',
]
ObjSpace.ExceptionTable = [
'ArithmeticError',
'AssertionError',
'AttributeError',
'EOFError',
'EnvironmentError',
'Exception',
'FloatingPointError',
'IOError',
'ImportError',
'IndentationError',
'IndexError',
'KeyError',
'KeyboardInterrupt',
'LookupError',
'MemoryError',
'NameError',
'NotImplementedError',
'OSError',
'OverflowError',
'ReferenceError',
'RuntimeError',
'StandardError',
'StopIteration',
'SyntaxError',
'SystemError',
'SystemExit',
'TabError',
'TypeError',
'UnboundLocalError',
'UnicodeError',
'ValueError',
'ZeroDivisionError',
]
## Irregular part of the interface:
#
# wrap(x) -> w_x
# str_w(w_str) -> str
# int_w(w_ival or w_long_ival) -> ival
# float_w(w_floatval) -> floatval
# uint_w(w_ival or w_long_ival) -> r_uint_val (unsigned int value)
# bigint_w(w_ival or w_long_ival) -> rbigint
#interpclass_w(w_interpclass_inst or w_obj) -> interpclass_inst|w_obj
# unwrap(w_x) -> x
# is_true(w_x) -> True or False
# newtuple([w_1, w_2,...]) -> w_tuple
# newlist([w_1, w_2,...]) -> w_list
# newstring([w_1, w_2,...]) -> w_string from ascii numbers (bytes)
# newunicode([i1, i2,...]) -> w_unicode from integers
# newdict() -> empty w_dict
# newslice(w_start,w_stop,w_step) -> w_slice
# call_args(w_obj,Arguments()) -> w_result
ObjSpace.IrregularOpTable = [
'wrap',
'str_w',
'int_w',
'float_w',
'uint_w',
'bigint_w',
'unichars_w',
'interpclass_w',
'unwrap',
'is_true',
'is_w',
'newtuple',
'newlist',
'newstring',
'newunicode',
'newdict',
'newslice',
'call_args',
'marshal_w',
]
| Python |
"""
Implementation of a part of the standard Python opcodes.
The rest, dealing with variables in optimized ways, is in
pyfastscope.py and pynestedscope.py.
"""
import sys
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import UnpackValueError, Wrappable
from pypy.interpreter import gateway, function, eval
from pypy.interpreter import pyframe, pytraceback
from pypy.interpreter.argument import Arguments
from pypy.interpreter.pycode import PyCode
from pypy.tool.sourcetools import func_with_new_name
from pypy.rlib.objectmodel import we_are_translated
from pypy.rlib.jit import hint, we_are_jitted
from pypy.rlib.rarithmetic import r_uint, intmask
from pypy.tool.stdlib_opcode import opcodedesc, HAVE_ARGUMENT
from pypy.tool.stdlib_opcode import unrolling_opcode_descs
from pypy.tool.stdlib_opcode import opcode_method_names
from pypy.rlib import rstack # for resume points
def unaryoperation(operationname):
"""NOT_RPYTHON"""
def opimpl(f, *ignored):
operation = getattr(f.space, operationname)
w_1 = f.popvalue()
w_result = operation(w_1)
f.pushvalue(w_result)
opimpl.unaryop = operationname
return func_with_new_name(opimpl, "opcode_impl_for_%s" % operationname)
def binaryoperation(operationname):
"""NOT_RPYTHON"""
def opimpl(f, *ignored):
operation = getattr(f.space, operationname)
w_2 = f.popvalue()
w_1 = f.popvalue()
w_result = operation(w_1, w_2)
f.pushvalue(w_result)
opimpl.binop = operationname
return func_with_new_name(opimpl, "opcode_impl_for_%s" % operationname)
class __extend__(pyframe.PyFrame):
"""A PyFrame that knows about interpretation of standard Python opcodes
minus the ones related to nested scopes."""
### opcode dispatch ###
def dispatch(self, pycode, next_instr, ec):
# For the sequel, force 'next_instr' to be unsigned for performance
next_instr = r_uint(next_instr)
co_code = pycode.co_code
try:
while True:
next_instr = self.handle_bytecode(co_code, next_instr, ec)
rstack.resume_point("dispatch", self, co_code, ec,
returns=next_instr)
except ExitFrame:
return self.popvalue()
def handle_bytecode(self, co_code, next_instr, ec):
try:
next_instr = self.dispatch_bytecode(co_code, next_instr, ec)
rstack.resume_point("handle_bytecode", self, co_code, ec,
returns=next_instr)
except OperationError, operr:
next_instr = self.handle_operation_error(ec, operr)
except Reraise:
operr = self.last_exception
next_instr = self.handle_operation_error(ec, operr,
attach_tb=False)
except KeyboardInterrupt:
next_instr = self.handle_asynchronous_error(ec,
self.space.w_KeyboardInterrupt)
except MemoryError:
next_instr = self.handle_asynchronous_error(ec,
self.space.w_MemoryError)
except RuntimeError, e:
if we_are_translated():
# stack overflows should be the only kind of RuntimeErrors
# in translated PyPy
msg = "internal error (stack overflow?)"
else:
msg = str(e)
next_instr = self.handle_asynchronous_error(ec,
self.space.w_RuntimeError,
self.space.wrap(msg))
return next_instr
def handle_asynchronous_error(self, ec, w_type, w_value=None):
# catch asynchronous exceptions and turn them
# into OperationErrors
if w_value is None:
w_value = self.space.w_None
operr = OperationError(w_type, w_value)
return self.handle_operation_error(ec, operr)
def handle_operation_error(self, ec, operr, attach_tb=True):
self.last_exception = operr
if attach_tb:
pytraceback.record_application_traceback(
self.space, operr, self, self.last_instr)
if not we_are_jitted():
ec.exception_trace(self, operr)
block = self.unrollstack(SApplicationException.kind)
if block is None:
# no handler found for the OperationError
if we_are_translated():
raise operr
else:
# try to preserve the CPython-level traceback
import sys
tb = sys.exc_info()[2]
raise OperationError, operr, tb
else:
unroller = SApplicationException(operr)
next_instr = block.handle(self, unroller)
return next_instr
def dispatch_bytecode(self, co_code, next_instr, ec):
space = self.space
while True:
self.last_instr = intmask(next_instr)
if not we_are_jitted():
ec.bytecode_trace(self)
next_instr = r_uint(self.last_instr)
opcode = ord(co_code[next_instr])
next_instr += 1
if space.config.objspace.logbytecodes:
space.bytecodecounts[opcode] = space.bytecodecounts.get(opcode, 0) + 1
if opcode >= HAVE_ARGUMENT:
lo = ord(co_code[next_instr])
hi = ord(co_code[next_instr+1])
next_instr += 2
oparg = (hi << 8) | lo
else:
oparg = 0
hint(opcode, concrete=True)
hint(oparg, concrete=True)
while opcode == opcodedesc.EXTENDED_ARG.index:
opcode = ord(co_code[next_instr])
if opcode < HAVE_ARGUMENT:
raise BytecodeCorruption
lo = ord(co_code[next_instr+1])
hi = ord(co_code[next_instr+2])
next_instr += 3
oparg = (oparg << 16) | (hi << 8) | lo
hint(opcode, concrete=True)
hint(oparg, concrete=True)
if opcode == opcodedesc.RETURN_VALUE.index:
w_returnvalue = self.popvalue()
block = self.unrollstack(SReturnValue.kind)
if block is None:
self.pushvalue(w_returnvalue) # XXX ping pong
raise Return
else:
unroller = SReturnValue(w_returnvalue)
next_instr = block.handle(self, unroller)
return next_instr # now inside a 'finally' block
if opcode == opcodedesc.YIELD_VALUE.index:
#self.last_instr = intmask(next_instr - 1) XXX clean up!
raise Yield
if opcode == opcodedesc.END_FINALLY.index:
unroller = self.end_finally()
if isinstance(unroller, SuspendedUnroller):
# go on unrolling the stack
block = self.unrollstack(unroller.kind)
if block is None:
w_result = unroller.nomoreblocks()
self.pushvalue(w_result)
raise Return
else:
next_instr = block.handle(self, unroller)
return next_instr
if we_are_translated():
for opdesc in unrolling_opcode_descs:
# static checks to skip this whole case if necessary
if not opdesc.is_enabled(space):
continue
if not hasattr(pyframe.PyFrame, opdesc.methodname):
continue # e.g. for JUMP_FORWARD, implemented above
if opcode == opdesc.index:
# dispatch to the opcode method
meth = getattr(self, opdesc.methodname)
res = meth(oparg, next_instr)
if opdesc.index == opcodedesc.CALL_FUNCTION.index:
rstack.resume_point("dispatch_call", self, co_code, next_instr, ec)
# !! warning, for the annotator the next line is not
# comparing an int and None - you can't do that.
# Instead, it's constant-folded to either True or False
if res is not None:
next_instr = res
break
else:
self.MISSING_OPCODE(oparg, next_instr)
else: # when we are not translated, a list lookup is much faster
methodname = opcode_method_names[opcode]
res = getattr(self, methodname)(oparg, next_instr)
if res is not None:
next_instr = res
if we_are_jitted():
return next_instr
def unrollstack(self, unroller_kind):
n = len(self.blockstack)
n = hint(n, promote=True)
while n > 0:
block = self.blockstack.pop()
n -= 1
hint(n, concrete=True)
if (block.handling_mask & unroller_kind) != 0:
return block
block.cleanupstack(self)
self.frame_finished_execution = True # for generators
return None
def unrollstack_and_jump(self, unroller):
block = self.unrollstack(unroller.kind)
if block is None:
raise BytecodeCorruption("misplaced bytecode - should not return")
return block.handle(self, unroller)
### accessor functions ###
def getlocalvarname(self, index):
return self.getcode().co_varnames[index]
def getconstant_w(self, index):
return self.getcode().co_consts_w[index]
def getname_u(self, index):
return self.space.str_w(self.getcode().co_names_w[index])
def getname_w(self, index):
return self.getcode().co_names_w[index]
################################################################
## Implementation of the "operational" opcodes
## See also pyfastscope.py and pynestedscope.py for the rest.
##
# the 'self' argument of opcode implementations is called 'f'
# for historical reasons
def NOP(f, *ignored):
pass
def LOAD_FAST(f, varindex, *ignored):
# access a local variable directly
w_value = f.fastlocals_w[varindex]
if w_value is None:
varname = f.getlocalvarname(varindex)
message = "local variable '%s' referenced before assignment" % varname
raise OperationError(f.space.w_UnboundLocalError, f.space.wrap(message))
f.pushvalue(w_value)
def LOAD_CONST(f, constindex, *ignored):
w_const = f.getconstant_w(constindex)
f.pushvalue(w_const)
def STORE_FAST(f, varindex, *ignored):
w_newvalue = f.popvalue()
assert w_newvalue is not None
f.fastlocals_w[varindex] = w_newvalue
#except:
# print "exception: got index error"
# print " varindex:", varindex
# print " len(locals_w)", len(f.locals_w)
# import dis
# print dis.dis(f.pycode)
# print "co_varnames", f.pycode.co_varnames
# print "co_nlocals", f.pycode.co_nlocals
# raise
def POP_TOP(f, *ignored):
f.popvalue()
def ROT_TWO(f, *ignored):
w_1 = f.popvalue()
w_2 = f.popvalue()
f.pushvalue(w_1)
f.pushvalue(w_2)
def ROT_THREE(f, *ignored):
w_1 = f.popvalue()
w_2 = f.popvalue()
w_3 = f.popvalue()
f.pushvalue(w_1)
f.pushvalue(w_3)
f.pushvalue(w_2)
def ROT_FOUR(f, *ignored):
w_1 = f.popvalue()
w_2 = f.popvalue()
w_3 = f.popvalue()
w_4 = f.popvalue()
f.pushvalue(w_1)
f.pushvalue(w_4)
f.pushvalue(w_3)
f.pushvalue(w_2)
def DUP_TOP(f, *ignored):
w_1 = f.peekvalue()
f.pushvalue(w_1)
def DUP_TOPX(f, itemcount, *ignored):
assert 1 <= itemcount <= 5, "limitation of the current interpreter"
f.dupvalues(itemcount)
UNARY_POSITIVE = unaryoperation("pos")
UNARY_NEGATIVE = unaryoperation("neg")
UNARY_NOT = unaryoperation("not_")
UNARY_CONVERT = unaryoperation("repr")
UNARY_INVERT = unaryoperation("invert")
def BINARY_POWER(f, *ignored):
w_2 = f.popvalue()
w_1 = f.popvalue()
w_result = f.space.pow(w_1, w_2, f.space.w_None)
f.pushvalue(w_result)
BINARY_MULTIPLY = binaryoperation("mul")
BINARY_TRUE_DIVIDE = binaryoperation("truediv")
BINARY_FLOOR_DIVIDE = binaryoperation("floordiv")
BINARY_DIVIDE = binaryoperation("div")
# XXX BINARY_DIVIDE must fall back to BINARY_TRUE_DIVIDE with -Qnew
BINARY_MODULO = binaryoperation("mod")
BINARY_ADD = binaryoperation("add")
BINARY_SUBTRACT = binaryoperation("sub")
BINARY_SUBSCR = binaryoperation("getitem")
BINARY_LSHIFT = binaryoperation("lshift")
BINARY_RSHIFT = binaryoperation("rshift")
BINARY_AND = binaryoperation("and_")
BINARY_XOR = binaryoperation("xor")
BINARY_OR = binaryoperation("or_")
def INPLACE_POWER(f, *ignored):
w_2 = f.popvalue()
w_1 = f.popvalue()
w_result = f.space.inplace_pow(w_1, w_2)
f.pushvalue(w_result)
INPLACE_MULTIPLY = binaryoperation("inplace_mul")
INPLACE_TRUE_DIVIDE = binaryoperation("inplace_truediv")
INPLACE_FLOOR_DIVIDE = binaryoperation("inplace_floordiv")
INPLACE_DIVIDE = binaryoperation("inplace_div")
# XXX INPLACE_DIVIDE must fall back to INPLACE_TRUE_DIVIDE with -Qnew
INPLACE_MODULO = binaryoperation("inplace_mod")
INPLACE_ADD = binaryoperation("inplace_add")
INPLACE_SUBTRACT = binaryoperation("inplace_sub")
INPLACE_LSHIFT = binaryoperation("inplace_lshift")
INPLACE_RSHIFT = binaryoperation("inplace_rshift")
INPLACE_AND = binaryoperation("inplace_and")
INPLACE_XOR = binaryoperation("inplace_xor")
INPLACE_OR = binaryoperation("inplace_or")
def slice(f, w_start, w_end):
w_obj = f.popvalue()
w_result = f.space.getslice(w_obj, w_start, w_end)
f.pushvalue(w_result)
def SLICE_0(f, *ignored):
f.slice(f.space.w_None, f.space.w_None)
def SLICE_1(f, *ignored):
w_start = f.popvalue()
f.slice(w_start, f.space.w_None)
def SLICE_2(f, *ignored):
w_end = f.popvalue()
f.slice(f.space.w_None, w_end)
def SLICE_3(f, *ignored):
w_end = f.popvalue()
w_start = f.popvalue()
f.slice(w_start, w_end)
def storeslice(f, w_start, w_end):
w_obj = f.popvalue()
w_newvalue = f.popvalue()
f.space.setslice(w_obj, w_start, w_end, w_newvalue)
def STORE_SLICE_0(f, *ignored):
f.storeslice(f.space.w_None, f.space.w_None)
def STORE_SLICE_1(f, *ignored):
w_start = f.popvalue()
f.storeslice(w_start, f.space.w_None)
def STORE_SLICE_2(f, *ignored):
w_end = f.popvalue()
f.storeslice(f.space.w_None, w_end)
def STORE_SLICE_3(f, *ignored):
w_end = f.popvalue()
w_start = f.popvalue()
f.storeslice(w_start, w_end)
def deleteslice(f, w_start, w_end):
w_obj = f.popvalue()
f.space.delslice(w_obj, w_start, w_end)
def DELETE_SLICE_0(f, *ignored):
f.deleteslice(f.space.w_None, f.space.w_None)
def DELETE_SLICE_1(f, *ignored):
w_start = f.popvalue()
f.deleteslice(w_start, f.space.w_None)
def DELETE_SLICE_2(f, *ignored):
w_end = f.popvalue()
f.deleteslice(f.space.w_None, w_end)
def DELETE_SLICE_3(f, *ignored):
w_end = f.popvalue()
w_start = f.popvalue()
f.deleteslice(w_start, w_end)
def STORE_SUBSCR(f, *ignored):
"obj[subscr] = newvalue"
w_subscr = f.popvalue()
w_obj = f.popvalue()
w_newvalue = f.popvalue()
f.space.setitem(w_obj, w_subscr, w_newvalue)
def DELETE_SUBSCR(f, *ignored):
"del obj[subscr]"
w_subscr = f.popvalue()
w_obj = f.popvalue()
f.space.delitem(w_obj, w_subscr)
def PRINT_EXPR(f, *ignored):
w_expr = f.popvalue()
print_expr(f.space, w_expr)
def PRINT_ITEM_TO(f, *ignored):
w_stream = f.popvalue()
w_item = f.popvalue()
if f.space.is_w(w_stream, f.space.w_None):
w_stream = sys_stdout(f.space) # grumble grumble special cases
print_item_to(f.space, w_item, w_stream)
def PRINT_ITEM(f, *ignored):
w_item = f.popvalue()
print_item(f.space, w_item)
def PRINT_NEWLINE_TO(f, *ignored):
w_stream = f.popvalue()
if f.space.is_w(w_stream, f.space.w_None):
w_stream = sys_stdout(f.space) # grumble grumble special cases
print_newline_to(f.space, w_stream)
def PRINT_NEWLINE(f, *ignored):
print_newline(f.space)
def BREAK_LOOP(f, *ignored):
next_instr = f.unrollstack_and_jump(SBreakLoop.singleton)
return next_instr
def CONTINUE_LOOP(f, startofloop, *ignored):
unroller = SContinueLoop(startofloop)
next_instr = f.unrollstack_and_jump(unroller)
return next_instr
def RAISE_VARARGS(f, nbargs, *ignored):
space = f.space
if nbargs == 0:
operror = space.getexecutioncontext().sys_exc_info()
if operror is None:
raise OperationError(space.w_TypeError,
space.wrap("raise: no active exception to re-raise"))
# re-raise, no new traceback obj will be attached
f.last_exception = operror
raise Reraise
w_value = w_traceback = space.w_None
if nbargs >= 3: w_traceback = f.popvalue()
if nbargs >= 2: w_value = f.popvalue()
if 1: w_type = f.popvalue()
operror = OperationError(w_type, w_value)
operror.normalize_exception(space)
if not space.full_exceptions or space.is_w(w_traceback, space.w_None):
# common case
raise operror
else:
tb = space.interpclass_w(w_traceback)
if tb is None or not space.is_true(space.isinstance(tb,
space.gettypeobject(pytraceback.PyTraceback.typedef))):
raise OperationError(space.w_TypeError,
space.wrap("raise: arg 3 must be a traceback or None"))
operror.application_traceback = tb
# re-raise, no new traceback obj will be attached
f.last_exception = operror
raise Reraise
def LOAD_LOCALS(f, *ignored):
f.pushvalue(f.w_locals)
def EXEC_STMT(f, *ignored):
w_locals = f.popvalue()
w_globals = f.popvalue()
w_prog = f.popvalue()
flags = f.space.getexecutioncontext().compiler.getcodeflags(f.pycode)
w_compile_flags = f.space.wrap(flags)
w_resulttuple = prepare_exec(f.space, f.space.wrap(f), w_prog,
w_globals, w_locals,
w_compile_flags,
f.space.wrap(f.get_builtin()),
f.space.gettypeobject(PyCode.typedef))
w_prog, w_globals, w_locals = f.space.unpacktuple(w_resulttuple, 3)
plain = f.w_locals is not None and f.space.is_w(w_locals, f.w_locals)
if plain:
w_locals = f.getdictscope()
co = f.space.interp_w(eval.Code, w_prog)
co.exec_code(f.space, w_globals, w_locals)
if plain:
f.setdictscope(w_locals)
def POP_BLOCK(f, *ignored):
block = f.blockstack.pop()
block.cleanup(f) # the block knows how to clean up the value stack
def end_finally(f):
# unlike CPython, when we reach this opcode the value stack has
# always been set up as follows (topmost first):
# [exception type or None]
# [exception value or None]
# [wrapped stack unroller ]
f.popvalue() # ignore the exception type
f.popvalue() # ignore the exception value
w_unroller = f.popvalue()
unroller = f.space.interpclass_w(w_unroller)
return unroller
def BUILD_CLASS(f, *ignored):
w_methodsdict = f.popvalue()
w_bases = f.popvalue()
w_name = f.popvalue()
w_metaclass = find_metaclass(f.space, w_bases,
w_methodsdict, f.w_globals,
f.space.wrap(f.get_builtin()))
w_newclass = f.space.call_function(w_metaclass, w_name,
w_bases, w_methodsdict)
f.pushvalue(w_newclass)
def STORE_NAME(f, varindex, *ignored):
w_varname = f.getname_w(varindex)
w_newvalue = f.popvalue()
f.space.set_str_keyed_item(f.w_locals, w_varname, w_newvalue)
def DELETE_NAME(f, varindex, *ignored):
w_varname = f.getname_w(varindex)
try:
f.space.delitem(f.w_locals, w_varname)
except OperationError, e:
# catch KeyErrors and turn them into NameErrors
if not e.match(f.space, f.space.w_KeyError):
raise
message = "name '%s' is not defined" % f.space.str_w(w_varname)
raise OperationError(f.space.w_NameError, f.space.wrap(message))
def UNPACK_SEQUENCE(f, itemcount, *ignored):
w_iterable = f.popvalue()
try:
items = f.space.unpackiterable(w_iterable, itemcount)
except UnpackValueError, e:
raise OperationError(f.space.w_ValueError, f.space.wrap(e.msg))
f.pushrevvalues(itemcount, items)
def STORE_ATTR(f, nameindex, *ignored):
"obj.attributename = newvalue"
w_attributename = f.getname_w(nameindex)
w_obj = f.popvalue()
w_newvalue = f.popvalue()
f.space.setattr(w_obj, w_attributename, w_newvalue)
def DELETE_ATTR(f, nameindex, *ignored):
"del obj.attributename"
w_attributename = f.getname_w(nameindex)
w_obj = f.popvalue()
f.space.delattr(w_obj, w_attributename)
def STORE_GLOBAL(f, nameindex, *ignored):
w_varname = f.getname_w(nameindex)
w_newvalue = f.popvalue()
f.space.set_str_keyed_item(f.w_globals, w_varname, w_newvalue)
def DELETE_GLOBAL(f, nameindex, *ignored):
w_varname = f.getname_w(nameindex)
f.space.delitem(f.w_globals, w_varname)
def LOAD_NAME(f, nameindex, *ignored):
if f.w_locals is not f.w_globals:
w_varname = f.getname_w(nameindex)
w_value = f.space.finditem(f.w_locals, w_varname)
if w_value is not None:
f.pushvalue(w_value)
return
f.LOAD_GLOBAL(nameindex) # fall-back
def _load_global(f, w_varname):
w_value = f.space.finditem(f.w_globals, w_varname)
if w_value is None:
# not in the globals, now look in the built-ins
w_value = f.get_builtin().getdictvalue(f.space, w_varname)
if w_value is None:
varname = f.space.str_w(w_varname)
message = "global name '%s' is not defined" % varname
raise OperationError(f.space.w_NameError,
f.space.wrap(message))
return w_value
def LOAD_GLOBAL(f, nameindex, *ignored):
f.pushvalue(f._load_global(f.getname_w(nameindex)))
def DELETE_FAST(f, varindex, *ignored):
if f.fastlocals_w[varindex] is None:
varname = f.getlocalvarname(varindex)
message = "local variable '%s' referenced before assignment" % varname
raise OperationError(f.space.w_UnboundLocalError, f.space.wrap(message))
f.fastlocals_w[varindex] = None
def BUILD_TUPLE(f, itemcount, *ignored):
items = f.popvalues(itemcount)
w_tuple = f.space.newtuple(items)
f.pushvalue(w_tuple)
def BUILD_LIST(f, itemcount, *ignored):
items = f.popvalues(itemcount)
w_list = f.space.newlist(items)
f.pushvalue(w_list)
def BUILD_MAP(f, zero, *ignored):
if zero != 0:
raise BytecodeCorruption
w_dict = f.space.newdict()
f.pushvalue(w_dict)
def LOAD_ATTR(f, nameindex, *ignored):
"obj.attributename"
w_attributename = f.getname_w(nameindex)
w_obj = f.popvalue()
w_value = f.space.getattr(w_obj, w_attributename)
f.pushvalue(w_value)
def cmp_lt(f, w_1, w_2): return f.space.lt(w_1, w_2)
def cmp_le(f, w_1, w_2): return f.space.le(w_1, w_2)
def cmp_eq(f, w_1, w_2): return f.space.eq(w_1, w_2)
def cmp_ne(f, w_1, w_2): return f.space.ne(w_1, w_2)
def cmp_gt(f, w_1, w_2): return f.space.gt(w_1, w_2)
def cmp_ge(f, w_1, w_2): return f.space.ge(w_1, w_2)
def cmp_in(f, w_1, w_2):
return f.space.contains(w_2, w_1)
def cmp_not_in(f, w_1, w_2):
return f.space.not_(f.space.contains(w_2, w_1))
def cmp_is(f, w_1, w_2):
return f.space.is_(w_1, w_2)
def cmp_is_not(f, w_1, w_2):
return f.space.not_(f.space.is_(w_1, w_2))
def cmp_exc_match(f, w_1, w_2):
return f.space.newbool(f.space.exception_match(w_1, w_2))
compare_dispatch_table = [
cmp_lt, # "<"
cmp_le, # "<="
cmp_eq, # "=="
cmp_ne, # "!="
cmp_gt, # ">"
cmp_ge, # ">="
cmp_in,
cmp_not_in,
cmp_is,
cmp_is_not,
cmp_exc_match,
]
def COMPARE_OP(f, testnum, *ignored):
w_2 = f.popvalue()
w_1 = f.popvalue()
table = hint(f.compare_dispatch_table, deepfreeze=True)
try:
testfn = table[testnum]
except IndexError:
raise BytecodeCorruption, "bad COMPARE_OP oparg"
w_result = testfn(f, w_1, w_2)
f.pushvalue(w_result)
def IMPORT_NAME(f, nameindex, *ignored):
space = f.space
w_modulename = f.getname_w(nameindex)
modulename = f.space.str_w(w_modulename)
w_fromlist = f.popvalue()
# CPython 2.5 adds an obscure extra flag consumed by this opcode
if f.pycode.magic >= 0xa0df294:
w_flag = f.popvalue()
try:
if space.int_w(w_flag) == -1:
w_flag = None # don't provide the extra flag if == -1
except OperationError, e:
# let SystemExit and KeyboardInterrupt go through
if e.async(space):
raise
# ignore other exceptions
else:
w_flag = None
w_import = f.get_builtin().getdictvalue_w(f.space, '__import__')
if w_import is None:
raise OperationError(space.w_ImportError,
space.wrap("__import__ not found"))
w_locals = f.w_locals
if w_locals is None: # CPython does this
w_locals = space.w_None
w_modulename = space.wrap(modulename)
w_globals = f.w_globals
if w_flag is None:
w_obj = space.call_function(w_import, w_modulename, w_globals,
w_locals, w_fromlist)
else:
w_obj = space.call_function(w_import, w_modulename, w_globals,
w_locals, w_fromlist, w_flag)
f.pushvalue(w_obj)
def IMPORT_STAR(f, *ignored):
w_module = f.popvalue()
w_locals = f.getdictscope()
import_all_from(f.space, w_module, w_locals)
f.setdictscope(w_locals)
def IMPORT_FROM(f, nameindex, *ignored):
w_name = f.getname_w(nameindex)
w_module = f.peekvalue()
try:
w_obj = f.space.getattr(w_module, w_name)
except OperationError, e:
if not e.match(f.space, f.space.w_AttributeError):
raise
raise OperationError(f.space.w_ImportError,
f.space.wrap("cannot import name '%s'" % f.space.str_w(w_name) ))
f.pushvalue(w_obj)
def JUMP_FORWARD(f, jumpby, next_instr, *ignored):
next_instr += jumpby
return next_instr
def JUMP_IF_FALSE(f, stepby, next_instr, *ignored):
w_cond = f.peekvalue()
if not f.space.is_true(w_cond):
next_instr += stepby
return next_instr
def JUMP_IF_TRUE(f, stepby, next_instr, *ignored):
w_cond = f.peekvalue()
if f.space.is_true(w_cond):
next_instr += stepby
return next_instr
def JUMP_ABSOLUTE(f, jumpto, next_instr, *ignored):
return jumpto
def GET_ITER(f, *ignored):
w_iterable = f.popvalue()
w_iterator = f.space.iter(w_iterable)
f.pushvalue(w_iterator)
def FOR_ITER(f, jumpby, next_instr, *ignored):
w_iterator = f.peekvalue()
try:
w_nextitem = f.space.next(w_iterator)
except OperationError, e:
if not e.match(f.space, f.space.w_StopIteration):
raise
# iterator exhausted
f.popvalue()
next_instr += jumpby
else:
f.pushvalue(w_nextitem)
return next_instr
def FOR_LOOP(f, oparg, *ignored):
raise BytecodeCorruption, "old opcode, no longer in use"
def SETUP_LOOP(f, offsettoend, next_instr, *ignored):
block = LoopBlock(f, next_instr + offsettoend)
f.blockstack.append(block)
def SETUP_EXCEPT(f, offsettoend, next_instr, *ignored):
block = ExceptBlock(f, next_instr + offsettoend)
f.blockstack.append(block)
def SETUP_FINALLY(f, offsettoend, next_instr, *ignored):
block = FinallyBlock(f, next_instr + offsettoend)
f.blockstack.append(block)
def WITH_CLEANUP(f, *ignored):
# see comment in END_FINALLY for stack state
w_exitfunc = f.popvalue()
w_unroller = f.peekvalue(2)
unroller = f.space.interpclass_w(w_unroller)
if isinstance(unroller, SApplicationException):
operr = unroller.operr
w_result = f.space.call_function(w_exitfunc,
operr.w_type,
operr.w_value,
operr.application_traceback)
if f.space.is_true(w_result):
# __exit__() returned True -> Swallow the exception.
f.settopvalue(f.space.w_None, 2)
else:
f.space.call_function(w_exitfunc,
f.space.w_None,
f.space.w_None,
f.space.w_None)
def call_function(f, oparg, w_star=None, w_starstar=None):
n_arguments = oparg & 0xff
n_keywords = (oparg>>8) & 0xff
keywords = None
if n_keywords:
keywords = f.popstrdictvalues(n_keywords)
arguments = f.popvalues(n_arguments)
args = Arguments(f.space, arguments, keywords, w_star, w_starstar)
w_function = f.popvalue()
w_result = f.space.call_args(w_function, args)
rstack.resume_point("call_function", f, returns=w_result)
f.pushvalue(w_result)
def CALL_FUNCTION(f, oparg, *ignored):
# XXX start of hack for performance
if (oparg >> 8) & 0xff == 0:
# Only positional arguments
nargs = oparg & 0xff
w_function = f.peekvalue(nargs)
try:
w_result = f.space.call_valuestack(w_function, nargs, f)
rstack.resume_point("CALL_FUNCTION", f, nargs, returns=w_result)
finally:
f.dropvalues(nargs + 1)
f.pushvalue(w_result)
# XXX end of hack for performance
else:
# general case
f.call_function(oparg)
def CALL_FUNCTION_VAR(f, oparg, *ignored):
w_varargs = f.popvalue()
f.call_function(oparg, w_varargs)
def CALL_FUNCTION_KW(f, oparg, *ignored):
w_varkw = f.popvalue()
f.call_function(oparg, None, w_varkw)
def CALL_FUNCTION_VAR_KW(f, oparg, *ignored):
w_varkw = f.popvalue()
w_varargs = f.popvalue()
f.call_function(oparg, w_varargs, w_varkw)
def MAKE_FUNCTION(f, numdefaults, *ignored):
w_codeobj = f.popvalue()
codeobj = f.space.interp_w(PyCode, w_codeobj)
defaultarguments = f.popvalues(numdefaults)
fn = function.Function(f.space, codeobj, f.w_globals, defaultarguments)
f.pushvalue(f.space.wrap(fn))
def BUILD_SLICE(f, numargs, *ignored):
if numargs == 3:
w_step = f.popvalue()
elif numargs == 2:
w_step = f.space.w_None
else:
raise BytecodeCorruption
w_end = f.popvalue()
w_start = f.popvalue()
w_slice = f.space.newslice(w_start, w_end, w_step)
f.pushvalue(w_slice)
def LIST_APPEND(f, *ignored):
w = f.popvalue()
v = f.popvalue()
f.space.call_method(v, 'append', w)
def SET_LINENO(f, lineno, *ignored):
pass
def CALL_LIKELY_BUILTIN(f, oparg, *ignored):
# overridden by faster version in the standard object space.
from pypy.module.__builtin__ import OPTIMIZED_BUILTINS
w_varname = f.space.wrap(OPTIMIZED_BUILTINS[oparg >> 8])
w_function = f._load_global(w_varname)
nargs = oparg&0xFF
try:
w_result = f.space.call_valuestack(w_function, nargs, f)
finally:
f.dropvalues(nargs)
f.pushvalue(w_result)
## def EXTENDED_ARG(f, oparg, *ignored):
## opcode = f.nextop()
## oparg = oparg<<16 | f.nextarg()
## fn = f.dispatch_table_w_arg[opcode]
## if fn is None:
## raise BytecodeCorruption
## fn(f, oparg)
def MISSING_OPCODE(f, oparg, next_instr, *ignored):
ofs = next_instr - 1
c = f.pycode.co_code[ofs]
name = f.pycode.co_name
raise BytecodeCorruption("unknown opcode, ofs=%d, code=%d, name=%s" %
(ofs, ord(c), name) )
STOP_CODE = MISSING_OPCODE
### ____________________________________________________________ ###
class Reraise(Exception):
"""Signal an application-level OperationError that should not grow
a new traceback entry nor trigger the trace hook."""
class ExitFrame(Exception):
pass
class Return(ExitFrame):
"""Obscure."""
class Yield(ExitFrame):
"""Obscure."""
class BytecodeCorruption(Exception):
"""Detected bytecode corruption. Never caught; it's an error."""
### Frame Blocks ###
class SuspendedUnroller(Wrappable):
"""Abstract base class for interpreter-level objects that
instruct the interpreter to change the control flow and the
block stack.
The concrete subclasses correspond to the various values WHY_XXX
values of the why_code enumeration in ceval.c:
WHY_NOT, OK, not this one :-)
WHY_EXCEPTION, SApplicationException
WHY_RERAISE, implemented differently, see Reraise
WHY_RETURN, SReturnValue
WHY_BREAK, SBreakLoop
WHY_CONTINUE, SContinueLoop
WHY_YIELD not needed
"""
def nomoreblocks(self):
raise BytecodeCorruption("misplaced bytecode - should not return")
# NB. for the flow object space, the state_(un)pack_variables methods
# give a way to "pickle" and "unpickle" the SuspendedUnroller by
# enumerating the Variables it contains.
class SReturnValue(SuspendedUnroller):
"""Signals a 'return' statement.
Argument is the wrapped object to return."""
kind = 0x01
def __init__(self, w_returnvalue):
self.w_returnvalue = w_returnvalue
def nomoreblocks(self):
return self.w_returnvalue
def state_unpack_variables(self, space):
return [self.w_returnvalue]
def state_pack_variables(space, w_returnvalue):
return SReturnValue(w_returnvalue)
state_pack_variables = staticmethod(state_pack_variables)
class SApplicationException(SuspendedUnroller):
"""Signals an application-level exception
(i.e. an OperationException)."""
kind = 0x02
def __init__(self, operr):
self.operr = operr
def nomoreblocks(self):
raise self.operr
def state_unpack_variables(self, space):
return [self.operr.w_type, self.operr.w_value]
def state_pack_variables(space, w_type, w_value):
return SApplicationException(OperationError(w_type, w_value))
state_pack_variables = staticmethod(state_pack_variables)
class SBreakLoop(SuspendedUnroller):
"""Signals a 'break' statement."""
kind = 0x04
def state_unpack_variables(self, space):
return []
def state_pack_variables(space):
return SBreakLoop.singleton
state_pack_variables = staticmethod(state_pack_variables)
SBreakLoop.singleton = SBreakLoop()
class SContinueLoop(SuspendedUnroller):
"""Signals a 'continue' statement.
Argument is the bytecode position of the beginning of the loop."""
kind = 0x08
def __init__(self, jump_to):
self.jump_to = jump_to
def state_unpack_variables(self, space):
return [space.wrap(self.jump_to)]
def state_pack_variables(space, w_jump_to):
return SContinueLoop(space.int_w(w_jump_to))
state_pack_variables = staticmethod(state_pack_variables)
class FrameBlock:
"""Abstract base class for frame blocks from the blockstack,
used by the SETUP_XXX and POP_BLOCK opcodes."""
def __init__(self, frame, handlerposition):
self.handlerposition = handlerposition
self.valuestackdepth = frame.valuestackdepth
def __eq__(self, other):
return (self.__class__ is other.__class__ and
self.handlerposition == other.handlerposition and
self.valuestackdepth == other.valuestackdepth)
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.handlerposition, self.valuestackdepth))
def cleanupstack(self, frame):
frame.dropvaluesuntil(self.valuestackdepth)
def cleanup(self, frame):
"Clean up a frame when we normally exit the block."
self.cleanupstack(frame)
# internal pickling interface, not using the standard protocol
def _get_state_(self, space):
w = space.wrap
return space.newtuple([w(self._opname), w(self.handlerposition),
w(self.valuestackdepth)])
def handle(self, frame, unroller):
next_instr = self.really_handle(frame, unroller) # JIT hack
return hint(next_instr, promote=True)
class LoopBlock(FrameBlock):
"""A loop block. Stores the end-of-loop pointer in case of 'break'."""
_opname = 'SETUP_LOOP'
handling_mask = SBreakLoop.kind | SContinueLoop.kind
def really_handle(self, frame, unroller):
if isinstance(unroller, SContinueLoop):
# re-push the loop block without cleaning up the value stack,
# and jump to the beginning of the loop, stored in the
# exception's argument
frame.blockstack.append(self)
return unroller.jump_to
else:
# jump to the end of the loop
self.cleanupstack(frame)
return self.handlerposition
class ExceptBlock(FrameBlock):
"""An try:except: block. Stores the position of the exception handler."""
_opname = 'SETUP_EXCEPT'
handling_mask = SApplicationException.kind
def really_handle(self, frame, unroller):
# push the exception to the value stack for inspection by the
# exception handler (the code after the except:)
self.cleanupstack(frame)
assert isinstance(unroller, SApplicationException)
operationerr = unroller.operr
if frame.space.full_exceptions:
operationerr.normalize_exception(frame.space)
# the stack setup is slightly different than in CPython:
# instead of the traceback, we store the unroller object,
# wrapped.
frame.pushvalue(frame.space.wrap(unroller))
frame.pushvalue(operationerr.w_value)
frame.pushvalue(operationerr.w_type)
return self.handlerposition # jump to the handler
class FinallyBlock(FrameBlock):
"""A try:finally: block. Stores the position of the exception handler."""
_opname = 'SETUP_FINALLY'
handling_mask = -1 # handles every kind of SuspendedUnroller
def cleanup(self, frame):
# upon normal entry into the finally: part, the standard Python
# bytecode pushes a single None for END_FINALLY. In our case we
# always push three values into the stack: the wrapped ctlflowexc,
# the exception value and the exception type (which are all None
# here).
self.cleanupstack(frame)
# one None already pushed by the bytecode
frame.pushvalue(frame.space.w_None)
frame.pushvalue(frame.space.w_None)
def really_handle(self, frame, unroller):
# any abnormal reason for unrolling a finally: triggers the end of
# the block unrolling and the entering the finally: handler.
# see comments in cleanup().
self.cleanupstack(frame)
frame.pushvalue(frame.space.wrap(unroller))
frame.pushvalue(frame.space.w_None)
frame.pushvalue(frame.space.w_None)
return self.handlerposition # jump to the handler
block_classes = {'SETUP_LOOP': LoopBlock,
'SETUP_EXCEPT': ExceptBlock,
'SETUP_FINALLY': FinallyBlock}
### helpers written at the application-level ###
# Some of these functions are expected to be generally useful if other
# parts of the code need to do the same thing as a non-trivial opcode,
# like finding out which metaclass a new class should have.
# This is why they are not methods of PyFrame.
# There are also a couple of helpers that are methods, defined in the
# class above.
app = gateway.applevel(r'''
""" applevel implementation of certain system properties, imports
and other helpers"""
import sys
def sys_stdout():
try:
return sys.stdout
except AttributeError:
raise RuntimeError("lost sys.stdout")
def print_expr(obj):
try:
displayhook = sys.displayhook
except AttributeError:
raise RuntimeError("lost sys.displayhook")
displayhook(obj)
def print_item_to(x, stream):
if file_softspace(stream, False):
stream.write(" ")
stream.write(str(x))
# add a softspace unless we just printed a string which ends in a '\t'
# or '\n' -- or more generally any whitespace character but ' '
if isinstance(x, str) and x and x[-1].isspace() and x[-1]!=' ':
return
# XXX add unicode handling
file_softspace(stream, True)
print_item_to._annspecialcase_ = "specialize:argtype(0)"
def print_item(x):
print_item_to(x, sys_stdout())
print_item._annspecialcase_ = "flowspace:print_item"
def print_newline_to(stream):
stream.write("\n")
file_softspace(stream, False)
def print_newline():
print_newline_to(sys_stdout())
print_newline._annspecialcase_ = "flowspace:print_newline"
def file_softspace(file, newflag):
try:
softspace = file.softspace
except AttributeError:
softspace = 0
try:
file.softspace = newflag
except AttributeError:
pass
return softspace
''', filename=__file__)
sys_stdout = app.interphook('sys_stdout')
print_expr = app.interphook('print_expr')
print_item = app.interphook('print_item')
print_item_to = app.interphook('print_item_to')
print_newline = app.interphook('print_newline')
print_newline_to= app.interphook('print_newline_to')
file_softspace = app.interphook('file_softspace')
app = gateway.applevel(r'''
def find_metaclass(bases, namespace, globals, builtin):
if '__metaclass__' in namespace:
return namespace['__metaclass__']
elif len(bases) > 0:
base = bases[0]
if hasattr(base, '__class__'):
return base.__class__
else:
return type(base)
elif '__metaclass__' in globals:
return globals['__metaclass__']
else:
try:
return builtin.__metaclass__
except AttributeError:
return type
''', filename=__file__)
find_metaclass = app.interphook('find_metaclass')
app = gateway.applevel(r'''
def import_all_from(module, into_locals):
try:
all = module.__all__
except AttributeError:
try:
dict = module.__dict__
except AttributeError:
raise ImportError("from-import-* object has no __dict__ "
"and no __all__")
all = dict.keys()
skip_leading_underscores = True
else:
skip_leading_underscores = False
for name in all:
if skip_leading_underscores and name[0]=='_':
continue
into_locals[name] = getattr(module, name)
''', filename=__file__)
import_all_from = app.interphook('import_all_from')
app = gateway.applevel(r'''
def prepare_exec(f, prog, globals, locals, compile_flags, builtin, codetype):
"""Manipulate parameters to exec statement to (codeobject, dict, dict).
"""
if (globals is None and locals is None and
isinstance(prog, tuple) and
(len(prog) == 2 or len(prog) == 3)):
globals = prog[1]
if len(prog) == 3:
locals = prog[2]
prog = prog[0]
if globals is None:
globals = f.f_globals
if locals is None:
locals = f.f_locals
if locals is None:
locals = globals
if not isinstance(globals, dict):
if not hasattr(globals, '__getitem__'):
raise TypeError("exec: arg 2 must be a dictionary or None")
try:
globals['__builtins__']
except KeyError:
globals['__builtins__'] = builtin
if not isinstance(locals, dict):
if not hasattr(locals, '__getitem__'):
raise TypeError("exec: arg 3 must be a dictionary or None")
if not isinstance(prog, codetype):
filename = '<string>'
if not isinstance(prog, str):
if isinstance(prog, basestring):
prog = str(prog)
elif isinstance(prog, file):
filename = prog.name
prog = prog.read()
else:
raise TypeError("exec: arg 1 must be a string, file, "
"or code object")
try:
prog = compile(prog, filename, 'exec', compile_flags, 1)
except SyntaxError, e: # exec SyntaxErrors have filename==None
if len(e.args) == 2:
msg, loc = e.args
loc1 = (None,) + loc[1:]
e.args = msg, loc1
e.filename = None
raise e
return (prog, globals, locals)
''', filename=__file__)
prepare_exec = app.interphook('prepare_exec')
| Python |
from pypy.interpreter import module, eval
from pypy.interpreter.error import OperationError
from pypy.interpreter.pycode import PyCode
import sys, types
def ensure__main__(space):
w_main = space.wrap('__main__')
w_modules = space.sys.get('modules')
try:
return space.getitem(w_modules, w_main)
except OperationError, e:
if not e.match(space, space.w_KeyError):
raise
mainmodule = module.Module(space, w_main)
space.setitem(w_modules, w_main, mainmodule)
return mainmodule
def compilecode(space, source, filename, cmd='exec'):
w = space.wrap
w_code = space.builtin.call('compile',
w(source), w(filename), w(cmd), w(0), w(0))
pycode = space.interp_w(eval.Code, w_code)
return pycode
def _run_eval_string(source, filename, space, eval):
if eval:
cmd = 'eval'
else:
cmd = 'exec'
try:
if space is None:
from pypy.objspace.std import StdObjSpace
space = StdObjSpace()
w = space.wrap
pycode = compilecode(space, source, filename or '<string>', cmd)
mainmodule = ensure__main__(space)
w_globals = mainmodule.w_dict
space.setitem(w_globals, w('__builtins__'), space.builtin)
if filename is not None:
space.setitem(w_globals, w('__file__'), w(filename))
retval = pycode.exec_code(space, w_globals, w_globals)
if eval:
return retval
else:
return
except OperationError, operationerr:
operationerr.record_interpreter_traceback()
raise
def run_string(source, filename=None, space=None):
_run_eval_string(source, filename, space, False)
def eval_string(source, filename=None, space=None):
return _run_eval_string(source, filename, space, True)
def run_file(filename, space=None):
if __name__=='__main__':
print "Running %r with %r" % (filename, space)
istring = open(filename).read()
run_string(istring, filename, space)
def run_module(module_name, args, space=None):
"""Implements PEP 338 'Executing modules as scripts', overwriting
sys.argv[1:] using `args` and executing the module `module_name`.
sys.argv[0] always is `module_name`.
Delegates the real work to the runpy module provided as the reference
implementation.
"""
if space is None:
from pypy.objspace.std import StdObjSpace
space = StdObjSpace()
w = space.wrap
argv = [module_name]
if args is not None:
argv.extend(args)
space.setitem(space.sys.w_dict, w('argv'), w(argv))
w_import = space.builtin.get('__import__')
runpy = space.call_function(w_import, w('runpy'))
w_run_module = space.getitem(runpy.w_dict, w('run_module'))
return space.call_function(w_run_module, w(module_name), space.w_None,
w('__main__'), space.w_True)
# ____________________________________________________________
def run_toplevel(space, f, verbose=False):
"""Calls f() and handle all OperationErrors.
Intended use is to run the main program or one interactive statement.
run_protected() handles details like forwarding exceptions to
sys.excepthook(), catching SystemExit, printing a newline after
sys.stdout if needed, etc.
"""
try:
# run it
f()
# we arrive here if no exception is raised. stdout cosmetics...
try:
w_stdout = space.sys.get('stdout')
w_softspace = space.getattr(w_stdout, space.wrap('softspace'))
except OperationError, e:
if not e.match(space, space.w_AttributeError):
raise
# Don't crash if user defined stdout doesn't have softspace
else:
if space.is_true(w_softspace):
space.call_method(w_stdout, 'write', space.wrap('\n'))
except OperationError, operationerr:
operationerr.normalize_exception(space)
w_type = operationerr.w_type
w_value = operationerr.w_value
w_traceback = space.wrap(operationerr.application_traceback)
# for debugging convenience we also insert the exception into
# the interpreter-level sys.last_xxx
operationerr.record_interpreter_traceback()
sys.last_type, sys.last_value, sys.last_traceback = sys.exc_info()
try:
# exit if we catch a w_SystemExit
if operationerr.match(space, space.w_SystemExit):
w_exitcode = space.getattr(operationerr.w_value,
space.wrap('code'))
if space.is_w(w_exitcode, space.w_None):
exitcode = 0
else:
try:
exitcode = space.int_w(w_exitcode)
except OperationError:
# not an integer: print it to stderr
msg = space.str_w(space.str(w_exitcode))
print >> sys.stderr, msg
exitcode = 1
raise SystemExit(exitcode)
# set the sys.last_xxx attributes
space.setitem(space.sys.w_dict, space.wrap('last_type'), w_type)
space.setitem(space.sys.w_dict, space.wrap('last_value'), w_value)
space.setitem(space.sys.w_dict, space.wrap('last_traceback'),
w_traceback)
# call sys.excepthook if present
w_hook = space.sys.getdictvalue_w(space, 'excepthook')
if w_hook is not None:
# hack: skip it if it wasn't modified by the user,
# to do instead the faster verbose/nonverbose thing below
w_original = space.sys.getdictvalue_w(space, '__excepthook__')
if w_original is None or not space.is_w(w_hook, w_original):
space.call_function(w_hook, w_type, w_value, w_traceback)
return False # done
except OperationError, err2:
# XXX should we go through sys.get('stderr') ?
print >> sys.stderr, 'Error calling sys.excepthook:'
err2.print_application_traceback(space)
print >> sys.stderr
print >> sys.stderr, 'Original exception was:'
# we only get here if sys.excepthook didn't do its job
if verbose:
operationerr.print_detailed_traceback(space)
else:
operationerr.print_application_traceback(space)
return False
return True # success
| Python |
"""
PyPy-oriented interface to pdb.
"""
import pdb
def fire(operationerr):
if not operationerr.debug_excs:
return
exc, val, tb = operationerr.debug_excs[-1]
pdb.post_mortem(tb)
| Python |
"""
"""
import py
from pypy.interpreter.gateway import interp2app
from pypy.interpreter.argument import Arguments
from pypy.interpreter.baseobjspace import Wrappable, W_Root, ObjSpace, \
DescrMismatch
from pypy.interpreter.error import OperationError
from pypy.tool.sourcetools import compile2, func_with_new_name
from pypy.rlib.objectmodel import instantiate
from pypy.rlib.rarithmetic import intmask
class TypeDef:
def __init__(self, __name, __base=None, **rawdict):
"NOT_RPYTHON: initialization-time only"
self.name = __name
self.base = __base
self.hasdict = '__dict__' in rawdict
self.weakrefable = '__weakref__' in rawdict
self.custom_hash = '__hash__' in rawdict
if __base is not None:
self.hasdict |= __base.hasdict
self.weakrefable |= __base.weakrefable
self.custom_hash |= __base.custom_hash
# NB. custom_hash is sometimes overridden manually by callers
self.rawdict = {}
self.acceptable_as_base_class = True
# xxx used by faking
self.fakedcpytype = None
self.add_entries(**rawdict)
def add_entries(self, **rawdict):
# xxx fix the names of the methods to match what app-level expects
for key, value in rawdict.items():
if isinstance(value, (interp2app, GetSetProperty)):
value.name = key
self.rawdict.update(rawdict)
def _freeze_(self):
# hint for the annotator: track individual constant instances of TypeDef
return True
# ____________________________________________________________
# Hash support
def get_default_hash_function(cls):
# go to the first parent class of 'cls' that as a typedef
while 'typedef' not in cls.__dict__:
cls = cls.__bases__[0]
if cls is object:
# not found: 'cls' must have been an abstract class,
# no hash function is needed
return None
if cls.typedef.custom_hash:
return None # the typedef says that instances have their own
# hash, so we don't need a default RPython-level
# hash function.
try:
hashfunction = _hashfunction_cache[cls]
except KeyError:
def hashfunction(w_obj):
"Return the identity hash of 'w_obj'."
assert isinstance(w_obj, cls)
return hash(w_obj) # forces a hash_cache only on 'cls' instances
hashfunction = func_with_new_name(hashfunction,
'hashfunction_for_%s' % (cls.__name__,))
_hashfunction_cache[cls] = hashfunction
return hashfunction
get_default_hash_function._annspecialcase_ = 'specialize:memo'
_hashfunction_cache = {}
def default_identity_hash(space, w_obj):
fn = get_default_hash_function(w_obj.__class__)
if fn is None:
typename = space.type(w_obj).getname(space, '?')
msg = "%s objects have no default hash" % (typename,)
raise OperationError(space.w_TypeError, space.wrap(msg))
return space.wrap(intmask(fn(w_obj)))
def descr__hash__unhashable(space, w_obj):
typename = space.type(w_obj).getname(space, '?')
msg = "%s objects are unhashable" % (typename,)
raise OperationError(space.w_TypeError,space.wrap(msg))
no_hash_descr = interp2app(descr__hash__unhashable)
# ____________________________________________________________
def get_unique_interplevel_subclass(cls, hasdict, wants_slots, needsdel=False,
weakrefable=False):
if needsdel:
hasdict = wants_slots = weakrefable = True
if hasdict:
weakrefable = True
else:
wants_slots = True
return _get_unique_interplevel_subclass(cls, hasdict, wants_slots, needsdel, weakrefable)
get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo"
def _get_unique_interplevel_subclass(cls, hasdict, wants_slots, needsdel, weakrefable):
"NOT_RPYTHON: initialization-time only"
typedef = cls.typedef
if hasdict and typedef.hasdict:
hasdict = False
if weakrefable and typedef.weakrefable:
weakrefable = False
key = cls, hasdict, wants_slots, needsdel, weakrefable
try:
return _subclass_cache[key]
except KeyError:
subcls = _buildusercls(cls, hasdict, wants_slots, needsdel, weakrefable)
_subclass_cache[key] = subcls
return subcls
_subclass_cache = {}
def _buildusercls(cls, hasdict, wants_slots, wants_del, weakrefable):
"NOT_RPYTHON: initialization-time only"
name = ['User']
if not hasdict:
name.append('NoDict')
if wants_slots:
name.append('WithSlots')
if wants_del:
name.append('WithDel')
if weakrefable:
name.append('Weakrefable')
name.append(cls.__name__)
name = ''.join(name)
if weakrefable:
supercls = _get_unique_interplevel_subclass(cls, hasdict, wants_slots,
wants_del, False)
class Proto(object):
_lifeline_ = None
def getweakref(self):
return self._lifeline_
def setweakref(self, space, weakreflifeline):
self._lifeline_ = weakreflifeline
elif wants_del:
supercls = _get_unique_interplevel_subclass(cls, hasdict, wants_slots,
False, False)
parent_destructor = getattr(cls, '__del__', None)
class Proto(object):
def __del__(self):
try:
self.space.userdel(self)
except OperationError, e:
e.write_unraisable(self.space, 'method __del__ of ', self)
e.clear(self.space) # break up reference cycles
if parent_destructor is not None:
parent_destructor(self)
elif wants_slots:
supercls = _get_unique_interplevel_subclass(cls, hasdict, False, False, False)
class Proto(object):
slots_w = []
def user_setup_slots(self, nslots):
if nslots > 0:
self.slots_w = [None] * nslots
def setslotvalue(self, index, w_value):
self.slots_w[index] = w_value
def getslotvalue(self, index):
return self.slots_w[index]
elif hasdict:
supercls = _get_unique_interplevel_subclass(cls, False, False, False, False)
class Proto(object):
def getdict(self):
return self.w__dict__
def setdict(self, space, w_dict):
if not space.is_true(space.isinstance(w_dict, space.w_dict)):
raise OperationError(space.w_TypeError,
space.wrap("setting dictionary to a non-dict"))
if space.config.objspace.std.withmultidict:
from pypy.objspace.std import dictmultiobject
assert isinstance(w_dict, dictmultiobject.W_DictMultiObject)
self.w__dict__ = w_dict
def user_setup(self, space, w_subtype):
self.space = space
self.w__class__ = w_subtype
if space.config.objspace.std.withsharingdict:
from pypy.objspace.std import dictmultiobject
self.w__dict__ = dictmultiobject.W_DictMultiObject(space,
sharing=True)
elif space.config.objspace.std.withshadowtracking:
from pypy.objspace.std import dictmultiobject
self.w__dict__ = dictmultiobject.W_DictMultiObject(space)
self.w__dict__.implementation = \
dictmultiobject.ShadowDetectingDictImplementation(
space, w_subtype)
else:
self.w__dict__ = space.newdict()
self.user_setup_slots(w_subtype.nslots)
def setclass(self, space, w_subtype):
# only used by descr_set___class__
self.w__class__ = w_subtype
if space.config.objspace.std.withshadowtracking:
self.w__dict__.implementation.set_shadows_anything()
def getdictvalue_attr_is_in_class(self, space, w_name):
w_dict = self.w__dict__
if space.config.objspace.std.withshadowtracking:
if not w_dict.implementation.shadows_anything():
return None
return space.finditem(w_dict, w_name)
else:
supercls = cls
class Proto(object):
def getclass(self, space):
return self.w__class__
def setclass(self, space, w_subtype):
# only used by descr_set___class__
self.w__class__ = w_subtype
def user_setup(self, space, w_subtype):
self.space = space
self.w__class__ = w_subtype
self.user_setup_slots(w_subtype.nslots)
def user_setup_slots(self, nslots):
assert nslots == 0
body = dict([(key, value)
for key, value in Proto.__dict__.items()
if not key.startswith('__') or key == '__del__'])
subcls = type(name, (supercls,), body)
return subcls
def make_descr_typecheck_wrapper(func, extraargs=(), cls=None):
if func is None:
return None
if cls is None:
return func
if hasattr(func, 'im_func'):
assert func.im_class is cls
func = func.im_func
miniglobals = {
func.__name__: func,
'OperationError': OperationError
}
if isinstance(cls, str):
#print "<CHECK", func.__module__ or '?', func.__name__
assert cls.startswith('<'),"pythontype typecheck should begin with <"
source = """
def descr_typecheck_%(name)s(space, w_obj, %(extra)s):
if not space.is_true(space.isinstance(w_obj, space.w_%(cls_name)s)):
# xxx improve msg
msg = "descriptor is for '%(expected)s'"
raise OperationError(space.w_TypeError, space.wrap(msg))
return %(name)s(space, w_obj, %(extra)s)
"""
cls_name = cls[1:]
expected = repr(cls_name)
else:
cls_name = cls.__name__
assert issubclass(cls, Wrappable)
source = """
def descr_typecheck_%(name)s(space, w_obj, %(extra)s):
obj = space.descr_self_interp_w(%(cls_name)s, w_obj)
return %(name)s(space, obj, %(extra)s)
"""
miniglobals[cls_name] = cls
name = func.__name__
extra = ', '.join(extraargs)
source = py.code.Source(source % locals())
exec source.compile() in miniglobals
return miniglobals['descr_typecheck_%s' % func.__name__]
def unknown_objclass_getter(space):
raise OperationError(space.w_TypeError,
space.wrap("generic property has no __objclass__"))
def make_objclass_getter(func, cls, cache={}):
if hasattr(func, 'im_func'):
assert not cls or cls is func.im_class
cls = func.im_class
if not cls:
return unknown_objclass_getter, cls
try:
return cache[cls]
except KeyError:
pass
miniglobals = {}
if isinstance(cls, str):
assert cls.startswith('<'),"pythontype typecheck should begin with <"
cls_name = cls[1:]
typeexpr = "space.w_%s" % cls_name
else:
miniglobals['cls'] = cls
typeexpr = "space.gettypeobject(cls.typedef)"
source = """if 1:
def objclass_getter(space):
return %s
\n""" % (typeexpr,)
exec compile2(source) in miniglobals
res = miniglobals['objclass_getter'], cls
cache[cls] = res
return res
class GetSetProperty(Wrappable):
def __init__(self, fget, fset=None, fdel=None, doc=None, cls=None):
"NOT_RPYTHON: initialization-time only"
objclass_getter, cls = make_objclass_getter(fget, cls)
fget = make_descr_typecheck_wrapper(fget, cls=cls)
fset = make_descr_typecheck_wrapper(fset, ('w_value',), cls=cls)
fdel = make_descr_typecheck_wrapper(fdel, cls=cls)
self.fget = fget
self.fset = fset
self.fdel = fdel
self.doc = doc
self.reqcls = cls
self.name = '<generic property>'
self.objclass_getter = objclass_getter
def descr_property_get(space, property, w_obj, w_cls=None):
"""property.__get__(obj[, type]) -> value
Read the value of the property of the given obj."""
# XXX HAAAAAAAAAAAACK (but possibly a good one)
if (space.is_w(w_obj, space.w_None)
and not space.is_w(w_cls, space.type(space.w_None))):
#print property, w_obj, w_cls
return space.wrap(property)
else:
try:
return property.fget(space, w_obj)
except DescrMismatch, e:
return w_obj.descr_call_mismatch(space, '__getattribute__',\
property.reqcls, Arguments(space, [w_obj,
space.wrap(property.name)]))
def descr_property_set(space, property, w_obj, w_value):
"""property.__set__(obj, value)
Change the value of the property of the given obj."""
fset = property.fset
if fset is None:
raise OperationError(space.w_TypeError,
space.wrap("readonly attribute"))
try:
fset(space, w_obj, w_value)
except DescrMismatch, e:
w_obj.descr_call_mismatch(space, '__setattr__',\
property.reqcls, Arguments(space, [w_obj,
space.wrap(property.name), w_value]))
def descr_property_del(space, property, w_obj):
"""property.__delete__(obj)
Delete the value of the property from the given obj."""
fdel = property.fdel
if fdel is None:
raise OperationError(space.w_AttributeError,
space.wrap("cannot delete attribute"))
try:
fdel(space, w_obj)
except DescrMismatch, e:
w_obj.descr_call_mismatch(space, '__delattr__',\
property.reqcls, Arguments(space, [w_obj,
space.wrap(property.name)]))
def descr_get_objclass(space, property):
return property.objclass_getter(space)
def interp_attrproperty(name, cls):
"NOT_RPYTHON: initialization-time only"
def fget(space, obj):
return space.wrap(getattr(obj, name))
return GetSetProperty(fget, cls=cls)
def interp_attrproperty_w(name, cls):
"NOT_RPYTHON: initialization-time only"
def fget(space, obj):
w_value = getattr(obj, name)
if w_value is None:
return space.w_None
else:
return w_value
return GetSetProperty(fget, cls=cls)
GetSetProperty.typedef = TypeDef(
"getset_descriptor",
__get__ = interp2app(GetSetProperty.descr_property_get.im_func,
unwrap_spec = [ObjSpace,
GetSetProperty, W_Root, W_Root]),
__set__ = interp2app(GetSetProperty.descr_property_set.im_func,
unwrap_spec = [ObjSpace,
GetSetProperty, W_Root, W_Root]),
__delete__ = interp2app(GetSetProperty.descr_property_del.im_func,
unwrap_spec = [ObjSpace,
GetSetProperty, W_Root]),
__name__ = interp_attrproperty('name', cls=GetSetProperty),
__objclass__ = GetSetProperty(GetSetProperty.descr_get_objclass),
)
class Member(Wrappable):
"""For slots."""
def __init__(self, index, name, w_cls):
self.index = index
self.name = name
self.w_cls = w_cls
def typecheck(self, space, w_obj):
if not space.is_true(space.isinstance(w_obj, self.w_cls)):
raise OperationError(space.w_TypeError,
space.wrap("descriptor '%s' for '%s'"
" objects doesn't apply to '%s' object" %
(self.name,
self.w_cls.name,
space.type(w_obj).getname(space, '?'))))
def descr_member_get(space, member, w_obj, w_w_cls=None):
"""member.__get__(obj[, type]) -> value
Read the slot 'member' of the given 'obj'."""
if space.is_w(w_obj, space.w_None):
return space.wrap(member)
else:
self = member
self.typecheck(space, w_obj)
w_result = w_obj.getslotvalue(self.index)
if w_result is None:
raise OperationError(space.w_AttributeError,
space.wrap(self.name)) # XXX better message
return w_result
def descr_member_set(space, member, w_obj, w_value):
"""member.__set__(obj, value)
Write into the slot 'member' of the given 'obj'."""
self = member
self.typecheck(space, w_obj)
w_obj.setslotvalue(self.index, w_value)
def descr_member_del(space, member, w_obj):
"""member.__delete__(obj)
Delete the value of the slot 'member' from the given 'obj'."""
self = member
self.typecheck(space, w_obj)
w_obj.setslotvalue(self.index, None)
Member.typedef = TypeDef(
"member_descriptor",
__get__ = interp2app(Member.descr_member_get.im_func,
unwrap_spec = [ObjSpace,
Member, W_Root, W_Root]),
__set__ = interp2app(Member.descr_member_set.im_func,
unwrap_spec = [ObjSpace,
Member, W_Root, W_Root]),
__delete__ = interp2app(Member.descr_member_del.im_func,
unwrap_spec = [ObjSpace,
Member, W_Root]),
__name__ = interp_attrproperty('name', cls=Member),
__objclass__ = interp_attrproperty_w('w_cls', cls=Member),
)
# ____________________________________________________________
#
# Definition of the type's descriptors for all the internal types
from pypy.interpreter.eval import Code, Frame
from pypy.interpreter.pycode import PyCode, CO_VARARGS, CO_VARKEYWORDS
from pypy.interpreter.pyframe import PyFrame
from pypy.interpreter.pyopcode import SuspendedUnroller
from pypy.interpreter.module import Module
from pypy.interpreter.function import Function, Method, StaticMethod
from pypy.interpreter.function import BuiltinFunction, descr_function_get
from pypy.interpreter.pytraceback import PyTraceback
from pypy.interpreter.generator import GeneratorIterator
from pypy.interpreter.nestedscope import Cell
from pypy.interpreter.special import NotImplemented, Ellipsis
def descr_get_dict(space, w_obj):
w_dict = w_obj.getdict()
if w_dict is None:
typename = space.type(w_obj).getname(space, '?')
raise OperationError(space.w_TypeError,
space.wrap("descriptor '__dict__' doesn't apply to"
" '%s' objects" % typename))
return w_dict
def descr_set_dict(space, w_obj, w_dict):
w_obj.setdict(space, w_dict)
def descr_get_weakref(space, w_obj):
lifeline = w_obj.getweakref()
if lifeline is None:
return space.w_None
return lifeline.get_any_weakref(space)
def generic_ne(space, w_obj1, w_obj2):
if space.eq_w(w_obj1, w_obj2):
return space.w_False
else:
return space.w_True
descr_generic_ne = interp2app(generic_ne)
# co_xxx interface emulation for built-in code objects
def fget_co_varnames(space, code): # unwrapping through unwrap_spec
return space.newtuple([space.wrap(name) for name in code.getvarnames()])
def fget_co_argcount(space, code): # unwrapping through unwrap_spec
argnames, varargname, kwargname = code.signature()
return space.wrap(len(argnames))
def fget_co_flags(space, code): # unwrapping through unwrap_spec
argnames, varargname, kwargname = code.signature()
flags = 0
if varargname is not None: flags |= CO_VARARGS
if kwargname is not None: flags |= CO_VARKEYWORDS
return space.wrap(flags)
def fget_co_consts(space, code): # unwrapping through unwrap_spec
w_docstring = code.getdocstring(space)
return space.newtuple([w_docstring])
weakref_descr = GetSetProperty(descr_get_weakref)
weakref_descr.name = '__weakref__'
def make_weakref_descr(cls):
# force the interface into the given cls
def getweakref(self):
return self._lifeline_
def setweakref(self, space, weakreflifeline):
self._lifeline_ = weakreflifeline
cls._lifeline_ = None
cls.getweakref = getweakref
cls.setweakref = setweakref
return weakref_descr
Code.typedef = TypeDef('internal-code',
co_name = interp_attrproperty('co_name', cls=Code),
co_varnames = GetSetProperty(fget_co_varnames, cls=Code),
co_argcount = GetSetProperty(fget_co_argcount, cls=Code),
co_flags = GetSetProperty(fget_co_flags, cls=Code),
co_consts = GetSetProperty(fget_co_consts, cls=Code),
)
Frame.typedef = TypeDef('internal-frame',
f_code = GetSetProperty(Frame.fget_code),
f_locals = GetSetProperty(Frame.fget_getdictscope),
f_globals = interp_attrproperty_w('w_globals', cls=Frame),
)
PyCode.typedef = TypeDef('code',
__new__ = interp2app(PyCode.descr_code__new__.im_func),
__eq__ = interp2app(PyCode.descr_code__eq__),
__ne__ = descr_generic_ne,
__hash__ = interp2app(PyCode.descr_code__hash__),
__reduce__ = interp2app(PyCode.descr__reduce__,
unwrap_spec=['self', ObjSpace]),
co_argcount = interp_attrproperty('co_argcount', cls=PyCode),
co_nlocals = interp_attrproperty('co_nlocals', cls=PyCode),
co_stacksize = interp_attrproperty('co_stacksize', cls=PyCode),
co_flags = interp_attrproperty('co_flags', cls=PyCode),
co_code = interp_attrproperty('co_code', cls=PyCode),
co_consts = GetSetProperty(PyCode.fget_co_consts),
co_names = GetSetProperty(PyCode.fget_co_names),
co_varnames = GetSetProperty(PyCode.fget_co_varnames),
co_freevars = GetSetProperty(PyCode.fget_co_freevars),
co_cellvars = GetSetProperty(PyCode.fget_co_cellvars),
co_filename = interp_attrproperty('co_filename', cls=PyCode),
co_name = interp_attrproperty('co_name', cls=PyCode),
co_firstlineno = interp_attrproperty('co_firstlineno', cls=PyCode),
co_lnotab = interp_attrproperty('co_lnotab', cls=PyCode),
)
PyFrame.typedef = TypeDef('frame',
__reduce__ = interp2app(PyFrame.descr__reduce__,
unwrap_spec=['self', ObjSpace]),
__setstate__ = interp2app(PyFrame.descr__setstate__,
unwrap_spec=['self', ObjSpace, W_Root]),
f_builtins = GetSetProperty(PyFrame.fget_f_builtins),
f_lineno = GetSetProperty(PyFrame.fget_f_lineno, PyFrame.fset_f_lineno),
f_back = GetSetProperty(PyFrame.fget_f_back),
f_lasti = GetSetProperty(PyFrame.fget_f_lasti),
f_trace = GetSetProperty(PyFrame.fget_f_trace, PyFrame.fset_f_trace,
PyFrame.fdel_f_trace),
f_exc_type = GetSetProperty(PyFrame.fget_f_exc_type),
f_exc_value = GetSetProperty(PyFrame.fget_f_exc_value),
f_exc_traceback = GetSetProperty(PyFrame.fget_f_exc_traceback),
f_restricted = GetSetProperty(PyFrame.fget_f_restricted),
**Frame.typedef.rawdict)
Module.typedef = TypeDef("module",
__new__ = interp2app(Module.descr_module__new__.im_func,
unwrap_spec=[ObjSpace, W_Root, Arguments]),
__init__ = interp2app(Module.descr_module__init__),
__reduce__ = interp2app(Module.descr__reduce__,
unwrap_spec=['self', ObjSpace]),
__dict__ = GetSetProperty(descr_get_dict, cls=Module), # module dictionaries are readonly attributes
__doc__ = 'module(name[, doc])\n\nCreate a module object.\nThe name must be a string; the optional doc argument can have any type.'
)
getset_func_doc = GetSetProperty(Function.fget_func_doc,
Function.fset_func_doc,
Function.fdel_func_doc)
# __module__ attribute lazily gets its value from the w_globals
# at the time of first invocation. This is not 100% compatible but
# avoid problems at the time we construct the first functions when
# it's not really possible to do a get or getitem on dictionaries
# (mostly because wrapped exceptions don't exist at that time)
getset___module__ = GetSetProperty(Function.fget___module__,
Function.fset___module__,
Function.fdel___module__)
getset_func_defaults = GetSetProperty(Function.fget_func_defaults,
Function.fset_func_defaults,
Function.fdel_func_defaults)
getset_func_code = GetSetProperty(Function.fget_func_code,
Function.fset_func_code)
getset_func_name = GetSetProperty(Function.fget_func_name,
Function.fset_func_name)
getset_func_dict = GetSetProperty(descr_get_dict, descr_set_dict, cls=Function)
Function.typedef = TypeDef("function",
__new__ = interp2app(Function.descr_method__new__.im_func),
__call__ = interp2app(Function.descr_function_call,
unwrap_spec=['self', Arguments],
descrmismatch='__call__'),
__get__ = interp2app(descr_function_get),
__repr__ = interp2app(Function.descr_function_repr, descrmismatch='__repr__'),
__reduce__ = interp2app(Function.descr_function__reduce__,
unwrap_spec=['self', ObjSpace]),
__setstate__ = interp2app(Function.descr_function__setstate__,
unwrap_spec=['self', ObjSpace, W_Root]),
func_code = getset_func_code,
func_doc = getset_func_doc,
func_name = getset_func_name,
func_dict = getset_func_dict,
func_defaults = getset_func_defaults,
func_globals = interp_attrproperty_w('w_func_globals', cls=Function),
func_closure = GetSetProperty( Function.fget_func_closure ),
__doc__ = getset_func_doc,
__name__ = getset_func_name,
__dict__ = getset_func_dict,
__module__ = getset___module__,
__weakref__ = make_weakref_descr(Function),
)
Method.typedef = TypeDef("method",
__new__ = interp2app(Method.descr_method__new__.im_func),
__call__ = interp2app(Method.descr_method_call,
unwrap_spec=['self', Arguments]),
__get__ = interp2app(Method.descr_method_get),
im_func = interp_attrproperty_w('w_function', cls=Method),
im_self = interp_attrproperty_w('w_instance', cls=Method),
im_class = interp_attrproperty_w('w_class', cls=Method),
__getattribute__ = interp2app(Method.descr_method_getattribute),
__eq__ = interp2app(Method.descr_method_eq),
__ne__ = descr_generic_ne,
__hash__ = interp2app(Method.descr_method_hash),
__repr__ = interp2app(Method.descr_method_repr),
__reduce__ = interp2app(Method.descr_method__reduce__,
unwrap_spec=['self', ObjSpace]),
__weakref__ = make_weakref_descr(Method),
)
StaticMethod.typedef = TypeDef("staticmethod",
__get__ = interp2app(StaticMethod.descr_staticmethod_get),
)
def always_none(self, obj):
return None
BuiltinFunction.typedef = TypeDef("builtin_function",**Function.typedef.rawdict)
BuiltinFunction.typedef.rawdict.update({
'__new__': interp2app(BuiltinFunction.descr_method__new__.im_func),
'__self__': GetSetProperty(always_none, cls=BuiltinFunction),
'__repr__': interp2app(BuiltinFunction.descr_function_repr),
})
del BuiltinFunction.typedef.rawdict['__get__']
PyTraceback.typedef = TypeDef("traceback",
__reduce__ = interp2app(PyTraceback.descr__reduce__,
unwrap_spec=['self', ObjSpace]),
__setstate__ = interp2app(PyTraceback.descr__setstate__,
unwrap_spec=['self', ObjSpace, W_Root]),
tb_frame = interp_attrproperty('frame', cls=PyTraceback),
tb_lasti = interp_attrproperty('lasti', cls=PyTraceback),
tb_lineno = interp_attrproperty('lineno', cls=PyTraceback),
tb_next = interp_attrproperty('next', cls=PyTraceback),
)
GeneratorIterator.typedef = TypeDef("generator",
__reduce__ = interp2app(GeneratorIterator.descr__reduce__,
unwrap_spec=['self', ObjSpace]),
next = interp2app(GeneratorIterator.descr_next,
descrmismatch='next'),
__iter__ = interp2app(GeneratorIterator.descr__iter__,
descrmismatch='__iter__'),
gi_running = interp_attrproperty('running', cls=GeneratorIterator),
gi_frame = interp_attrproperty('frame', cls=GeneratorIterator),
__weakref__ = make_weakref_descr(GeneratorIterator),
)
Cell.typedef = TypeDef("cell",
__eq__ = interp2app(Cell.descr__eq__,
unwrap_spec=['self', ObjSpace, W_Root]),
__ne__ = descr_generic_ne,
__hash__ = no_hash_descr,
__reduce__ = interp2app(Cell.descr__reduce__,
unwrap_spec=['self', ObjSpace]),
__setstate__ = interp2app(Cell.descr__setstate__,
unwrap_spec=['self', ObjSpace, W_Root]),
)
Ellipsis.typedef = TypeDef("Ellipsis",
__repr__ = interp2app(Ellipsis.descr__repr__),
)
NotImplemented.typedef = TypeDef("NotImplemented",
__repr__ = interp2app(NotImplemented.descr__repr__),
)
SuspendedUnroller.typedef = TypeDef("SuspendedUnroller")
interptypes = [ val.typedef for name,val in globals().items() if hasattr(val,'__bases__') and hasattr(val,'typedef') ]
| Python |
from pypy.interpreter.module import Module
from pypy.interpreter.function import Function, BuiltinFunction
from pypy.interpreter import gateway
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import W_Root
import os, sys
import inspect
class MixedModule(Module):
NOT_RPYTHON_ATTRIBUTES = ['loaders']
applevel_name = None
expose__file__attribute = True
def __init__(self, space, w_name):
""" NOT_RPYTHON """
Module.__init__(self, space, w_name)
self.lazy = True
self.__class__.buildloaders()
def get_applevel_name(cls):
""" NOT_RPYTHON """
if cls.applevel_name is not None:
return cls.applevel_name
else:
pkgroot = cls.__module__
return pkgroot.split('.')[-1]
get_applevel_name = classmethod(get_applevel_name)
def get(self, name):
space = self.space
w_value = self.getdictvalue_w(space, name)
if w_value is None:
raise OperationError(space.w_AttributeError, space.wrap(name))
return w_value
def call(self, name, *args_w):
w_builtin = self.get(name)
return self.space.call_function(w_builtin, *args_w)
def getdictvalue(self, space, w_name):
w_value = space.finditem(self.w_dict, w_name)
if self.lazy and w_value is None:
name = space.str_w(w_name)
w_name = space.new_interned_w_str(w_name)
try:
loader = self.loaders[name]
except KeyError:
return None
else:
#print "trying to load", name
w_value = loader(space)
#print "loaded", w_value
# obscure
func = space.interpclass_w(w_value)
if type(func) is Function:
try:
bltin = func._builtinversion_
except AttributeError:
bltin = BuiltinFunction(func)
bltin.w_module = self.w_name
func._builtinversion_ = bltin
bltin.name = name
w_value = space.wrap(bltin)
space.setitem(self.w_dict, w_name, w_value)
return w_value
def getdict(self):
if self.lazy:
space = self.space
for name in self.loaders:
w_value = self.get(name)
space.setitem(self.w_dict, space.new_interned_str(name), w_value)
self.lazy = False
return self.w_dict
def _freeze_(self):
self.getdict()
# hint for the annotator: Modules can hold state, so they are
# not constant
return False
def buildloaders(cls):
""" NOT_RPYTHON """
if not hasattr(cls, 'loaders'):
# build a constant dictionary out of
# applevel/interplevel definitions
cls.loaders = loaders = {}
pkgroot = cls.__module__
appname = cls.get_applevel_name()
for name, spec in cls.interpleveldefs.items():
loaders[name] = getinterpevalloader(pkgroot, spec)
for name, spec in cls.appleveldefs.items():
loaders[name] = getappfileloader(pkgroot, appname, spec)
assert '__file__' not in loaders
if cls.expose__file__attribute:
loaders['__file__'] = cls.get__file__
if '__doc__' not in loaders:
loaders['__doc__'] = cls.get__doc__
buildloaders = classmethod(buildloaders)
def extra_interpdef(self, name, spec):
cls = self.__class__
pkgroot = cls.__module__
loader = getinterpevalloader(pkgroot, spec)
space = self.space
w_obj = loader(space)
space.setattr(space.wrap(self), space.wrap(name), w_obj)
def get__file__(cls, space):
""" NOT_RPYTHON.
return the __file__ attribute of a MixedModule
which is the root-directory for the various
applevel and interplevel snippets that make
up the module.
"""
try:
fname = cls._fname
except AttributeError:
pkgroot = cls.__module__
mod = __import__(pkgroot, None, None, ['__doc__'])
fname = mod.__file__
assert os.path.basename(fname).startswith('__init__.py')
# make it clear that it's not really the interp-level module
# at this path that we are seeing, but an app-level version of it
fname = os.path.join(os.path.dirname(fname), '*.py')
cls._fname = fname
return space.wrap(fname)
get__file__ = classmethod(get__file__)
def get__doc__(cls, space):
return space.wrap(cls.__doc__)
get__doc__ = classmethod(get__doc__)
def getinterpevalloader(pkgroot, spec):
""" NOT_RPYTHON """
def ifileloader(space):
d = {'space' : space}
# EVIL HACK (but it works, and this is not RPython :-)
while 1:
try:
value = eval(spec, d)
except NameError, ex:
name = ex.args[0].split("'")[1] # super-Evil
if name in d:
raise # propagate the NameError
try:
d[name] = __import__(pkgroot+'.'+name, None, None, [name])
except ImportError:
etype, evalue, etb = sys.exc_info()
try:
d[name] = __import__(name, None, None, [name])
except ImportError:
# didn't help, re-raise the original exception for
# clarity
raise etype, evalue, etb
else:
#print spec, "->", value
if hasattr(value, 'func_code'): # semi-evil
return space.wrap(gateway.interp2app(value))
try:
is_type = issubclass(value, W_Root) # pseudo-evil
except TypeError:
is_type = False
if is_type:
return space.gettypefor(value)
W_Object = getattr(space, 'W_Object', ()) # for cpyobjspace
assert isinstance(value, (W_Root, W_Object)), (
"interpleveldef %s.%s must return a wrapped object "
"(got %r instead)" % (pkgroot, spec, value))
return value
return ifileloader
applevelcache = {}
def getappfileloader(pkgroot, appname, spec):
""" NOT_RPYTHON """
# hum, it's a bit more involved, because we usually
# want the import at applevel
modname, attrname = spec.split('.')
impbase = pkgroot + '.' + modname
try:
app = applevelcache[impbase]
except KeyError:
import imp
pkg = __import__(pkgroot, None, None, ['__doc__'])
file, fn, (suffix, mode, typ) = imp.find_module(modname, pkg.__path__)
assert typ == imp.PY_SOURCE
source = file.read()
file.close()
if fn.endswith('.pyc') or fn.endswith('.pyo'):
fn = fn[:-1]
app = gateway.applevel(source, filename=fn, modname=appname)
applevelcache[impbase] = app
def afileloader(space):
return app.wget(space, attrname)
return afileloader
# ____________________________________________________________
# Helper to test mixed modules on top of CPython
def testmodule(name, basepath='pypy.module'):
"""Helper to test mixed modules on top of CPython,
running with the CPy Object Space. The module should behave
more or less as if it had been compiled, either with the
pypy/bin/compilemodule.py tool, or within pypy-c.
Try: testmodule('_demo')
"""
import sys, new
from pypy.objspace.cpy.objspace import CPyObjSpace
space = CPyObjSpace()
fullname = "%s.%s" % (basepath, name)
Module = __import__(fullname,
None, None, ["Module"]).Module
appname = Module.get_applevel_name()
mod = Module(space, space.wrap(appname))
res = new.module(appname)
sys.modules[appname] = res
moddict = space.unwrap(mod.getdict())
res.__dict__.update(moddict)
return res
def compilemodule(name, interactive=False):
"Compile a PyPy module for CPython."
from pypy.rpython.rctypes.tool.compilemodule import compilemodule
return compilemodule(name, interactive=interactive)
| Python |
"""
Function objects.
In PyPy there is no difference between built-in and user-defined function
objects; the difference lies in the code object found in their func_code
attribute.
"""
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import Wrappable
from pypy.interpreter.eval import Code
from pypy.interpreter.argument import Arguments, ArgumentsFromValuestack
class Function(Wrappable):
"""A function is a code object captured with some environment:
an object space, a dictionary of globals, default arguments,
and an arbitrary 'closure' passed to the code object."""
def __init__(self, space, code, w_globals=None, defs_w=[], closure=None, forcename=None):
self.space = space
self.name = forcename or code.co_name
self.w_doc = None # lazily read from code.getdocstring()
self.code = code # Code instance
self.w_func_globals = w_globals # the globals dictionary
self.closure = closure # normally, list of Cell instances or None
self.defs_w = defs_w # list of w_default's
self.w_func_dict = None # filled out below if needed
self.w_module = None
def __repr__(self):
# return "function %s.%s" % (self.space, self.name)
# maybe we want this shorter:
return "<Function %s>" % self.name
def call_args(self, args):
return self.code.funcrun(self, args) # delegate activation to code
def getcode(self):
return self.code
def funccall(self, *args_w): # speed hack
code = self.getcode() # hook for the jit
if len(args_w) == 0:
w_res = code.fastcall_0(self.space, self)
if w_res is not None:
return w_res
elif len(args_w) == 1:
w_res = code.fastcall_1(self.space, self, args_w[0])
if w_res is not None:
return w_res
elif len(args_w) == 2:
w_res = code.fastcall_2(self.space, self, args_w[0], args_w[1])
if w_res is not None:
return w_res
elif len(args_w) == 3:
w_res = code.fastcall_3(self.space, self, args_w[0],
args_w[1], args_w[2])
if w_res is not None:
return w_res
elif len(args_w) == 4:
w_res = code.fastcall_4(self.space, self, args_w[0],
args_w[1], args_w[2], args_w[3])
if w_res is not None:
return w_res
return self.call_args(Arguments(self.space, list(args_w)))
def funccall_valuestack(self, nargs, frame): # speed hack
code = self.getcode() # hook for the jit
if nargs == 0:
w_res = code.fastcall_0(self.space, self)
if w_res is not None:
return w_res
elif nargs == 1:
w_res = code.fastcall_1(self.space, self, frame.peekvalue(0))
if w_res is not None:
return w_res
elif nargs == 2:
w_res = code.fastcall_2(self.space, self, frame.peekvalue(1),
frame.peekvalue(0))
if w_res is not None:
return w_res
elif nargs == 3:
w_res = code.fastcall_3(self.space, self, frame.peekvalue(2),
frame.peekvalue(1), frame.peekvalue(0))
if w_res is not None:
return w_res
elif nargs == 4:
w_res = code.fastcall_4(self.space, self, frame.peekvalue(3),
frame.peekvalue(2), frame.peekvalue(1),
frame.peekvalue(0))
if w_res is not None:
return w_res
args = frame.make_arguments(nargs)
try:
return self.call_args(args)
finally:
if isinstance(args, ArgumentsFromValuestack):
args.frame = None
def funccall_obj_valuestack(self, w_obj, nargs, frame): # speed hack
code = self.getcode() # hook for the jit
if nargs == 0:
w_res = code.fastcall_1(self.space, self, w_obj)
if w_res is not None:
return w_res
elif nargs == 1:
w_res = code.fastcall_2(self.space, self, w_obj, frame.peekvalue(0))
if w_res is not None:
return w_res
elif nargs == 2:
w_res = code.fastcall_3(self.space, self, w_obj, frame.peekvalue(1),
frame.peekvalue(0))
if w_res is not None:
return w_res
elif nargs == 3:
w_res = code.fastcall_4(self.space, self, w_obj, frame.peekvalue(2),
frame.peekvalue(1), frame.peekvalue(0))
if w_res is not None:
return w_res
stkargs = frame.make_arguments(nargs)
args = stkargs.prepend(w_obj)
try:
return self.call_args(args)
finally:
if isinstance(stkargs, ArgumentsFromValuestack):
stkargs.frame = None
def getdict(self):
if self.w_func_dict is None:
self.w_func_dict = self.space.newdict()
return self.w_func_dict
def setdict(self, space, w_dict):
if not space.is_true(space.isinstance( w_dict, space.w_dict )):
raise OperationError( space.w_TypeError, space.wrap("setting function's dictionary to a non-dict") )
self.w_func_dict = w_dict
# unwrapping is done through unwrap_specs in typedef.py
def descr_method__new__(space, w_subtype, w_code, w_globals,
w_name=None, w_argdefs=None, w_closure=None):
code = space.interp_w(Code, w_code)
if not space.is_true(space.isinstance(w_globals, space.w_dict)):
raise OperationError(space.w_TypeError, space.wrap("expected dict"))
if not space.is_w(w_name, space.w_None):
name = space.str_w(w_name)
else:
name = None
if not space.is_w(w_argdefs, space.w_None):
defs_w = space.unpackiterable(w_argdefs)
else:
defs_w = []
nfreevars = 0
from pypy.interpreter.pycode import PyCode
if isinstance(code, PyCode):
nfreevars = len(code.co_freevars)
if space.is_w(w_closure, space.w_None) and nfreevars == 0:
closure = None
elif not space.is_w(space.type(w_closure), space.w_tuple):
raise OperationError(space.w_TypeError, space.wrap("invalid closure"))
else:
from pypy.interpreter.nestedscope import Cell
closure_w = space.unpackiterable(w_closure)
n = len(closure_w)
if nfreevars == 0:
raise OperationError(space.w_ValueError, space.wrap("no closure needed"))
elif nfreevars != n:
raise OperationError(space.w_ValueError, space.wrap("closure is wrong size"))
closure = [space.interp_w(Cell, w_cell) for w_cell in closure_w]
func = space.allocate_instance(Function, w_subtype)
Function.__init__(func, space, code, w_globals, defs_w, closure, name)
return space.wrap(func)
def descr_function_call(self, __args__):
return self.call_args(__args__)
def descr_function_repr(self):
return self.getrepr(self.space, 'function %s' % (self.name,))
def descr_function__reduce__(self, space):
from pypy.interpreter.mixedmodule import MixedModule
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('func_new')
w = space.wrap
if self.closure is None:
w_closure = space.w_None
else:
w_closure = space.newtuple([w(cell) for cell in self.closure])
nt = space.newtuple
tup_base = []
tup_state = [
w(self.name),
self.w_doc,
w(self.code),
self.w_func_globals,
w_closure,
nt(self.defs_w),
self.w_func_dict,
self.w_module,
]
return nt([new_inst, nt(tup_base), nt(tup_state)])
def descr_function__setstate__(self, space, w_args):
from pypy.interpreter.pycode import PyCode
args_w = space.unpackiterable(w_args)
(w_name, w_doc, w_code, w_func_globals, w_closure, w_defs_w,
w_func_dict, w_module) = args_w
self.space = space
self.name = space.str_w(w_name)
self.w_doc = w_doc
self.code = space.interp_w(PyCode, w_code)
self.w_func_globals = w_func_globals
if w_closure is not space.w_None:
from pypy.interpreter.nestedscope import Cell
closure_w = space.unpackiterable(w_closure)
self.closure = [space.interp_w(Cell, w_cell) for w_cell in closure_w]
else:
self.closure = None
self.defs_w = space.unpackiterable(w_defs_w)
self.w_func_dict = w_func_dict
self.w_module = w_module
def fget_func_defaults(space, self):
values_w = self.defs_w
if not values_w:
return space.w_None
return space.newtuple(values_w)
def fset_func_defaults(space, self, w_defaults):
if not space.is_true( space.isinstance( w_defaults, space.w_tuple ) ):
raise OperationError( space.w_TypeError, space.wrap("func_defaults must be set to a tuple object") )
self.defs_w = space.unpackiterable( w_defaults )
def fdel_func_defaults(space, self):
self.defs_w = []
def fget_func_doc(space, self):
if self.w_doc is None:
self.w_doc = self.code.getdocstring(space)
return self.w_doc
def fset_func_doc(space, self, w_doc):
self.w_doc = w_doc
def fget_func_name(space, self):
return space.wrap(self.name)
def fset_func_name(space, self, w_name):
try:
self.name = space.str_w(w_name)
except OperationError, e:
if e.match(space, space.w_TypeError):
raise OperationError(space.w_TypeError,
space.wrap("func_name must be set "
"to a string object"))
raise
def fdel_func_doc(space, self):
self.w_doc = space.w_None
def fget___module__(space, self):
if self.w_module is None:
if self.w_func_globals is not None and not space.is_w(self.w_func_globals, space.w_None):
self.w_module = space.call_method( self.w_func_globals, "get", space.wrap("__name__") )
else:
self.w_module = space.w_None
return self.w_module
def fset___module__(space, self, w_module):
self.w_module = w_module
def fdel___module__(space, self):
self.w_module = space.w_None
def fget_func_code(space, self):
return space.wrap(self.code)
def fset_func_code(space, self, w_code):
from pypy.interpreter.pycode import PyCode
code = space.interp_w(Code, w_code)
closure_len = 0
if self.closure:
closure_len = len(self.closure)
if isinstance(code, PyCode) and closure_len != len(code.co_freevars):
raise OperationError(space.w_ValueError, space.wrap("%s() requires a code object with %s free vars, not %s " % (self.name, closure_len, len(code.co_freevars))))
self.code = code
def fget_func_closure(space, self):
if self.closure is not None:
w_res = space.newtuple( [ space.wrap(i) for i in self.closure ] )
else:
w_res = space.w_None
return w_res
def descr_function_get(space, w_function, w_obj, w_cls=None):
"""functionobject.__get__(obj[, type]) -> method"""
# this is not defined as a method on Function because it's generally
# useful logic: w_function can be any callable. It is used by Method too.
asking_for_bound = (space.is_w(w_cls, space.w_None) or
not space.is_w(w_obj, space.w_None) or
space.is_w(w_cls, space.type(space.w_None)))
if asking_for_bound:
return space.wrap(Method(space, w_function, w_obj, w_cls))
else:
return space.wrap(Method(space, w_function, None, w_cls))
class Method(Wrappable):
"""A method is a function bound to a specific instance or class."""
def __init__(self, space, w_function, w_instance, w_class):
self.space = space
self.w_function = w_function
self.w_instance = w_instance # or None
self.w_class = w_class # possibly space.w_None
def descr_method__new__(space, w_subtype, w_function, w_instance, w_class=None):
if space.is_w( w_instance, space.w_None ):
w_instance = None
method = space.allocate_instance(Method, w_subtype)
Method.__init__(method, space, w_function, w_instance, w_class)
return space.wrap(method)
def __repr__(self):
if self.w_instance:
pre = "bound"
else:
pre = "unbound"
return "%s method %s" % (pre, self.w_function.getname(self.space, '?'))
def call_args(self, args):
space = self.space
if self.w_instance is not None:
# bound method
args = args.prepend(self.w_instance)
else:
# unbound method
w_firstarg = args.firstarg()
if w_firstarg is not None and space.is_true(
space.abstract_isinstance(w_firstarg, self.w_class)):
pass # ok
else:
myname = self.getname(space,"")
clsdescr = self.w_class.getname(space,"")
if clsdescr:
clsdescr+=" "
if w_firstarg is None:
instdescr = "nothing"
else:
instname = space.abstract_getclass(w_firstarg).getname(space,"")
if instname:
instname += " "
instdescr = "%sinstance" %instname
msg = ("unbound method %s() must be called with %s"
"instance as first argument (got %s instead)") % (myname, clsdescr, instdescr)
raise OperationError(space.w_TypeError,
space.wrap(msg))
return space.call_args(self.w_function, args)
def descr_method_get(self, w_obj, w_cls=None):
space = self.space
if self.w_instance is not None:
return space.wrap(self) # already bound
else:
# only allow binding to a more specific class than before
if (w_cls is not None and
not space.is_w(w_cls, space.w_None) and
not space.is_true(space.abstract_issubclass(w_cls, self.w_class))):
return space.wrap(self) # subclass test failed
else:
return descr_function_get(space, self.w_function, w_obj, w_cls)
def descr_method_call(self, __args__):
return self.call_args(__args__)
def descr_method_repr(self):
space = self.space
name = self.w_function.getname(self.space, '?')
# XXX do we handle all cases sanely here?
if space.is_w(self.w_class, space.w_None):
w_class = space.type(self.w_instance)
else:
w_class = self.w_class
typename = w_class.getname(self.space, '?')
if self.w_instance is None:
s = "<unbound method %s.%s>" % (typename, name)
return space.wrap(s)
else:
objrepr = space.str_w(space.repr(self.w_instance))
info = 'bound method %s.%s of %s' % (typename, name, objrepr)
# info = "method %s of %s object" % (name, typename)
return self.w_instance.getrepr(self.space, info)
def descr_method_getattribute(self, w_attr):
space = self.space
if space.str_w(w_attr) != '__doc__':
try:
return space.call_method(space.w_object, '__getattribute__',
space.wrap(self), w_attr)
except OperationError, e:
if not e.match(space, space.w_AttributeError):
raise
# fall-back to the attribute of the underlying 'im_func'
return space.getattr(self.w_function, w_attr)
def descr_method_eq(self, w_other):
space = self.space
other = space.interpclass_w(w_other)
if not isinstance(other, Method):
return space.w_False
if self.w_instance is None:
if other.w_instance is not None:
return space.w_False
else:
if other.w_instance is None:
return space.w_False
if not space.is_w(self.w_instance, other.w_instance):
return space.w_False
return space.eq(self.w_function, other.w_function)
def descr_method_hash(self):
space = self.space
w_result = space.hash(self.w_function)
if self.w_instance is not None:
w_result = space.xor(w_result, space.hash(self.w_instance))
return w_result
def descr_method__reduce__(self, space):
from pypy.interpreter.mixedmodule import MixedModule
from pypy.interpreter.gateway import BuiltinCode
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('method_new')
w = space.wrap
w_instance = self.w_instance or space.w_None
function = space.interpclass_w(self.w_function)
if isinstance(function, Function) and isinstance(function.code, BuiltinCode):
new_inst = mod.get('builtin_method_new')
if space.is_w(w_instance, space.w_None):
tup = [self.w_class, space.wrap(function.name)]
else:
tup = [w_instance, space.wrap(function.name)]
elif space.is_w( self.w_class, space.w_None ):
tup = [self.w_function, w_instance]
else:
tup = [self.w_function, w_instance, self.w_class]
return space.newtuple([new_inst, space.newtuple(tup)])
class StaticMethod(Wrappable):
"""A static method. Note that there is one class staticmethod at
app-level too currently; this is only used for __new__ methods."""
def __init__(self, w_function):
self.w_function = w_function
def descr_staticmethod_get(self, w_obj, w_cls=None):
"""staticmethod(x).__get__(obj[, type]) -> x"""
return self.w_function
class BuiltinFunction(Function):
def __init__(self, func):
assert isinstance(func, Function)
Function.__init__(self, func.space, func.code, func.w_func_globals,
func.defs_w, func.closure, func.name)
self.w_doc = func.w_doc
self.w_func_dict = func.w_func_dict
self.w_module = func.w_module
def descr_method__new__(space, w_subtype, w_func):
func = space.interp_w(Function, w_func)
bltin = space.allocate_instance(BuiltinFunction, w_subtype)
BuiltinFunction.__init__(bltin, func)
return space.wrap(bltin)
def descr_function_repr(self):
return self.space.wrap('<built-in function %s>' % (self.name,))
| Python |
"""
General classes for bytecode compilers.
Compiler instances are stored into 'space.getexecutioncontext().compiler'.
"""
from codeop import PyCF_DONT_IMPLY_DEDENT
from pypy.interpreter.error import OperationError
class AbstractCompiler:
"""Abstract base class for a bytecode compiler."""
# The idea is to grow more methods here over the time,
# e.g. to handle .pyc files in various ways if we have multiple compilers.
def __init__(self, space):
self.space = space
self.w_compile_hook = space.w_None
def compile(self, source, filename, mode, flags):
"""Compile and return an pypy.interpreter.eval.Code instance."""
raise NotImplementedError
def getcodeflags(self, code):
"""Return the __future__ compiler flags that were used to compile
the given code object."""
return 0
def compile_command(self, source, filename, mode, flags):
"""Same as compile(), but tries to compile a possibly partial
interactive input. If more input is needed, it returns None.
"""
# Hackish default implementation based on the stdlib 'codeop' module.
# See comments over there.
space = self.space
flags |= PyCF_DONT_IMPLY_DEDENT
# Check for source consisting of only blank lines and comments
if mode != "eval":
in_comment = False
for c in source:
if c in ' \t\f\v': # spaces
pass
elif c == '#':
in_comment = True
elif c in '\n\r':
in_comment = False
elif not in_comment:
break # non-whitespace, non-comment character
else:
source = "pass" # Replace it with a 'pass' statement
try:
code = self.compile(source, filename, mode, flags)
return code # success
except OperationError, err:
if not err.match(space, space.w_SyntaxError):
raise
try:
self.compile(source + "\n", filename, mode, flags)
return None # expect more
except OperationError, err1:
if not err1.match(space, space.w_SyntaxError):
raise
try:
self.compile(source + "\n\n", filename, mode, flags)
raise # uh? no error with \n\n. re-raise the previous error
except OperationError, err2:
if not err2.match(space, space.w_SyntaxError):
raise
if space.eq_w(err1.w_value, err2.w_value):
raise # twice the same error, re-raise
return None # two different errors, expect more
# ____________________________________________________________
# faked compiler
import warnings
from pypy.tool import stdlib___future__
compiler_flags = 0
compiler_features = {}
for fname in stdlib___future__.all_feature_names:
flag = getattr(stdlib___future__, fname).compiler_flag
compiler_flags |= flag
compiler_features[fname] = flag
allowed_flags = compiler_flags | PyCF_DONT_IMPLY_DEDENT
def get_flag_names(space, flags):
if flags & ~allowed_flags:
raise OperationError(space.w_ValueError,
space.wrap("compile(): unrecognized flags"))
flag_names = []
for name, value in compiler_features.items():
if flags & value:
flag_names.append( name )
return flag_names
class PyCodeCompiler(AbstractCompiler):
"""Base class for compilers producing PyCode objects."""
def getcodeflags(self, code):
from pypy.interpreter.pycode import PyCode
if isinstance(code, PyCode):
return code.co_flags & compiler_flags
else:
return 0
class CPythonCompiler(PyCodeCompiler):
"""Faked implementation of a compiler, using the underlying compile()."""
def compile(self, source, filename, mode, flags):
flags |= stdlib___future__.generators.compiler_flag # always on (2.2 compat)
space = self.space
try:
old = self.setup_warn_explicit(warnings)
try:
c = compile(source, filename, mode, flags, True)
finally:
self.restore_warn_explicit(warnings, old)
# It would be nice to propagate all exceptions to app level,
# but here we only propagate the 'usual' ones, until we figure
# out how to do it generically.
except SyntaxError,e:
w_synerr = space.newtuple([space.wrap(e.msg),
space.newtuple([space.wrap(e.filename),
space.wrap(e.lineno),
space.wrap(e.offset),
space.wrap(e.text)])])
raise OperationError(space.w_SyntaxError, w_synerr)
except UnicodeDecodeError, e:
# TODO use a custom UnicodeError
raise OperationError(space.w_UnicodeDecodeError, space.newtuple([
space.wrap(e.encoding), space.wrap(e.object),
space.wrap(e.start),
space.wrap(e.end), space.wrap(e.reason)]))
except ValueError, e:
raise OperationError(space.w_ValueError, space.wrap(str(e)))
except TypeError, e:
raise OperationError(space.w_TypeError, space.wrap(str(e)))
from pypy.interpreter.pycode import PyCode
return PyCode._from_code(space, c)
compile._annspecialcase_ = "override:cpy_compile"
def _warn_explicit(self, message, category, filename, lineno,
module=None, registry=None):
if hasattr(category, '__bases__') and \
issubclass(category, SyntaxWarning):
assert isinstance(message, str)
space = self.space
w_mod = space.sys.getmodule('warnings')
if w_mod is not None:
w_dict = w_mod.getdict()
w_reg = space.call_method(w_dict, 'setdefault',
space.wrap("__warningregistry__"),
space.newdict())
try:
space.call_method(w_mod, 'warn_explicit',
space.wrap(message),
space.w_SyntaxWarning,
space.wrap(filename),
space.wrap(lineno),
space.w_None,
space.w_None)
except OperationError, e:
if e.match(space, space.w_SyntaxWarning):
raise OperationError(
space.w_SyntaxError,
space.wrap(message))
raise
def setup_warn_explicit(self, warnings):
"""
this is a hack until we have our own parsing/compiling
in place: we bridge certain warnings to the applevel
warnings module to let it decide what to do with
a syntax warning ...
"""
# there is a hack to make the flow space happy:
# 'warnings' should not look like a Constant
old_warn_explicit = warnings.warn_explicit
warnings.warn_explicit = self._warn_explicit
return old_warn_explicit
def restore_warn_explicit(self, warnings, old_warn_explicit):
warnings.warn_explicit = old_warn_explicit
########
class PythonAstCompiler(PyCodeCompiler):
"""Uses the stdlib's python implementation of compiler
XXX: This class should override the baseclass implementation of
compile_command() in order to optimize it, especially in case
of incomplete inputs (e.g. we shouldn't re-compile from sracth
the whole source after having only added a new '\n')
"""
def __init__(self, space):
from pyparser.pythonparse import PYTHON_PARSER
PyCodeCompiler.__init__(self, space)
self.parser = PYTHON_PARSER
self.additional_rules = {}
def compile(self, source, filename, mode, flags):
from pyparser.error import SyntaxError
from pypy.interpreter import astcompiler
from pypy.interpreter.astcompiler.pycodegen import ModuleCodeGenerator
from pypy.interpreter.astcompiler.pycodegen import InteractiveCodeGenerator
from pypy.interpreter.astcompiler.pycodegen import ExpressionCodeGenerator
from pypy.interpreter.astcompiler.ast import Node
from pyparser.astbuilder import AstBuilder
from pypy.interpreter.pycode import PyCode
from pypy.interpreter.function import Function
flags |= stdlib___future__.generators.compiler_flag # always on (2.2 compat)
space = self.space
try:
builder = AstBuilder(self.parser, space=space)
for rulename, buildfunc in self.additional_rules.iteritems():
assert isinstance(buildfunc, Function)
builder.user_build_rules[rulename] = buildfunc
self.parser.parse_source(source, mode, builder, flags)
ast_tree = builder.rule_stack[-1]
encoding = builder.source_encoding
except SyntaxError, e:
raise OperationError(space.w_SyntaxError,
e.wrap_info(space, filename))
if not space.is_w(self.w_compile_hook, space.w_None):
try:
w_ast_tree = space.call_function(self.w_compile_hook,
space.wrap(ast_tree),
space.wrap(encoding),
space.wrap(filename))
ast_tree = space.interp_w(Node, w_ast_tree)
except OperationError:
self.w_compile_hook = space.w_None
raise
try:
astcompiler.misc.set_filename(filename, ast_tree)
flag_names = get_flag_names(space, flags)
if mode == 'exec':
codegenerator = ModuleCodeGenerator(space, ast_tree, flag_names)
elif mode == 'single':
codegenerator = InteractiveCodeGenerator(space, ast_tree, flag_names)
else: # mode == 'eval':
codegenerator = ExpressionCodeGenerator(space, ast_tree, flag_names)
c = codegenerator.getCode()
except SyntaxError, e:
raise OperationError(space.w_SyntaxError,
e.wrap_info(space, filename))
except (ValueError, TypeError), e:
raise OperationError(space.w_SystemError, space.wrap(str(e)))
assert isinstance(c,PyCode)
return c
def install_compiler_hook(space, w_callable):
# if not space.get( w_callable ):
# raise OperationError( space.w_TypeError( space.wrap( "must have a callable" ) )
space.default_compiler.w_compile_hook = w_callable
def insert_grammar_rule(space, w_rule, w_buildfuncs):
"""inserts new grammar rules to the default compiler"""
from pypy.interpreter import function
rule = space.str_w(w_rule)
#buildfuncs_w = w_buildfuncs.content
buildfuncs = {}
#for w_name, w_func in buildfuncs_w.iteritems():
# buildfuncs[space.str_w(w_name)] = space.unwrap(w_func)
w_iter = space.iter(w_buildfuncs)
while 1:
try:
w_key = space.next(w_iter)
w_func = space.getitem(w_buildfuncs, w_key)
buildfuncs[space.str_w(w_key)] = space.interp_w(function.Function, w_func)
except OperationError, e:
if not e.match(space, space.w_StopIteration):
raise
break
space.default_compiler.additional_rules = buildfuncs
space.default_compiler.parser.insert_rule(rule)
# XXX cyclic import
#from pypy.interpreter.baseobjspace import ObjSpace
#insert_grammar_rule.unwrap_spec = [ObjSpace, str, dict]
| Python |
from pypy.interpreter.error import OperationError
from pypy.interpreter.baseobjspace import Wrappable
class GeneratorIterator(Wrappable):
"An iterator created by a generator."
def __init__(self, frame):
self.space = frame.space
self.frame = frame
self.running = False
def descr__reduce__(self, space):
from pypy.interpreter.mixedmodule import MixedModule
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('generator_new')
w = space.wrap
tup = [
w(self.frame),
w(self.running),
]
return space.newtuple([new_inst, space.newtuple(tup)])
def descr__iter__(self):
"""x.__iter__() <==> iter(x)"""
return self.space.wrap(self)
def descr_next(self):
"""x.next() -> the next value, or raise StopIteration"""
space = self.space
if self.running:
raise OperationError(space.w_ValueError,
space.wrap('generator already executing'))
if self.frame.frame_finished_execution:
raise OperationError(space.w_StopIteration, space.w_None)
self.running = True
try:
try:
w_result = self.frame.execute_generator_frame(space.w_None)
except OperationError:
# errors finish a frame
self.frame.frame_finished_execution = True
raise
# if the frame is now marked as finished, it was RETURNed from
if self.frame.frame_finished_execution:
raise OperationError(space.w_StopIteration, space.w_None)
else:
return w_result # YIELDed
finally:
self.frame.f_back = None
self.running = False
| Python |
""" PyFrame class implementation with the interpreter main loop.
"""
from pypy.tool.pairtype import extendabletype
from pypy.interpreter import eval, baseobjspace, pycode
from pypy.interpreter.argument import Arguments, ArgumentsFromValuestack
from pypy.interpreter.error import OperationError
from pypy.interpreter import pytraceback
import opcode
from pypy.rlib.objectmodel import we_are_translated, instantiate
from pypy.rlib.jit import we_are_jitted, hint
from pypy.rlib import rstack # for resume points
# Define some opcodes used
g = globals()
for op in '''DUP_TOP POP_TOP SETUP_LOOP SETUP_EXCEPT SETUP_FINALLY
POP_BLOCK END_FINALLY'''.split():
g[op] = opcode.opmap[op]
HAVE_ARGUMENT = opcode.HAVE_ARGUMENT
class PyFrame(eval.Frame):
"""Represents a frame for a regular Python function
that needs to be interpreted.
See also pyopcode.PyStandardFrame and pynestedscope.PyNestedScopeFrame.
Public fields:
* 'space' is the object space this frame is running in
* 'code' is the PyCode object this frame runs
* 'w_locals' is the locals dictionary to use
* 'w_globals' is the attached globals dictionary
* 'builtin' is the attached built-in module
* 'valuestack_w', 'blockstack', control the interpretation
"""
__metaclass__ = extendabletype
frame_finished_execution = False
last_instr = -1
last_exception = None
f_back = None
w_f_trace = None
# For tracing
instr_lb = 0
instr_ub = -1
instr_prev = -1
def __init__(self, space, code, w_globals, closure):
self = hint(self, access_directly=True)
assert isinstance(code, pycode.PyCode)
self.pycode = code
eval.Frame.__init__(self, space, w_globals, code.co_nlocals)
self.valuestack_w = [None] * code.co_stacksize
self.valuestackdepth = 0
self.blockstack = []
if space.config.objspace.honor__builtins__:
self.builtin = space.builtin.pick_builtin(w_globals)
# regular functions always have CO_OPTIMIZED and CO_NEWLOCALS.
# class bodies only have CO_NEWLOCALS.
self.initialize_frame_scopes(closure)
self.fastlocals_w = [None]*self.numlocals
self.f_lineno = self.pycode.co_firstlineno
def get_builtin(self):
if self.space.config.objspace.honor__builtins__:
return self.builtin
else:
return self.space.builtin
def initialize_frame_scopes(self, closure):
# regular functions always have CO_OPTIMIZED and CO_NEWLOCALS.
# class bodies only have CO_NEWLOCALS.
# CO_NEWLOCALS: make a locals dict unless optimized is also set
# CO_OPTIMIZED: no locals dict needed at all
# NB: this method is overridden in nestedscope.py
flags = self.pycode.co_flags
if flags & pycode.CO_OPTIMIZED:
return
if flags & pycode.CO_NEWLOCALS:
self.w_locals = self.space.newdict()
else:
assert self.w_globals is not None
self.w_locals = self.w_globals
def run(self):
"""Start this frame's execution."""
if self.pycode.co_flags & pycode.CO_GENERATOR:
from pypy.interpreter.generator import GeneratorIterator
return self.space.wrap(GeneratorIterator(self))
else:
return self.execute_frame()
def execute_generator_frame(self, w_inputvalue):
# opcode semantic change in CPython 2.5: we must pass an input value
# when resuming a generator, which goes into the value stack.
# (it's always w_None for now - not implemented in generator.py)
if self.pycode.magic >= 0xa0df294 and self.last_instr != -1:
self.pushvalue(w_inputvalue)
return self.execute_frame()
def execute_frame(self):
"""Execute this frame. Main entry point to the interpreter."""
executioncontext = self.space.getexecutioncontext()
executioncontext.enter(self)
try:
executioncontext.call_trace(self)
# Execution starts just after the last_instr. Initially,
# last_instr is -1. After a generator suspends it points to
# the YIELD_VALUE instruction.
next_instr = self.last_instr + 1
w_exitvalue = self.dispatch(self.pycode, next_instr,
executioncontext)
rstack.resume_point("execute_frame", self, executioncontext, returns=w_exitvalue)
executioncontext.return_trace(self, w_exitvalue)
# on exit, we try to release self.last_exception -- breaks an
# obvious reference cycle, so it helps refcounting implementations
self.last_exception = None
finally:
executioncontext.leave(self)
return w_exitvalue
execute_frame.insert_stack_check_here = True
# stack manipulation helpers
def pushvalue(self, w_object):
depth = self.valuestackdepth
self.valuestack_w[depth] = w_object
self.valuestackdepth = depth + 1
def popvalue(self):
depth = self.valuestackdepth - 1
assert depth >= 0, "pop from empty value stack"
w_object = self.valuestack_w[depth]
self.valuestack_w[depth] = None
self.valuestackdepth = depth
return w_object
def popstrdictvalues(self, n):
dic_w = {}
while True:
n -= 1
if n < 0:
break
hint(n, concrete=True)
w_value = self.popvalue()
w_key = self.popvalue()
key = self.space.str_w(w_key)
dic_w[key] = w_value
return dic_w
def popvalues(self, n):
values_w = [None] * n
while True:
n -= 1
if n < 0:
break
hint(n, concrete=True)
values_w[n] = self.popvalue()
return values_w
def peekvalues(self, n):
values_w = [None] * n
base = self.valuestackdepth - n
assert base >= 0
while True:
n -= 1
if n < 0:
break
hint(n, concrete=True)
values_w[n] = self.valuestack_w[base+n]
return values_w
def dropvalues(self, n):
finaldepth = self.valuestackdepth - n
assert finaldepth >= 0, "stack underflow in dropvalues()"
while True:
n -= 1
if n < 0:
break
hint(n, concrete=True)
self.valuestack_w[finaldepth+n] = None
self.valuestackdepth = finaldepth
def pushrevvalues(self, n, values_w): # n should be len(values_w)
while True:
n -= 1
if n < 0:
break
hint(n, concrete=True)
self.pushvalue(values_w[n])
def dupvalues(self, n):
delta = n-1
while True:
n -= 1
if n < 0:
break
hint(n, concrete=True)
w_value = self.peekvalue(delta)
self.pushvalue(w_value)
def peekvalue(self, index_from_top=0):
index = self.valuestackdepth + ~index_from_top
assert index >= 0, "peek past the bottom of the stack"
return self.valuestack_w[index]
def settopvalue(self, w_object, index_from_top=0):
index = self.valuestackdepth + ~index_from_top
assert index >= 0, "settop past the bottom of the stack"
self.valuestack_w[index] = w_object
def dropvaluesuntil(self, finaldepth):
depth = self.valuestackdepth - 1
while depth >= finaldepth:
self.valuestack_w[depth] = None
depth -= 1
self.valuestackdepth = finaldepth
def savevaluestack(self):
return self.valuestack_w[:self.valuestackdepth]
def restorevaluestack(self, items_w):
assert None not in items_w
self.valuestack_w[:len(items_w)] = items_w
self.dropvaluesuntil(len(items_w))
def make_arguments(self, nargs):
if we_are_jitted():
return Arguments(self.space, self.peekvalues(nargs))
else:
return ArgumentsFromValuestack(self.space, self, nargs)
def descr__reduce__(self, space):
from pypy.interpreter.mixedmodule import MixedModule
from pypy.module._pickle_support import maker # helper fns
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('frame_new')
w = space.wrap
nt = space.newtuple
cells = self._getcells()
if cells is None:
w_cells = space.w_None
else:
w_cells = space.newlist([space.wrap(cell) for cell in cells])
if self.w_f_trace is None:
f_lineno = self.get_last_lineno()
else:
f_lineno = self.f_lineno
values_w = self.valuestack_w[0:self.valuestackdepth]
w_valuestack = maker.slp_into_tuple_with_nulls(space, values_w)
w_blockstack = nt([block._get_state_(space) for block in self.blockstack])
w_fastlocals = maker.slp_into_tuple_with_nulls(space, self.fastlocals_w)
tup_base = [
w(self.pycode),
]
if self.last_exception is None:
w_exc_value = space.w_None
w_tb = space.w_None
else:
w_exc_value = self.last_exception.w_value
w_tb = w(self.last_exception.application_traceback)
tup_state = [
w(self.f_back),
w(self.get_builtin()),
w(self.pycode),
w_valuestack,
w_blockstack,
w_exc_value, # last_exception
w_tb, #
self.w_globals,
w(self.last_instr),
w(self.frame_finished_execution),
w(f_lineno),
w_fastlocals,
space.w_None, #XXX placeholder for f_locals
#f_restricted requires no additional data!
space.w_None, ## self.w_f_trace, ignore for now
w(self.instr_lb), #do we need these three (that are for tracing)
w(self.instr_ub),
w(self.instr_prev),
w_cells,
]
return nt([new_inst, nt(tup_base), nt(tup_state)])
def descr__setstate__(self, space, w_args):
from pypy.module._pickle_support import maker # helper fns
from pypy.interpreter.pycode import PyCode
from pypy.interpreter.module import Module
args_w = space.unpackiterable(w_args)
w_f_back, w_builtin, w_pycode, w_valuestack, w_blockstack, w_exc_value, w_tb,\
w_globals, w_last_instr, w_finished, w_f_lineno, w_fastlocals, w_f_locals, \
w_f_trace, w_instr_lb, w_instr_ub, w_instr_prev, w_cells = args_w
new_frame = self
pycode = space.interp_w(PyCode, w_pycode)
if space.is_w(w_cells, space.w_None):
closure = None
cellvars = []
else:
from pypy.interpreter.nestedscope import Cell
cells_w = space.unpackiterable(w_cells)
cells = [space.interp_w(Cell, w_cell) for w_cell in cells_w]
ncellvars = len(pycode.co_cellvars)
cellvars = cells[:ncellvars]
closure = cells[ncellvars:]
# do not use the instance's __init__ but the base's, because we set
# everything like cells from here
PyFrame.__init__(self, space, pycode, w_globals, closure)
new_frame.f_back = space.interp_w(PyFrame, w_f_back, can_be_None=True)
new_frame.builtin = space.interp_w(Module, w_builtin)
new_frame.blockstack = [unpickle_block(space, w_blk)
for w_blk in space.unpackiterable(w_blockstack)]
values_w = maker.slp_from_tuple_with_nulls(space, w_valuestack)
for w_value in values_w:
new_frame.pushvalue(w_value)
if space.is_w(w_exc_value, space.w_None):
new_frame.last_exception = None
else:
from pypy.interpreter.pytraceback import PyTraceback
tb = space.interp_w(PyTraceback, w_tb)
new_frame.last_exception = OperationError(space.type(w_exc_value),
w_exc_value, tb
)
new_frame.last_instr = space.int_w(w_last_instr)
new_frame.frame_finished_execution = space.is_true(w_finished)
new_frame.f_lineno = space.int_w(w_f_lineno)
new_frame.fastlocals_w = maker.slp_from_tuple_with_nulls(space, w_fastlocals)
if space.is_w(w_f_trace, space.w_None):
new_frame.w_f_trace = None
else:
new_frame.w_f_trace = w_f_trace
new_frame.instr_lb = space.int_w(w_instr_lb) #the three for tracing
new_frame.instr_ub = space.int_w(w_instr_ub)
new_frame.instr_prev = space.int_w(w_instr_prev)
self._setcellvars(cellvars)
def hide(self):
return self.pycode.hidden_applevel
def getcode(self):
return hint(hint(self.pycode, promote=True), deepfreeze=True)
def getfastscope(self):
"Get the fast locals as a list."
return self.fastlocals_w
def setfastscope(self, scope_w):
"""Initialize the fast locals from a list of values,
where the order is according to self.pycode.signature()."""
scope_len = len(scope_w)
if scope_len > len(self.fastlocals_w):
raise ValueError, "new fastscope is longer than the allocated area"
self.fastlocals_w[:scope_len] = scope_w
self.init_cells()
def init_cells(self):
"""Initialize cellvars from self.fastlocals_w
This is overridden in nestedscope.py"""
pass
def getclosure(self):
return None
def _getcells(self):
return None
def _setcellvars(self, cellvars):
pass
### line numbers ###
# for f*_f_* unwrapping through unwrap_spec in typedef.py
def fget_f_lineno(space, self):
"Returns the line number of the instruction currently being executed."
if self.w_f_trace is None:
return space.wrap(self.get_last_lineno())
else:
return space.wrap(self.f_lineno)
def fset_f_lineno(space, self, w_new_lineno):
"Returns the line number of the instruction currently being executed."
try:
new_lineno = space.int_w(w_new_lineno)
except OperationError, e:
raise OperationError(space.w_ValueError,
space.wrap("lineno must be an integer"))
if self.w_f_trace is None:
raise OperationError(space.w_ValueError,
space.wrap("f_lineo can only be set by a trace function."))
if new_lineno < self.pycode.co_firstlineno:
raise OperationError(space.w_ValueError,
space.wrap("line %d comes before the current code." % new_lineno))
code = self.pycode.co_code
addr = 0
line = self.pycode.co_firstlineno
new_lasti = -1
offset = 0
lnotab = self.pycode.co_lnotab
for offset in xrange(0, len(lnotab), 2):
addr += ord(lnotab[offset])
line += ord(lnotab[offset + 1])
if line >= new_lineno:
new_lasti = addr
new_lineno = line
break
if new_lasti == -1:
raise OperationError(space.w_ValueError,
space.wrap("line %d comes after the current code." % new_lineno))
# Don't jump to a line with an except in it.
if ord(code[new_lasti]) in (DUP_TOP, POP_TOP):
raise OperationError(space.w_ValueError,
space.wrap("can't jump to 'except' line as there's no exception"))
# Don't jump into or out of a finally block.
f_lasti_setup_addr = -1
new_lasti_setup_addr = -1
blockstack = []
addr = 0
while addr < len(code):
op = ord(code[addr])
if op in (SETUP_LOOP, SETUP_EXCEPT, SETUP_FINALLY):
blockstack.append([addr, False])
elif op == POP_BLOCK:
setup_op = ord(code[blockstack[-1][0]])
if setup_op == SETUP_FINALLY:
blockstack[-1][1] = True
else:
blockstack.pop()
elif op == END_FINALLY:
if len(blockstack) > 0:
setup_op = ord(code[blockstack[-1][0]])
if setup_op == SETUP_FINALLY:
blockstack.pop()
if addr == new_lasti or addr == self.last_instr:
for ii in range(len(blockstack)):
setup_addr, in_finally = blockstack[~ii]
if in_finally:
if addr == new_lasti:
new_lasti_setup_addr = setup_addr
if addr == self.last_instr:
f_lasti_setup_addr = setup_addr
break
if op >= HAVE_ARGUMENT:
addr += 3
else:
addr += 1
assert len(blockstack) == 0
if new_lasti_setup_addr != f_lasti_setup_addr:
raise OperationError(space.w_ValueError,
space.wrap("can't jump into or out of a 'finally' block %d -> %d" %
(f_lasti_setup_addr, new_lasti_setup_addr)))
if new_lasti < self.last_instr:
min_addr = new_lasti
max_addr = self.last_instr
else:
min_addr = self.last_instr
max_addr = new_lasti
delta_iblock = min_delta_iblock = 0
addr = min_addr
while addr < max_addr:
op = ord(code[addr])
if op in (SETUP_LOOP, SETUP_EXCEPT, SETUP_FINALLY):
delta_iblock += 1
elif op == POP_BLOCK:
delta_iblock -= 1
if delta_iblock < min_delta_iblock:
min_delta_iblock = delta_iblock
if op >= opcode.HAVE_ARGUMENT:
addr += 3
else:
addr += 1
f_iblock = len(self.blockstack)
min_iblock = f_iblock + min_delta_iblock
if new_lasti > self.last_instr:
new_iblock = f_iblock + delta_iblock
else:
new_iblock = f_iblock - delta_iblock
if new_iblock > min_iblock:
raise OperationError(space.w_ValueError,
space.wrap("can't jump into the middle of a block"))
while f_iblock > new_iblock:
block = self.blockstack.pop()
block.cleanup(self)
f_iblock -= 1
self.f_lineno = new_lineno
self.last_instr = new_lasti
def get_last_lineno(self):
"Returns the line number of the instruction currently being executed."
return pytraceback.offset2lineno(self.pycode, self.last_instr)
def fget_f_builtins(space, self):
return self.get_builtin().getdict()
def fget_f_back(space, self):
return self.space.wrap(self.f_back)
def fget_f_lasti(space, self):
return self.space.wrap(self.last_instr)
def fget_f_trace(space, self):
return self.w_f_trace
def fset_f_trace(space, self, w_trace):
if space.is_w(w_trace, space.w_None):
self.w_f_trace = None
else:
self.w_f_trace = w_trace
self.f_lineno = self.get_last_lineno()
def fdel_f_trace(space, self):
self.w_f_trace = None
def fget_f_exc_type(space, self):
if self.last_exception is not None:
f = self.f_back
while f is not None and f.last_exception is None:
f = f.f_back
if f is not None:
return f.last_exception.w_type
return space.w_None
def fget_f_exc_value(space, self):
if self.last_exception is not None:
f = self.f_back
while f is not None and f.last_exception is None:
f = f.f_back
if f is not None:
return f.last_exception.w_value
return space.w_None
def fget_f_exc_traceback(space, self):
if self.last_exception is not None:
f = self.f_back
while f is not None and f.last_exception is None:
f = f.f_back
if f is not None:
return space.wrap(f.last_exception.application_traceback)
return space.w_None
def fget_f_restricted(space, self):
if space.config.objspace.honor__builtins__:
return space.wrap(self.builtin is not space.builtin)
return space.w_False
# ____________________________________________________________
def get_block_class(opname):
# select the appropriate kind of block
from pypy.interpreter.pyopcode import block_classes
return block_classes[opname]
def unpickle_block(space, w_tup):
w_opname, w_handlerposition, w_valuestackdepth = space.unpackiterable(w_tup)
opname = space.str_w(w_opname)
handlerposition = space.int_w(w_handlerposition)
valuestackdepth = space.int_w(w_valuestackdepth)
assert valuestackdepth >= 0
blk = instantiate(get_block_class(opname))
blk.handlerposition = handlerposition
blk.valuestackdepth = valuestackdepth
return blk
| Python |
import sys
from pypy.interpreter.miscutils import Stack, Action
from pypy.interpreter.error import OperationError
def new_framestack():
return Stack()
class ExecutionContext:
"""An ExecutionContext holds the state of an execution thread
in the Python interpreter."""
def __init__(self, space):
self.space = space
self.framestack = new_framestack()
self.w_tracefunc = None
self.w_profilefunc = None
self.is_tracing = 0
self.ticker = 0
self.pending_actions = []
self.compiler = space.createcompiler()
def enter(self, frame):
if self.framestack.depth() > self.space.sys.recursionlimit:
raise OperationError(self.space.w_RuntimeError,
self.space.wrap("maximum recursion depth exceeded"))
try:
frame.f_back = self.framestack.top()
except IndexError:
frame.f_back = None
if not frame.hide():
self.framestack.push(frame)
def leave(self, frame):
if self.w_profilefunc:
self._trace(frame, 'leaveframe', None)
if not frame.hide():
self.framestack.pop()
class Subcontext(object):
# coroutine: subcontext support
def __init__(self):
self.framestack = new_framestack()
self.w_tracefunc = None
self.w_profilefunc = None
self.is_tracing = 0
def enter(self, ec):
ec.framestack = self.framestack
ec.w_tracefunc = self.w_tracefunc
ec.w_profilefunc = self.w_profilefunc
ec.is_tracing = self.is_tracing
def leave(self, ec):
self.framestack = ec.framestack
self.w_tracefunc = ec.w_tracefunc
self.w_profilefunc = ec.w_profilefunc
self.is_tracing = ec.is_tracing
# the following interface is for pickling and unpickling
def getstate(self, space):
# we just save the framestack
items = [space.wrap(item) for item in self.framestack.items]
return space.newtuple(items)
def setstate(self, space, w_state):
from pypy.interpreter.pyframe import PyFrame
items = [space.interp_w(PyFrame, w_item)
for w_item in space.unpackiterable(w_state)]
self.framestack.items = items
# coroutine: I think this is all, folks!
def get_builtin(self):
try:
return self.framestack.top().builtin
except IndexError:
return self.space.builtin
# XXX this one should probably be dropped in favor of a module
def make_standard_w_globals(self):
"Create a new empty 'globals' dictionary."
w_key = self.space.wrap("__builtins__")
w_value = self.space.wrap(self.get_builtin())
w_globals = self.space.newdict()
space.setitem(w_globals, w_key, w_value)
return w_globals
def call_trace(self, frame):
"Trace the call of a function"
self._trace(frame, 'call', self.space.w_None)
def return_trace(self, frame, w_retval):
"Trace the return from a function"
self._trace(frame, 'return', w_retval)
def bytecode_trace(self, frame):
"Trace function called before each bytecode."
# First, call yield_thread() before each Nth bytecode,
# as selected by sys.setcheckinterval()
ticker = self.ticker
if ticker <= 0:
Action.perform_actions(self.space.pending_actions)
Action.perform_actions(self.pending_actions)
ticker = self.space.sys.checkinterval
self.ticker = ticker - 1
if frame.w_f_trace is None or self.is_tracing:
return
self._do_bytecode_trace(frame)
def _do_bytecode_trace(self, frame):
code = getattr(frame, 'pycode')
if frame.instr_lb <= frame.last_instr < frame.instr_ub:
if frame.last_instr <= frame.instr_prev:
# We jumped backwards in the same line.
self._trace(frame, 'line', self.space.w_None)
else:
size = len(code.co_lnotab) / 2
addr = 0
line = code.co_firstlineno
p = 0
lineno = code.co_lnotab
while size > 0:
c = ord(lineno[p])
if (addr + c) > frame.last_instr:
break
addr += c
if c:
frame.instr_lb = addr
line += ord(lineno[p + 1])
p += 2
size -= 1
if size > 0:
while True:
size -= 1
if size < 0:
break
addr += ord(lineno[p])
if ord(lineno[p + 1]):
break
p += 2
frame.instr_ub = addr
else:
frame.instr_ub = sys.maxint
if frame.instr_lb == frame.last_instr: # At start of line!
frame.f_lineno = line
self._trace(frame, 'line', self.space.w_None)
frame.instr_prev = frame.last_instr
def exception_trace(self, frame, operationerr):
"Trace function called upon OperationError."
operationerr.record_interpreter_traceback()
space = self.space
self._trace(frame, 'exception', None, operationerr)
#operationerr.print_detailed_traceback(self.space)
def sys_exc_info(self): # attn: the result is not the wrapped sys.exc_info() !!!
"""Implements sys.exc_info().
Return an OperationError instance or None."""
for i in range(self.framestack.depth()):
frame = self.framestack.top(i)
if frame.last_exception is not None:
return frame.last_exception
return None
def settrace(self, w_func):
"""Set the global trace function."""
if self.space.is_w(w_func, self.space.w_None):
self.w_tracefunc = None
else:
self.w_tracefunc = w_func
def setprofile(self, w_func):
"""Set the global trace function."""
if self.space.is_w(w_func, self.space.w_None):
self.w_profilefunc = None
else:
self.w_profilefunc = w_func
def call_tracing(self, w_func, w_args):
is_tracing = self.is_tracing
self.is_tracing = 0
try:
return self.space.call(w_func, w_args)
finally:
self.is_tracing = is_tracing
def _trace(self, frame, event, w_arg, operr=None):
if self.is_tracing or frame.hide():
return
space = self.space
# Tracing cases
if event == 'call':
w_callback = self.w_tracefunc
else:
w_callback = frame.w_f_trace
if w_callback is not None and event != "leaveframe":
if operr is not None:
w_arg = space.newtuple([operr.w_type, operr.w_value,
space.wrap(operr.application_traceback)])
frame.fast2locals()
self.is_tracing += 1
try:
try:
w_result = space.call_function(w_callback, space.wrap(frame), space.wrap(event), w_arg)
if space.is_w(w_result, space.w_None):
frame.w_f_trace = None
else:
frame.w_f_trace = w_result
except:
self.settrace(space.w_None)
frame.w_f_trace = None
raise
finally:
self.is_tracing -= 1
frame.locals2fast()
# Profile cases
if self.w_profilefunc is not None:
if event not in ['leaveframe', 'call']:
return
last_exception = None
if event == 'leaveframe':
last_exception = frame.last_exception
event = 'return'
assert self.is_tracing == 0
self.is_tracing += 1
try:
try:
w_result = space.call_function(self.w_profilefunc,
space.wrap(frame),
space.wrap(event), w_arg)
except:
self.w_profilefunc = None
raise
finally:
frame.last_exception = last_exception
self.is_tracing -= 1
def add_pending_action(self, action):
self.pending_actions.append(action)
self.ticker = 0
| Python |
"""
Gateway between app-level and interpreter-level:
* BuiltinCode (call interp-level code from app-level)
* app2interp (embed an app-level function into an interp-level callable)
* interp2app (publish an interp-level object to be visible from app-level)
"""
import types, sys, md5, os
NoneNotWrapped = object()
from pypy.tool.sourcetools import func_with_new_name
from pypy.interpreter.error import OperationError
from pypy.interpreter import eval
from pypy.interpreter.function import Function, Method
from pypy.interpreter.baseobjspace import W_Root, ObjSpace, Wrappable
from pypy.interpreter.baseobjspace import Wrappable, SpaceCache, DescrMismatch
from pypy.interpreter.argument import Arguments, AbstractArguments
from pypy.tool.sourcetools import NiceCompile, compile2
from pypy.rlib.jit import hint
# internal non-translatable parts:
import py
class Signature:
"NOT_RPYTHON"
def __init__(self, func=None, argnames=None, varargname=None,
kwargname=None, name = None):
self.func = func
if func is not None:
self.name = func.__name__
else:
self.name = name
if argnames is None:
argnames = []
self.argnames = argnames
self.varargname = varargname
self.kwargname = kwargname
def append(self, argname):
self.argnames.append(argname)
def signature(self):
return self.argnames, self.varargname, self.kwargname
#________________________________________________________________
class UnwrapSpecRecipe:
"NOT_RPYTHON"
bases_order = [Wrappable, W_Root, ObjSpace, Arguments, object]
def dispatch(self, el, *args):
if isinstance(el, str):
getattr(self, "visit_%s" % (el,))(el, *args)
elif isinstance(el, tuple):
if el[0] == 'self':
self.visit_self(el[1], *args)
else:
self.visit_function(el, *args)
else:
for typ in self.bases_order:
if issubclass(el, typ):
visit = getattr(self, "visit__%s" % (typ.__name__,))
visit(el, *args)
break
else:
raise Exception("%s: no match for unwrap_spec element %s" % (
self.__class__.__name__, el))
def apply_over(self, unwrap_spec, *extra):
dispatch = self.dispatch
for el in unwrap_spec:
dispatch(el, *extra)
class UnwrapSpecEmit(UnwrapSpecRecipe):
def __init__(self):
self.n = 0
self.miniglobals = {}
def succ(self):
n = self.n
self.n += 1
return n
def use(self, obj):
name = obj.__name__
self.miniglobals[name] = obj
return name
#________________________________________________________________
class UnwrapSpec_Check(UnwrapSpecRecipe):
# checks for checking interp2app func argument names wrt unwrap_spec
# and synthetizing an app-level signature
def __init__(self, original_sig):
self.func = original_sig.func
self.orig_arg = iter(original_sig.argnames).next
def visit_function(self, (func, cls), app_sig):
self.dispatch(cls, app_sig)
def visit_self(self, cls, app_sig):
self.visit__Wrappable(cls, app_sig)
def checked_space_method(self, typname, app_sig):
argname = self.orig_arg()
assert not argname.startswith('w_'), (
"unwrapped %s argument %s of built-in function %r should "
"not start with 'w_'" % (typname, argname, self.func))
app_sig.append(argname)
def visit_index(self, index, app_sig):
self.checked_space_method(index, app_sig)
def visit__Wrappable(self, el, app_sig):
name = el.__name__
argname = self.orig_arg()
assert not argname.startswith('w_'), (
"unwrapped %s argument %s of built-in function %r should "
"not start with 'w_'" % (name, argname, self.func))
app_sig.append(argname)
def visit__ObjSpace(self, el, app_sig):
self.orig_arg()
def visit__W_Root(self, el, app_sig):
assert el is W_Root, "oops"
argname = self.orig_arg()
assert argname.startswith('w_'), (
"argument %s of built-in function %r should "
"start with 'w_'" % (argname, self.func))
app_sig.append(argname[2:])
def visit__Arguments(self, el, app_sig):
argname = self.orig_arg()
assert app_sig.varargname is None,(
"built-in function %r has conflicting rest args specs" % self.func)
app_sig.varargname = 'args'
app_sig.kwargname = 'keywords'
def visit_args_w(self, el, app_sig):
argname = self.orig_arg()
assert argname.endswith('_w'), (
"rest arguments arg %s of built-in function %r should end in '_w'" %
(argname, self.func))
assert app_sig.varargname is None,(
"built-in function %r has conflicting rest args specs" % self.func)
app_sig.varargname = argname[:-2]
def visit_w_args(self, el, app_sig):
argname = self.orig_arg()
assert argname.startswith('w_'), (
"rest arguments arg %s of built-in function %r should start 'w_'" %
(argname, self.func))
assert app_sig.varargname is None,(
"built-in function %r has conflicting rest args specs" % self.func)
app_sig.varargname = argname[2:]
def visit__object(self, typ, app_sig):
if typ not in (int, str, float):
assert False, "unsupported basic type in unwrap_spec"
self.checked_space_method(typ.__name__, app_sig)
class UnwrapSpec_EmitRun(UnwrapSpecEmit):
# collect code to emit for interp2app builtin frames based on unwrap_spec
def __init__(self):
UnwrapSpecEmit.__init__(self)
self.run_args = []
def scopenext(self):
return "scope_w[%d]" % self.succ()
def visit_function(self, (func, cls)):
self.run_args.append("%s(%s)" % (self.use(func),
self.scopenext()))
def visit_self(self, typ):
self.run_args.append("space.descr_self_interp_w(%s, %s)" %
(self.use(typ), self.scopenext()))
def visit__Wrappable(self, typ):
self.run_args.append("space.interp_w(%s, %s)" % (self.use(typ),
self.scopenext()))
def visit__ObjSpace(self, el):
self.run_args.append('space')
def visit__W_Root(self, el):
self.run_args.append(self.scopenext())
def visit__Arguments(self, el):
self.miniglobals['Arguments'] = Arguments
self.run_args.append("Arguments.frompacked(space, %s, %s)"
% (self.scopenext(), self.scopenext()))
def visit_args_w(self, el):
self.run_args.append("space.unpacktuple(%s)" % self.scopenext())
def visit_w_args(self, el):
self.run_args.append(self.scopenext())
def visit__object(self, typ):
if typ not in (int, str, float):
assert False, "unsupported basic type in uwnrap_spec"
self.run_args.append("space.%s_w(%s)" %
(typ.__name__, self.scopenext()))
def visit_index(self, typ):
self.run_args.append("space.getindex_w(%s, space.w_OverflowError)"
% (self.scopenext(), ))
def _make_unwrap_activation_class(self, unwrap_spec, cache={}):
try:
key = tuple(unwrap_spec)
activation_factory_cls, run_args = cache[key]
assert run_args == self.run_args, (
"unexpected: same spec, different run_args")
return activation_factory_cls
except KeyError:
parts = []
for el in unwrap_spec:
if isinstance(el, tuple):
parts.append(''.join([getattr(subel, '__name__', subel)
for subel in el]))
else:
parts.append(getattr(el, '__name__', el))
label = '_'.join(parts)
#print label
d = {}
source = """if 1:
def _run_UWS_%s(self, space, scope_w):
return self.behavior(%s)
\n""" % (label, ', '.join(self.run_args))
exec compile2(source) in self.miniglobals, d
d['_run'] = d['_run_UWS_%s' % label]
del d['_run_UWS_%s' % label]
activation_cls = type("BuiltinActivation_UwS_%s" % label,
(BuiltinActivation,), d)
cache[key] = activation_cls, self.run_args
return activation_cls
def make_activation(unwrap_spec, func):
emit = UnwrapSpec_EmitRun()
emit.apply_over(unwrap_spec)
activation_uw_cls = emit._make_unwrap_activation_class(unwrap_spec)
return activation_uw_cls(func)
make_activation = staticmethod(make_activation)
class BuiltinActivation(object):
def __init__(self, behavior):
"""NOT_RPYTHON"""
self.behavior = behavior
def _run(self, space, scope_w):
"""Subclasses with behavior specific for an unwrap spec are generated"""
raise TypeError, "abstract"
#________________________________________________________________
class FastFuncNotSupported(Exception):
pass
class UnwrapSpec_FastFunc_Unwrap(UnwrapSpecEmit):
def __init__(self):
UnwrapSpecEmit.__init__(self)
self.args = []
self.unwrap = []
self.finger = 0
def dispatch(self, el, *args):
UnwrapSpecEmit.dispatch(self, el, *args)
self.finger += 1
if self.n > 4:
raise FastFuncNotSupported
def nextarg(self):
arg = "w%d" % self.succ()
self.args.append(arg)
return arg
def visit_function(self, (func, cls)):
raise FastFuncNotSupported
def visit_self(self, typ):
self.unwrap.append("space.descr_self_interp_w(%s, %s)" %
(self.use(typ), self.nextarg()))
def visit__Wrappable(self, typ):
self.unwrap.append("space.interp_w(%s, %s)" % (self.use(typ),
self.nextarg()))
def visit__ObjSpace(self, el):
if self.finger != 0:
raise FastFuncNotSupported
self.unwrap.append("space")
def visit__W_Root(self, el):
self.unwrap.append(self.nextarg())
def visit__Arguments(self, el):
raise FastFuncNotSupported
def visit_args_w(self, el):
raise FastFuncNotSupported
def visit_w_args(self, el):
raise FastFuncNotSupported
def visit__object(self, typ):
if typ not in (int, str, float):
assert False, "unsupported basic type in uwnrap_spec"
self.unwrap.append("space.%s_w(%s)" % (typ.__name__,
self.nextarg()))
def visit_index(self, typ):
self.unwrap.append("space.getindex_w(%s, space.w_OverflowError)"
% (self.nextarg()), )
def make_fastfunc(unwrap_spec, func):
unwrap_info = UnwrapSpec_FastFunc_Unwrap()
unwrap_info.apply_over(unwrap_spec)
narg = unwrap_info.n
args = ['space'] + unwrap_info.args
if args == unwrap_info.unwrap:
fastfunc = func
else:
# try to avoid excessive bloat
if func.__module__ == 'pypy.interpreter.astcompiler.ast':
raise FastFuncNotSupported
if (not func.__module__.startswith('pypy.module.__builtin__') and
not func.__module__.startswith('pypy.module.sys') and
not func.__module__.startswith('pypy.module.math')):
if not func.__name__.startswith('descr'):
raise FastFuncNotSupported
d = {}
unwrap_info.miniglobals['func'] = func
source = """if 1:
def fastfunc_%s_%d(%s):
return func(%s)
\n""" % (func.__name__, narg,
', '.join(args),
', '.join(unwrap_info.unwrap))
exec compile2(source) in unwrap_info.miniglobals, d
fastfunc = d['fastfunc_%s_%d' % (func.__name__, narg)]
return narg, fastfunc
make_fastfunc = staticmethod(make_fastfunc)
class BuiltinCode(eval.Code):
"The code object implementing a built-in (interpreter-level) hook."
hidden_applevel = True
descrmismatch_op = None
descr_reqcls = None
# When a BuiltinCode is stored in a Function object,
# you get the functionality of CPython's built-in function type.
NOT_RPYTHON_ATTRIBUTES = ['_bltin', '_unwrap_spec']
def __init__(self, func, unwrap_spec = None, self_type = None,
descrmismatch=None):
"NOT_RPYTHON"
# 'implfunc' is the interpreter-level function.
# Note that this uses a lot of (construction-time) introspection.
eval.Code.__init__(self, func.__name__)
self.docstring = func.__doc__
# unwrap_spec can be passed to interp2app or
# attached as an attribute to the function.
# It is a list of types or singleton objects:
# baseobjspace.ObjSpace is used to specify the space argument
# baseobjspace.W_Root is for wrapped arguments to keep wrapped
# baseobjspace.Wrappable subclasses imply interp_w and a typecheck
# argument.Arguments is for a final rest arguments Arguments object
# 'args_w' for unpacktuple applied to rest arguments
# 'w_args' for rest arguments passed as wrapped tuple
# str,int,float: unwrap argument as such type
# (function, cls) use function to check/unwrap argument of type cls
# First extract the signature from the (CPython-level) code object
from pypy.interpreter import pycode
argnames, varargname, kwargname = pycode.cpython_code_signature(func.func_code)
if unwrap_spec is None:
unwrap_spec = getattr(func,'unwrap_spec',None)
if unwrap_spec is None:
unwrap_spec = [ObjSpace]+ [W_Root] * (len(argnames)-1)
if self_type:
unwrap_spec = ['self'] + unwrap_spec[1:]
if self_type:
assert unwrap_spec[0] == 'self',"self_type without 'self' spec element"
unwrap_spec = list(unwrap_spec)
if descrmismatch is not None:
assert issubclass(self_type, Wrappable)
unwrap_spec[0] = ('self', self_type)
self.descrmismatch_op = descrmismatch
self.descr_reqcls = self_type
else:
unwrap_spec[0] = self_type
else:
assert descrmismatch is None, (
"descrmismatch without a self-type specified")
orig_sig = Signature(func, argnames, varargname, kwargname)
app_sig = Signature(func)
UnwrapSpec_Check(orig_sig).apply_over(unwrap_spec,
app_sig #to populate
)
self.sig = argnames, varargname, kwargname = app_sig.signature()
self.minargs = len(argnames)
if varargname:
self.maxargs = sys.maxint
else:
self.maxargs = self.minargs
self.activation = UnwrapSpec_EmitRun.make_activation(unwrap_spec, func)
self._bltin = func
self._unwrap_spec = unwrap_spec
# speed hack
if 0 <= len(unwrap_spec) <= 5:
try:
arity, fastfunc = UnwrapSpec_FastFunc_Unwrap.make_fastfunc(
unwrap_spec, func)
except FastFuncNotSupported:
if unwrap_spec == [ObjSpace, Arguments]:
self.__class__ = BuiltinCodePassThroughArguments0
self.func__args__ = func
elif unwrap_spec == [ObjSpace, W_Root, Arguments]:
self.__class__ = BuiltinCodePassThroughArguments1
self.func__args__ = func
else:
self.__class__ = globals()['BuiltinCode%d' % arity]
setattr(self, 'fastfunc_%d' % arity, fastfunc)
def signature(self):
return self.sig
def getdocstring(self, space):
return space.wrap(self.docstring)
def funcrun(self, func, args):
space = func.space
activation = self.activation
scope_w = args.parse(func.name, self.sig, func.defs_w)
try:
w_result = activation._run(space, scope_w)
except KeyboardInterrupt:
raise OperationError(space.w_KeyboardInterrupt,
space.w_None)
except MemoryError:
raise OperationError(space.w_MemoryError, space.w_None)
except RuntimeError, e:
raise OperationError(space.w_RuntimeError,
space.wrap("internal error: " + str(e)))
except DescrMismatch, e:
return scope_w[0].descr_call_mismatch(space,
self.descrmismatch_op,
self.descr_reqcls,
args)
if w_result is None:
w_result = space.w_None
return w_result
# (verbose) performance hack below
class BuiltinCodePassThroughArguments0(BuiltinCode):
def funcrun(self, func, args):
space = func.space
try:
w_result = self.func__args__(space, args)
except KeyboardInterrupt:
raise OperationError(space.w_KeyboardInterrupt, space.w_None)
except MemoryError:
raise OperationError(space.w_MemoryError, space.w_None)
except RuntimeError, e:
raise OperationError(space.w_RuntimeError,
space.wrap("internal error: " + str(e)))
except DescrMismatch, e:
return args.firstarg().descr_call_mismatch(space,
self.descrmismatch_op,
self.descr_reqcls,
args)
if w_result is None:
w_result = space.w_None
return w_result
class BuiltinCodePassThroughArguments1(BuiltinCode):
def funcrun(self, func, args):
space = func.space
try:
w_obj, newargs = args.popfirst()
except IndexError:
return BuiltinCode.funcrun(self, func, args)
else:
try:
w_result = self.func__args__(space, w_obj, newargs)
except KeyboardInterrupt:
raise OperationError(space.w_KeyboardInterrupt, space.w_None)
except MemoryError:
raise OperationError(space.w_MemoryError, space.w_None)
except RuntimeError, e:
raise OperationError(space.w_RuntimeError,
space.wrap("internal error: " + str(e)))
except DescrMismatch, e:
return args.firstarg().descr_call_mismatch(space,
self.descrmismatch_op,
self.descr_reqcls,
args)
if w_result is None:
w_result = space.w_None
return w_result
class BuiltinCode0(BuiltinCode):
def fastcall_0(self, space, w_func):
self = hint(self, deepfreeze=True)
try:
w_result = self.fastfunc_0(space)
except KeyboardInterrupt:
raise OperationError(space.w_KeyboardInterrupt, space.w_None)
except MemoryError:
raise OperationError(space.w_MemoryError, space.w_None)
except (RuntimeError, DescrMismatch), e:
raise OperationError(space.w_RuntimeError,
space.wrap("internal error: " + str(e)))
if w_result is None:
w_result = space.w_None
return w_result
class BuiltinCode1(BuiltinCode):
def fastcall_1(self, space, w_func, w1):
self = hint(self, deepfreeze=True)
try:
w_result = self.fastfunc_1(space, w1)
except KeyboardInterrupt:
raise OperationError(space.w_KeyboardInterrupt, space.w_None)
except MemoryError:
raise OperationError(space.w_MemoryError, space.w_None)
except RuntimeError, e:
raise OperationError(space.w_RuntimeError,
space.wrap("internal error: " + str(e)))
except DescrMismatch, e:
return w1.descr_call_mismatch(space,
self.descrmismatch_op,
self.descr_reqcls,
Arguments(space, [w1]))
if w_result is None:
w_result = space.w_None
return w_result
class BuiltinCode2(BuiltinCode):
def fastcall_2(self, space, w_func, w1, w2):
self = hint(self, deepfreeze=True)
try:
w_result = self.fastfunc_2(space, w1, w2)
except KeyboardInterrupt:
raise OperationError(space.w_KeyboardInterrupt, space.w_None)
except MemoryError:
raise OperationError(space.w_MemoryError, space.w_None)
except RuntimeError, e:
raise OperationError(space.w_RuntimeError,
space.wrap("internal error: " + str(e)))
except DescrMismatch, e:
return w1.descr_call_mismatch(space,
self.descrmismatch_op,
self.descr_reqcls,
Arguments(space, [w1, w2]))
if w_result is None:
w_result = space.w_None
return w_result
class BuiltinCode3(BuiltinCode):
def fastcall_3(self, space, func, w1, w2, w3):
self = hint(self, deepfreeze=True)
try:
w_result = self.fastfunc_3(space, w1, w2, w3)
except KeyboardInterrupt:
raise OperationError(space.w_KeyboardInterrupt, space.w_None)
except MemoryError:
raise OperationError(space.w_MemoryError, space.w_None)
except RuntimeError, e:
raise OperationError(space.w_RuntimeError,
space.wrap("internal error: " + str(e)))
except DescrMismatch, e:
return w1.descr_call_mismatch(space,
self.descrmismatch_op,
self.descr_reqcls,
Arguments(space, [w1, w2, w3]))
if w_result is None:
w_result = space.w_None
return w_result
class BuiltinCode4(BuiltinCode):
def fastcall_4(self, space, func, w1, w2, w3, w4):
self = hint(self, deepfreeze=True)
try:
w_result = self.fastfunc_4(space, w1, w2, w3, w4)
except KeyboardInterrupt:
raise OperationError(space.w_KeyboardInterrupt, space.w_None)
except MemoryError:
raise OperationError(space.w_MemoryError, space.w_None)
except RuntimeError, e:
raise OperationError(space.w_RuntimeError,
space.wrap("internal error: " + str(e)))
except DescrMismatch, e:
return w1.descr_call_mismatch(space,
self.descrmismatch_op,
self.descr_reqcls,
Arguments(space,
[w1, w2, w3, w4]))
if w_result is None:
w_result = space.w_None
return w_result
class interp2app(Wrappable):
"""Build a gateway that calls 'f' at interp-level."""
# NOTICE interp2app defaults are stored and passed as
# wrapped values, this to avoid having scope_w be of mixed
# wrapped and unwrapped types;
# an exception is made for the NoneNotWrapped special value
# which is passed around as default as an unwrapped None,
# unwrapped None and wrapped types are compatible
#
# Takes optionally an unwrap_spec, see BuiltinCode
NOT_RPYTHON_ATTRIBUTES = ['_staticdefs']
def __init__(self, f, app_name=None, unwrap_spec = None,
descrmismatch=None):
"NOT_RPYTHON"
Wrappable.__init__(self)
# f must be a function whose name does NOT start with 'app_'
self_type = None
if hasattr(f, 'im_func'):
self_type = f.im_class
f = f.im_func
if not isinstance(f, types.FunctionType):
raise TypeError, "function expected, got %r instead" % f
if app_name is None:
if f.func_name.startswith('app_'):
raise ValueError, ("function name %r suspiciously starts "
"with 'app_'" % f.func_name)
app_name = f.func_name
self._code = BuiltinCode(f, unwrap_spec=unwrap_spec,
self_type = self_type,
descrmismatch=descrmismatch)
self.__name__ = f.func_name
self.name = app_name
self._staticdefs = list(f.func_defaults or ())
def _getdefaults(self, space):
"NOT_RPYTHON"
defs_w = []
for val in self._staticdefs:
if val is NoneNotWrapped:
defs_w.append(None)
else:
defs_w.append(space.wrap(val))
return defs_w
# lazy binding to space
def __spacebind__(self, space):
# we first make a real Function object out of it
# and the result is a wrapped version of this Function.
return self.get_function(space)
def get_function(self, space):
return self.getcache(space).getorbuild(self)
def getcache(self, space):
return space.fromcache(GatewayCache)
def get_method(self, obj):
# to bind this as a method out of an instance, we build a
# Function and get it.
# the object space is implicitely fetched out of the instance
assert self._code.ismethod, (
'global built-in function %r used as method' %
self._code.func)
space = obj.space
fn = self.get_function(space)
w_obj = space.wrap(obj)
return Method(space, space.wrap(fn),
w_obj, space.type(w_obj))
class GatewayCache(SpaceCache):
def build(cache, gateway):
"NOT_RPYTHON"
space = cache.space
defs = gateway._getdefaults(space) # needs to be implemented by subclass
code = gateway._code
fn = Function(space, code, None, defs, forcename = gateway.name)
return fn
#
# the next gateways are to be used only for
# temporary/initialization purposes
class interp2app_temp(interp2app):
"NOT_RPYTHON"
def getcache(self, space):
return self.__dict__.setdefault(space, GatewayCache(space))
# and now for something completely different ...
#
class ApplevelClass:
"""NOT_RPYTHON
A container for app-level source code that should be executed
as a module in the object space; interphook() builds a static
interp-level function that invokes the callable with the given
name at app-level."""
hidden_applevel = True
def __init__(self, source, filename = None, modname = '__builtin__'):
self.filename = filename
if self.filename is None:
self.code = py.code.Source(source).compile()
else:
self.code = NiceCompile(self.filename)(source)
self.modname = modname
# look at the first three lines for a NOT_RPYTHON tag
first = "\n".join(source.split("\n", 3)[:3])
if "NOT_RPYTHON" in first:
self.can_use_geninterp = False
else:
self.can_use_geninterp = True
def getwdict(self, space):
return space.fromcache(ApplevelCache).getorbuild(self)
def buildmodule(self, space, name='applevel'):
from pypy.interpreter.module import Module
return Module(space, space.wrap(name), self.getwdict(space))
def wget(self, space, name):
if hasattr(space, '_applevelclass_hook'): # XXX for the CPyObjSpace
return space._applevelclass_hook(self, name)
w_globals = self.getwdict(space)
return space.getitem(w_globals, space.wrap(name))
def interphook(self, name):
"NOT_RPYTHON"
def appcaller(space, *args_w):
if not isinstance(space, ObjSpace):
raise TypeError("first argument must be a space instance.")
# redirect if the space handles this specially
# XXX can this be factored a bit less flow space dependently?
if hasattr(space, 'specialcases'):
sc = space.specialcases
if ApplevelClass in sc:
ret_w = sc[ApplevelClass](space, self, name, args_w)
if ret_w is not None: # it was RPython
return ret_w
# the last argument can be an Arguments
if not args_w:
args = Arguments(space, [])
else:
args = args_w[-1]
assert args is not None
if not isinstance(args, AbstractArguments):
args = Arguments(space, list(args_w))
else:
# ...which is merged with the previous arguments, if any
if len(args_w) > 1:
more_args_w, more_kwds_w = args.unpack()
args = Arguments(space,
list(args_w[:-1]) + more_args_w,
more_kwds_w)
w_func = self.wget(space, name)
return space.call_args(w_func, args)
def get_function(space):
w_func = self.wget(space, name)
return space.unwrap(w_func)
appcaller = func_with_new_name(appcaller, name)
appcaller.get_function = get_function
return appcaller
def _freeze_(self):
return True # hint for the annotator: applevel instances are constants
class ApplevelCache(SpaceCache):
"""NOT_RPYTHON
The cache mapping each applevel instance to its lazily built w_dict"""
def build(self, app):
"NOT_RPYTHON. Called indirectly by Applevel.getwdict()."
if self.space.config.objspace.geninterp and app.can_use_geninterp:
return PyPyCacheDir.build_applevelinterp_dict(app, self.space)
else:
return build_applevel_dict(app, self.space)
# __________ pure applevel version __________
def build_applevel_dict(self, space):
"NOT_RPYTHON"
from pypy.interpreter.pycode import PyCode
w_glob = space.newdict()
space.setitem(w_glob, space.wrap('__name__'), space.wrap(self.modname))
space.exec_(self.code, w_glob, w_glob,
hidden_applevel=self.hidden_applevel)
return w_glob
# __________ geninterplevel version __________
class PyPyCacheDir:
"NOT_RPYTHON"
# similar to applevel, but using translation to interp-level.
# This version maintains a cache folder with single files.
def build_applevelinterp_dict(cls, self, space):
"NOT_RPYTHON"
# N.B. 'self' is the ApplevelInterp; this is a class method,
# just so that we have a convenient place to store the global state.
if not cls._setup_done:
cls._setup()
from pypy.translator.geninterplevel import translate_as_module
import marshal
scramble = md5.new(cls.seed)
scramble.update(marshal.dumps(self.code))
key = scramble.hexdigest()
initfunc = cls.known_code.get(key)
if not initfunc:
# try to get it from file
name = key
if self.filename:
prename = os.path.splitext(os.path.basename(self.filename))[0]
else:
prename = 'zznoname'
name = "%s_%s" % (prename, name)
try:
__import__("pypy._cache."+name)
except ImportError, x:
# print x
pass
else:
initfunc = cls.known_code[key]
if not initfunc:
# build it and put it into a file
initfunc, newsrc = translate_as_module(
self.code, self.filename, self.modname)
fname = cls.cache_path.join(name+".py").strpath
f = file(get_tmp_file_name(fname), "w")
print >> f, """\
# self-destruct on double-click:
if __name__ == "__main__":
from pypy import _cache
import os
namestart = os.path.join(os.path.split(_cache.__file__)[0], '%s')
for ending in ('.py', '.pyc', '.pyo'):
try:
os.unlink(namestart+ending)
except os.error:
pass""" % name
print >> f
print >> f, newsrc
print >> f, "from pypy._cache import known_code"
print >> f, "known_code[%r] = %s" % (key, initfunc.__name__)
f.close()
rename_tmp_to_eventual_file_name(fname)
w_glob = initfunc(space)
return w_glob
build_applevelinterp_dict = classmethod(build_applevelinterp_dict)
_setup_done = False
def _setup(cls):
"""NOT_RPYTHON"""
lp = py.path.local
import pypy, os
p = lp(pypy.__file__).new(basename='_cache').ensure(dir=1)
cls.cache_path = p
ini = p.join('__init__.py')
try:
if not ini.check():
raise ImportError # don't import if only a .pyc file left!!!
from pypy._cache import known_code, \
GI_VERSION_RENDERED
except ImportError:
GI_VERSION_RENDERED = 0
from pypy.translator.geninterplevel import GI_VERSION
cls.seed = md5.new(str(GI_VERSION)).digest()
if GI_VERSION != GI_VERSION_RENDERED or GI_VERSION is None:
for pth in p.listdir():
try:
pth.remove()
except: pass
f = file(get_tmp_file_name(str(ini)), "w")
f.write("""\
# This folder acts as a cache for code snippets which have been
# compiled by compile_as_module().
# It will get a new entry for every piece of code that has
# not been seen, yet.
#
# Caution! Only the code snippet is checked. If something
# is imported, changes are not detected. Also, changes
# to geninterplevel or gateway are also not checked.
# Exception: There is a checked version number in geninterplevel.py
#
# If in doubt, remove this file from time to time.
GI_VERSION_RENDERED = %r
known_code = {}
# self-destruct on double-click:
def harakiri():
import pypy._cache as _c
import py
lp = py.path.local
for pth in lp(_c.__file__).dirpath().listdir():
try:
pth.remove()
except: pass
if __name__ == "__main__":
harakiri()
del harakiri
""" % GI_VERSION)
f.close()
rename_tmp_to_eventual_file_name(str(ini))
import pypy._cache
cls.known_code = pypy._cache.known_code
cls._setup_done = True
_setup = classmethod(_setup)
def gethostname(_cache=[]):
if not _cache:
try:
import socket
hostname = socket.gethostname()
except:
hostname = ''
_cache.append(hostname)
return _cache[0]
def get_tmp_file_name(fname):
return '%s~%s~%d' % (fname, gethostname(), os.getpid())
def rename_tmp_to_eventual_file_name(fname):
# generated files are first written to the host- and process-specific
# file 'tmpname', and then atomically moved to their final 'fname'
# to avoid problems if py.py is started several times in parallel
tmpname = get_tmp_file_name(fname)
try:
os.rename(tmpname, fname)
except (OSError, IOError):
os.unlink(fname) # necessary on Windows
os.rename(tmpname, fname)
# ____________________________________________________________
def appdef(source, applevel=ApplevelClass):
""" NOT_RPYTHON: build an app-level helper function, like for example:
myfunc = appdef('''myfunc(x, y):
return x+y
''')
"""
if not isinstance(source, str):
source = str(py.code.Source(source).strip())
assert source.startswith("def "), "can only transform functions"
source = source[4:]
p = source.find('(')
assert p >= 0
funcname = source[:p].strip()
source = source[p:]
return applevel("def %s%s\n" % (funcname, source)).interphook(funcname)
applevel = ApplevelClass # backward compatibility
app2interp = appdef # backward compatibility
class applevel_temp(ApplevelClass):
hidden_applevel = False
def getwdict(self, space): # no cache
return build_applevel_dict(self, space)
class applevelinterp_temp(ApplevelClass):
hidden_applevel = False
def getwdict(self, space): # no cache
return PyPyCacheDir.build_applevelinterp_dict(self, space)
# app2interp_temp is used for testing mainly
def app2interp_temp(func, applevel_temp=applevel_temp):
""" NOT_RPYTHON """
return appdef(func, applevel_temp)
| Python |
from pypy.interpreter.error import OperationError
from pypy.interpreter import function, pycode, pyframe
from pypy.interpreter.baseobjspace import Wrappable
from pypy.interpreter.mixedmodule import MixedModule
from pypy.tool.uid import uid
class Cell(Wrappable):
"A simple container for a wrapped value."
def __init__(self, w_value=None):
self.w_value = w_value
def clone(self):
return self.__class__(self.w_value)
def empty(self):
return self.w_value is None
def get(self):
if self.w_value is None:
raise ValueError, "get() from an empty cell"
return self.w_value
def set(self, w_value):
self.w_value = w_value
def delete(self):
if self.w_value is None:
raise ValueError, "delete() on an empty cell"
self.w_value = None
def descr__eq__(self, space, w_other):
other = space.interpclass_w(w_other)
if not isinstance(other, Cell):
return space.w_False
return space.eq(self.w_value, other.w_value)
def descr__reduce__(self, space):
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('cell_new')
if self.w_value is None: #when would this happen?
return space.newtuple([new_inst, space.newtuple([])])
tup = [self.w_value]
return space.newtuple([new_inst, space.newtuple([]),
space.newtuple(tup)])
def descr__setstate__(self, space, w_state):
self.w_value = space.getitem(w_state, space.wrap(0))
def __repr__(self):
""" representation for debugging purposes """
if self.w_value is None:
content = ""
else:
content = repr(self.w_value)
return "<%s(%s) at 0x%x>" % (self.__class__.__name__,
content, uid(self))
super_initialize_frame_scopes = pyframe.PyFrame.initialize_frame_scopes
super_fast2locals = pyframe.PyFrame.fast2locals
super_locals2fast = pyframe.PyFrame.locals2fast
class __extend__(pyframe.PyFrame):
"""This class enhances a standard frame with nested scope abilities,
i.e. handling of cell/free variables."""
# Cell Vars:
# my local variables that are exposed to my inner functions
# Free Vars:
# variables coming from a parent function in which i'm nested
# 'closure' is a list of Cell instances: the received free vars.
cells = None
def initialize_frame_scopes(self, closure):
super_initialize_frame_scopes(self, closure)
code = self.pycode
ncellvars = len(code.co_cellvars)
nfreevars = len(code.co_freevars)
if not nfreevars:
if not ncellvars:
return # no self.cells needed - fast path
if closure is None:
closure = []
elif closure is None:
space = self.space
raise OperationError(space.w_TypeError,
space.wrap("directly executed code object "
"may not contain free variables"))
if len(closure) != nfreevars:
raise ValueError("code object received a closure with "
"an unexpected number of free variables")
self.cells = [Cell() for i in range(ncellvars)] + closure
def getclosure(self):
if self.cells is None:
return None
ncellvars = len(self.pycode.co_cellvars) # not part of the closure
return self.cells[ncellvars:]
def _getcells(self):
return self.cells
def _setcellvars(self, cellvars):
ncellvars = len(self.pycode.co_cellvars)
if len(cellvars) != ncellvars:
raise OperationError(self.space.w_TypeError,
self.space.wrap("bad cellvars"))
if self.cells is not None:
self.cells[:ncellvars] = cellvars
def fast2locals(self):
super_fast2locals(self)
# cellvars are values exported to inner scopes
# freevars are values coming from outer scopes
freevarnames = self.pycode.co_cellvars + self.pycode.co_freevars
for i in range(len(freevarnames)):
name = freevarnames[i]
cell = self.cells[i]
try:
w_value = cell.get()
except ValueError:
pass
else:
w_name = self.space.wrap(name)
self.space.setitem(self.w_locals, w_name, w_value)
def locals2fast(self):
super_locals2fast(self)
freevarnames = self.pycode.co_cellvars + self.pycode.co_freevars
for i in range(len(freevarnames)):
name = freevarnames[i]
cell = self.cells[i]
w_name = self.space.wrap(name)
try:
w_value = self.space.getitem(self.w_locals, w_name)
except OperationError, e:
if not e.match(self.space, self.space.w_KeyError):
raise
else:
cell.set(w_value)
def init_cells(self):
if self.cells is None:
return
args_to_copy = self.pycode._args_as_cellvars
for i in range(len(args_to_copy)):
argnum = args_to_copy[i]
self.cells[i] = Cell(self.fastlocals_w[argnum])
def getfreevarname(self, index):
freevarnames = self.pycode.co_cellvars + self.pycode.co_freevars
return freevarnames[index]
def iscellvar(self, index):
# is the variable given by index a cell or a free var?
return index < len(self.pycode.co_cellvars)
### extra opcodes ###
def LOAD_CLOSURE(f, varindex, *ignored):
# nested scopes: access the cell object
cell = f.cells[varindex]
w_value = f.space.wrap(cell)
f.pushvalue(w_value)
def LOAD_DEREF(f, varindex, *ignored):
# nested scopes: access a variable through its cell object
cell = f.cells[varindex]
try:
w_value = cell.get()
except ValueError:
varname = f.getfreevarname(varindex)
if f.iscellvar(varindex):
message = "local variable '%s' referenced before assignment"%varname
w_exc_type = f.space.w_UnboundLocalError
else:
message = ("free variable '%s' referenced before assignment"
" in enclosing scope"%varname)
w_exc_type = f.space.w_NameError
raise OperationError(w_exc_type, f.space.wrap(message))
else:
f.pushvalue(w_value)
def STORE_DEREF(f, varindex, *ignored):
# nested scopes: access a variable through its cell object
w_newvalue = f.popvalue()
#try:
cell = f.cells[varindex]
#except IndexError:
# import pdb; pdb.set_trace()
# raise
cell.set(w_newvalue)
def MAKE_CLOSURE(f, numdefaults, *ignored):
w_codeobj = f.popvalue()
codeobj = f.space.interp_w(pycode.PyCode, w_codeobj)
if codeobj.magic >= 0xa0df281: # CPython 2.5 AST branch merge
w_freevarstuple = f.popvalue()
freevars = [f.space.interp_w(Cell, cell)
for cell in f.space.unpacktuple(w_freevarstuple)]
else:
nfreevars = len(codeobj.co_freevars)
freevars = [f.space.interp_w(Cell, f.popvalue())
for i in range(nfreevars)]
freevars.reverse()
defaultarguments = [f.popvalue() for i in range(numdefaults)]
defaultarguments.reverse()
fn = function.Function(f.space, codeobj, f.w_globals,
defaultarguments, freevars)
f.pushvalue(f.space.wrap(fn))
| Python |
from pypy.interpreter.baseobjspace import Wrappable
class Ellipsis(Wrappable):
def __init__(self, space):
self.space = space
def descr__repr__(self):
return self.space.wrap('Ellipsis')
class NotImplemented(Wrappable):
def __init__(self, space):
self.space = space
def descr__repr__(self):
return self.space.wrap('NotImplemented')
| Python |
"""
Miscellaneous utilities.
"""
import types
from pypy.rlib.rarithmetic import r_uint
class RootStack:
pass
class Stack(RootStack):
"""Utility class implementing a stack."""
_annspecialcase_ = "specialize:ctr_location" # polymorphic
def __init__(self):
self.items = []
def clone(self):
s = self.__class__()
for item in self.items:
try:
item = item.clone()
except AttributeError:
pass
s.push(item)
return s
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def drop(self, n):
if n > 0:
del self.items[-n:]
def top(self, position=0):
"""'position' is 0 for the top of the stack, 1 for the item below,
and so on. It must not be negative."""
if position < 0:
raise ValueError, 'negative stack position'
if position >= len(self.items):
raise IndexError, 'not enough entries in stack'
return self.items[~position]
def set_top(self, value, position=0):
"""'position' is 0 for the top of the stack, 1 for the item below,
and so on. It must not be negative."""
if position < 0:
raise ValueError, 'negative stack position'
if position >= len(self.items):
raise IndexError, 'not enough entries in stack'
self.items[~position] = value
def depth(self):
return len(self.items)
def empty(self):
return len(self.items) == 0
class FixedStack(RootStack):
_annspecialcase_ = "specialize:ctr_location" # polymorphic
# unfortunately, we have to re-do everything
def __init__(self):
pass
def setup(self, stacksize):
self.ptr = r_uint(0) # we point after the last element
self.items = [None] * stacksize
def clone(self):
# this is only needed if we support flow space
s = self.__class__()
s.setup(len(self.items))
for item in self.items[:self.ptr]:
try:
item = item.clone()
except AttributeError:
pass
s.push(item)
return s
def push(self, item):
ptr = self.ptr
self.items[ptr] = item
self.ptr = ptr + 1
def pop(self):
ptr = self.ptr - 1
ret = self.items[ptr] # you get OverflowError if the stack is empty
self.items[ptr] = None
self.ptr = ptr
return ret
def drop(self, n):
while n > 0:
n -= 1
self.ptr -= 1
self.items[self.ptr] = None
def top(self, position=0):
# for a fixed stack, we assume correct indices
return self.items[self.ptr + ~position]
def set_top(self, value, position=0):
# for a fixed stack, we assume correct indices
self.items[self.ptr + ~position] = value
def depth(self):
return self.ptr
def empty(self):
return not self.ptr
class InitializedClass(type):
"""NOT_RPYTHON. A meta-class that allows a class to initialize itself (or
its subclasses) by calling __initclass__() as a class method."""
def __init__(self, name, bases, dict):
super(InitializedClass, self).__init__(name, bases, dict)
for basecls in self.__mro__:
raw = basecls.__dict__.get('__initclass__')
if isinstance(raw, types.FunctionType):
raw(self) # call it as a class method
class RwDictProxy(object):
"""NOT_RPYTHON. A dict-like class standing for 'cls.__dict__', to work
around the fact that the latter is a read-only proxy for new-style
classes."""
def __init__(self, cls):
self.cls = cls
def __getitem__(self, attr):
return self.cls.__dict__[attr]
def __setitem__(self, attr, value):
setattr(self.cls, attr, value)
def __contains__(self, value):
return value in self.cls.__dict__
def items(self):
return self.cls.__dict__.items()
class ThreadLocals:
"""Pseudo thread-local storage, for 'space.threadlocals'.
This is not really thread-local at all; the intention is that the PyPy
implementation of the 'thread' module knows how to provide a real
implementation for this feature, and patches 'space.threadlocals' when
'thread' is initialized.
"""
_value = None
def getvalue(self):
return self._value
def setvalue(self, value):
self._value = value
def getmainthreadvalue(self):
return self._value
def getGIL(self):
return None # XXX temporary hack!
class Action(object):
"""Abstract base class for actions that must be performed regularly,
every Nth bytecode (as selected by sys.setcheckinterval())."""
# set repeat to True for actions that must be kept around and
# re-performed regularly
repeat = False
def perform(self):
"""To be overridden."""
def perform_actions(actionlist):
i = 0
while i < len(actionlist):
a = actionlist[i]
if a.repeat:
i += 1 # keep action
else:
del actionlist[i]
a.perform()
perform_actions = staticmethod(perform_actions)
| Python |
# empty
| Python |
"""Parser for future statements
"""
from pypy.interpreter.pyparser.error import SyntaxError
from pypy.interpreter.astcompiler import ast
def is_future(stmt):
"""Return true if statement is a well-formed future statement"""
if not isinstance(stmt, ast.From):
return 0
if stmt.modname == "__future__":
return 1
else:
return 0
class FutureParser(ast.ASTVisitor):
features = ("nested_scopes", "generators", "division", "with_statement")
def __init__(self):
self.found = {} # set
def visitModule(self, node):
stmt = node.node
invalid = False
assert isinstance(stmt, ast.Stmt)
for s in stmt.nodes:
if not self.check_stmt(s, invalid):
invalid = True
def check_stmt(self, stmt, invalid):
if isinstance(stmt, ast.From):
stmt.valid_future = 0
if invalid:
return 0
if is_future(stmt):
assert isinstance(stmt, ast.From)
for name, asname in stmt.names:
if name in self.features:
self.found[name] = 1
elif name=="*":
raise SyntaxError(
"future statement does not support import *",
filename = stmt.filename,
lineno = stmt.lineno)
else:
raise SyntaxError(
"future feature %s is not defined" % name,
filename = stmt.filename,
lineno = stmt.lineno)
stmt.valid_future = 1
return 1
return 0
def get_features(self):
"""Return list of features enabled by future statements"""
return self.found.keys()
class BadFutureParser(ast.ASTVisitor):
"""Check for invalid future statements
Those not marked valid are appearing after other statements
"""
def visitModule(self, node):
stmt = node.node
assert isinstance(stmt, ast.Stmt)
for s in stmt.nodes:
if isinstance(s, ast.From):
if s.valid_future:
continue
self.visitFrom(s)
else:
self.default(s)
def visitFrom(self, node):
if node.modname != "__future__":
return
raise SyntaxError( "from __future__ imports must occur at the beginning of the file",
filename=node.filename,
lineno=node.lineno)
def find_futures(node):
p1 = FutureParser()
p2 = BadFutureParser()
node.accept( p1 )
node.accept( p2 )
return p1.get_features()
if __name__ == "__main__":
import sys
from pypy.interpreter.astcompiler import parseFile
for file in sys.argv[1:]:
print file
tree = parseFile(file)
v = FutureParser()
tree.accept(v)
print v.found
print
| Python |
"""Generate ast module from specification
This script generates the ast module from a simple specification,
which makes it easy to accomodate changes in the grammar. This
approach would be quite reasonable if the grammar changed often.
Instead, it is rather complex to generate the appropriate code. And
the Node interface has changed more often than the grammar.
"""
# This is a heavily modified version from the original that adds a
# visit method to each node
import fileinput
import getopt
import re
import sys
from StringIO import StringIO
SPEC = "ast.txt"
COMMA = ", "
def strip_default(arg):
"""Return the argname from an 'arg = default' string"""
i = arg.find('=')
if i == -1:
return arg
t = arg[:i].strip()
return t
P_NODE = 1
P_OTHER = 2
P_STR = 3
P_INT = 4
P_STR_LIST = 5
P_INT_LIST = 6
P_WRAPPED = 7
P_NESTED = 8
P_NONE = 9
class NodeInfo:
"""Each instance describes a specific AST node"""
def __init__(self, name, args, parent=None):
self.name = name
self.args = args.strip()
self.argnames = self.get_argnames()
self.argprops = self.get_argprops()
self.nargs = len(self.argnames)
self.init = []
self.applevel_new = []
self.applevel_mutate = []
self.flatten_nodes = {}
self.mutate_nodes = {}
self.additional_methods = {}
self.parent = parent
def setup_parent(self, classes):
if self.parent:
self.parent = classes[self.parent]
else:
self.parent = Node_NodeInfo
def get_argnames(self):
args = self.args
return [strip_default(arg.strip())
for arg in args.split(',') if arg]
def get_argprops(self):
"""Each argument can have a property like '*' or '!'
XXX This method modifies the argnames in place!
"""
d = {}
hardest_arg = P_NODE
for i in range(len(self.argnames)):
arg = self.argnames[i]
if arg.endswith('*'):
arg = self.argnames[i] = arg[:-1]
d[arg] = P_OTHER
hardest_arg = max(hardest_arg, P_OTHER)
elif arg.endswith('*int'):
arg = self.argnames[i] = arg[:-4]
d[arg] = P_INT
hardest_arg = max(hardest_arg, P_INT)
elif arg.endswith('*str'):
arg = self.argnames[i] = arg[:-4]
d[arg] = P_STR
hardest_arg = max(hardest_arg, P_STR)
elif arg.endswith('*[int]'):
arg = self.argnames[i] = arg[:-6]
d[arg] = P_INT_LIST
hardest_arg = max(hardest_arg, P_INT_LIST)
elif arg.endswith('*[str]'):
arg = self.argnames[i] = arg[:-6]
d[arg] = P_STR_LIST
hardest_arg = max(hardest_arg, P_STR_LIST)
elif arg.endswith('%'):
arg = self.argnames[i] = arg[:-1]
d[arg] = P_WRAPPED
hardest_arg = max(hardest_arg, P_WRAPPED)
elif arg.endswith('!'):
arg = self.argnames[i] = arg[:-1]
d[arg] = P_NESTED
hardest_arg = max(hardest_arg, P_NESTED)
elif arg.endswith('&'):
arg = self.argnames[i] = arg[:-1]
d[arg] = P_NONE
hardest_arg = max(hardest_arg, P_NONE)
else:
d[arg] = P_NODE
self.hardest_arg = hardest_arg
if hardest_arg > P_NODE:
self.args = self.args.replace('*str', '')
self.args = self.args.replace('*int', '')
self.args = self.args.replace('*[str]', '')
self.args = self.args.replace('*[int]', '')
self.args = self.args.replace('*', '')
self.args = self.args.replace('!', '')
self.args = self.args.replace('&', '')
self.args = self.args.replace('%', '')
return d
def get_initargs(self):
if self.parent.args and self.args:
args = self.parent.args +","+ self.args
else:
args = self.parent.args or self.args
return args
def gen_source(self):
buf = StringIO()
print >> buf, "class %s(%s):" % (self.name, self.parent.name)
self._gen_init(buf)
print >> buf
self._gen_getChildren(buf)
print >> buf
self._gen_getChildNodes(buf)
print >> buf
self._gen_additional_methods(buf)
self._gen_repr(buf)
print >> buf
self._gen_visit(buf)
print >> buf
self._gen_mutate(buf)
print >> buf
self._gen_attrs(buf)
print >> buf
self._gen_new(buf)
print >> buf
self._gen_typedef(buf)
buf.seek(0, 0)
return buf.read()
def _gen_init(self, buf):
initargs = self.get_initargs()
if initargs:
print >> buf, " def __init__(self, %s, lineno=-1):" % initargs
else:
print >> buf, " def __init__(self, lineno=-1):"
if self.parent.args:
print >> buf, " %s.__init__(self, %s, lineno)" % (self.parent.name, self.parent.args)
else:
print >> buf, " Node.__init__(self, lineno)"
if self.argnames:
for name in self.argnames:
if name in self.flatten_nodes:
print >>buf, " %s" % self.flatten_nodes[name][0].rstrip()
print >> buf, " self.%s = %s" % (name, name)
if self.init:
print >> buf, "".join([" " + line for line in self.init])
def _gen_new(self, buf):
if self.applevel_new:
print >> buf, ''.join(self.applevel_new)
return
args = self.get_initargs()
argprops = self.argprops
if args:
w_args = ['w_%s' % strip_default(arg.strip())
for arg in args.split(',') if arg]
print >> buf, "def descr_%s_new(space, w_subtype, %s, lineno=-1):" % (self.name, ', '.join(w_args))
else:
w_args = []
print >> buf, "def descr_%s_new(space, w_subtype, lineno=-1):" % (self.name,)
print >> buf, " self = space.allocate_instance(%s, w_subtype)" % (self.name,)
# w_args = ['w_%s' % strip_default(arg.strip()) for arg in self.args.split(',') if arg]
for w_arg in w_args:
argname = w_arg[2:]
prop = argprops[argname]
if prop == P_NONE:
print >> buf, " %s = space.interp_w(Node, %s, can_be_None=True)" % (argname, w_arg)
elif prop == P_NODE:
print >> buf, " %s = space.interp_w(Node, %s, can_be_None=False)" % (argname, w_arg)
elif prop == P_NESTED:
print >> buf, " %s = [space.interp_w(Node, w_node) for w_node in space.unpackiterable(%s)]" % (argname, w_arg)
elif prop == P_STR:
print >> buf, " %s = space.str_w(%s)" % (argname, w_arg)
elif prop == P_INT:
print >> buf, " %s = space.int_w(%s)" % (argname, w_arg)
elif prop == P_STR_LIST:
print >> buf, " %s = [space.str_w(w_str) for w_str in space.unpackiterable(%s)]" % (argname, w_arg)
elif prop == P_INT_LIST:
print >> buf, " %s = [space.int_w(w_int) for w_int in space.unpackiterable(%s)]" % (argname, w_arg)
elif prop == P_WRAPPED:
print >> buf, " # This dummy assingment is auto-generated, astgen.py should be fixed to avoid that"
print >> buf, " %s = %s" % (argname, w_arg)
else:
raise ValueError("Don't know how to handle property '%s'" % prop)
print >> buf, " self.%s = %s" % (argname, argname)
print >> buf, " self.lineno = lineno"
print >> buf, " return space.wrap(self)"
def _gen_getChildren(self, buf):
print >> buf, " def getChildren(self):"
print >> buf, ' "NOT_RPYTHON"'
if len(self.argnames) == 0:
print >> buf, " return []"
else:
if self.hardest_arg < P_NESTED:
clist = COMMA.join(["self.%s" % c
for c in self.argnames])
if self.nargs == 1:
print >> buf, " return %s," % clist
else:
print >> buf, " return %s" % clist
else:
if len(self.argnames) == 1:
name = self.argnames[0]
if self.argprops[name] == P_NESTED:
print >> buf, " return tuple(flatten(self.%s))" % name
else:
print >> buf, " return (self.%s,)" % name
else:
print >> buf, " children = []"
template = " children.%s(%sself.%s%s)"
for name in self.argnames:
if self.argprops[name] == P_NESTED:
print >> buf, template % ("extend", "flatten(",
name, ")")
else:
print >> buf, template % ("append", "", name, "")
print >> buf, " return tuple(children)"
def _gen_getChildNodes(self, buf):
print >> buf, " def getChildNodes(self):"
if len(self.argnames) == 0:
print >> buf, " return []"
else:
if self.hardest_arg < P_NESTED:
clist = ["self.%s" % c
for c in self.argnames
if self.argprops[c] == P_NODE]
if len(clist) == 0:
print >> buf, " return []"
elif len(clist) == 1:
print >> buf, " return [%s,]" % clist[0]
else:
print >> buf, " return [%s]" % COMMA.join(clist)
else:
print >> buf, " nodelist = []"
template = " nodelist.%s(%sself.%s%s)"
for name in self.argnames:
if self.argprops[name] == P_NONE:
tmp = (" if self.%s is not None:\n"
" nodelist.append(self.%s)")
print >> buf, tmp % (name, name)
elif self.argprops[name] == P_NESTED:
if name not in self.flatten_nodes:
print >> buf, template % ("extend", "",
name, "")
else:
flat_logic = self.flatten_nodes[name]
while not flat_logic[-1].strip():
flat_logic.pop()
flat_logic[-1] = flat_logic[-1].rstrip()
print >> buf, "".join([" " + line for line in flat_logic])
elif self.argprops[name] == P_NODE:
print >> buf, template % ("append", "", name, "")
print >> buf, " return nodelist"
def _gen_repr(self, buf):
print >> buf, " def __repr__(self):"
if self.argnames:
fmt = COMMA.join(["%s"] * self.nargs)
if '(' in self.args:
fmt = '(%s)' % fmt
vals = ["self.%s.__repr__()" % name for name in self.argnames]
vals = COMMA.join(vals)
if self.nargs == 1:
vals = vals + ","
print >> buf, ' return "%s(%s)" %% (%s)' % \
(self.name, fmt, vals)
else:
print >> buf, ' return "%s()"' % self.name
def _gen_visit(self, buf):
print >> buf, " def accept(self, visitor):"
print >> buf, " return visitor.visit%s(self)" % self.name
def _gen_insertnodes_func(self, buf):
print >> buf, " def descr_insert_after(space, self, node, w_added_nodes):"
print >> buf, " added_nodes = [space.interp_w(Node, w_node) for w_node in space.unpackiterable(w_added_nodes)]"
print >> buf, " index = self.nodes.index(node) + 1"
print >> buf, " self.nodes[index:index] = added_nodes"
print >> buf
print >> buf, " def descr_insert_before(space, self, node, w_added_nodes):"
print >> buf, " added_nodes = [space.interp_w(Node, w_node) for w_node in space.unpackiterable(w_added_nodes)]"
print >> buf, " index = self.nodes.index(node)"
print >> buf, " self.nodes[index:index] = added_nodes"
def _gen_mutate(self, buf):
print >> buf, " def mutate(self, visitor):"
if len(self.argnames) != 0:
for argname in self.argnames:
if argname in self.mutate_nodes:
for line in self.mutate_nodes[argname]:
if line.strip():
print >> buf, ' ' + line
elif self.argprops[argname] == P_NODE:
print >> buf, " self.%s = self.%s.mutate(visitor)" % (argname,argname)
elif self.argprops[argname] == P_NONE:
print >> buf, " if self.%s is not None:" % (argname,)
print >> buf, " self.%s = self.%s.mutate(visitor)" % (argname,argname)
elif self.argprops[argname] == P_NESTED:
print >> buf, " newlist = []"
print >> buf, " for n in self.%s:"%(argname)
print >> buf, " item = n.mutate(visitor)"
print >> buf, " if item is not None:"
print >> buf, " newlist.append(item)"
print >> buf, " self.%s[:] = newlist"%(argname)
print >> buf, " return visitor.visit%s(self)" % self.name
def _gen_fget_func(self, buf, attr, prop ):
# FGET
print >> buf, " def fget_%s( space, self):" % attr
if prop[attr]==P_WRAPPED:
print >> buf, " return self.%s" % attr
elif prop[attr] in (P_INT,P_STR, P_NODE):
print >> buf, " return space.wrap(self.%s)" % attr
elif prop[attr] in (P_INT_LIST, P_STR_LIST, P_NESTED ):
print >> buf, " return space.newlist( [space.wrap(itm) for itm in self.%s] )" % attr
elif prop[attr]==P_NONE:
print >> buf, " if self.%s is None:" % attr
print >> buf, " return space.w_None"
print >> buf, " else:"
print >> buf, " return space.wrap(self.%s)" % attr
else:
assert False, "Unkown node type"
def _gen_fset_func(self, buf, attr, prop ):
# FSET
print >> buf, " def fset_%s( space, self, w_arg):" % attr
if prop[attr] == P_WRAPPED:
print >> buf, " self.%s = w_arg" % attr
elif prop[attr] == P_INT:
print >> buf, " self.%s = space.int_w(w_arg)" % attr
elif prop[attr] == P_STR:
print >> buf, " self.%s = space.str_w(w_arg)" % attr
elif prop[attr] == P_INT_LIST:
print >> buf, " del self.%s[:]" % attr
print >> buf, " for itm in space.unpackiterable(w_arg):"
print >> buf, " self.%s.append( space.int_w(itm) )" % attr
elif prop[attr] == P_STR_LIST:
print >> buf, " del self.%s[:]" % attr
print >> buf, " for itm in space.unpackiterable(w_arg):"
print >> buf, " self.%s.append( space.str_w(itm) )" % attr
elif prop[attr] == P_NESTED:
print >> buf, " del self.%s[:]" % attr
print >> buf, " for w_itm in space.unpackiterable(w_arg):"
print >> buf, " self.%s.append( space.interp_w(Node, w_itm))" % attr
elif prop[attr] == P_NONE:
print >> buf, " self.%s = space.interp_w(Node, w_arg, can_be_None=True)" % attr
else: # P_NODE
print >> buf, " self.%s = space.interp_w(Node, w_arg, can_be_None=False)" % attr
def _gen_attrs(self, buf):
prop = self.argprops
for attr in self.argnames:
if "fget_%s" % attr not in self.additional_methods:
self._gen_fget_func( buf, attr, prop )
if "fset_%s" % attr not in self.additional_methods:
self._gen_fset_func( buf, attr, prop )
if prop[attr] == P_NESTED and attr == 'nodes':
self._gen_insertnodes_func(buf)
def _gen_descr_mutate(self, buf):
if self.applevel_mutate:
print >> buf, ''.join(self.applevel_mutate)
return
print >> buf, "def descr_%s_mutate(space, w_self, w_visitor): " % self.name
for argname in self.argnames:
if self.argprops[argname] in [P_NODE, P_NONE]:
print >> buf, ' w_%s = space.getattr(w_self, space.wrap("%s"))' % (argname,argname)
if self.argprops[argname] == P_NONE:
indent = ' '
print >> buf, ' if not space.is_w(w_%s, space.w_None):' % (argname,)
else:
indent = ''
print >> buf, indent+' space.setattr(w_%s, space.wrap("parent"), w_self)' % (argname,)
print >> buf, indent+' w_new_%s = space.call_method(w_%s, "mutate", w_visitor)'% (argname,
argname)
print >> buf, indent+' space.setattr(w_self, space.wrap("%s"), w_new_%s)' % ( argname,
argname)
print >> buf, ""
elif self.argprops[argname] == P_NESTED:
print >> buf, ' w_list = space.getattr(w_self, space.wrap("%s"))' % (argname,)
print >> buf, ' list_w = space.unpackiterable(w_list)'
print >> buf, ' newlist_w = []'
print >> buf, ' for w_item in list_w:'
print >> buf, ' space.setattr(w_item, space.wrap("parent"), w_self)'
print >> buf, ' w_newitem = space.call_method(w_item, "mutate", w_visitor)'
print >> buf, ' if not space.is_w(w_newitem, space.w_None):'
print >> buf, ' newlist_w.append(w_newitem)'
print >> buf, ' w_newlist = space.newlist(newlist_w)'
print >> buf, ' space.setattr(w_self, space.wrap("%s"), w_newlist)'%(argname)
print >> buf, ' return space.call_method(w_visitor, "visit%s", w_self)' % (self.name,)
def _gen_typedef(self, buf):
initargs = [strip_default(arg.strip())
for arg in self.get_initargs().split(',') if arg]
if initargs:
new_unwrap_spec = ['ObjSpace', 'W_Root'] + ['W_Root'] * len(initargs) + ['int']
else:
new_unwrap_spec = ['ObjSpace', 'W_Root', 'int']
parent_type = "%s.typedef" % self.parent.name
print >> buf, "def descr_%s_accept( space, w_self, w_visitor):" %self.name
print >> buf, " return space.call_method(w_visitor, 'visit%s', w_self)" % self.name
print >> buf, ""
# mutate stuff
self._gen_descr_mutate(buf)
print >> buf, ""
print >> buf, "%s.typedef = TypeDef('%s', %s, " % (self.name, self.name, parent_type)
print >> buf, " __new__ = interp2app(descr_%s_new, unwrap_spec=[%s])," % (self.name, ', '.join(new_unwrap_spec))
print >> buf, " accept=interp2app(descr_%s_accept, unwrap_spec=[ObjSpace, W_Root, W_Root] )," % self.name
print >> buf, " mutate=interp2app(descr_%s_mutate, unwrap_spec=[ObjSpace, W_Root, W_Root] )," % self.name
for attr in self.argnames:
print >> buf, " %s=GetSetProperty(%s.fget_%s, %s.fset_%s )," % (attr,self.name,attr,self.name,attr)
if self.argprops[attr] == P_NESTED and attr == "nodes":
print >> buf, " insert_after=interp2app(%s.descr_insert_after.im_func, unwrap_spec=[ObjSpace, %s, Node, W_Root])," % (self.name, self.name)
print >> buf, " insert_before=interp2app(%s.descr_insert_before.im_func, unwrap_spec=[ObjSpace, %s, Node, W_Root])," % (self.name, self.name)
print >> buf, " )"
print >> buf, "%s.typedef.acceptable_as_base_class = False" % self.name
def _gen_additional_methods(self, buf):
for key, value in self.additional_methods.iteritems():
print >> buf, ''.join(value)
def gen_base_visit(self, buf):
print >> buf, " def visit%s(self, node):" % self.name
print >> buf, " return self.default( node )"
def gen_print_visit(self, buf):
# This is a print visitor for application level tests
print >> buf, " def visit%s(self, node):" % self.name
print >> buf, " print '%s('," % self.name
for attr in self.argnames:
if self.argprops[attr] == P_NODE:
print >> buf, " node.%s.accept(self)" % attr
print >> buf, " print ',',"
if self.argprops[attr] == P_NONE:
print >> buf, " if node.%s: node.%s.accept(self)" % (attr,attr)
print >> buf, " print ',',"
elif self.argprops[attr] == P_NESTED:
print >> buf, " for nd in node.%s:" % attr
print >> buf, " nd.accept(self)"
print >> buf, " print ',',"
else:
print >> buf, " print node.%s,','," % attr
print >> buf, " print ')',"
Node_NodeInfo = NodeInfo("Node","")
rx_init = re.compile('init\((.*)\):')
rx_flatten_nodes = re.compile('flatten_nodes\((.*)\.(.*)\):')
rx_additional_methods = re.compile('(\\w+)\.(\w+)\((.*?)\):')
rx_descr_news_methods = re.compile('def\s+descr_(\\w+)_new\((.*?)\):')
rx_descr_mutate_methods = re.compile('def\s+descr_(\\w+)_mutate\((.*?)\):')
rx_mutate = re.compile('mutate\((.*)\.(.*)\):')
def parse_spec(file):
classes = {}
cur = None
kind = None
fiter = fileinput.input(file)
for line in fiter:
if line.startswith("== OVERRIDES =="):
break
comment = line.strip().startswith('#')
if comment:
continue
# a normal entry
try:
name, args = line.split(':')
except ValueError:
continue
if "(" in name:
name, parent = name.split("(")
parent = parent[:-1]
else:
parent = None
classes[name] = NodeInfo(name, args, parent)
for line in fiter:
mo = None
mo = rx_init.match(line)
if mo:
kind = 'init'
# some extra code for a Node's __init__ method
name = mo.group(1)
cur = classes[name]
continue
mo = rx_flatten_nodes.match(line)
if mo:
kind = 'flatten_nodes'
# special case for getChildNodes flattening
name = mo.group(1)
attr = mo.group(2)
cur = classes[name]
_cur_ = attr
cur.flatten_nodes[attr] = []
flatten_expect_comment = True
continue
mo = rx_mutate.match(line)
if mo:
kind = 'mutate'
# special case for getChildNodes flattening
name = mo.group(1)
attr = mo.group(2)
cur = classes[name]
_cur_ = attr
cur.mutate_nodes[attr] = []
continue
mo = rx_additional_methods.match(line)
if mo:
kind = 'additional_method'
name = mo.group(1)
methname = mo.group(2)
params = mo.group(3)
cur = classes[name]
_cur_ = methname
cur.additional_methods[_cur_] = [' def %s(%s):\n' % (methname, params)]
continue
mo = rx_descr_news_methods.match(line)
if mo:
kind = 'applevel_new'
name = mo.group(1)
cur = classes[name]
cur.applevel_new = [mo.group(0) + '\n']
continue
mo = rx_descr_mutate_methods.match(line)
if mo:
kind = 'applevel_mutate'
name = mo.group(1)
cur = classes[name]
cur.applevel_mutate = [mo.group(0) + '\n']
continue
if kind == 'init':
# some code for the __init__ method
cur.init.append(line)
elif kind == 'flatten_nodes':
if flatten_expect_comment:
assert line.strip().startswith("#")
flatten_expect_comment=False
cur.flatten_nodes[_cur_].append(line)
elif kind == 'mutate':
cur.mutate_nodes[_cur_].append(line)
elif kind == 'additional_method':
cur.additional_methods[_cur_].append(' '*4 + line)
elif kind == 'applevel_new':
cur.applevel_new.append(line)
elif kind == 'applevel_mutate':
cur.applevel_mutate.append(line)
for node in classes.values():
node.setup_parent(classes)
return sorted(classes.values(), key=lambda n: n.name)
ASTVISITORCLASS='''
class ASTVisitor(object):
"""This is a visitor base class used to provide the visit
method in replacement of the former visitor.visit = walker.dispatch
It could also use to identify base type for visit arguments of AST nodes
"""
def default(self, node):
for child in node.getChildNodes():
child.accept(self)
def visitExpression(self, node):
return self.default(node)
def visitEmptyNode(self, node):
return self.default(node)
'''
def gen_ast_visitor(classes):
print ASTVISITORCLASS
buf = StringIO()
for info in classes:
info.gen_base_visit(buf)
print buf.getvalue()
def gen_print_visitor(classes, f):
print >>f, ASTVISITORCLASS
buf = StringIO()
for info in classes:
info.gen_base_visit(buf)
print >>f, buf.getvalue()
print >>f, "class ASTPrintVisitor(ASTVisitor):"
buf = StringIO()
for info in classes:
info.gen_print_visit(buf)
print >>f, buf.getvalue()
def main():
print prologue
print
classes = parse_spec(SPEC)
emitted = {Node_NodeInfo: True}
def emit(info):
if info in emitted:
return
emitted[info] = True
emit(info.parent)
print info.gen_source()
for info in classes:
emit(info)
gen_ast_visitor(classes)
gen_print_visitor(classes,file("ast_test.py","w"))
print epilogue
prologue = '''
"""Python abstract syntax node definitions
This file is automatically generated by astgen.py
"""
from consts import CO_VARARGS, CO_VARKEYWORDS, OP_ASSIGN
from pypy.interpreter.baseobjspace import Wrappable
from pypy.interpreter.typedef import TypeDef, GetSetProperty, interp_attrproperty
from pypy.interpreter.gateway import interp2app, W_Root, ObjSpace
from pypy.interpreter.error import OperationError
def flatten(list):
l = []
for elt in list:
t = type(elt)
if t is tuple or t is list:
for elt2 in flatten(elt):
l.append(elt2)
else:
l.append(elt)
return l
#def flatten_nodes(list):
# return [n for n in flatten(list) if isinstance(n, Node)]
nodes = {}
class Node(Wrappable):
"""Abstract base class for ast nodes."""
def __init__(self, lineno = -1):
self.lineno = lineno
self.filename = ""
self.parent = None
#self.scope = None
def getChildren(self):
pass # implemented by subclasses
def __iter__(self):
for n in self.getChildren():
yield n
def asList(self): # for backwards compatibility
return self.getChildren()
def getChildNodes(self):
return [] # implemented by subclasses
def accept(self, visitor):
raise NotImplementedError
def mutate(self, visitor):
return visitor.visitNode(self)
def flatten(self):
res = []
nodes = self.getChildNodes()
if nodes:
for n in nodes:
res.extend( n.flatten() )
else:
res.append( self )
return res
def __repr__(self):
return "Node()"
def descr_repr( self, space ):
# most of the __repr__ are not RPython, more work is needed
return space.wrap( self.__repr__() )
def fget_parent(space, self):
return space.wrap(self.parent)
def fset_parent(space, self, w_parent):
self.parent = space.interp_w(Node, w_parent, can_be_None=False)
def descr_getChildNodes( self, space ):
lst = self.getChildNodes()
return space.newlist( [ space.wrap( it ) for it in lst ] )
def descr_node_accept( space, w_self, w_visitor ):
return space.call_method( w_visitor, 'visitNode', w_self )
def descr_node_mutate(space, w_self, w_visitor):
return space.call_method(w_visitor, 'visitNode', w_self)
def descr_Node_new(space, w_subtype, lineno=-1):
node = space.allocate_instance(Node, w_subtype)
node.lineno = lineno
return space.wrap(node)
Node.typedef = TypeDef('ASTNode',
__new__ = interp2app(descr_Node_new, unwrap_spec=[ObjSpace, W_Root, int]),
#__repr__ = interp2app(Node.descr_repr, unwrap_spec=['self', ObjSpace] ),
getChildNodes = interp2app(Node.descr_getChildNodes, unwrap_spec=[ 'self', ObjSpace ] ),
accept = interp2app(descr_node_accept, unwrap_spec=[ ObjSpace, W_Root, W_Root ] ),
mutate = interp2app(descr_node_mutate, unwrap_spec=[ ObjSpace, W_Root, W_Root ] ),
lineno = interp_attrproperty('lineno', cls=Node),
filename = interp_attrproperty('filename', cls=Node),
parent=GetSetProperty(Node.fget_parent, Node.fset_parent),
)
Node.typedef.acceptable_as_base_class = False
class EmptyNode(Node):
def accept(self, visitor):
return visitor.visitEmptyNode(self)
class Expression(Node):
# Expression is an artificial node class to support "eval"
nodes["expression"] = "Expression"
def __init__(self, node):
Node.__init__(self)
self.node = node
def getChildren(self):
return [self.node,]
def getChildNodes(self):
return [self.node,]
def __repr__(self):
return "Expression(%s)" % (repr(self.node))
def accept(self, visitor):
return visitor.visitExpression(self)
def mutate(self, visitor):
self.node = self.node.mutate(visitor)
return visitor.visitExpression(self)
def fget_node(space, self):
return space.wrap(self.node)
def fset_node(space, self, w_arg):
self.node = space.interp_w(Node, w_arg, can_be_None=False)
def descr_expression_new(space, w_subtype, w_node, lineno=-1):
self = space.allocate_instance(Expression, w_subtype)
node = space.interp_w(Node, w_node, can_be_None=False)
self.node = node
self.lineno = lineno
return space.wrap(self)
def descr_expression_accept(space, w_self, w_visitor):
return space.call_method(w_visitor, 'visitExpression', w_self)
def descr_expression_mutate(space, w_self, w_visitor):
w_node = space.getattr(w_self, space.wrap("node"))
space.setattr(w_node, space.wrap('parent'), w_self)
w_new_node = space.call_method(w_node, "mutate", w_visitor)
space.setattr(w_self, space.wrap("node"), w_new_node)
return space.call_method(w_visitor, "visitExpression", w_self)
Expression.typedef = TypeDef('Expression', Node.typedef,
__new__ = interp2app(descr_expression_new, unwrap_spec=[ObjSpace, W_Root, W_Root, int]),
accept=interp2app(descr_expression_accept, unwrap_spec=[ObjSpace, W_Root, W_Root] ),
mutate=interp2app(descr_expression_mutate, unwrap_spec=[ObjSpace, W_Root, W_Root] ),
node=GetSetProperty(Expression.fget_node, Expression.fset_node ),
)
Expression.typedef.acceptable_as_base_class = False
'''
epilogue = '''
nodeclasses = []
for name, obj in globals().items():
if isinstance(obj, type) and issubclass(obj, Node):
nodes[name.lower()] = obj
nodeclasses.append(name)
'''
if __name__ == "__main__":
main()
sys.exit(0)
| Python |
# operation flags
OP_ASSIGN = 0 # 'OP_ASSIGN'
OP_DELETE = 1 # 'OP_DELETE'
OP_APPLY = 2 # 'OP_APPLY'
OP_NONE = 3
SC_LOCAL = 1
SC_GLOBAL = 2
SC_FREE = 3
SC_CELL = 4
SC_UNKNOWN = 5
SC_DEFAULT = 6
CO_OPTIMIZED = 0x0001
CO_NEWLOCALS = 0x0002
CO_VARARGS = 0x0004
CO_VARKEYWORDS = 0x0008
CO_NESTED = 0x0010
CO_GENERATOR = 0x0020
CO_GENERATOR_ALLOWED = 0x1000
CO_FUTURE_DIVISION = 0x2000
CO_FUTURE_WITH_STATEMENT = 0x8000
| Python |
from pypy.interpreter.astcompiler import ast
def flatten(tup):
elts = []
for elt in tup:
if type(elt) == tuple:
elts = elts + flatten(elt)
else:
elts.append(elt)
return elts
class Counter:
def __init__(self, initial):
self.count = initial
def next(self):
i = self.count
self.count += 1
return i
MANGLE_LEN = 256 # magic constant from compile.c
def mangle(name, klass):
if not name.startswith('__'):
return name
if len(name) + 2 >= MANGLE_LEN:
return name
if name.endswith('__'):
return name
try:
i = 0
while klass[i] == '_':
i = i + 1
except IndexError:
return name
klass = klass[i:]
tlen = len(klass) + len(name)
if tlen > MANGLE_LEN:
end = len(klass) + MANGLE_LEN-tlen
if end < 0:
klass = '' # slices of negative length are invalid in RPython
else:
klass = klass[:end]
return "_%s%s" % (klass, name)
def set_filename(filename, tree):
"""Set the filename attribute to filename on every node in tree"""
worklist = [tree]
while worklist:
node = worklist.pop(0)
assert isinstance(node, ast.Node)
node.filename = filename
worklist.extend(node.getChildNodes())
| Python |
"""A flow graph representation for Python bytecode"""
import sys
from pypy.interpreter.astcompiler import misc, ast
from pypy.interpreter.astcompiler.consts \
import CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS
from pypy.interpreter.pycode import PyCode
from pypy.interpreter.baseobjspace import W_Root
from pypy.tool import stdlib_opcode as pythonopcode
from pypy.interpreter.error import OperationError
class BlockSet:
"""A Set implementation specific to Blocks
it uses Block.bid as keys to underlying dict"""
def __init__(self):
self.elts = {}
def __len__(self):
return len(self.elts)
def __contains__(self, elt):
return elt.bid in self.elts
def add(self, elt):
self.elts[elt.bid] = elt
def elements(self):
return self.elts.values()
def has_elt(self, elt):
return elt.bid in self.elts
def remove(self, elt):
del self.elts[elt.bid]
def copy(self):
c = BlockSet()
c.elts.update(self.elts)
return c
class Instr:
has_arg = False
def __init__(self, op):
self.op = op
class InstrWithArg(Instr):
has_arg = True
class InstrName(InstrWithArg):
def __init__(self, inst, name):
Instr.__init__(self, inst)
self.name = name
def getArg(self):
"NOT_RPYTHON"
return self.name
class InstrInt(InstrWithArg):
def __init__(self, inst, intval):
Instr.__init__(self, inst)
self.intval = intval
def getArg(self):
"NOT_RPYTHON"
return self.intval
class InstrBlock(InstrWithArg):
def __init__(self, inst, block):
Instr.__init__(self, inst)
self.block = block
def getArg(self):
"NOT_RPYTHON"
return self.block
class InstrObj(InstrWithArg):
def __init__(self, inst, obj):
Instr.__init__(self, inst)
self.obj = obj
def getArg(self):
"NOT_RPYTHON"
return self.obj
class InstrCode(InstrWithArg):
def __init__(self, inst, gen):
Instr.__init__(self, inst)
self.gen = gen
def getArg(self):
"NOT_RPYTHON"
return self.gen
class FlowGraph:
def __init__(self, space):
self.space = space
self.current = self.entry = Block(space)
self.exit = Block(space,"exit")
self.blocks = BlockSet()
self.blocks.add(self.entry)
self.blocks.add(self.exit)
def startBlock(self, block):
if self._debug:
if self.current:
print "end", repr(self.current)
print " next", self.current.next
print " ", self.current.get_children()
print repr(block)
assert block is not None
self.current = block
def nextBlock(self, block=None):
# XXX think we need to specify when there is implicit transfer
# from one block to the next. might be better to represent this
# with explicit JUMP_ABSOLUTE instructions that are optimized
# out when they are unnecessary.
#
# I think this strategy works: each block has a child
# designated as "next" which is returned as the last of the
# children. because the nodes in a graph are emitted in
# reverse post order, the "next" block will always be emitted
# immediately after its parent.
# Worry: maintaining this invariant could be tricky
if block is None:
block = self.newBlock()
# Note: If the current block ends with an unconditional
# control transfer, then it is incorrect to add an implicit
# transfer to the block graph. The current code requires
# these edges to get the blocks emitted in the right order,
# however. :-( If a client needs to remove these edges, call
# pruneEdges().
self.current.addNext(block)
self.startBlock(block)
def newBlock(self):
b = Block(self.space)
self.blocks.add(b)
return b
def startExitBlock(self):
self.startBlock(self.exit)
_debug = 0
def _enable_debug(self):
self._debug = 1
def _disable_debug(self):
self._debug = 0
def emit(self, inst):
if self._debug:
print "\t", inst
if inst in ['RETURN_VALUE', 'YIELD_VALUE']:
self.current.addOutEdge(self.exit)
self.current.emit( Instr(inst) )
#def emitop(self, inst, arg ):
# if self._debug:
# print "\t", inst, arg
# self.current.emit( (inst,arg) )
def emitop_obj(self, inst, obj ):
if self._debug:
print "\t", inst, repr(obj)
self.current.emit( InstrObj(inst,obj) )
def emitop_code(self, inst, obj ):
if self._debug:
print "\t", inst, repr(obj)
self.current.emit( InstrCode(inst, obj) )
def emitop_int(self, inst, intval ):
if self._debug:
print "\t", inst, intval
assert isinstance(intval,int)
self.current.emit( InstrInt(inst,intval) )
def emitop_block(self, inst, block):
if self._debug:
print "\t", inst, block
assert isinstance(block, Block)
self.current.addOutEdge( block )
self.current.emit( InstrBlock(inst,block) )
def emitop_name(self, inst, name ):
if self._debug:
print "\t", inst, name
assert isinstance(name,str)
self.current.emit( InstrName(inst,name) )
def getBlocksInOrder(self):
"""Return the blocks in reverse postorder
i.e. each node appears before all of its successors
"""
# TODO: What we need here is a topological sort that
# XXX make sure every node that doesn't have an explicit next
# is set so that next points to exit
for b in self.blocks.elements():
if b is self.exit:
continue
if not b.next:
b.addNext(self.exit)
order = dfs_postorder(self.entry, {})
order.reverse()
self.fixupOrder(order, self.exit)
# hack alert
if not self.exit in order:
order.append(self.exit)
return order
def fixupOrder(self, blocks, default_next):
"""Fixup bad order introduced by DFS."""
# XXX This is a total mess. There must be a better way to get
# the code blocks in the right order.
self.fixupOrderHonorNext(blocks, default_next)
self.fixupOrderForward(blocks, default_next)
def fixupOrderHonorNext(self, blocks, default_next):
"""Fix one problem with DFS.
The DFS uses child block, but doesn't know about the special
"next" block. As a result, the DFS can order blocks so that a
block isn't next to the right block for implicit control
transfers.
"""
new_blocks = blocks
blocks = blocks[:]
del new_blocks[:]
i = 0
while i < len(blocks) - 1:
b = blocks[i]
n = blocks[i + 1]
i += 1
new_blocks.append(b)
if not b.next or b.next[0] == default_next or b.next[0] == n:
continue
# The blocks are in the wrong order. Find the chain of
# blocks to insert where they belong.
cur = b
chain = []
elt = cur
while elt.next and elt.next[0] != default_next:
chain.append(elt.next[0])
elt = elt.next[0]
# Now remove the blocks in the chain from the current
# block list, so that they can be re-inserted.
for b in chain:
for j in range(i + 1, len(blocks)):
if blocks[j] == b:
del blocks[j]
break
else:
assert False, "Can't find block"
new_blocks.extend(chain)
if i == len(blocks) - 1:
new_blocks.append(blocks[i])
def fixupOrderForward(self, blocks, default_next):
"""Make sure all JUMP_FORWARDs jump forward"""
index = {}
chains = []
cur = []
for b in blocks:
index[b.bid] = len(chains)
cur.append(b)
if b.next and b.next[0] == default_next:
chains.append(cur)
cur = []
chains.append(cur)
while 1:
constraints = []
for i in range(len(chains)):
l = chains[i]
for b in l:
for c in b.get_children():
if index[c.bid] < i:
forward_p = 0
for inst in b.insts:
if inst.op == 'JUMP_FORWARD':
assert isinstance(inst, InstrBlock)
if inst.block == c:
forward_p = 1
if not forward_p:
continue
constraints.append((index[c.bid], i))
if not constraints:
break
# XXX just do one for now
# do swaps to get things in the right order
goes_before, a_chain = constraints[0]
assert a_chain > goes_before >= 0
c = chains[a_chain]
del chains[a_chain]
chains.insert(goes_before, c)
del blocks[:]
for c in chains:
for b in c:
blocks.append(b)
def getBlocks(self):
return self.blocks.elements()
def getRoot(self):
"""Return nodes appropriate for use with dominator"""
return self.entry
def getContainedGraphs(self):
l = []
for b in self.getBlocks():
l.extend(b.getContainedGraphs())
return l
def dfs_postorder(b, seen):
"""Depth-first search of tree rooted at b, return in postorder"""
order = []
seen[b.bid] = b
for c in b.get_children():
if c.bid in seen:
continue
order = order + dfs_postorder(c, seen)
order.append(b)
return order
BlockCounter = misc.Counter(0)
class Block:
def __init__(self, space, label=''):
self.insts = []
self.inEdges = BlockSet()
self.outEdges = BlockSet()
self.label = label
self.bid = BlockCounter.next()
self.next = []
self.space = space
def __repr__(self):
if self.label:
return "<block %s id=%d>" % (self.label, self.bid)
else:
return "<block id=%d>" % (self.bid)
def __str__(self):
insts = [ str(i) for i in self.insts ]
return "<block %s %d:\n%s>" % (self.label, self.bid,
'\n'.join(insts))
def emit(self, inst):
op = inst.op
if op[:4] == 'JUMP':
assert isinstance(inst, InstrBlock)
self.outEdges.add(inst.block)
## if op=="LOAD_CONST":
## assert isinstance( inst[1], W_Root ) or hasattr( inst[1], 'getCode')
self.insts.append( inst )
def getInstructions(self):
return self.insts
def addInEdge(self, block):
self.inEdges.add(block)
def addOutEdge(self, block):
self.outEdges.add(block)
def addNext(self, block):
self.next.append(block)
assert len(self.next) == 1, [ str(i) for i in self.next ]
_uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS', 'YIELD_VALUE',
'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP')
def pruneNext(self):
"""Remove bogus edge for unconditional transfers
Each block has a next edge that accounts for implicit control
transfers, e.g. from a JUMP_IF_FALSE to the block that will be
executed if the test is true.
These edges must remain for the current assembler code to
work. If they are removed, the dfs_postorder gets things in
weird orders. However, they shouldn't be there for other
purposes, e.g. conversion to SSA form. This method will
remove the next edge when it follows an unconditional control
transfer.
"""
try:
inst = self.insts[-1]
except (IndexError, ValueError):
return
if inst.op in self._uncond_transfer:
self.next = []
def get_children(self):
if self.next and self.next[0].bid in self.outEdges.elts:
self.outEdges.remove(self.next[0])
return self.outEdges.elements() + self.next
def getContainedGraphs(self):
"""Return all graphs contained within this block.
For example, a MAKE_FUNCTION block will contain a reference to
the graph for the function body.
"""
contained = []
for inst in self.insts:
if isinstance(inst, InstrCode):
gen = inst.gen
if gen:
contained.append(gen)
return contained
# flags for code objects
# the FlowGraph is transformed in place; it exists in one of these states
RAW = "RAW"
FLAT = "FLAT"
CONV = "CONV"
DONE = "DONE"
class PyFlowGraph(FlowGraph):
def __init__(self, space, name, filename, argnames=None,
optimized=0, klass=0, newlocals=0):
FlowGraph.__init__(self, space)
if argnames is None:
argnames = []
self.name = name
self.filename = filename
self.docstring = space.w_None
self.argcount = len(argnames)
self.klass = klass
self.flags = 0
if optimized:
self.flags |= CO_OPTIMIZED
if newlocals:
self.flags |= CO_NEWLOCALS
# XXX we need to build app-level dict here, bleh
self.w_consts = space.newdict()
#self.const_list = []
self.names = []
# Free variables found by the symbol table scan, including
# variables used only in nested scopes, are included here.
self.freevars = []
self.cellvars = []
# The closure list is used to track the order of cell
# variables and free variables in the resulting code object.
# The offsets used by LOAD_CLOSURE/LOAD_DEREF refer to both
# kinds of variables.
self.closure = []
self.varnames = list(argnames)
self.stage = RAW
self.orderedblocks = []
def setDocstring(self, doc):
self.docstring = doc
def setFlag(self, flag):
self.flags = self.flags | flag
if flag == CO_VARARGS:
self.argcount = self.argcount - 1
def checkFlag(self, flag):
if self.flags & flag:
return 1
def setFreeVars(self, names):
self.freevars = list(names)
def setCellVars(self, names):
self.cellvars = names
def getCode(self):
"""Get a Python code object"""
if self.stage == RAW:
self.computeStackDepth()
self.convertArgs()
if self.stage == CONV:
self.flattenGraph()
if self.stage == FLAT:
self.makeByteCode()
if self.stage == DONE:
return self.newCodeObject()
raise RuntimeError, "inconsistent PyFlowGraph state"
def dump(self, io=None):
if io:
save = sys.stdout
sys.stdout = io
pc = 0
for t in self.insts:
opname = t.op
if opname == "SET_LINENO":
print
if not t.has_arg:
print "\t", "%3d" % pc, opname
pc = pc + 1
else:
print "\t", "%3d" % pc, opname, t.getArg()
pc = pc + 3
if io:
sys.stdout = save
def _max_depth(self, depth, seen, b, d):
if b in seen:
return d
seen[b] = 1
d = d + depth[b]
children = b.get_children()
if children:
maxd = -1
for c in children:
childd =self._max_depth(depth, seen, c, d)
if childd > maxd:
maxd = childd
return maxd
else:
if not b.label == "exit":
return self._max_depth(depth, seen, self.exit, d)
else:
return d
def computeStackDepth(self):
"""Compute the max stack depth.
Approach is to compute the stack effect of each basic block.
Then find the path through the code with the largest total
effect.
"""
depth = {}
exit = None
for b in self.getBlocks():
depth[b] = findDepth(b.getInstructions())
seen = {}
self.stacksize = self._max_depth( depth, seen, self.entry, 0)
def flattenGraph(self):
"""Arrange the blocks in order and resolve jumps"""
assert self.stage == CONV
self.insts = insts = []
firstline = 0
pc = 0
begin = {}
end = {}
forward_refs = []
for b in self.orderedblocks:
# Prune any setlineno before the 'implicit return' block.
if b is self.exit:
while len(insts) and insts[-1].op == "SET_LINENO":
insts.pop()
begin[b] = pc
for inst in b.getInstructions():
if not inst.has_arg:
insts.append(inst)
pc = pc + 1
elif inst.op != "SET_LINENO":
if inst.op in self.hasjrel:
assert isinstance(inst, InstrBlock)
# relative jump - no extended arg
block = inst.block
inst = InstrInt(inst.op, 0)
forward_refs.append( (block, inst, pc) )
insts.append(inst)
pc = pc + 3
elif inst.op in self.hasjabs:
# absolute jump - can be extended if backward
assert isinstance(inst, InstrBlock)
arg = inst.block
if arg in begin:
# can only extend argument if backward
offset = begin[arg]
hi = offset // 65536
lo = offset % 65536
if hi>0:
# extended argument
insts.append( InstrInt("EXTENDED_ARG", hi) )
pc = pc + 3
inst = InstrInt(inst.op, lo)
else:
inst = InstrInt(inst.op, 0)
forward_refs.append( (arg, inst, pc ) )
insts.append(inst)
pc = pc + 3
else:
assert isinstance(inst, InstrInt)
arg = inst.intval
# numerical arg
hi = arg // 65536
lo = arg % 65536
if hi>0:
# extended argument
insts.append( InstrInt("EXTENDED_ARG", hi) )
inst.intval = lo
pc = pc + 3
insts.append(inst)
pc = pc + 3
else:
insts.append(inst)
if firstline == 0:
firstline = inst.intval
end[b] = pc
pc = 0
for block, inst, pc in forward_refs:
opname = inst.op
abspos = begin[block]
if opname in self.hasjrel:
offset = abspos - pc - 3
inst.intval = offset
else:
inst.intval = abspos
self.firstline = firstline
self.stage = FLAT
hasjrel = {}
for i in pythonopcode.hasjrel:
hasjrel[pythonopcode.opname[i]] = True
hasjabs = {}
for i in pythonopcode.hasjabs:
hasjabs[pythonopcode.opname[i]] = True
def setconst(self, w_consts, w_item, value):
space = self.space
w_item_type = space.type(w_item)
w_key = space.newtuple([w_item, w_item_type])
space.setitem(w_consts, w_key, space.wrap(value))
def convertArgs(self):
"""Convert arguments from symbolic to concrete form"""
assert self.stage == RAW
space = self.space
self.orderedblocks = self.getBlocksInOrder()
self.setconst(self.w_consts, self.docstring, 0)
#self.const_list.insert(0, self.docstring)
self.sort_cellvars()
for b in self.orderedblocks:
insts = b.getInstructions()
for i in range(len(insts)):
inst = insts[i]
if inst.has_arg:
opname = inst.op
conv = self._converters.get(opname, None)
if conv:
insts[i] = conv(self, inst)
self.stage = CONV
def sort_cellvars(self):
"""Sort cellvars in the order of varnames and prune from freevars.
"""
cells = {}
for name in self.cellvars:
cells[name] = 1
self.cellvars = [name for name in self.varnames
if name in cells]
for name in self.cellvars:
del cells[name]
self.cellvars = self.cellvars + cells.keys()
self.closure = self.cellvars + self.freevars
def _lookupName(self, name, list):
"""Return index of name in list, appending if necessary
"""
assert isinstance(name, str)
for i in range(len(list)):
if list[i] == name:
return i
end = len(list)
list.append(name)
return end
def _cmpConsts(self, w_left, w_right):
space = self.space
t = space.type(w_left)
if space.is_w(t, space.type(w_right)):
if space.is_w(t, space.w_tuple):
left_len = space.int_w(space.len(w_left))
right_len = space.int_w(space.len(w_right))
if left_len == right_len:
for i in range(left_len):
w_lefti = space.getitem(w_left, space.wrap(i))
w_righti = space.getitem(w_right, space.wrap(i))
if not self._cmpConsts(w_lefti, w_righti):
return False
return True
elif space.eq_w(w_left, w_right):
return True
return False
def _lookupConst(self, w_obj, w_dict):
"""
This routine uses a list instead of a dictionary, because a
dictionary can't store two different keys if the keys have the
same value but different types, e.g. 2 and 2L. The compiler
must treat these two separately, so it does an explicit type
comparison before comparing the values.
"""
space = self.space
w_obj_type = space.type(w_obj)
try:
w_key = space.newtuple([w_obj, w_obj_type])
return space.int_w(space.getitem(w_dict, w_key))
except OperationError, operr:
if not operr.match(space, space.w_KeyError):
raise
lgt = space.int_w(space.len(w_dict))
self.setconst(w_dict, w_obj, lgt)
return lgt
_converters = {}
def _convert_LOAD_CONST(self, inst):
if isinstance(inst, InstrCode):
w_obj = inst.gen.getCode()
else:
assert isinstance(inst, InstrObj)
w_obj = inst.obj
#assert w_obj is not None
index = self._lookupConst(w_obj, self.w_consts)
return InstrInt(inst.op, index)
def _convert_LOAD_FAST(self, inst):
assert isinstance(inst, InstrName)
arg = inst.name
self._lookupName(arg, self.names)
index= self._lookupName(arg, self.varnames)
return InstrInt(inst.op, index)
_convert_STORE_FAST = _convert_LOAD_FAST
_convert_DELETE_FAST = _convert_LOAD_FAST
def _convert_NAME(self, inst):
assert isinstance(inst, InstrName)
arg = inst.name
index = self._lookupName(arg, self.names)
return InstrInt(inst.op, index)
_convert_LOAD_NAME = _convert_NAME
_convert_STORE_NAME = _convert_NAME
_convert_DELETE_NAME = _convert_NAME
_convert_IMPORT_NAME = _convert_NAME
_convert_IMPORT_FROM = _convert_NAME
_convert_STORE_ATTR = _convert_NAME
_convert_LOAD_ATTR = _convert_NAME
_convert_DELETE_ATTR = _convert_NAME
_convert_LOAD_GLOBAL = _convert_NAME
_convert_STORE_GLOBAL = _convert_NAME
_convert_DELETE_GLOBAL = _convert_NAME
_convert_LOOKUP_METHOD = _convert_NAME
def _convert_DEREF(self, inst):
assert isinstance(inst, InstrName)
arg = inst.name
self._lookupName(arg, self.names)
index = self._lookupName(arg, self.closure)
return InstrInt(inst.op, index)
_convert_LOAD_DEREF = _convert_DEREF
_convert_STORE_DEREF = _convert_DEREF
def _convert_LOAD_CLOSURE(self, inst):
assert isinstance(inst, InstrName)
arg = inst.name
index = self._lookupName(arg, self.closure)
return InstrInt(inst.op, index)
_cmp = list(pythonopcode.cmp_op)
def _convert_COMPARE_OP(self, inst):
assert isinstance(inst, InstrName)
arg = inst.name
index = self._cmp.index(arg)
return InstrInt(inst.op, index)
# similarly for other opcodes...
for name, obj in locals().items():
if name[:9] == "_convert_":
opname = name[9:]
_converters[opname] = obj
del name, obj, opname
def makeByteCode(self):
assert self.stage == FLAT
self.lnotab = lnotab = LineAddrTable(self.firstline)
for t in self.insts:
opname = t.op
if self._debug:
if not t.has_arg:
print "x",opname
else:
print "x",opname, t.getArg()
if not t.has_arg:
lnotab.addCode1(self.opnum[opname])
else:
assert isinstance(t, InstrInt)
oparg = t.intval
if opname == "SET_LINENO":
lnotab.nextLine(oparg)
continue
hi, lo = twobyte(oparg)
try:
lnotab.addCode3(self.opnum[opname], lo, hi)
except ValueError:
if self._debug:
print opname, oparg
print self.opnum[opname], lo, hi
raise
self.stage = DONE
opnum = {}
for num in range(len(pythonopcode.opname)):
opnum[pythonopcode.opname[num]] = num
# This seems to duplicate dis.opmap from opcode.opmap
del num
def newCodeObject(self):
assert self.stage == DONE
if (self.flags & CO_NEWLOCALS) == 0:
nlocals = 0
else:
nlocals = len(self.varnames)
argcount = self.argcount
if self.flags & CO_VARKEYWORDS:
argcount = argcount - 1
# was return new.code, now we just return the parameters and let
# the caller create the code object
return PyCode( self.space, argcount, nlocals,
self.stacksize, self.flags,
self.lnotab.getCode(),
self.getConsts(),
self.names,
self.varnames,
self.filename, self.name,
self.firstline,
self.lnotab.getTable(),
self.freevars,
self.cellvars
)
def getConsts(self):
"""Return a tuple for the const slot of the code object
Must convert references to code (MAKE_FUNCTION) to code
objects recursively.
"""
space = self.space
l_w = [None] * space.int_w(space.len(self.w_consts))
keys_w = space.unpackiterable(self.w_consts)
for w_key in keys_w:
index = space.int_w(space.getitem(self.w_consts, w_key))
w_v = space.unpacktuple(w_key)[0]
l_w[index] = w_v
return l_w
def isJump(opname):
if opname[:4] == 'JUMP':
return 1
def twobyte(val):
"""Convert an int argument into high and low bytes"""
assert isinstance(val,int)
hi = val // 256
lo = val % 256
return hi, lo
class LineAddrTable:
"""lnotab
This class builds the lnotab, which is documented in compile.c.
Here's a brief recap:
For each SET_LINENO instruction after the first one, two bytes are
added to lnotab. (In some cases, multiple two-byte entries are
added.) The first byte is the distance in bytes between the
instruction for the last SET_LINENO and the current SET_LINENO.
The second byte is offset in line numbers. If either offset is
greater than 255, multiple two-byte entries are added -- see
compile.c for the delicate details.
"""
def __init__(self, firstline):
self.code = []
self.codeOffset = 0
self.firstline = firstline
self.lastline = firstline
self.lastoff = 0
self.lnotab = []
def addCode1(self, op ):
self.code.append(chr(op))
self.codeOffset = self.codeOffset + 1
def addCode3(self, op, hi, lo):
self.code.append(chr(op))
self.code.append(chr(hi))
self.code.append(chr(lo))
self.codeOffset = self.codeOffset + 3
def nextLine(self, lineno):
# compute deltas
addr = self.codeOffset - self.lastoff
line = lineno - self.lastline
# Python assumes that lineno always increases with
# increasing bytecode address (lnotab is unsigned char).
# Depending on when SET_LINENO instructions are emitted
# this is not always true. Consider the code:
# a = (1,
# b)
# In the bytecode stream, the assignment to "a" occurs
# after the loading of "b". This works with the C Python
# compiler because it only generates a SET_LINENO instruction
# for the assignment.
if line >= 0:
push = self.lnotab.append
while addr > 255:
push(255); push(0)
addr -= 255
while line > 255:
push(addr); push(255)
line -= 255
addr = 0
if addr > 0 or line > 0:
push(addr); push(line)
self.lastline = lineno
self.lastoff = self.codeOffset
def getCode(self):
return ''.join(self.code)
def getTable(self):
return ''.join( [ chr(i) for i in self.lnotab ] )
def depth_UNPACK_SEQUENCE(count):
return count-1
def depth_BUILD_TUPLE(count):
return -count+1
def depth_BUILD_LIST(count):
return -count+1
def depth_CALL_FUNCTION(argc):
hi = argc//256
lo = argc%256
return -(lo + hi * 2)
def depth_CALL_FUNCTION_VAR(argc):
return depth_CALL_FUNCTION(argc)-1
def depth_CALL_FUNCTION_KW(argc):
return depth_CALL_FUNCTION(argc)-1
def depth_CALL_FUNCTION_VAR_KW(argc):
return depth_CALL_FUNCTION(argc)-2
def depth_CALL_METHOD(argc):
return -argc-1
def depth_MAKE_FUNCTION(argc):
return -argc
def depth_MAKE_CLOSURE(argc):
# XXX need to account for free variables too!
return -argc
def depth_BUILD_SLICE(argc):
if argc == 2:
return -1
elif argc == 3:
return -2
assert False, 'Unexpected argument %s to depth_BUILD_SLICE' % argc
def depth_DUP_TOPX(argc):
return argc
DEPTH_OP_TRACKER = {
"UNPACK_SEQUENCE" : depth_UNPACK_SEQUENCE,
"BUILD_TUPLE" : depth_BUILD_TUPLE,
"BUILD_LIST" : depth_BUILD_LIST,
"CALL_FUNCTION" : depth_CALL_FUNCTION,
"CALL_FUNCTION_VAR" : depth_CALL_FUNCTION_VAR,
"CALL_FUNCTION_KW" : depth_CALL_FUNCTION_KW,
"CALL_FUNCTION_VAR_KW" : depth_CALL_FUNCTION_VAR_KW,
"MAKE_FUNCTION" : depth_MAKE_FUNCTION,
"MAKE_CLOSURE" : depth_MAKE_CLOSURE,
"BUILD_SLICE" : depth_BUILD_SLICE,
"DUP_TOPX" : depth_DUP_TOPX,
}
class StackDepthTracker:
# XXX 1. need to keep track of stack depth on jumps
# XXX 2. at least partly as a result, this code is broken
# XXX 3. Don't need a class here!
def findDepth(self, insts, debug=0):
depth = 0
maxDepth = 0
for i in insts:
opname = i.op
if debug:
print i,
delta = self.effect.get(opname, sys.maxint)
if delta != sys.maxint:
depth = depth + delta
else:
# now check patterns
for pat, pat_delta in self.patterns:
if opname[:len(pat)] == pat:
delta = pat_delta
depth = depth + delta
break
# if we still haven't found a match
if delta == sys.maxint:
meth = DEPTH_OP_TRACKER.get( opname, None )
if meth is not None:
assert isinstance(i, InstrInt)
depth = depth + meth(i.intval)
if depth > maxDepth:
maxDepth = depth
if debug:
print depth, maxDepth
return maxDepth
effect = {
'POP_TOP': -1,
'DUP_TOP': 1,
'SLICE+1': -1,
'SLICE+2': -1,
'SLICE+3': -2,
'STORE_SLICE+0': -1,
'STORE_SLICE+1': -2,
'STORE_SLICE+2': -2,
'STORE_SLICE+3': -3,
'DELETE_SLICE+0': -1,
'DELETE_SLICE+1': -2,
'DELETE_SLICE+2': -2,
'DELETE_SLICE+3': -3,
'STORE_SUBSCR': -3,
'DELETE_SUBSCR': -2,
# PRINT_EXPR?
'PRINT_ITEM': -1,
'RETURN_VALUE': -1,
'YIELD_VALUE': -1,
'EXEC_STMT': -3,
'BUILD_CLASS': -2,
'STORE_NAME': -1,
'STORE_ATTR': -2,
'DELETE_ATTR': -1,
'STORE_GLOBAL': -1,
'BUILD_MAP': 1,
'COMPARE_OP': -1,
'STORE_FAST': -1,
'IMPORT_STAR': -1,
'IMPORT_NAME': 0,
'IMPORT_FROM': 1,
'LOAD_ATTR': 0, # unlike other loads
# close enough...
'SETUP_EXCEPT': 3,
'SETUP_FINALLY': 3,
'FOR_ITER': 1,
'WITH_CLEANUP': 3,
'LOOKUP_METHOD': 1,
}
# use pattern match
patterns = [
('BINARY_', -1),
('LOAD_', 1),
]
findDepth = StackDepthTracker().findDepth
| Python |
"""Package for parsing and compiling Python source code
There are several functions defined at the top level that are imported
from modules contained in the package.
parse(buf, mode="exec") -> AST
Converts a string containing Python source code to an abstract
syntax tree (AST). The AST is defined in compiler.ast.
parseFile(path) -> AST
The same as parse(open(path))
walk(ast, visitor, verbose=None)
Does a pre-order walk over the ast using the visitor instance.
See compiler.visitor for details.
compile(source, filename, mode, flags=None, dont_inherit=None)
Returns a code object. A replacement for the builtin compile() function.
compileFile(filename)
Generates a .pyc file by compiling filename.
"""
# from transformer import parse, parseFile
# from visitor import walk
# from pycodegen import compile, compileFile
| Python |
"""Check for errs in the AST.
The Python parser does not catch all syntax errors. Others, like
assignments with invalid targets, are caught in the code generation
phase.
The compiler package catches some errors in the transformer module.
But it seems clearer to write checkers that use the AST to detect
errors.
"""
from pypy.interpreter.astcompiler import ast, walk
def check(tree, multi=None):
v = SyntaxErrorChecker(multi)
walk(tree, v)
return v.errors
class SyntaxErrorChecker:
"""A visitor to find syntax errors in the AST."""
def __init__(self, multi=None):
"""Create new visitor object.
If optional argument multi is not None, then print messages
for each error rather than raising a SyntaxError for the
first.
"""
self.multi = multi
self.errors = 0
def error(self, node, msg):
self.errors = self.errors + 1
if self.multi is not None:
print "%s:%s: %s" % (node.filename, node.lineno, msg)
else:
raise SyntaxError, "%s (%s:%s)" % (msg, node.filename, node.lineno)
def visitAssign(self, node):
# the transformer module handles many of these
for target in node.nodes:
pass
## if isinstance(target, ast.AssList):
## if target.lineno is None:
## target.lineno = node.lineno
## self.error(target, "can't assign to list comprehension")
| Python |
"""Module symbol-table generator"""
from pypy.interpreter.astcompiler import ast
from pypy.interpreter.astcompiler.consts import SC_LOCAL, SC_GLOBAL, \
SC_FREE, SC_CELL, SC_UNKNOWN, SC_DEFAULT
from pypy.interpreter.astcompiler.misc import mangle, Counter
from pypy.interpreter.pyparser.error import SyntaxError
from pypy.interpreter import gateway
import sys
# the 'role' of variables records how the variable is
# syntactically used in a given scope.
ROLE_NONE = ' '
ROLE_USED = 'U' # used only
ROLE_DEFINED = 'D' # defined (i.e. assigned to) in the current scope
ROLE_GLOBAL = 'G' # marked with the 'global' keyword in the current scope
ROLE_PARAM = 'P' # function parameter
class Scope:
bare_exec = False
import_star = False
def __init__(self, name, parent):
self.name = name
self.varroles = {} # {variable: role}
self.children = [] # children scopes
self.varscopes = None # initialized by build_var_scopes()
self.freevars = {} # vars to show up in the code object's
# co_freevars. Note that some vars may
# be only in this dict and not in
# varscopes; see need_passthrough_name()
self.parent = parent
if parent is not None:
parent.children.append(self)
def mangle(self, name):
if self.parent is None:
return name
else:
return self.parent.mangle(name)
def locals_fully_known(self):
return not self.bare_exec and not self.import_star
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
def add_use(self, name):
name = self.mangle(name)
if name not in self.varroles:
self.varroles[name] = ROLE_USED
def add_def(self, name):
name = self.mangle(name)
if self.varroles.get(name, ROLE_USED) == ROLE_USED:
self.varroles[name] = ROLE_DEFINED
def add_global(self, name):
name = self.mangle(name)
prevrole = self.varroles.get(name, ROLE_NONE)
self.varroles[name] = ROLE_GLOBAL
return prevrole
def add_return(self, node):
raise SyntaxError("'return' outside function")
def add_yield(self):
raise SyntaxError("'yield' outside function")
def DEBUG(self):
print >> sys.stderr, self
print >> sys.stderr, "\troles: ", self.varroles
print >> sys.stderr, "\tscopes: ", self.varscopes
def build_var_scopes(self, names_from_enclosing_funcs):
"""Build the varscopes dictionary of this scope and all children.
The names_from_enclosing_funcs are the names that come from
enclosing scopes. It is a dictionary {name: source_function_scope},
where the source_function_scope might be None to mean 'from the
global scope'. The whole names_from_enclosing_funcs can also be
None, to mean that we don't know anything statically because of a
bare exec or import *.
A call to build_var_scopes() that uses a variable from an enclosing
scope must patch the varscopes of that enclosing scope, to make the
variable SC_CELL instead of SC_LOCAL, as well as the intermediate
scopes, to make the variable SC_FREE in them.
"""
newnames = {} # new names that this scope potentially exports
# to its children (if it is a FunctionScope)
self.varscopes = {}
for name, role in self.varroles.items():
if role == ROLE_USED:
# where does this variable come from?
if names_from_enclosing_funcs is None:
msg = self.parent.get_ambiguous_name_msg(
"it contains a nested function using the "
"variable '%s'" % (name,))
raise SyntaxError(msg)
if name in names_from_enclosing_funcs:
enclosingscope = names_from_enclosing_funcs[name]
if enclosingscope is None:
# it is a global var
scope = SC_GLOBAL
else:
if not self.locals_fully_known():
msg = self.get_ambiguous_name_msg(
"it is a nested function, so the origin of "
"the variable '%s' is ambiguous" % (name,))
raise SyntaxError(msg)
enclosingscope.varscopes[name] = SC_CELL
parent = self.parent
while parent is not enclosingscope:
parent.need_passthrough_name(name)
parent = parent.parent
self.freevars[name] = True
scope = SC_FREE
else:
scope = SC_DEFAULT
self._use_var()
elif role == ROLE_GLOBAL:
# a global var
newnames[name] = None
scope = SC_GLOBAL
else:
# a ROLE_DEFINED or ROLE_PARAM local var
newnames[name] = self
scope = SC_LOCAL
self.varscopes[name] = scope
# call build_var_scopes() on all the children
names_enclosing_children = self.export_names_to_children(
names_from_enclosing_funcs,
newnames)
for subscope in self.children:
subscope.build_var_scopes(names_enclosing_children)
def export_names_to_children(self, names_from_enclosing_funcs, newnames):
# by default, scopes don't export names to their children
# (only FunctionScopes do)
return names_from_enclosing_funcs
def need_passthrough_name(self, name):
# make the 'name' pass through the 'self' scope, without showing
# up in the normal way in the scope. This case occurs when a
# free variable is needed in some inner sub-scope, and comes from
# some outer super-scope. Hiding the name is needed for e.g. class
# scopes, otherwise the name sometimes end up in the class __dict__.
# Note that FunctionScope override this to *not* hide the name,
# because users might expect it to show up in the function's locals
# then...
self.freevars[name] = True
def _use_var(self):
pass
def get_ambiguous_name_msg(self, reason):
if self.bare_exec:
cause = "unqualified exec"
elif self.import_star:
cause = "import *"
else:
assert self.parent
return self.parent.get_ambiguous_name_msg(reason)
return "%s is not allowed in '%s' because %s" % (cause, self.name,
reason)
def check_name(self, name):
"""Return scope of name.
"""
return self.varscopes.get(name, SC_UNKNOWN)
def get_free_vars_in_scope(self):
# list the names of the free variables, giving them the name they
# should have inside this scope
result = []
for name in self.freevars:
if self.check_name(name) != SC_FREE:
# it's not considered as a free variable within this scope,
# but only a need_passthrough_name(). We need to hide the
# name to avoid confusion with another potential use of the
# name in the 'self' scope.
name = hiddenname(name)
result.append(name)
return result
def get_free_vars_in_parent(self):
# list the names of the free variables, giving them the name they
# should have in the parent scope
result = []
for name in self.freevars:
if self.parent.check_name(name) not in (SC_FREE, SC_CELL):
# it's not considered as a free variable in the parent scope,
# but only a need_passthrough_name(). We need to hide the
# name to avoid confusion with another potential use of the
# name in the parent scope.
name = hiddenname(name)
result.append(name)
return result
def get_cell_vars(self):
return [name for name, scope in self.varscopes.items()
if scope == SC_CELL]
class ModuleScope(Scope):
def __init__(self):
Scope.__init__(self, "global", None)
def finished(self):
self.build_var_scopes({})
class FunctionScope(Scope):
generator = False
return_with_arg = None # or the node
def add_param(self, name):
name = self.mangle(name)
if name in self.varroles:
msg = "duplicate argument '%s' in function definition" % (name,)
raise SyntaxError(msg)
self.varroles[name] = ROLE_PARAM
def add_return(self, node):
if node.value is not None:
# record the first 'return expr' that we see, for error checking
if self.return_with_arg is None:
self.return_with_arg = node
def add_yield(self):
self.generator = True
def export_names_to_children(self, names_from_enclosing_funcs, newnames):
if names_from_enclosing_funcs is None:
return None
if not self.locals_fully_known():
return None
d = names_from_enclosing_funcs.copy()
d.update(newnames)
return d
def need_passthrough_name(self, name):
# overrides Scope.need_passthrough_name(), see comments there
if name not in self.varscopes:
self.varscopes[name] = SC_FREE
self.freevars[name] = True
def _use_var(self):
# some extra checks just for CPython compatibility -- the logic
# of build_var_scopes() in symbols.py should be able to detect
# all the cases that would really produce broken code, but CPython
# insists on raising SyntaxError in some more cases
if self._is_nested_function():
if self.bare_exec:
raise SyntaxError("for CPython compatibility, an unqualified "
"exec is not allowed here")
if self.import_star:
raise SyntaxError("for CPython compatibility, import * "
"is not allowed here")
def _is_nested_function(self):
scope = self.parent
while scope is not None:
if isinstance(scope, FunctionScope):
return True
scope = scope.parent
return False
class GenExprScope(FunctionScope):
_counter = Counter(1)
def __init__(self, parent):
i = GenExprScope._counter.next()
FunctionScope.__init__(self, "generator expression<%d>" % i, parent)
self.add_param('[outmost-iterable]')
class LambdaScope(FunctionScope):
_counter = Counter(1)
def __init__(self, parent):
i = LambdaScope._counter.next()
FunctionScope.__init__(self, "lambda.%d" % i, parent)
class ClassScope(Scope):
def mangle(self, name):
return mangle(name, self.name)
def hiddenname(name):
return '.(%s)' % (name,)
app = gateway.applevel(r'''
def issue_warning(msg, filename, lineno):
import warnings
try:
warnings.warn_explicit(msg, SyntaxWarning, filename, lineno,
None, None)
except SyntaxWarning:
raise SyntaxError(msg, filename, lineno)
''')
_issue_warning = app.interphook('issue_warning')
def issue_warning(space, msg, filename, lineno):
_issue_warning(space, space.wrap(msg), space.wrap(filename),
space.wrap(lineno))
class SymbolVisitor(ast.ASTVisitor):
def __init__(self, space):
self.space = space
self.scope_stack = []
self.assign_stack = [ False ]
def cur_assignment(self):
return self.assign_stack[-1]
def push_assignment(self, val ):
self.assign_stack.append( val )
def pop_assignment(self):
self.assign_stack.pop()
def push_scope( self, scope ):
self.scope_stack.append( scope )
def pop_scope( self ):
self.scope_stack.pop()
def cur_scope( self ):
return self.scope_stack[-1]
# node that define new scopes
def visitModule(self, node):
scope = self.module = node.scope = ModuleScope()
if node.w_doc is not None:
scope.add_def('__doc__')
self.push_scope(scope)
node.node.accept(self)
self.pop_scope()
scope.finished()
def visitExpression(self, node):
scope = self.module = node.scope = ModuleScope()
self.push_scope(scope)
node.node.accept(self)
self.pop_scope()
scope.finished()
def visitFunction(self, node):
parent = self.cur_scope()
if node.decorators:
node.decorators.accept(self)
parent.add_def(node.name)
for n in node.defaults:
n.accept( self )
scope = FunctionScope(node.name, parent)
node.scope = scope
self._do_args(scope, node.argnames)
self.push_scope( scope )
node.code.accept(self )
self.pop_scope()
def visitExec(self, node):
if not (node.globals or node.locals):
parent = self.cur_scope()
parent.bare_exec = True
ast.ASTVisitor.visitExec(self, node)
def visitGenExpr(self, node ):
parent = self.cur_scope()
scope = GenExprScope(parent)
node.scope = scope
self.push_scope(scope)
node.code.accept(self)
self.pop_scope()
def visitGenExprInner(self, node ):
for genfor in node.quals:
genfor.accept( self )
node.expr.accept( self )
def visitGenExprFor(self, node ):
self.push_assignment( True )
node.assign.accept(self)
self.pop_assignment()
if node.is_outmost:
curscope = self.cur_scope()
self.pop_scope()
node.iter.accept(self) # in the parent scope
self.push_scope(curscope)
else:
node.iter.accept(self )
for if_ in node.ifs:
if_.accept( self )
def visitGenExprIf(self, node ):
node.test.accept( self )
def visitLambda(self, node ):
# Lambda is an expression, so it could appear in an expression
# context where assign is passed. The transformer should catch
# any code that has a lambda on the left-hand side.
assert not self.cur_assignment()
parent = self.cur_scope()
for n in node.defaults:
n.accept( self )
scope = LambdaScope(parent)
node.scope = scope
self._do_args(scope, node.argnames)
self.push_scope(scope)
node.code.accept(self)
self.pop_scope()
def _do_args(self, scope, args):
for arg in args:
if isinstance( arg, ast.AssName ):
scope.add_param( arg.name )
elif isinstance( arg, ast.AssTuple ):
self._do_args( scope, arg.flatten() )
else:
#msg = "Argument list contains %s of type %s" % (arg, type(arg) )
msg = "Argument list contains ASTNodes other than AssName or AssTuple"
raise TypeError( msg )
def visitClass(self, node):
parent = self.cur_scope()
parent.add_def(node.name)
for n in node.bases:
n.accept(self)
scope = ClassScope(node.name, parent)
if node.w_doc is not None:
scope.add_def('__doc__')
scope.add_def('__module__')
node.scope = scope
self.push_scope( scope )
node.code.accept(self)
self.pop_scope()
# name can be a def or a use
# XXX a few calls and nodes expect a third "assign" arg that is
# true if the name is being used as an assignment. only
# expressions contained within statements may have the assign arg.
def visitName(self, node ):
scope = self.cur_scope()
if self.cur_assignment():
scope.add_def(node.varname)
else:
scope.add_use(node.varname)
# operations that bind new names
def visitFor(self, node ):
self.push_assignment( True )
node.assign.accept( self )
self.pop_assignment()
node.list.accept( self )
node.body.accept( self )
if node.else_:
node.else_.accept( self )
def visitFrom(self, node ):
scope = self.cur_scope()
for name, asname in node.names:
if name == "*":
scope.import_star = True
continue
scope.add_def(asname or name)
def visitImport(self, node ):
scope = self.cur_scope()
for name, asname in node.names:
i = name.find(".")
if i >= 0:
name = name[:i]
scope.add_def(asname or name)
def visitGlobal(self, node ):
scope = self.cur_scope()
for name in node.names:
prevrole = scope.add_global(name)
if prevrole == ROLE_PARAM:
msg = "name '%s' is a function parameter and declared global"
raise SyntaxError(msg % (name,))
elif prevrole == ROLE_DEFINED:
msg = "name '%s' is assigned to before global declaration"
issue_warning(self.space, msg % (name,),
node.filename, node.lineno)
elif prevrole == ROLE_USED:
msg = "name '%s' is used prior to global declaration"
issue_warning(self.space, msg % (name,),
node.filename, node.lineno)
def visitAssign(self, node ):
"""Propagate assignment flag down to child nodes.
The Assign node doesn't itself contains the variables being
assigned to. Instead, the children in node.nodes are visited
with the assign flag set to true. When the names occur in
those nodes, they are marked as defs.
Some names that occur in an assignment target are not bound by
the assignment, e.g. a name occurring inside a slice. The
visitor handles these nodes specially; they do not propagate
the assign flag to their children.
"""
self.push_assignment( True )
for n in node.nodes:
n.accept( self )
self.pop_assignment()
node.expr.accept( self )
def visitAssName(self, node ):
scope = self.cur_scope()
scope.add_def(node.name)
def visitAssAttr(self, node ):
self.push_assignment( False )
node.expr.accept( self )
self.pop_assignment()
def visitSubscript(self, node ):
self.push_assignment( False )
node.expr.accept( self )
node.sub.accept( self )
self.pop_assignment()
def visitSlice(self, node ):
self.push_assignment( False )
node.expr.accept( self )
if node.lower:
node.lower.accept( self )
if node.upper:
node.upper.accept( self )
self.pop_assignment()
def visitAugAssign(self, node ):
# If the LHS is a name, then this counts as assignment.
# Otherwise, it's just use.
node.node.accept( self )
if isinstance(node.node, ast.Name):
self.push_assignment( True ) # XXX worry about this
node.node.accept( self )
self.pop_assignment()
node.expr.accept( self )
# prune if statements if tests are false
# a yield statement signals a generator
def visitYield(self, node ):
scope = self.cur_scope()
scope.add_yield()
node.value.accept( self )
def visitReturn(self, node):
scope = self.cur_scope()
scope.add_return(node)
if node.value is not None:
node.value.accept(self)
def visitCondExpr(self, node):
issue_warning(self.space, "conditional expression",
node.filename, node.lineno)
ast.ASTVisitor.visitCondExpr(self, node)
def sort(l):
l = l[:]
l.sort()
return l
def list_eq(l1, l2):
return sort(l1) == sort(l2)
if __name__ == "__main__":
import sys
from pypy.interpreter.astcompiler import parseFile
import symtable
def get_names(syms):
return [s for s in [s.get_name() for s in syms.get_symbols()]
if not (s.startswith('_[') or s.startswith('.'))]
for file in sys.argv[1:]:
print file
f = open(file)
buf = f.read()
f.close()
syms = symtable.symtable(buf, file, "exec")
mod_names = get_names(syms)
tree = parseFile(file)
s = SymbolVisitor()
tree.accept(s)
# compare module-level symbols
names2 = tree.scope.get_names()
if not list_eq(mod_names, names2):
print
print "oops", file
print sort(mod_names)
print sort(names2)
sys.exit(-1)
d = {}
# this part won't work anymore
d.update(s.scopes)
del d[tree]
scopes = d.values()
del d
for s in syms.get_symbols():
if s.is_namespace():
l = [sc for sc in scopes
if sc.name == s.get_name()]
if len(l) > 1:
print "skipping", s.get_name()
else:
if not list_eq(get_names(s.get_namespace()),
l[0].get_names()):
print s.get_name()
print sort(get_names(s.get_namespace()))
print sort(l[0].get_names())
sys.exit(-1)
| Python |
import imp
import os
import marshal
import struct
import sys
#from pypy.interpreter.astcompiler import ast, parse, walk, syntax
from pypy.interpreter.astcompiler import ast
from pypy.interpreter.astcompiler import pyassem, misc, future, symbols
from pypy.interpreter.astcompiler.consts import SC_LOCAL, SC_GLOBAL, \
SC_FREE, SC_CELL, SC_DEFAULT, OP_APPLY, OP_ASSIGN, OP_DELETE, OP_NONE
from pypy.interpreter.astcompiler.consts import CO_VARARGS, CO_VARKEYWORDS, \
CO_NEWLOCALS, CO_NESTED, CO_GENERATOR, CO_GENERATOR_ALLOWED, \
CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT
from pypy.interpreter.pyparser.error import SyntaxError
# drop VERSION dependency since it the ast transformer for 2.4 doesn't work with 2.3 anyway
VERSION = 2
callfunc_opcode_info = [
# (Have *args, Have **args) : opcode
"CALL_FUNCTION",
"CALL_FUNCTION_KW",
"CALL_FUNCTION_VAR",
"CALL_FUNCTION_VAR_KW",
]
LOOP = 1
EXCEPT = 2
TRY_FINALLY = 3
END_FINALLY = 4
from pypy.module.__builtin__.__init__ import BUILTIN_TO_INDEX
def compileFile(filename, display=0):
f = open(filename, 'U')
buf = f.read()
f.close()
mod = Module(buf, filename)
try:
mod.compile(display)
except SyntaxError:
raise
else:
f = open(filename + "c", "wb")
mod.dump(f)
f.close()
def compile(source, filename, mode, flags=None, dont_inherit=None):
"""Replacement for builtin compile() function"""
if flags is not None or dont_inherit is not None:
raise RuntimeError, "not implemented yet"
if mode == "single":
gen = Interactive(source, filename)
elif mode == "exec":
gen = Module(source, filename)
elif mode == "eval":
gen = Expression(source, filename)
else:
raise ValueError("compile() 3rd arg must be 'exec' or "
"'eval' or 'single'")
gen.compile()
return gen.code
class AbstractCompileMode:
def __init__(self, source, filename):
self.source = source
self.filename = filename
self.code = None
def _get_tree(self):
tree = parse(self.source, self.mode)
misc.set_filename(self.filename, tree)
syntax.check(tree)
return tree
def compile(self):
pass # implemented by subclass
def getCode(self):
return self.code
class Expression(AbstractCompileMode):
mode = "eval"
def compile(self):
tree = self._get_tree()
gen = ExpressionCodeGenerator(tree)
self.code = gen.getCode()
class Interactive(AbstractCompileMode):
mode = "single"
def compile(self):
tree = self._get_tree()
gen = InteractiveCodeGenerator(tree)
self.code = gen.getCode()
class Module(AbstractCompileMode):
mode = "exec"
def compile(self, display=0):
tree = self._get_tree()
gen = ModuleCodeGenerator(tree)
if display:
import pprint
print pprint.pprint(tree)
self.code = gen.getCode()
def dump(self, f):
f.write(self.getPycHeader())
marshal.dump(self.code, f)
MAGIC = imp.get_magic()
def getPycHeader(self):
# compile.c uses marshal to write a long directly, with
# calling the interface that would also generate a 1-byte code
# to indicate the type of the value. simplest way to get the
# same effect is to call marshal and then skip the code.
mtime = os.path.getmtime(self.filename)
mtime = struct.pack('<i', mtime)
return self.MAGIC + mtime
def is_constant_false(space, node):
if isinstance(node, ast.Const):
if not space.is_true(node.value):
return 1
return 0
def is_constant_true(space, node):
if isinstance(node, ast.Const):
if space.is_true(node.value):
return 1
return 0
class CodeGenerator(ast.ASTVisitor):
"""Defines basic code generator for Python bytecode
"""
localsfullyknown = False
def __init__(self, space, graph):
self.space = space
self.setups = []
self.last_lineno = -1
self._div_op = "BINARY_DIVIDE"
self.genexpr_cont_stack = []
self.graph = graph
self.optimized = 0 # is namespace access optimized?
# XXX set flags based on future features
futures = self.get_module().futures
for feature in futures:
if feature == "division":
self.graph.setFlag(CO_FUTURE_DIVISION)
self._div_op = "BINARY_TRUE_DIVIDE"
elif feature == "generators":
self.graph.setFlag(CO_GENERATOR_ALLOWED)
elif feature == "with_statement":
self.graph.setFlag(CO_FUTURE_WITH_STATEMENT)
def emit(self, inst ):
return self.graph.emit( inst )
def emitop(self, inst, op):
return self.graph.emitop_name( inst, op )
def emitop_obj(self, inst, obj):
return self.graph.emitop_obj( inst, obj )
def emitop_code(self, inst, gen):
return self.graph.emitop_code( inst, gen )
def emitop_int(self, inst, op):
assert isinstance(op, int)
return self.graph.emitop_int( inst, op )
def emitop_block(self, inst, block):
return self.graph.emitop_block( inst, block )
def nextBlock(self, block=None ):
"""graph delegation"""
return self.graph.nextBlock( block )
def startBlock(self, block ):
"""graph delegation"""
return self.graph.startBlock( block )
def newBlock(self):
"""graph delegation"""
return self.graph.newBlock()
def setDocstring(self, doc):
"""graph delegation"""
return self.graph.setDocstring( doc )
def getCode(self):
"""Return a code object"""
return self.graph.getCode()
def mangle(self, name):
return self.scope.mangle(name)
def parseSymbols(self, tree):
s = symbols.SymbolVisitor(self.space)
tree.accept(s)
def get_module(self):
raise RuntimeError, "should be implemented by subclasses"
# Next five methods handle name access
def storeName(self, name, lineno):
if name in ('None', '__debug__'):
raise SyntaxError('assignment to %s is not allowed' % name, lineno)
self._nameOp('STORE', name)
def loadName(self, name, lineno):
self._nameOp('LOAD', name)
def delName(self, name, lineno):
if name in ('None', '__debug__'):
raise SyntaxError('deleting %s is not allowed' % name, lineno)
scope = self.scope.check_name(self.mangle(name))
if scope == SC_CELL:
raise SyntaxError("cannot delete variable '%s' "
"referenced in nested scope" % name, lineno)
self._nameOp('DELETE', name)
def _nameOp(self, prefix, name):
name = self.mangle(name)
scope = self.scope.check_name(name)
if scope == SC_LOCAL:
if not self.optimized:
self.emitop(prefix + '_NAME', name)
else:
self.emitop(prefix + '_FAST', name)
elif scope == SC_GLOBAL:
self.emitop(prefix + '_GLOBAL', name)
elif scope == SC_FREE or scope == SC_CELL:
self.emitop(prefix + '_DEREF', name)
elif scope == SC_DEFAULT:
if self.optimized and self.localsfullyknown:
self.emitop(prefix + '_GLOBAL', name)
else:
self.emitop(prefix + '_NAME', name)
else:
raise RuntimeError, "unsupported scope for var %s in %s: %d" % \
(name, self.scope.name, scope)
def _implicitNameOp(self, prefix, name):
"""Emit name ops for names generated implicitly by for loops
The interpreter generates names that start with a period or
dollar sign. The symbol table ignores these names because
they aren't present in the program text.
"""
if self.optimized:
self.emitop(prefix + '_FAST', name)
else:
self.emitop(prefix + '_NAME', name)
# The set_lineno() function and the explicit emit() calls for
# SET_LINENO below are only used to generate the line number table.
# As of Python 2.3, the interpreter does not have a SET_LINENO
# instruction. pyassem treats SET_LINENO opcodes as a special case.
def set_lineno(self, node, force=False):
"""Emit SET_LINENO if necessary.
The instruction is considered necessary if the node has a
lineno attribute and it is different than the last lineno
emitted.
Returns true if SET_LINENO was emitted.
There are no rules for when an AST node should have a lineno
attribute. The transformer and AST code need to be reviewed
and a consistent policy implemented and documented. Until
then, this method works around missing line numbers.
"""
if node is None:
return False
lineno = node.lineno
if lineno != -1 and (lineno != self.last_lineno
or force):
self.emitop_int('SET_LINENO', lineno)
self.last_lineno = lineno
return True
return False
# The first few visitor methods handle nodes that generator new
# code objects. They use class attributes to determine what
# specialized code generators to use.
def visitModule(self, node):
space = self.space
self.parseSymbols(node)
assert node.scope is not None
self.scope = node.scope
self.emitop_int('SET_LINENO', 0)
if not space.is_w(node.w_doc, space.w_None):
self.setDocstring(node.w_doc)
self.set_lineno(node)
self.emitop_obj('LOAD_CONST', node.w_doc)
self.storeName('__doc__', node.lineno)
node.node.accept( self )
self.emitop_obj('LOAD_CONST', space.w_None )
self.emit('RETURN_VALUE')
def visitExpression(self, node):
self.set_lineno(node)
self.parseSymbols(node)
assert node.scope is not None
self.scope = node.scope
node.node.accept( self )
self.emit('RETURN_VALUE')
def visitFunction(self, node):
self._visitFuncOrLambda(node, isLambda=0)
space = self.space
if not space.is_w(node.w_doc, space.w_None):
self.setDocstring(node.w_doc)
self.storeName(node.name, node.lineno)
def visitLambda(self, node):
self._visitFuncOrLambda(node, isLambda=1)
def _visitFuncOrLambda(self, node, isLambda=0):
if not isLambda and node.decorators:
for decorator in node.decorators.nodes:
decorator.accept( self )
ndecorators = len(node.decorators.nodes)
else:
ndecorators = 0
gen = FunctionCodeGenerator(self.space, node, isLambda,
self.get_module())
node.code.accept( gen )
gen.finish()
self.set_lineno(node)
for default in node.defaults:
default.accept( self )
frees = gen.scope.get_free_vars_in_parent()
if frees:
for name in frees:
self.emitop('LOAD_CLOSURE', name)
self.emitop_code('LOAD_CONST', gen)
self.emitop_int('MAKE_CLOSURE', len(node.defaults))
else:
self.emitop_code('LOAD_CONST', gen)
self.emitop_int('MAKE_FUNCTION', len(node.defaults))
for i in range(ndecorators):
self.emitop_int('CALL_FUNCTION', 1)
def visitClass(self, node):
gen = ClassCodeGenerator(self.space, node,
self.get_module())
node.code.accept( gen )
gen.finish()
self.set_lineno(node)
self.emitop_obj('LOAD_CONST', self.space.wrap(node.name) )
for base in node.bases:
base.accept( self )
self.emitop_int('BUILD_TUPLE', len(node.bases))
frees = gen.scope.get_free_vars_in_parent()
if frees:
for name in frees:
self.emitop('LOAD_CLOSURE', name)
self.emitop_code('LOAD_CONST', gen)
self.emitop_int('MAKE_CLOSURE', 0)
else:
self.emitop_code('LOAD_CONST', gen)
self.emitop_int('MAKE_FUNCTION', 0)
self.emitop_int('CALL_FUNCTION', 0)
self.emit('BUILD_CLASS')
self.storeName(node.name, node.lineno)
# The rest are standard visitor methods
# The next few implement control-flow statements
def visitIf(self, node):
end = self.newBlock()
for test, suite in node.tests:
if is_constant_false(self.space, test):
# XXX will need to check generator stuff here
continue
self.set_lineno(test)
test.accept( self )
nextTest = self.newBlock()
self.emitop_block('JUMP_IF_FALSE', nextTest)
self.nextBlock()
self.emit('POP_TOP')
suite.accept( self )
self.emitop_block('JUMP_FORWARD', end)
self.startBlock(nextTest)
self.emit('POP_TOP')
if node.else_:
node.else_.accept( self )
self.nextBlock(end)
def visitWhile(self, node):
self.set_lineno(node)
loop = self.newBlock()
else_ = self.newBlock()
after = self.newBlock()
self.emitop_block('SETUP_LOOP', after)
self.nextBlock(loop)
self.setups.append((LOOP, loop))
self.set_lineno(node, force=True)
if is_constant_true(self.space, node.test):
self.nextBlock()
else:
node.test.accept( self )
self.emitop_block('JUMP_IF_FALSE', else_ or after)
self.nextBlock()
self.emit('POP_TOP')
node.body.accept( self )
self.emitop_block('JUMP_ABSOLUTE', loop)
self.startBlock(else_) # or just the POPs if not else clause
self.emit('POP_TOP')
self.emit('POP_BLOCK')
self.setups.pop()
if node.else_:
node.else_.accept( self )
self.nextBlock(after)
def visitFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
after = self.newBlock()
self.setups.append((LOOP, start))
self.set_lineno(node)
self.emitop_block('SETUP_LOOP', after)
node.list.accept( self )
self.emit('GET_ITER')
self.nextBlock(start)
self.set_lineno(node, force=1)
self.emitop_block('FOR_ITER', anchor)
node.assign.accept( self )
node.body.accept( self )
self.emitop_block('JUMP_ABSOLUTE', start)
self.nextBlock(anchor)
self.emit('POP_BLOCK')
self.setups.pop()
if node.else_:
node.else_.accept( self )
self.nextBlock(after)
def visitBreak(self, node):
if len(self.setups) == 0:
raise SyntaxError( "'break' outside loop", node.lineno)
self.set_lineno(node)
self.emit('BREAK_LOOP')
def visitContinue(self, node):
if len(self.setups) == 0:
raise SyntaxError( "'continue' not properly in loop", node.lineno)
kind, block = self.setups[-1]
if kind == LOOP:
self.set_lineno(node)
self.emitop_block('JUMP_ABSOLUTE', block)
self.nextBlock()
elif kind == EXCEPT or kind == TRY_FINALLY:
self.set_lineno(node)
# find the block that starts the loop
top = len(self.setups)
loop_block = None
while top > 0:
top = top - 1
kind, loop_block = self.setups[top]
if kind == LOOP:
break
elif kind == END_FINALLY:
msg = "'continue' not supported inside 'finally' clause"
raise SyntaxError( msg, node.lineno )
if kind != LOOP:
raise SyntaxError( "'continue' not properly in loop", node.lineno)
self.emitop_block('CONTINUE_LOOP', loop_block)
self.nextBlock()
elif kind == END_FINALLY:
msg = "'continue' not supported inside 'finally' clause"
raise SyntaxError( msg, node.lineno )
def _visitTest(self, node, jump):
end = self.newBlock()
for child in node.nodes[:-1]:
child.accept( self )
self.emitop_block(jump, end)
self.nextBlock()
self.emit('POP_TOP')
node.nodes[-1].accept( self )
self.nextBlock(end)
def visitAnd(self, node):
self._visitTest(node, 'JUMP_IF_FALSE')
def visitOr(self, node):
self._visitTest(node, 'JUMP_IF_TRUE')
def visitCondExpr(self, node):
node.test.accept(self)
end = self.newBlock()
falseblock = self.newBlock()
self.emitop_block('JUMP_IF_FALSE', falseblock)
self.emit('POP_TOP')
node.true_expr.accept(self)
self.emitop_block('JUMP_FORWARD', end)
self.nextBlock(falseblock)
self.emit('POP_TOP')
node.false_expr.accept(self)
self.nextBlock(end)
__with_count = 0
def visitWith(self, node):
node.expr.accept(self)
self.emit('DUP_TOP')
## exit = ctx.__exit__
self.emitop('LOAD_ATTR', '__exit__')
exit = "$exit%d" % self.__with_count
var = "$var%d" % self.__with_count
self.__with_count = self.__with_count + 1
self._implicitNameOp('STORE', exit)
self.emitop('LOAD_ATTR', '__enter__')
self.emitop_int('CALL_FUNCTION', 0)
finally_block = self.newBlock()
body = self.newBlock()
self.setups.append((TRY_FINALLY, body))
if node.var is not None: # VAR is present
self._implicitNameOp('STORE', var)
self.emitop_block('SETUP_FINALLY', finally_block)
self.nextBlock(body)
self._implicitNameOp('LOAD', var)
self._implicitNameOp('DELETE', var)
node.var.accept(self)
else:
self.emit('POP_TOP')
self.emitop_block('SETUP_FINALLY', finally_block)
self.nextBlock(body)
node.body.accept(self)
self.emit('POP_BLOCK')
self.setups.pop()
self.emitop_obj('LOAD_CONST', self.space.w_None) # WITH_CLEANUP checks for normal exit
self.nextBlock(finally_block)
self.setups.append((END_FINALLY, finally_block))
# find local variable with is context.__exit__
self._implicitNameOp('LOAD', exit)
self._implicitNameOp('DELETE', exit)
self.emit('WITH_CLEANUP')
self.emit('END_FINALLY')
self.setups.pop()
def visitCompare(self, node):
node.expr.accept( self )
cleanup = self.newBlock()
for op, code in node.ops[:-1]:
code.accept( self )
self.emit('DUP_TOP')
self.emit('ROT_THREE')
self.emitop('COMPARE_OP', op)
self.emitop_block('JUMP_IF_FALSE', cleanup)
self.nextBlock()
self.emit('POP_TOP')
# now do the last comparison
if node.ops:
op, code = node.ops[-1]
code.accept( self )
self.emitop('COMPARE_OP', op)
if len(node.ops) > 1:
end = self.newBlock()
self.emitop_block('JUMP_FORWARD', end)
self.startBlock(cleanup)
self.emit('ROT_TWO')
self.emit('POP_TOP')
self.nextBlock(end)
# list comprehensions
__list_count = 0
def visitListComp(self, node):
self.set_lineno(node)
# setup list
append = "$append%d" % self.__list_count
self.__list_count = self.__list_count + 1
self.emitop_int('BUILD_LIST', 0)
self.emit('DUP_TOP')
self.emitop('LOAD_ATTR', 'append')
self._implicitNameOp('STORE', append)
stack = []
i = 0
for for_ in node.quals:
assert isinstance(for_, ast.ListCompFor)
start, anchor = self._visitListCompFor(for_)
self.genexpr_cont_stack.append( None )
for if_ in for_.ifs:
if self.genexpr_cont_stack[-1] is None:
self.genexpr_cont_stack[-1] = self.newBlock()
if_.accept( self )
stack.insert(0, (start, self.genexpr_cont_stack[-1], anchor))
self.genexpr_cont_stack.pop()
i += 1
self._implicitNameOp('LOAD', append)
node.expr.accept( self )
self.emitop_int('CALL_FUNCTION', 1)
self.emit('POP_TOP')
for start, cont, anchor in stack:
if cont:
skip_one = self.newBlock()
self.emitop_block('JUMP_FORWARD', skip_one)
self.startBlock(cont)
self.emit('POP_TOP')
self.nextBlock(skip_one)
self.emitop_block('JUMP_ABSOLUTE', start)
self.startBlock(anchor)
self._implicitNameOp('DELETE', append)
self.__list_count = self.__list_count - 1
def _visitListCompFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
node.list.accept( self )
self.emit('GET_ITER')
self.nextBlock(start)
self.set_lineno(node, force=True)
self.emitop_block('FOR_ITER', anchor)
self.nextBlock()
node.assign.accept( self )
return start, anchor
def visitListCompIf(self, node):
branch = self.genexpr_cont_stack[-1]
self.set_lineno(node, force=True)
node.test.accept( self )
self.emitop_block('JUMP_IF_FALSE', branch)
self.newBlock()
self.emit('POP_TOP')
def visitGenExpr(self, node):
gen = GenExprCodeGenerator(self.space, node, self.get_module())
inner = node.code
assert isinstance(inner, ast.GenExprInner)
inner.accept( gen )
gen.finish()
self.set_lineno(node)
frees = gen.scope.get_free_vars_in_parent()
if frees:
for name in frees:
self.emitop('LOAD_CLOSURE', name)
self.emitop_code('LOAD_CONST', gen)
self.emitop_int('MAKE_CLOSURE', 0)
else:
self.emitop_code('LOAD_CONST', gen)
self.emitop_int('MAKE_FUNCTION', 0)
# precomputation of outmost iterable
qual0 = inner.quals[0]
assert isinstance(qual0, ast.GenExprFor)
qual0.iter.accept( self )
self.emit('GET_ITER')
self.emitop_int('CALL_FUNCTION', 1)
def visitGenExprInner(self, node):
self.set_lineno(node)
# setup list
stack = []
i = 0
for for_ in node.quals:
assert isinstance(for_, ast.GenExprFor)
start, anchor = self._visitGenExprFor(for_)
self.genexpr_cont_stack.append( None )
for if_ in for_.ifs:
if self.genexpr_cont_stack[-1] is None:
self.genexpr_cont_stack[-1] = self.newBlock()
if_.accept( self )
stack.insert(0, (start, self.genexpr_cont_stack[-1], anchor))
self.genexpr_cont_stack.pop()
i += 1
node.expr.accept( self )
self.emit('YIELD_VALUE')
for start, cont, anchor in stack:
if cont:
skip_one = self.newBlock()
self.emitop_block('JUMP_FORWARD', skip_one)
self.startBlock(cont)
self.emit('POP_TOP')
self.nextBlock(skip_one)
self.emitop_block('JUMP_ABSOLUTE', start)
self.startBlock(anchor)
self.emitop_obj('LOAD_CONST', self.space.w_None)
def _visitGenExprFor(self, node):
start = self.newBlock()
anchor = self.newBlock()
if node.is_outmost:
self.loadName('[outmost-iterable]', node.lineno)
else:
node.iter.accept( self )
self.emit('GET_ITER')
self.nextBlock(start)
self.set_lineno(node, force=True)
self.emitop_block('FOR_ITER', anchor)
self.nextBlock()
node.assign.accept( self )
return start, anchor
def visitGenExprIf(self, node ):
branch = self.genexpr_cont_stack[-1]
self.set_lineno(node, force=True)
node.test.accept( self )
self.emitop_block('JUMP_IF_FALSE', branch)
self.newBlock()
self.emit('POP_TOP')
# exception related
def visitAssert(self, node):
# XXX would be interesting to implement this via a
# transformation of the AST before this stage
if __debug__:
end = self.newBlock()
self.set_lineno(node)
# XXX AssertionError appears to be special case -- it is always
# loaded as a global even if there is a local name. I guess this
# is a sort of renaming op.
self.nextBlock()
node.test.accept( self )
self.emitop_block('JUMP_IF_TRUE', end)
self.nextBlock()
self.emit('POP_TOP')
self.emitop('LOAD_GLOBAL', 'AssertionError')
if node.fail:
node.fail.accept( self )
self.emitop_int('RAISE_VARARGS', 2)
else:
self.emitop_int('RAISE_VARARGS', 1)
self.nextBlock(end)
self.emit('POP_TOP')
def visitRaise(self, node):
self.set_lineno(node)
n = 0
if node.expr1:
node.expr1.accept( self )
n = n + 1
if node.expr2:
node.expr2.accept( self )
n = n + 1
if node.expr3:
node.expr3.accept( self )
n = n + 1
self.emitop_int('RAISE_VARARGS', n)
def visitTryExcept(self, node):
body = self.newBlock()
handlers = self.newBlock()
end = self.newBlock()
if node.else_:
lElse = self.newBlock()
else:
lElse = end
self.set_lineno(node)
self.emitop_block('SETUP_EXCEPT', handlers)
self.nextBlock(body)
self.setups.append((EXCEPT, body))
node.body.accept( self )
self.emit('POP_BLOCK')
self.setups.pop()
self.emitop_block('JUMP_FORWARD', lElse)
self.startBlock(handlers)
last = len(node.handlers) - 1
next = None
for expr, target, body in node.handlers:
if expr:
self.set_lineno(expr)
self.emit('DUP_TOP')
expr.accept( self )
self.emitop('COMPARE_OP', 'exception match')
next = self.newBlock()
self.emitop_block('JUMP_IF_FALSE', next)
self.nextBlock()
self.emit('POP_TOP')
else:
next = None
self.emit('POP_TOP')
if target:
target.accept( self )
else:
self.emit('POP_TOP')
self.emit('POP_TOP')
body.accept( self )
self.emitop_block('JUMP_FORWARD', end)
self.nextBlock(next)
if expr: # XXX
self.emit('POP_TOP')
self.emit('END_FINALLY')
if node.else_:
self.nextBlock(lElse)
node.else_.accept( self )
self.nextBlock(end)
def visitTryFinally(self, node):
body = self.newBlock()
final = self.newBlock()
self.set_lineno(node)
self.emitop_block('SETUP_FINALLY', final)
self.nextBlock(body)
self.setups.append((TRY_FINALLY, body))
node.body.accept( self )
self.emit('POP_BLOCK')
self.setups.pop()
self.emitop_obj('LOAD_CONST', self.space.w_None)
self.nextBlock(final)
self.setups.append((END_FINALLY, final))
node.final.accept( self )
self.emit('END_FINALLY')
self.setups.pop()
# misc
def visitDiscard(self, node):
# Important: this function is overridden in InteractiveCodeGenerator,
# which also has the effect that the following test only occurs in
# non-'single' modes.
if isinstance(node.expr, ast.Const):
return # skip LOAD_CONST/POP_TOP pairs (for e.g. docstrings)
self.set_lineno(node)
node.expr.accept( self )
self.emit('POP_TOP')
def visitConst(self, node):
space = self.space
if space.is_true(space.isinstance(node.value, space.w_tuple)):
self.set_lineno(node)
self.emitop_obj('LOAD_CONST', node.value)
def visitKeyword(self, node):
self.emitop_obj('LOAD_CONST', self.space.wrap(node.name) )
node.expr.accept( self )
def visitGlobal(self, node):
# no code to generate
pass
def visitName(self, node):
self.set_lineno(node)
self.loadName(node.varname, node.lineno)
def visitPass(self, node):
self.set_lineno(node)
def visitImport(self, node):
self.set_lineno(node)
for name, alias in node.names:
self.emitop_obj('LOAD_CONST', self.space.w_None)
self.emitop('IMPORT_NAME', name)
mod = name.split(".")[0]
if alias:
self._resolveDots(name)
self.storeName(alias, node.lineno)
else:
self.storeName(mod, node.lineno)
def visitFrom(self, node):
self.set_lineno(node)
fromlist = [ self.space.wrap(name) for name,alias in node.names ]
self.emitop_obj('LOAD_CONST', self.space.newtuple(fromlist))
self.emitop('IMPORT_NAME', node.modname)
for name, alias in node.names:
if name == '*':
self.namespace = 0
self.emit('IMPORT_STAR')
# There can only be one name w/ from ... import *
assert len(node.names) == 1
return
else:
self.emitop('IMPORT_FROM', name)
self._resolveDots(name)
self.storeName(alias or name, node.lineno)
self.emit('POP_TOP')
def _resolveDots(self, name):
elts = name.split(".")
if len(elts) == 1:
return
for elt in elts[1:]:
self.emitop('LOAD_ATTR', elt)
def visitGetattr(self, node):
node.expr.accept( self )
self.emitop('LOAD_ATTR', self.mangle(node.attrname))
# next five implement assignments
def visitAssign(self, node):
self.set_lineno(node)
node.expr.accept( self )
dups = len(node.nodes) - 1
for i in range(len(node.nodes)):
elt = node.nodes[i]
if i < dups:
self.emit('DUP_TOP')
if isinstance(elt, ast.Node):
elt.accept( self )
def visitAssName(self, node):
if node.flags == OP_ASSIGN:
self.storeName(node.name, node.lineno)
elif node.flags == OP_DELETE:
self.set_lineno(node)
self.delName(node.name, node.lineno)
else:
assert False, "visitAssName unexpected flags: %d" % node.flags
def visitAssAttr(self, node):
node.expr.accept( self )
if node.flags == OP_ASSIGN:
if node.attrname == 'None':
raise SyntaxError('assignment to None is not allowed', node.lineno)
self.emitop('STORE_ATTR', self.mangle(node.attrname))
elif node.flags == OP_DELETE:
if node.attrname == 'None':
raise SyntaxError('deleting None is not allowed', node.lineno)
self.emitop('DELETE_ATTR', self.mangle(node.attrname))
else:
assert False, "visitAssAttr unexpected flags: %d" % node.flags
def _visitAssSequence(self, node, op='UNPACK_SEQUENCE'):
if findOp(node) != OP_DELETE:
self.emitop_int(op, len(node.nodes))
for child in node.nodes:
child.accept( self )
visitAssTuple = _visitAssSequence
visitAssList = _visitAssSequence
# augmented assignment
def visitAugAssign(self, node):
self.set_lineno(node)
node.node.accept( AugLoadVisitor(self) )
node.expr.accept( self )
self.emit(self._augmented_opcode[node.op])
node.node.accept( AugStoreVisitor(self) )
_augmented_opcode = {
'+=' : 'INPLACE_ADD',
'-=' : 'INPLACE_SUBTRACT',
'*=' : 'INPLACE_MULTIPLY',
'/=' : 'INPLACE_DIVIDE',
'//=': 'INPLACE_FLOOR_DIVIDE',
'%=' : 'INPLACE_MODULO',
'**=': 'INPLACE_POWER',
'>>=': 'INPLACE_RSHIFT',
'<<=': 'INPLACE_LSHIFT',
'&=' : 'INPLACE_AND',
'^=' : 'INPLACE_XOR',
'|=' : 'INPLACE_OR',
}
def visitExec(self, node):
node.expr.accept( self )
if node.locals is None:
self.emitop_obj('LOAD_CONST', self.space.w_None)
else:
node.locals.accept( self )
if node.globals is None:
self.emit('DUP_TOP')
else:
node.globals.accept( self )
self.emit('EXEC_STMT')
def visitCallFunc(self, node):
self.set_lineno(node)
if self.emit_builtin_call(node):
return
if self.emit_method_call(node):
return
pos = 0
kw = 0
node.node.accept( self )
for arg in node.args:
arg.accept( self )
if isinstance(arg, ast.Keyword):
kw = kw + 1
else:
pos = pos + 1
if node.star_args is not None:
node.star_args.accept( self )
if node.dstar_args is not None:
node.dstar_args.accept( self )
have_star = node.star_args is not None
have_dstar = node.dstar_args is not None
opcode = callfunc_opcode_info[ have_star*2 + have_dstar]
self.emitop_int(opcode, kw << 8 | pos)
def check_simple_call_args(self, node):
if node.star_args is not None or node.dstar_args is not None:
return False
# check for kw args
for arg in node.args:
if isinstance(arg, ast.Keyword):
return False
return True
def emit_builtin_call(self, node):
if not self.space.config.objspace.opcodes.CALL_LIKELY_BUILTIN:
return False
if not self.check_simple_call_args(node):
return False
func = node.node
if not isinstance(func, ast.Name):
return False
name = func.varname
scope = self.scope.check_name(name)
# YYY
index = BUILTIN_TO_INDEX.get(name, -1)
if ((scope == SC_GLOBAL or
(scope == SC_DEFAULT and self.optimized and self.localsfullyknown))
and index != -1):
for arg in node.args:
arg.accept(self)
self.emitop_int("CALL_LIKELY_BUILTIN", index << 8 | len(node.args))
return True
return False
def emit_method_call(self, node):
if not self.space.config.objspace.opcodes.CALL_METHOD:
return False
meth = node.node
if not isinstance(meth, ast.Getattr):
return False
if not self.check_simple_call_args(node):
return False
meth.expr.accept(self)
self.emitop('LOOKUP_METHOD', self.mangle(meth.attrname))
for arg in node.args:
arg.accept(self)
self.emitop_int('CALL_METHOD', len(node.args))
return True
def visitPrint(self, node):
self.set_lineno(node)
if node.dest:
node.dest.accept( self )
for child in node.nodes:
if node.dest:
self.emit('DUP_TOP')
child.accept( self )
if node.dest:
self.emit('ROT_TWO')
self.emit('PRINT_ITEM_TO')
else:
self.emit('PRINT_ITEM')
if node.dest:
self.emit('POP_TOP')
def visitPrintnl(self, node):
self.set_lineno(node)
if node.dest:
node.dest.accept( self )
for child in node.nodes:
if node.dest:
self.emit('DUP_TOP')
child.accept( self )
if node.dest:
self.emit('ROT_TWO')
self.emit('PRINT_ITEM_TO')
else:
self.emit('PRINT_ITEM')
if node.dest:
self.emit('PRINT_NEWLINE_TO')
else:
self.emit('PRINT_NEWLINE')
def visitReturn(self, node):
self.set_lineno(node)
if node.value is None:
self.emitop_obj('LOAD_CONST', self.space.w_None)
else:
node.value.accept( self )
self.emit('RETURN_VALUE')
def visitYield(self, node):
if len(self.setups):
kind, block = self.setups[-1]
if kind == TRY_FINALLY:
raise SyntaxError("'yield' not allowed in a 'try' block "
"with a 'finally' clause",
node.lineno)
self.set_lineno(node)
node.value.accept( self )
self.emit('YIELD_VALUE')
# slice and subscript stuff
def visitSlice(self, node):
return self._visitSlice(node, False)
def _visitSlice(self, node, aug_flag):
# aug_flag is used by visitAugSlice
node.expr.accept( self )
slice = 0
if node.lower:
node.lower.accept( self )
slice = slice | 1
if node.upper:
node.upper.accept( self )
slice = slice | 2
if aug_flag:
if slice == 0:
self.emit('DUP_TOP')
elif slice == 3:
self.emitop_int('DUP_TOPX', 3)
else:
self.emitop_int('DUP_TOPX', 2)
if node.flags == OP_APPLY:
self.emit('SLICE+%d' % slice)
elif node.flags == OP_ASSIGN:
self.emit('STORE_SLICE+%d' % slice)
elif node.flags == OP_DELETE:
self.emit('DELETE_SLICE+%d' % slice)
else:
assert False, "weird slice %d" % node.flags
def visitSubscript(self, node):
return self._visitSubscript(node, False)
def _visitSubscript(self, node, aug_flag):
node.expr.accept( self )
node.sub.accept( self )
if aug_flag:
self.emitop_int('DUP_TOPX', 2)
if node.flags == OP_APPLY:
self.emit('BINARY_SUBSCR')
elif node.flags == OP_ASSIGN:
self.emit('STORE_SUBSCR')
elif node.flags == OP_DELETE:
self.emit('DELETE_SUBSCR')
# binary ops
def binaryOp(self, node, op):
node.left.accept( self )
node.right.accept( self )
self.emit(op)
def visitAdd(self, node):
return self.binaryOp(node, 'BINARY_ADD')
def visitSub(self, node):
return self.binaryOp(node, 'BINARY_SUBTRACT')
def visitMul(self, node):
return self.binaryOp(node, 'BINARY_MULTIPLY')
def visitDiv(self, node):
return self.binaryOp(node, self._div_op)
def visitFloorDiv(self, node):
return self.binaryOp(node, 'BINARY_FLOOR_DIVIDE')
def visitMod(self, node):
return self.binaryOp(node, 'BINARY_MODULO')
def visitPower(self, node):
return self.binaryOp(node, 'BINARY_POWER')
def visitLeftShift(self, node):
return self.binaryOp(node, 'BINARY_LSHIFT')
def visitRightShift(self, node):
return self.binaryOp(node, 'BINARY_RSHIFT')
# unary ops
def unaryOp(self, node, op):
node.expr.accept( self )
self.emit(op)
def visitInvert(self, node):
return self.unaryOp(node, 'UNARY_INVERT')
def visitUnarySub(self, node):
return self.unaryOp(node, 'UNARY_NEGATIVE')
def visitUnaryAdd(self, node):
return self.unaryOp(node, 'UNARY_POSITIVE')
def visitUnaryInvert(self, node):
return self.unaryOp(node, 'UNARY_INVERT')
def visitNot(self, node):
return self.unaryOp(node, 'UNARY_NOT')
def visitBackquote(self, node):
return self.unaryOp(node, 'UNARY_CONVERT')
# bit ops
def bitOp(self, nodes, op):
nodes[0].accept( self )
for node in nodes[1:]:
node.accept( self )
self.emit(op)
def visitBitand(self, node):
return self.bitOp(node.nodes, 'BINARY_AND')
def visitBitor(self, node):
return self.bitOp(node.nodes, 'BINARY_OR')
def visitBitxor(self, node):
return self.bitOp(node.nodes, 'BINARY_XOR')
# object constructors
def visitEllipsis(self, node):
return self.emitop_obj('LOAD_CONST', self.space.w_Ellipsis)
def visitTuple(self, node):
self.set_lineno(node)
for elt in node.nodes:
elt.accept( self )
self.emitop_int('BUILD_TUPLE', len(node.nodes))
def visitList(self, node):
self.set_lineno(node)
for elt in node.nodes:
elt.accept( self )
self.emitop_int('BUILD_LIST', len(node.nodes))
def visitSliceobj(self, node):
for child in node.nodes:
child.accept( self )
self.emitop_int('BUILD_SLICE', len(node.nodes))
def visitDict(self, node):
self.set_lineno(node)
self.emitop_int('BUILD_MAP', 0)
for k, v in node.items:
self.emit('DUP_TOP')
k.accept( self )
v.accept( self )
self.emit('ROT_THREE')
self.emit('STORE_SUBSCR')
class ModuleCodeGenerator(CodeGenerator):
def __init__(self, space, tree, futures = []):
graph = pyassem.PyFlowGraph(space, "<module>", tree.filename)
self.futures = future.find_futures(tree)
for f in futures:
if f not in self.futures:
self.futures.append(f)
CodeGenerator.__init__(self, space, graph)
tree.accept(self) # yuck
def get_module(self):
return self
class ExpressionCodeGenerator(CodeGenerator):
def __init__(self, space, tree, futures=[]):
graph = pyassem.PyFlowGraph(space, "<expression>", tree.filename)
self.futures = futures[:]
CodeGenerator.__init__(self, space, graph)
tree.accept(self) # yuck
def get_module(self):
return self
class InteractiveCodeGenerator(CodeGenerator):
def __init__(self, space, tree, futures=[]):
graph = pyassem.PyFlowGraph(space, "<interactive>", tree.filename)
self.futures = future.find_futures(tree)
for f in futures:
if f not in self.futures:
self.futures.append(f)
CodeGenerator.__init__(self, space, graph)
self.set_lineno(tree)
tree.accept(self) # yuck
self.emit('RETURN_VALUE')
def get_module(self):
return self
def visitDiscard(self, node):
# XXX Discard means it's an expression. Perhaps this is a bad
# name.
node.expr.accept( self )
self.emit('PRINT_EXPR')
class AbstractFunctionCode(CodeGenerator):
def __init__(self, space, func, isLambda, mod):
self.module = mod
if isLambda:
name = "<lambda>"
else:
assert isinstance(func, ast.Function)
name = func.name
# Find duplicated arguments.
argnames = {}
for arg in func.argnames:
if isinstance(arg, ast.AssName):
argname = self.mangle(arg.name)
if argname in argnames:
raise SyntaxError("duplicate argument '%s' in function definition" % argname, func.lineno)
argnames[argname] = 1
elif isinstance(arg, ast.AssTuple):
for argname in arg.getArgNames():
argname = self.mangle(argname)
if argname in argnames:
raise SyntaxError("duplicate argument '%s' in function definition" % argname, func.lineno)
argnames[argname] = 1
if 'None' in argnames:
raise SyntaxError('assignment to None is not allowed', func.lineno)
argnames = []
for i in range(len(func.argnames)):
var = func.argnames[i]
if isinstance(var, ast.AssName):
argnames.append(self.mangle(var.name))
elif isinstance(var, ast.AssTuple):
argnames.append('.%d' % (2 * i))
# (2 * i) just because CPython does that too
graph = pyassem.PyFlowGraph(space, name, func.filename, argnames,
optimized=self.localsfullyknown,
newlocals=1)
self.isLambda = isLambda
CodeGenerator.__init__(self, space, graph)
self.optimized = 1
if not isLambda and not space.is_w(func.w_doc, space.w_None):
self.setDocstring(func.w_doc)
if func.varargs:
self.graph.setFlag(CO_VARARGS)
if func.kwargs:
self.graph.setFlag(CO_VARKEYWORDS)
self.set_lineno(func)
self.generateArgUnpack(func.argnames)
def get_module(self):
return self.module
def finish(self):
self.graph.startExitBlock()
if not self.isLambda:
self.emitop_obj('LOAD_CONST', self.space.w_None)
self.emit('RETURN_VALUE')
def generateArgUnpack(self, args):
for i in range(len(args)):
arg = args[i]
if isinstance(arg, ast.AssTuple):
self.emitop('LOAD_FAST', '.%d' % (i * 2))
self.unpackSequence(arg)
def unpackSequence(self, tup):
if VERSION > 1:
self.emitop_int('UNPACK_SEQUENCE', len(tup.nodes))
else:
self.emitop_int('UNPACK_TUPLE', len(tup.nodes))
for elt in tup.nodes:
if isinstance(elt, ast.AssName):
self.storeName(elt.name, elt.lineno)
elif isinstance(elt, ast.AssTuple):
self.unpackSequence( elt )
else:
#raise TypeError( "Got argument %s of type %s" % (elt,type(elt)))
raise TypeError( "Got unexpected argument" )
unpackTuple = unpackSequence
class FunctionCodeGenerator(AbstractFunctionCode):
def __init__(self, space, func, isLambda, mod):
assert func.scope is not None
self.scope = func.scope
self.localsfullyknown = self.scope.locals_fully_known()
AbstractFunctionCode.__init__(self, space, func, isLambda, mod)
self.graph.setFreeVars(self.scope.get_free_vars_in_scope())
self.graph.setCellVars(self.scope.get_cell_vars())
if self.scope.generator:
self.graph.setFlag(CO_GENERATOR)
if self.scope.return_with_arg is not None:
node = self.scope.return_with_arg
raise SyntaxError("'return' with argument inside generator",
node.lineno)
class GenExprCodeGenerator(AbstractFunctionCode):
def __init__(self, space, gexp, mod):
assert gexp.scope is not None
self.scope = gexp.scope
self.localsfullyknown = self.scope.locals_fully_known()
AbstractFunctionCode.__init__(self, space, gexp, 1, mod)
self.graph.setFreeVars(self.scope.get_free_vars_in_scope())
self.graph.setCellVars(self.scope.get_cell_vars())
self.graph.setFlag(CO_GENERATOR)
class AbstractClassCode(CodeGenerator):
def __init__(self, space, klass, module):
self.module = module
graph = pyassem.PyFlowGraph( space, klass.name, klass.filename,
optimized=0, klass=1)
CodeGenerator.__init__(self, space, graph)
self.graph.setFlag(CO_NEWLOCALS)
if not space.is_w(klass.w_doc, space.w_None):
self.setDocstring(klass.w_doc)
def get_module(self):
return self.module
def finish(self):
self.graph.startExitBlock()
self.emit('LOAD_LOCALS')
self.emit('RETURN_VALUE')
class ClassCodeGenerator(AbstractClassCode):
def __init__(self, space, klass, module):
assert klass.scope is not None
self.scope = klass.scope
AbstractClassCode.__init__(self, space, klass, module)
self.graph.setFreeVars(self.scope.get_free_vars_in_scope())
self.graph.setCellVars(self.scope.get_cell_vars())
self.set_lineno(klass)
self.emitop("LOAD_GLOBAL", "__name__")
self.storeName("__module__", klass.lineno)
if not space.is_w(klass.w_doc, space.w_None):
self.emitop_obj("LOAD_CONST", klass.w_doc)
self.storeName('__doc__', klass.lineno)
def findOp(node):
"""Find the op (DELETE, LOAD, STORE) in an AssTuple tree"""
v = OpFinder()
node.accept(v)
return v.op
class OpFinder(ast.ASTVisitor):
def __init__(self):
self.op = OP_NONE
def visitAssName(self, node):
if self.op is OP_NONE:
self.op = node.flags
elif self.op != node.flags:
raise ValueError("mixed ops in stmt")
def visitAssAttr(self, node):
if self.op is OP_NONE:
self.op = node.flags
elif self.op != node.flags:
raise ValueError("mixed ops in stmt")
def visitSubscript(self, node):
if self.op is OP_NONE:
self.op = node.flags
elif self.op != node.flags:
raise ValueError("mixed ops in stmt")
class AugLoadVisitor(ast.ASTVisitor):
def __init__(self, main_visitor):
self.main = main_visitor
def default(self, node):
raise RuntimeError("shouldn't arrive here!")
def visitName(self, node ):
self.main.loadName(node.varname, node.lineno)
def visitGetattr(self, node):
node.expr.accept( self.main )
self.main.emit('DUP_TOP')
self.main.emitop('LOAD_ATTR', self.main.mangle(node.attrname))
def visitSlice(self, node):
self.main._visitSlice(node, True)
def visitSubscript(self, node):
self.main._visitSubscript(node, True)
class AugStoreVisitor(ast.ASTVisitor):
def __init__(self, main_visitor):
self.main = main_visitor
def default(self, node):
raise RuntimeError("shouldn't arrive here!")
def visitName(self, node):
self.main.storeName(node.varname, node.lineno)
def visitGetattr(self, node):
self.main.emit('ROT_TWO')
self.main.emitop('STORE_ATTR', self.main.mangle(node.attrname))
def visitSlice(self, node):
slice = 0
if node.lower:
slice = slice | 1
if node.upper:
slice = slice | 2
if slice == 0:
self.main.emit('ROT_TWO')
elif slice == 3:
self.main.emit('ROT_FOUR')
else:
self.main.emit('ROT_THREE')
self.main.emit('STORE_SLICE+%d' % slice)
def visitSubscript(self, node):
self.main.emit('ROT_THREE')
self.main.emit('STORE_SUBSCR')
if __name__ == "__main__":
for file in sys.argv[1:]:
compileFile(file)
| Python |
from pypy.interpreter.astcompiler import ast
# XXX should probably rename ASTVisitor to ASTWalker
# XXX can it be made even more generic?
class ASTVisitor:
"""Performs a depth-first walk of the AST
The ASTVisitor will walk the AST, performing either a preorder or
postorder traversal depending on which method is called.
methods:
preorder(tree, visitor)
postorder(tree, visitor)
tree: an instance of ast.Node
visitor: an instance with visitXXX methods
The ASTVisitor is responsible for walking over the tree in the
correct order. For each node, it checks the visitor argument for
a method named 'visitNodeType' where NodeType is the name of the
node's class, e.g. Class. If the method exists, it is called
with the node as its sole argument.
The visitor method for a particular node type can control how
child nodes are visited during a preorder walk. (It can't control
the order during a postorder walk, because it is called _after_
the walk has occurred.) The ASTVisitor modifies the visitor
argument by adding a visit method to the visitor; this method can
be used to visit a child node of arbitrary type.
"""
VERBOSE = 0
def __init__(self):
self.node = None
self._cache = {}
def default(self, node, *args):
for child in node.getChildNodes():
self.dispatch(child, *args)
def dispatch(self, node, *args):
self.node = node
klass = node.__class__
meth = self._cache.get(klass, None)
if meth is None:
className = klass.__name__
meth = getattr(self.visitor, 'visit' + className, self.default)
self._cache[klass] = meth
## if self.VERBOSE > 0:
## className = klass.__name__
## if self.VERBOSE == 1:
## if meth == 0:
## print "dispatch", className
## else:
## print "dispatch", className, (meth and meth.__name__ or '')
return meth(node, *args)
def preorder(self, tree, visitor, *args):
"""Do preorder walk of tree using visitor"""
self.visitor = visitor
visitor.visit = self.dispatch
self.dispatch(tree, *args) # XXX *args make sense?
class ExampleASTVisitor(ASTVisitor):
"""Prints examples of the nodes that aren't visited
This visitor-driver is only useful for development, when it's
helpful to develop a visitor incrementally, and get feedback on what
you still have to do.
"""
examples = {}
def dispatch(self, node, *args):
self.node = node
meth = self._cache.get(node.__class__, None)
className = node.__class__.__name__
if meth is None:
meth = getattr(self.visitor, 'visit' + className, 0)
self._cache[node.__class__] = meth
if self.VERBOSE > 1:
print "dispatch", className, (meth and meth.__name__ or '')
if meth:
meth(node, *args)
elif self.VERBOSE > 0:
klass = node.__class__
if klass not in self.examples:
self.examples[klass] = klass
print
print self.visitor
print klass
for attr in dir(node):
if attr[0] != '_':
print "\t", "%-12.12s" % attr, getattr(node, attr)
print
return self.default(node, *args)
# XXX this is an API change
_walker = ASTVisitor
def walk(tree, visitor, walker=None, verbose=None):
if walker is None:
walker = _walker()
if verbose is not None:
walker.VERBOSE = verbose
walker.preorder(tree, visitor)
return walker.visitor
def walk(tree, visitor, verbose=-1):
tree.accept(visitor)
return visitor
def dumpNode(node):
print node.__class__
for attr in dir(node):
if attr[0] != '_':
print "\t", "%-10.10s" % attr, getattr(node, attr)
| Python |
#empty
| Python |
#!/usr/bin/env python
"""This module loads the python Grammar (2.3 or 2.4) and builds
the parser for this grammar in the global PYTHON_PARSER
helper functions are provided that use the grammar to parse
using file_input, single_input and eval_input targets
"""
import sys
import os
from pypy.interpreter.error import OperationError, debug_print
from pypy.interpreter import gateway
from pypy.interpreter.pyparser.error import SyntaxError
from pypy.interpreter.pyparser.pythonlexer import Source, match_encoding_declaration
from pypy.interpreter.astcompiler.consts import CO_FUTURE_WITH_STATEMENT
import pypy.interpreter.pyparser.pysymbol as pysymbol
import pypy.interpreter.pyparser.pytoken as pytoken
import pypy.interpreter.pyparser.ebnfparse as ebnfparse
from pypy.interpreter.pyparser.ebnflexer import GrammarSource
from pypy.interpreter.pyparser.ebnfgrammar import GRAMMAR_GRAMMAR
import pypy.interpreter.pyparser.grammar as grammar
from pypy.interpreter.pyparser.pythonutil import build_parser_for_version, build_parser
# try:
from pypy.interpreter.pyparser import symbol
# except ImportError:
# # for standalone testing
# import symbol
from codeop import PyCF_DONT_IMPLY_DEDENT
ENABLE_GRAMMAR_VERSION = "2.4"
## files encoding management ############################################
_recode_to_utf8 = gateway.applevel(r'''
def _recode_to_utf8(text, encoding):
return unicode(text, encoding).encode("utf-8")
''').interphook('_recode_to_utf8')
def recode_to_utf8(space, text, encoding):
return space.str_w(_recode_to_utf8(space, space.wrap(text),
space.wrap(encoding)))
def _normalize_encoding(encoding):
"""returns normalized name for <encoding>
see dist/src/Parser/tokenizer.c 'get_normal_name()'
for implementation details / reference
NOTE: for now, parser.suite() raises a MemoryError when
a bad encoding is used. (SF bug #979739)
"""
if encoding is None:
return None
# lower() + '_' / '-' conversion
encoding = encoding.replace('_', '-').lower()
if encoding.startswith('utf-8'):
return 'utf-8'
for variant in ['latin-1', 'iso-latin-1', 'iso-8859-1']:
if encoding.startswith(variant):
return 'iso-8859-1'
return encoding
def _check_for_encoding(s):
eol = s.find('\n')
if eol < 0:
return _check_line_for_encoding(s)
enc = _check_line_for_encoding(s[:eol])
if enc:
return enc
eol2 = s.find('\n', eol + 1)
if eol2 < 0:
return _check_line_for_encoding(s[eol + 1:])
return _check_line_for_encoding(s[eol + 1:eol2])
def _check_line_for_encoding(line):
"""returns the declared encoding or None"""
i = 0
for i in range(len(line)):
if line[i] == '#':
break
if line[i] not in ' \t\014':
return None
return match_encoding_declaration(line[i:])
## Python Source Parser ###################################################
class PythonParser(grammar.Parser):
"""Wrapper class for python grammar"""
targets = {
'eval' : "eval_input",
'single' : "single_input",
'exec' : "file_input",
}
def __init__(self): # , predefined_symbols=None):
grammar.Parser.__init__(self)
pytoken.setup_tokens(self)
# remember how many tokens were loaded
self._basetokens_count = self._sym_count
# if predefined_symbols:
# self.load_symbols(predefined_symbols)
self.keywords = []
def is_base_token(self, tokvalue):
return tokvalue < 0 or tokvalue >= self._basetokens_count
def parse_source(self, textsrc, mode, builder, flags=0):
"""Parse a python source according to goal"""
goal = self.targets[mode]
# Detect source encoding.
if textsrc[:3] == '\xEF\xBB\xBF':
textsrc = textsrc[3:]
enc = 'utf-8'
else:
enc = _normalize_encoding(_check_for_encoding(textsrc))
if enc is not None and enc not in ('utf-8', 'iso-8859-1'):
textsrc = recode_to_utf8(builder.space, textsrc, enc)
lines = [line + '\n' for line in textsrc.split('\n')]
builder.source_encoding = enc
if len(textsrc) and textsrc[-1] == '\n':
lines.pop()
flags &= ~PyCF_DONT_IMPLY_DEDENT
return self.parse_lines(lines, goal, builder, flags)
def parse_lines(self, lines, goal, builder, flags=0):
# builder.keywords = self.keywords.copy()
# if flags & CO_FUTURE_WITH_STATEMENT:
# builder.enable_with()
goalnumber = self.symbols[goal]
target = self.root_rules[goalnumber]
src = Source(self, lines, flags)
if not target.match(src, builder):
line, lineno = src.debug()
# XXX needs better error messages
raise SyntaxError("invalid syntax", lineno, -1, line)
# return None
return builder
def update_rules_references(self):
"""update references to old rules"""
# brute force algorithm
for rule in self.all_rules:
for i in range(len(rule.args)):
arg = rule.args[i]
if arg.codename in self.root_rules:
real_rule = self.root_rules[arg.codename]
# This rule has been updated
if real_rule is not rule.args[i]:
rule.args[i] = real_rule
def insert_rule(self, ruledef):
"""parses <ruledef> and inserts corresponding rules in the parser"""
# parse the ruledef(s)
source = GrammarSource(GRAMMAR_GRAMMAR, ruledef)
builder = ebnfparse.EBNFBuilder(GRAMMAR_GRAMMAR, dest_parser=self)
GRAMMAR_GRAMMAR.root_rules['grammar'].match(source, builder)
# remove proxy objects if any
builder.resolve_rules()
# update keywords
self.keywords.extend(builder.keywords)
# update old references in case an existing rule was modified
self.update_rules_references()
# recompute first sets
self.build_first_sets()
def make_pyparser(version=ENABLE_GRAMMAR_VERSION):
parser = PythonParser()
return build_parser_for_version(version, parser=parser)
PYTHON_PARSER = make_pyparser()
def translation_target(grammardef):
parser = PythonParser() # predefined_symbols=symbol.sym_name)
source = GrammarSource(GRAMMAR_GRAMMAR, grammardef)
builder = ebnfparse.EBNFBuilder(GRAMMAR_GRAMMAR, dest_parser=parser)
GRAMMAR_GRAMMAR.root_rules['grammar'].match(source, builder)
builder.resolve_rules()
parser.build_first_sets()
parser.keywords = builder.keywords
return 0
## XXX BROKEN
## def parse_grammar(space, w_src):
## """Loads the grammar using the 'dynamic' rpython parser"""
## src = space.str_w( w_src )
## ebnfbuilder = ebnfparse.parse_grammar_text( src )
## ebnfbuilder.resolve_rules()
## grammar.build_first_sets(ebnfbuilder.all_rules)
## return space.wrap( ebnfbuilder.root_rules )
def grammar_rules( space ):
w_rules = space.newdict()
parser = make_pyparser()
for key, value in parser.rules.iteritems():
space.setitem(w_rules, space.wrap(key), space.wrap(value))
return w_rules
| Python |
from pypy.interpreter.pyparser import symbol
# try to avoid numeric values conflict with tokens
# it's important for CPython, but I'm not so sure it's still
# important here
class SymbolMapper(object):
"""XXX dead"""
def __init__(self, sym_name=None ):
_anoncount = self._anoncount = -10
_count = self._count = 0
self.sym_name = {}
self.sym_values = {}
if sym_name is not None:
for _value, _name in sym_name.items():
if _value<_anoncount:
_anoncount = _value
if _value>_count:
_count = _value
self.sym_values[_name] = _value
self.sym_name[_value] = _name
self._anoncount = _anoncount
self._count = _count
def add_symbol( self, sym ):
# assert isinstance(sym, str)
if not sym in self.sym_values:
self._count += 1
val = self._count
self.sym_values[sym] = val
self.sym_name[val] = sym
return val
return self.sym_values[ sym ]
def add_anon_symbol( self, sym ):
# assert isinstance(sym, str)
if not sym in self.sym_values:
self._anoncount -= 1
val = self._anoncount
self.sym_values[sym] = val
self.sym_name[val] = sym
return val
return self.sym_values[ sym ]
def __getitem__(self, sym ):
"""NOT RPYTHON"""
# assert isinstance(sym, str)
return self.sym_values[ sym ]
def __contains__(self, sym):
"""NOT RPYTHON"""
return sym in self.sym_values
_cpython_symbols = SymbolMapper( symbol.sym_name )
# There is no symbol in this module until the grammar is loaded
# once loaded the grammar parser will fill the mappings with the
# grammar symbols
# XXX: is this completly dead ?
## # prepopulate symbol table from symbols used by CPython
## for _value, _name in _cpython_symbols.sym_name.items():
## globals()[_name] = _value
def gen_symbol_file(fname):
"""
Generate a compatible symbol file for symbol.py, using the grammar that has
been generated from the grammar in the PyPy parser. (Note that we assume
that the parser generates the tokens deterministically.)
"""
import ebnfparse
gram = ebnfparse.parse_grammar( file(fname) )
import os.path
f = open(os.path.join(os.path.dirname(__file__), 'symbol.py'), 'w')
print >> f, """
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, call this function from python_grammar()
# and call PyPy.
"""
for k, v in gram.symbols.sym_name.iteritems():
if k >= 0:
print >> f, '%s = %s' % (v, k)
print >> f, """
# Generate sym_name
sym_name = {}
for _name, _value in globals().items():
if type(_value) is type(0):
sym_name[_value] = _name
"""
f.close()
if __name__ == '__main__':
import sys
if len(sys.argv) != 2:
print 'Call pysymbol with the filename of the python grammar'
sys.exit(0)
gen_symbol_file(sys.argv[1])
| Python |
"""This is a lexer for a Python recursive descent parser
it obeys the TokenSource interface defined for the grammar
analyser in grammar.py
"""
import sys
from codeop import PyCF_DONT_IMPLY_DEDENT
from pypy.interpreter.pyparser.grammar import TokenSource, Token, AbstractContext, Parser
from pypy.interpreter.pyparser.error import SyntaxError
import pytoken
# Don't import string for that ...
NAMECHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_'
NUMCHARS = '0123456789'
ALNUMCHARS = NAMECHARS + NUMCHARS
EXTENDED_ALNUMCHARS = ALNUMCHARS + '-.'
WHITESPACES = ' \t\n\r\v\f'
def match_encoding_declaration(comment):
"""returns the declared encoding or None
This function is a replacement for :
>>> py_encoding = re.compile(r"coding[:=]\s*([-\w.]+)")
>>> py_encoding.search(comment)
"""
index = comment.find('coding')
if index < 0:
return None
next_char = comment[index + 6]
if next_char not in ':=':
return None
end_of_decl = comment[index + 7:]
index = 0
for char in end_of_decl:
if char not in WHITESPACES:
break
index += 1
else:
return None
encoding = ''
for char in end_of_decl[index:]:
if char in EXTENDED_ALNUMCHARS:
encoding += char
else:
break
if encoding != '':
return encoding
return None
################################################################################
from pypy.interpreter.pyparser import pytoken
from pytokenize import tabsize, whiteSpaceDFA, triple_quoted, endDFAs, \
single_quoted, pseudoDFA
import automata
class TokenError(SyntaxError):
"""Raised for lexer errors, e.g. when EOF is found prematurely"""
def __init__(self, msg, line, strstart, token_stack):
lineno, offset = strstart
SyntaxError.__init__(self, msg, lineno, offset, line)
self.token_stack = token_stack
def generate_tokens( parser, lines, flags):
"""
This is a rewrite of pypy.module.parser.pytokenize.generate_tokens since
the original function is not RPYTHON (uses yield)
It was also slightly modified to generate Token instances instead
of the original 5-tuples -- it's now a 4-tuple of
* the Token instance
* the whole line as a string
* the line number (the real one, counting continuation lines)
* the position on the line of the end of the token.
Original docstring ::
The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string.
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
"""
#for line in lines:
# print repr(line)
#print '------------------- flags=%s ---->' % flags
assert isinstance( parser, Parser )
token_list = []
lnum = parenlev = continued = 0
namechars = NAMECHARS
numchars = NUMCHARS
contstr, needcont = '', 0
contline = None
indents = [0]
last_comment = ''
# make the annotator happy
pos = -1
lines.append('') # XXX HACK probably not needed
# look for the bom (byte-order marker) for utf-8
# make the annotator happy
endDFA = automata.DFA([], [])
# make the annotator happy
line = ''
for line in lines:
lnum = lnum + 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError("EOF while scanning triple-quoted string", line,
(lnum-1, 0), token_list)
endmatch = endDFA.recognize(line)
if endmatch >= 0:
pos = end = endmatch
tok = parser.build_token(parser.tokens['STRING'], contstr + line[:end])
token_list.append((tok, line, lnum, pos))
last_comment = ''
# token_list.append((STRING, contstr + line[:end],
# strstart, (lnum, end), contline + line))
contstr, needcont = '', 0
contline = None
elif (needcont and not line.endswith('\\\n') and
not line.endswith('\\\r\n')):
tok = parser.build_token(parser.tokens['ERRORTOKEN'], contstr + line)
token_list.append((tok, line, lnum, pos))
last_comment = ''
# token_list.append((ERRORTOKEN, contstr + line,
# strstart, (lnum, len(line)), contline))
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ': column = column + 1
elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
elif line[pos] == '\f': column = 0
else: break
pos = pos + 1
if pos == max: break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
tok = parser.build_token(parser.tokens['COMMENT'], line[pos:])
last_comment = line[pos:]
else:
tok = parser.build_token(parser.tokens['NL'], line[pos:])
last_comment = ''
# XXX Skip NL and COMMENT Tokens
# token_list.append((tok, line, lnum, pos))
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
tok = parser.build_token(parser.tokens['INDENT'], line[:pos])
token_list.append((tok, line, lnum, pos))
last_comment = ''
while column < indents[-1]:
indents = indents[:-1]
tok = parser.build_token(parser.tokens['DEDENT'], '')
token_list.append((tok, line, lnum, pos))
last_comment = ''
else: # continued statement
if not line:
raise TokenError("EOF in multi-line statement", line,
(lnum, 0), token_list)
continued = 0
while pos < max:
pseudomatch = pseudoDFA.recognize(line, pos)
if pseudomatch >= 0: # scan for tokens
# JDR: Modified
start = whiteSpaceDFA.recognize(line, pos)
if start < 0:
start = pos
end = pseudomatch
if start == end:
# Nothing matched!!!
raise TokenError("Unknown character", line,
(lnum, start), token_list)
pos = end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
tok = parser.build_token(parser.tokens['NUMBER'], token)
token_list.append((tok, line, lnum, pos))
last_comment = ''
elif initial in '\r\n':
if parenlev > 0:
tok = parser.build_token(parser.tokens['NL'], token)
last_comment = ''
# XXX Skip NL
else:
tok = parser.build_token(parser.tokens['NEWLINE'], token)
# XXX YUCK !
tok.value = last_comment
token_list.append((tok, line, lnum, pos))
last_comment = ''
elif initial == '#':
tok = parser.build_token(parser.tokens['COMMENT'], token)
last_comment = token
# XXX Skip # token_list.append((tok, line, lnum, pos))
# token_list.append((COMMENT, token, spos, epos, line))
elif token in triple_quoted:
endDFA = endDFAs[token]
endmatch = endDFA.recognize(line, pos)
if endmatch >= 0: # all on one line
pos = endmatch
token = line[start:pos]
tok = parser.build_token(parser.tokens['STRING'], token)
token_list.append((tok, line, lnum, pos))
last_comment = ''
else:
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
endDFA = (endDFAs[initial] or endDFAs[token[1]] or
endDFAs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
tok = parser.build_token(parser.tokens['STRING'], token)
token_list.append((tok, line, lnum, pos))
last_comment = ''
elif initial in namechars: # ordinary name
tok = parser.build_token(parser.tokens['NAME'], token)
token_list.append((tok, line, lnum, pos))
last_comment = ''
elif initial == '\\': # continued stmt
continued = 1
# lnum -= 1 disabled: count continuation lines separately
else:
if initial in '([{':
parenlev = parenlev + 1
elif initial in ')]}':
parenlev = parenlev - 1
if parenlev < 0:
raise TokenError("unmatched '%s'" % initial, line,
(lnum-1, 0), token_list)
if token in parser.tok_values:
punct = parser.tok_values[token]
tok = parser.build_token(punct)
else:
tok = parser.build_token(parser.tokens['OP'], token)
token_list.append((tok, line, lnum, pos))
last_comment = ''
else:
start = whiteSpaceDFA.recognize(line, pos)
if start < 0:
start = pos
if start<max and line[start] in single_quoted:
raise TokenError("EOL while scanning single-quoted string", line,
(lnum, start), token_list)
tok = parser.build_token(parser.tokens['ERRORTOKEN'], line[pos])
token_list.append((tok, line, lnum, pos))
last_comment = ''
pos = pos + 1
lnum -= 1
if not (flags & PyCF_DONT_IMPLY_DEDENT):
if token_list and token_list[-1][0].codename != parser.tokens['NEWLINE']:
token_list.append((parser.build_token(parser.tokens['NEWLINE'], ''), '\n', lnum, 0))
for indent in indents[1:]: # pop remaining indent levels
tok = parser.build_token(parser.tokens['DEDENT'], '')
token_list.append((tok, line, lnum, pos))
#if token_list and token_list[-1][0].codename != pytoken.NEWLINE:
token_list.append((parser.build_token(parser.tokens['NEWLINE'], ''), '\n', lnum, 0))
tok = parser.build_token(parser.tokens['ENDMARKER'], '',)
token_list.append((tok, line, lnum, pos))
#for t in token_list:
# print '%20s %-25s %d' % (pytoken.tok_name.get(t[0].codename, '?'), t[0], t[-2])
#print '----------------------------------------- pyparser/pythonlexer.py'
return token_list
class PythonSourceContext(AbstractContext):
def __init__(self, pos ):
self.pos = pos
class PythonSource(TokenSource):
"""This source uses Jonathan's tokenizer"""
def __init__(self, parser, strings, flags=0):
# TokenSource.__init__(self)
#self.parser = parser
tokens = generate_tokens( parser, strings, flags)
self.token_stack = tokens
self._current_line = '' # the current line (as a string)
self._lineno = -1
self._token_lnum = 0
self._offset = 0
self.stack_pos = 0
def next(self):
"""Returns the next parsed token"""
if self.stack_pos >= len(self.token_stack):
raise StopIteration
tok, line, lnum, pos = self.token_stack[self.stack_pos]
self.stack_pos += 1
self._current_line = line
self._lineno = max(self._lineno, lnum)
self._token_lnum = lnum
self._offset = pos
return tok
def current_linesource(self):
"""Returns the current line being parsed"""
return self._current_line
def current_lineno(self):
"""Returns the current lineno"""
return self._lineno
def context(self):
"""Returns an opaque context object for later restore"""
return PythonSourceContext(self.stack_pos)
def restore(self, ctx):
"""Restores a context"""
assert isinstance(ctx, PythonSourceContext)
self.stack_pos = ctx.pos
def peek(self):
"""returns next token without consuming it"""
ctx = self.context()
token = self.next()
self.restore(ctx)
return token
#### methods below have to be translated
def offset(self, ctx=None):
if ctx is None:
return self.stack_pos
else:
assert type(ctx) == int
return ctx
def get_pos(self):
if self.stack_pos >= len(self.stack):
return self.pos
else:
token, line, lnum, pos = self.stack[self.stack_pos]
return lnum, pos
def get_source_text(self, pos0, pos1):
return self.input[pos0:pos1]
def debug(self):
"""return context for debug information"""
return (self._current_line, self._lineno)
# return 'line %s : %s' % ('XXX', self._current_line)
#NONE_LIST = [pytoken.ENDMARKER, pytoken.INDENT, pytoken.DEDENT]
#NAMED_LIST = [pytoken.OP]
Source = PythonSource
def tokenize_file(filename):
f = file(filename).read()
src = Source(f)
token = src.next()
while token != ("ENDMARKER", None) and token != (None, None):
print token
token = src.next()
if __name__ == '__main__':
import sys
tokenize_file(sys.argv[1])
| Python |
class SyntaxError(Exception):
"""Base class for exceptions raised by the parser."""
def __init__(self, msg, lineno=0, offset=0, text=None, filename=None):
self.msg = msg
self.lineno = lineno
self.offset = offset
self.text = text
self.filename = filename
self.print_file_and_line = False
def wrap_info(self, space, filename):
return space.newtuple([space.wrap(self.msg),
space.newtuple([space.wrap(filename),
space.wrap(self.lineno),
space.wrap(self.offset),
space.wrap(self.text)])])
def __str__(self):
return "%s at pos (%d, %d) in %r" % (self.__class__.__name__,
self.lineno,
self.offset,
self.text)
class ASTError(Exception):
def __init__(self, msg, ast_node ):
self.msg = msg
self.ast_node = ast_node
class TokenError(Exception):
def __init__(self, msg, tokens ):
self.msg = msg
self.tokens = tokens
| Python |
"""This is a lexer for a Python recursive descent parser
it obeys the TokenSource interface defined for the grammar
analyser in grammar.py
"""
from grammar import TokenSource, Token, AbstractContext
from ebnfgrammar import GRAMMAR_GRAMMAR as G
def match_symbol( input, start, stop ):
idx = start
while idx<stop:
if input[idx] not in "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789":
break
idx+=1
return idx
class GrammarSourceContext(AbstractContext):
def __init__(self, pos, peek):
self.pos = pos
self.peek = peek
class GrammarSource(TokenSource):
"""Fully RPython - see targetebnflexer.py
The grammar tokenizer
It knows only 5 types of tokens:
EOF: end of file
SYMDEF: a symbol definition e.g. "file_input:"
STRING: a simple string "'xxx'"
SYMBOL: a rule symbol usually appearing right of a SYMDEF
tokens: '[', ']', '(' ,')', '*', '+', '|'
"""
def __init__(self, parser, inpstring):
# TokenSource.__init__(self)
self.parser = parser
self.input = inpstring
self.pos = 0
self.begin = 0
self._peeked = None
self.current_line = 1
def context(self):
"""returns an opaque context object, used to backtrack
to a well known position in the parser"""
return GrammarSourceContext( self.pos, self._peeked )
def offset(self, ctx=None):
"""Returns the current parsing position from the start
of the parsed text"""
if ctx is None:
return self.pos
else:
assert isinstance(ctx, GrammarSourceContext)
return ctx.pos
def restore(self, ctx):
"""restore the context provided by context()"""
assert isinstance( ctx, GrammarSourceContext )
self.pos = ctx.pos
self._peeked = ctx.peek
def current_linesource(self):
pos = idx = self.begin
inp = self.input
end = len(inp)
while idx<end:
chr = inp[idx]
if chr=="\n":
break
idx+=1
return self.input[pos:idx]
def current_lineno(self):
return self.current_line
def skip_empty_lines(self, input, start, end ):
idx = start
# assume beginning of a line
while idx<end:
chr = input[idx]
if chr not in " \t#\n":
break
idx += 1
if chr=="#":
# skip to end of line
while idx<end:
chr = input[idx]
idx+= 1
if chr=="\n":
self.begin = idx
self.current_line+=1
break
continue
elif chr=="\n":
self.begin = idx
self.current_line+=1
return idx
def match_string( self, input, start, stop ):
if input[start]!="'":
return start
idx = start + 1
while idx<stop:
chr = input[idx]
idx = idx + 1
if chr == "'":
break
if chr == "\n":
self.current_line += 1
self.begin = idx
break
return idx
def RaiseError( self, msg ):
errmsg = msg + " at line=%d" % self.current_line
errmsg += " at pos=%d" % (self.pos-self.begin)
errmsg += " context='" + self.input[self.pos:self.pos+20]
raise ValueError( errmsg )
def next(self):
"""returns the next token"""
# We only support 1-lookahead which
# means backtracking more than one token
# will re-tokenize the stream (but this is the
# grammar lexer so we don't care really!)
_p = self.parser
if self._peeked is not None:
peeked = self._peeked
self._peeked = None
return peeked
pos = self.pos
inp = self.input
end = len(self.input)
pos = self.skip_empty_lines(inp,pos,end)
if pos==end:
return _p.build_token( _p.EOF, None)
# at this point nextchar is not a white space nor \n
nextchr = inp[pos]
if nextchr=="'":
npos = self.match_string( inp, pos, end)
# could get a string terminated by EOF here
if npos==end and inp[end-1]!="'":
self.RaiseError("Unterminated string")
self.pos = npos
_endpos = npos - 1
assert _endpos>=0
return _p.build_token( _p.TOK_STRING, inp[pos+1:_endpos])
else:
npos = match_symbol( inp, pos, end)
if npos!=pos:
self.pos = npos
if npos!=end and inp[npos]==":":
self.pos += 1
return _p.build_token( _p.TOK_SYMDEF, inp[pos:npos])
else:
return _p.build_token( _p.TOK_SYMBOL, inp[pos:npos])
# we still have pos!=end here
chr = inp[pos]
if chr in "[]()*+|":
self.pos = pos+1
return _p.build_token( _p.tok_values[chr], chr)
self.RaiseError( "Unknown token" )
def peek(self):
"""take a peek at the next token"""
if self._peeked is not None:
return self._peeked
self._peeked = self.next()
return self._peeked
def debug(self, N=20):
"""A simple helper function returning the stream at the last
parsed position"""
return self.input[self.pos:self.pos+N]
# a simple target used to annotate/translate the tokenizer
def target_parse_input( txt ):
lst = []
src = GrammarSource( txt )
while 1:
x = src.next()
lst.append( x )
if x.codename == EOF:
break
#return lst
if __name__ == "__main__":
import sys
f = file(sys.argv[-1])
lst = target_parse_input( f.read() )
for i in lst: print i
| Python |
#! /usr/bin/env python
# ______________________________________________________________________
"""Module pytokenize
THIS FILE WAS COPIED FROM pypy/module/parser/pytokenize.py AND ADAPTED
TO BE ANNOTABLE (Mainly made lists homogeneous)
This is a modified version of Ka-Ping Yee's tokenize module found in the
Python standard library.
The primary modification is the removal of the tokenizer's dependence on the
standard Python regular expression module, which is written in C. The regular
expressions have been replaced with hand built DFA's using the
basil.util.automata module.
$Id: pytokenize.py,v 1.3 2003/10/03 16:31:53 jriehl Exp $
"""
# ______________________________________________________________________
from __future__ import generators
from pypy.interpreter.pyparser import automata
__all__ = [ "tokenize" ]
# ______________________________________________________________________
# Automatically generated DFA's (with one or two hand tweeks):
pseudoStatesAccepts = [True, True, True, True, True, True, True, True,
True, True, False, True, True, True, False, False,
False, False, True, False, False, True, True, False,
True, False, True, False, True, False, True, False,
False, False, True, False, False, False, True]
pseudoStates = [
{'\t': 0, '\n': 13, '\x0c': 0, '\r': 14, ' ': 0, '!': 10,
'"': 16, '#': 18, '%': 12, '&': 12,
"'": 15, '(': 13, ')': 13, '*': 7,
'+': 12, ',': 13, '-': 12, '.': 6,
'/': 11, '0': 4, '1': 5, '2': 5,
'3': 5, '4': 5, '5': 5, '6': 5,
'7': 5, '8': 5, '9': 5, ':': 13,
';': 13, '<': 9, '=': 12, '>': 8,
'@': 13, 'A': 1,
'B': 1, 'C': 1, 'D': 1, 'E': 1,
'F': 1, 'G': 1, 'H': 1, 'I': 1,
'J': 1, 'K': 1, 'L': 1, 'M': 1,
'N': 1, 'O': 1, 'P': 1, 'Q': 1,
'R': 2, 'S': 1, 'T': 1, 'U': 3,
'V': 1, 'W': 1, 'X': 1, 'Y': 1,
'Z': 1, '[': 13, '\\': 17, ']': 13,
'^': 12, '_': 1, '`': 13, 'a': 1,
'b': 1, 'c': 1, 'd': 1, 'e': 1,
'f': 1, 'g': 1, 'h': 1, 'i': 1,
'j': 1, 'k': 1, 'l': 1, 'm': 1,
'n': 1, 'o': 1, 'p': 1, 'q': 1,
'r': 2, 's': 1, 't': 1, 'u': 3,
'v': 1, 'w': 1, 'x': 1, 'y': 1,
'z': 1, '{': 13, '|': 12, '}': 13,
'~': 13},
{'0': 1, '1': 1, '2': 1, '3': 1,
'4': 1, '5': 1, '6': 1, '7': 1,
'8': 1, '9': 1, 'A': 1, 'B': 1,
'C': 1, 'D': 1, 'E': 1, 'F': 1,
'G': 1, 'H': 1, 'I': 1, 'J': 1,
'K': 1, 'L': 1, 'M': 1, 'N': 1,
'O': 1, 'P': 1, 'Q': 1, 'R': 1,
'S': 1, 'T': 1, 'U': 1, 'V': 1,
'W': 1, 'X': 1, 'Y': 1, 'Z': 1,
'_': 1, 'a': 1, 'b': 1, 'c': 1,
'd': 1, 'e': 1, 'f': 1, 'g': 1,
'h': 1, 'i': 1, 'j': 1, 'k': 1,
'l': 1, 'm': 1, 'n': 1, 'o': 1,
'p': 1, 'q': 1, 'r': 1, 's': 1,
't': 1, 'u': 1, 'v': 1, 'w': 1,
'x': 1, 'y': 1, 'z': 1},
{'"': 16, "'": 15, '0': 1, '1': 1,
'2': 1, '3': 1, '4': 1, '5': 1,
'6': 1, '7': 1, '8': 1, '9': 1,
'A': 1, 'B': 1, 'C': 1, 'D': 1,
'E': 1, 'F': 1, 'G': 1, 'H': 1,
'I': 1, 'J': 1, 'K': 1, 'L': 1,
'M': 1, 'N': 1, 'O': 1, 'P': 1,
'Q': 1, 'R': 1, 'S': 1, 'T': 1,
'U': 1, 'V': 1, 'W': 1, 'X': 1,
'Y': 1, 'Z': 1, '_': 1, 'a': 1,
'b': 1, 'c': 1, 'd': 1, 'e': 1,
'f': 1, 'g': 1, 'h': 1, 'i': 1,
'j': 1, 'k': 1, 'l': 1, 'm': 1,
'n': 1, 'o': 1, 'p': 1, 'q': 1,
'r': 1, 's': 1, 't': 1, 'u': 1,
'v': 1, 'w': 1, 'x': 1, 'y': 1,
'z': 1},
{'"': 16, "'": 15, '0': 1, '1': 1,
'2': 1, '3': 1, '4': 1, '5': 1,
'6': 1, '7': 1, '8': 1, '9': 1,
'A': 1, 'B': 1, 'C': 1, 'D': 1,
'E': 1, 'F': 1, 'G': 1, 'H': 1,
'I': 1, 'J': 1, 'K': 1, 'L': 1,
'M': 1, 'N': 1, 'O': 1, 'P': 1,
'Q': 1, 'R': 2, 'S': 1, 'T': 1,
'U': 1, 'V': 1, 'W': 1, 'X': 1,
'Y': 1, 'Z': 1, '_': 1, 'a': 1,
'b': 1, 'c': 1, 'd': 1, 'e': 1,
'f': 1, 'g': 1, 'h': 1, 'i': 1,
'j': 1, 'k': 1, 'l': 1, 'm': 1,
'n': 1, 'o': 1, 'p': 1, 'q': 1,
'r': 2, 's': 1, 't': 1, 'u': 1,
'v': 1, 'w': 1, 'x': 1, 'y': 1,
'z': 1},
{'.': 24, '0': 22, '1': 22, '2': 22,
'3': 22, '4': 22, '5': 22, '6': 22,
'7': 22, '8': 23, '9': 23, 'E': 25,
'J': 13, 'L': 13, 'X': 21, 'e': 25,
'j': 13, 'l': 13, 'x': 21},
{'.': 24, '0': 5, '1': 5, '2': 5,
'3': 5, '4': 5, '5': 5, '6': 5,
'7': 5, '8': 5, '9': 5, 'E': 25,
'J': 13, 'L': 13, 'e': 25, 'j': 13,
'l': 13},
{'0': 26, '1': 26, '2': 26, '3': 26,
'4': 26, '5': 26, '6': 26, '7': 26,
'8': 26, '9': 26},
{'*': 12, '=': 13},
{'=': 13, '>': 12},
{'=': 13, '<': 12, '>': 13},
{'=': 13},
{'=': 13, '/': 12},
{'=': 13},
{},
{'\n': 13},
{automata.DEFAULT: 19, '\n': 27, '\\': 29, "'": 28},
{automata.DEFAULT: 20, '"': 30, '\n': 27, '\\': 31},
{'\n': 13, '\r': 14},
{automata.DEFAULT: 18, '\n': 27, '\r': 27},
{automata.DEFAULT: 19, '\n': 27, '\\': 29, "'": 13},
{automata.DEFAULT: 20, '"': 13, '\n': 27, '\\': 31},
{'0': 21, '1': 21, '2': 21, '3': 21,
'4': 21, '5': 21, '6': 21, '7': 21,
'8': 21, '9': 21, 'A': 21, 'B': 21,
'C': 21, 'D': 21, 'E': 21, 'F': 21,
'L': 13, 'a': 21, 'b': 21, 'c': 21,
'd': 21, 'e': 21, 'f': 21, 'l': 13},
{'.': 24, '0': 22, '1': 22, '2': 22,
'3': 22, '4': 22, '5': 22, '6': 22,
'7': 22, '8': 23, '9': 23, 'E': 25,
'J': 13, 'L': 13, 'e': 25, 'j': 13,
'l': 13},
{'.': 24, '0': 23, '1': 23, '2': 23,
'3': 23, '4': 23, '5': 23, '6': 23,
'7': 23, '8': 23, '9': 23, 'E': 25,
'J': 13, 'e': 25, 'j': 13},
{'0': 24, '1': 24, '2': 24, '3': 24,
'4': 24, '5': 24, '6': 24, '7': 24,
'8': 24, '9': 24, 'E': 32, 'J': 13,
'e': 32, 'j': 13},
{'+': 33, '-': 33, '0': 34, '1': 34,
'2': 34, '3': 34, '4': 34, '5': 34,
'6': 34, '7': 34, '8': 34, '9': 34},
{'0': 26, '1': 26, '2': 26, '3': 26,
'4': 26, '5': 26, '6': 26, '7': 26,
'8': 26, '9': 26, 'E': 32, 'J': 13,
'e': 32, 'j': 13},
{},
{"'": 13},
{automata.DEFAULT: 35, '\n': 13, '\r': 14},
{'"': 13},
{automata.DEFAULT: 36, '\n': 13, '\r': 14},
{'+': 37, '-': 37, '0': 38, '1': 38,
'2': 38, '3': 38, '4': 38, '5': 38,
'6': 38, '7': 38, '8': 38, '9': 38},
{'0': 34, '1': 34, '2': 34, '3': 34,
'4': 34, '5': 34, '6': 34, '7': 34,
'8': 34, '9': 34},
{'0': 34, '1': 34, '2': 34, '3': 34,
'4': 34, '5': 34, '6': 34, '7': 34,
'8': 34, '9': 34, 'J': 13, 'j': 13},
{automata.DEFAULT: 35, '\n': 27, '\\': 29, "'": 13},
{automata.DEFAULT: 36, '"': 13, '\n': 27, '\\': 31},
{'0': 38, '1': 38, '2': 38, '3': 38,
'4': 38, '5': 38, '6': 38, '7': 38,
'8': 38, '9': 38},
{'0': 38, '1': 38, '2': 38, '3': 38,
'4': 38, '5': 38, '6': 38, '7': 38,
'8': 38, '9': 38, 'J': 13, 'j': 13},
]
pseudoDFA = automata.DFA(pseudoStates, pseudoStatesAccepts)
double3StatesAccepts = [False, False, False, False, False, True]
double3States = [
{automata.DEFAULT: 0, '"': 1, '\\': 2},
{automata.DEFAULT: 4, '"': 3, '\\': 2},
{automata.DEFAULT: 4},
{automata.DEFAULT: 4, '"': 5, '\\': 2},
{automata.DEFAULT: 4, '"': 1, '\\': 2},
{automata.DEFAULT: 4, '"': 5, '\\': 2},
]
double3DFA = automata.NonGreedyDFA(double3States, double3StatesAccepts)
single3StatesAccepts = [False, False, False, False, False, True]
single3States = [
{automata.DEFAULT: 0, '\\': 2, "'": 1},
{automata.DEFAULT: 4, '\\': 2, "'": 3},
{automata.DEFAULT: 4},
{automata.DEFAULT: 4, '\\': 2, "'": 5},
{automata.DEFAULT: 4, '\\': 2, "'": 1},
{automata.DEFAULT: 4, '\\': 2, "'": 5},
]
single3DFA = automata.NonGreedyDFA(single3States, single3StatesAccepts)
singleStatesAccepts = [False, True, False]
singleStates = [
{automata.DEFAULT: 0, '\\': 2, "'": 1},
{},
{automata.DEFAULT: 0},
]
singleDFA = automata.DFA(singleStates, singleStatesAccepts)
doubleStatesAccepts = [False, True, False]
doubleStates = [
{automata.DEFAULT: 0, '"': 1, '\\': 2},
{},
{automata.DEFAULT: 0},
]
doubleDFA = automata.DFA(doubleStates, doubleStatesAccepts)
endDFAs = {"'" : singleDFA,
'"' : doubleDFA,
"r" : None,
"R" : None,
"u" : None,
"U" : None}
for uniPrefix in ("", "u", "U"):
for rawPrefix in ("", "r", "R"):
prefix = uniPrefix + rawPrefix
endDFAs[prefix + "'''"] = single3DFA
endDFAs[prefix + '"""'] = double3DFA
whiteSpaceStatesAccepts = [True]
whiteSpaceStates = [{'\t': 0, ' ': 0, '\x0c': 0}]
whiteSpaceDFA = automata.DFA(whiteSpaceStates, whiteSpaceStatesAccepts)
# ______________________________________________________________________
# COPIED:
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""'):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"',
"ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"' ):
single_quoted[t] = t
tabsize = 8
# PYPY MODIFICATION: removed TokenError class as it's not needed here
# PYPY MODIFICATION: removed StopTokenizing class as it's not needed here
# PYPY MODIFICATION: removed printtoken() as it's not needed here
# PYPY MODIFICATION: removed tokenize() as it's not needed here
# PYPY MODIFICATION: removed tokenize_loop() as it's not needed here
# PYPY MODIFICATION: removed generate_tokens() as it was copied / modified
# in pythonlexer.py
# PYPY MODIFICATION: removed main() as it's not needed here
# ______________________________________________________________________
# End of pytokenize.py
| Python |
#! /usr/bin/env python
# ______________________________________________________________________
"""Module automata
THIS FILE WAS COPIED FROM pypy/module/parser/pytokenize.py AND ADAPTED
TO BE ANNOTABLE (Mainly made the DFA's __init__ accept two lists
instead of a unique nested one)
$Id: automata.py,v 1.2 2003/10/02 17:37:17 jriehl Exp $
"""
# ______________________________________________________________________
# Module level definitions
# PYPY Modification: removed the EMPTY class as it's not needed here
# PYPY Modification: we don't need a particuliar DEFAULT class here
# a simple None works fine.
# (Having a DefaultClass inheriting from str makes
# the annotator crash)
DEFAULT = "\00default" # XXX hack, the rtyper does not support dict of with str|None keys
# anyway using dicts doesn't seem the best final way to store these char indexed tables
# PYPY Modification : removed all automata functions (any, maybe,
# newArcPair, etc.)
class DFA:
# ____________________________________________________________
def __init__(self, states, accepts, start = 0):
self.states = states
self.accepts = accepts
self.start = start
# ____________________________________________________________
def recognize (self, inVec, pos = 0): # greedy = True
crntState = self.start
i = pos
lastAccept = False
for item in inVec[pos:]:
# arcMap, accept = self.states[crntState]
arcMap = self.states[crntState]
accept = self.accepts[crntState]
if item in arcMap:
crntState = arcMap[item]
elif DEFAULT in arcMap:
crntState = arcMap[DEFAULT]
elif accept:
return i
elif lastAccept:
# This is now needed b/c of exception cases where there are
# transitions to dead states
return i - 1
else:
return -1
lastAccept = accept
i += 1
# if self.states[crntState][1]:
if self.accepts[crntState]:
return i
elif lastAccept:
return i - 1
else:
return -1
# ______________________________________________________________________
class NonGreedyDFA (DFA):
def recognize (self, inVec, pos = 0):
crntState = self.start
i = pos
for item in inVec[pos:]:
# arcMap, accept = self.states[crntState]
arcMap = self.states[crntState]
accept = self.accepts[crntState]
if accept:
return i
elif item in arcMap:
crntState = arcMap[item]
elif DEFAULT in arcMap:
crntState = arcMap[DEFAULT]
else:
return -1
i += 1
# if self.states[crntState][1]:
if self.accepts[crntState]:
return i
else:
return -1
# ______________________________________________________________________
# End of automata.py
| Python |
# This module contains the grammar parser
# and the symbol mappings
from grammar import Alternative, Sequence, Token, KleeneStar, \
GrammarElement, Parser
class GrammarParser(Parser):
pass
GRAMMAR_GRAMMAR = GrammarParser()
def grammar_grammar():
"""
(mostly because of g_add_token I suppose)
Builds the grammar for the grammar file
Here's the description of the grammar's grammar ::
grammar: rule+
rule: SYMDEF alternative
alternative: sequence ( '|' sequence )+
star: '*' | '+'
sequence: (SYMBOL star? | STRING | option | group )+
option: '[' alternative ']'
group: '(' alternative ')' star?
"""
p = GRAMMAR_GRAMMAR
p.add_token('EOF','EOF')
# star: '*' | '+'
star = p.Alternative_n( "star", [p.Token_n('TOK_STAR', '*'), p.Token_n('TOK_ADD', '+')] )
star_opt = p.KleeneStar_n( "star_opt", 0, 1, rule=star )
# rule: SYMBOL ':' alternative
symbol = p.Sequence_n( "symbol", [p.Token_n('TOK_SYMBOL'), star_opt] )
symboldef = p.Token_n( 'TOK_SYMDEF' )
alternative = p.Sequence_n( "alternative", [])
rule = p.Sequence_n( "rule", [symboldef, alternative] )
# grammar: rule+
grammar = p.KleeneStar_n( "grammar", _min=1, rule=rule )
# alternative: sequence ( '|' sequence )*
sequence = p.KleeneStar_n( "sequence", 1 )
seq_cont_list = p.Sequence_n( "seq_cont_list", [p.Token_n('TOK_BAR', '|'), sequence] )
sequence_cont = p.KleeneStar_n( "sequence_cont",0, rule=seq_cont_list )
alternative.args = [ sequence, sequence_cont ]
# option: '[' alternative ']'
option = p.Sequence_n( "option", [p.Token_n('TOK_LBRACKET', '['), alternative, p.Token_n('TOK_RBRACKET', ']')] )
# group: '(' alternative ')'
group = p.Sequence_n( "group", [p.Token_n('TOK_LPAR', '('), alternative, p.Token_n('TOK_RPAR', ')'), star_opt] )
# sequence: (SYMBOL | STRING | option | group )+
string = p.Token_n('TOK_STRING')
alt = p.Alternative_n( "sequence_alt", [symbol, string, option, group] )
sequence.args = [ alt ]
p.root_rules['grammar'] = grammar
p.build_first_sets()
return p
grammar_grammar()
for _sym, _value in GRAMMAR_GRAMMAR.symbols.items():
assert not hasattr( GRAMMAR_GRAMMAR, _sym ), _sym
setattr(GRAMMAR_GRAMMAR, _sym, _value )
for _sym, _value in GRAMMAR_GRAMMAR.tokens.items():
assert not hasattr( GRAMMAR_GRAMMAR, _sym )
setattr(GRAMMAR_GRAMMAR, _sym, _value )
del grammar_grammar
| Python |
from pypy.interpreter.typedef import TypeDef, GetSetProperty, interp_attrproperty
from pypy.interpreter.astcompiler import ast, consts
from pypy.interpreter.pyparser.error import SyntaxError
### Parsing utilites #################################################
def parse_except_clause(tokens):
"""parses 'except' [test [',' test]] ':' suite
and returns a 4-tuple : (tokens_read, expr1, expr2, except_body)
"""
lineno = tokens[0].lineno
clause_length = 1
# Read until end of except clause (bound by following 'else',
# or 'except' or end of tokens)
while clause_length < len(tokens):
token = tokens[clause_length]
if isinstance(token, TokenObject) and \
(token.get_value() == 'except' or token.get_value() == 'else'):
break
clause_length += 1
if clause_length == 3:
# case 'except: body'
return (3, None, None, tokens[2])
elif clause_length == 4:
# case 'except Exception: body':
return (4, tokens[1], None, tokens[3])
else:
# case 'except Exception, exc: body'
return (6, tokens[1], to_lvalue(tokens[3], consts.OP_ASSIGN), tokens[5])
def parse_dotted_names(tokens, builder):
"""parses NAME('.' NAME)* and returns full dotted name
this function doesn't assume that the <tokens> list ends after the
last 'NAME' element
"""
first = tokens[0]
assert isinstance(first, TokenObject)
name = first.get_value()
l = len(tokens)
index = 1
for index in range(1, l, 2):
token = tokens[index]
assert isinstance(token, TokenObject)
if token.name != builder.parser.tokens['DOT']:
break
token = tokens[index+1]
assert isinstance(token, TokenObject)
name += '.'
value = token.get_value()
name += value
return (index, name)
def parse_argument(tokens, builder):
"""parses function call arguments"""
l = len(tokens)
index = 0
arguments = []
last_token = None
building_kw = False
kw_built = False
stararg_token = None
dstararg_token = None
while index < l:
cur_token = tokens[index]
if not isinstance(cur_token, TokenObject):
index += 1
if not building_kw:
arguments.append(cur_token)
else:
last_token = arguments.pop()
assert isinstance(last_token, ast.Name) # used by rtyper
arguments.append(ast.Keyword(last_token.varname, cur_token, last_token.lineno))
building_kw = False
kw_built = True
continue
elif cur_token.name == builder.parser.tokens['COMMA']:
index += 1
continue
elif cur_token.name == builder.parser.tokens['EQUAL']:
index += 1
building_kw = True
continue
elif cur_token.name == builder.parser.tokens['STAR'] or cur_token.name == builder.parser.tokens['DOUBLESTAR']:
index += 1
if cur_token.name == builder.parser.tokens['STAR']:
stararg_token = tokens[index]
index += 1
if index >= l:
break
index += 2 # Skip COMMA and DOUBLESTAR
dstararg_token = tokens[index]
break
elif cur_token.get_value() == 'for':
if len(arguments) != 1:
raise SyntaxError("invalid syntax", cur_token.lineno,
cur_token.col)
expr = arguments[0]
genexpr_for = parse_genexpr_for(tokens[index:])
genexpr_for[0].is_outmost = True
gexp = ast.GenExpr(ast.GenExprInner(expr, genexpr_for, expr.lineno), expr.lineno)
arguments[0] = gexp
break
return arguments, stararg_token, dstararg_token
def parse_fpdef(tokens, index, builder):
"""fpdef: fpdef: NAME | '(' fplist ')'
fplist: fpdef (',' fpdef)* [',']
This intend to be a RPYTHON compliant implementation of _parse_fpdef,
but it can't work with the default compiler.
We switched to use astcompiler module now
"""
nodes = []
comma = False
while True:
token = tokens[index]
index += 1
assert isinstance(token, TokenObject)
if token.name == builder.parser.tokens['LPAR']: # nested item
index, node = parse_fpdef(tokens, index, builder)
elif token.name == builder.parser.tokens['RPAR']: # end of current nesting
break
else: # name
val = token.get_value()
node = ast.AssName(val, consts.OP_ASSIGN, token.lineno)
nodes.append(node)
token = tokens[index]
index += 1
assert isinstance(token, TokenObject)
if token.name == builder.parser.tokens['COMMA']:
comma = True
else:
assert token.name == builder.parser.tokens['RPAR']
break
if len(nodes) == 1 and not comma:
node = nodes[0]
else:
node = ast.AssTuple(nodes, token.lineno)
return index, node
def parse_arglist(tokens, builder):
"""returns names, defaults, flags"""
l = len(tokens)
index = 0
defaults = []
names = []
flags = 0
first_with_default = -1
while index < l:
cur_token = tokens[index]
index += 1
if not isinstance(cur_token, TokenObject):
# XXX: think of another way to write this test
defaults.append(cur_token)
if first_with_default == -1:
first_with_default = len(names) - 1
elif cur_token.name == builder.parser.tokens['COMMA']:
# We could skip test COMMA by incrementing index cleverly
# but we might do some experiment on the grammar at some point
continue
elif cur_token.name == builder.parser.tokens['LPAR']:
index, node = parse_fpdef(tokens, index, builder)
names.append(node)
elif cur_token.name == builder.parser.tokens['STAR'] or cur_token.name == builder.parser.tokens['DOUBLESTAR']:
if cur_token.name == builder.parser.tokens['STAR']:
cur_token = tokens[index]
assert isinstance(cur_token, TokenObject)
index += 1
if cur_token.name == builder.parser.tokens['NAME']:
val = cur_token.get_value()
names.append( ast.AssName( val, consts.OP_ASSIGN ) )
flags |= consts.CO_VARARGS
index += 1
if index >= l:
break
else:
# still more tokens to read
cur_token = tokens[index]
index += 1
else:
raise SyntaxError("incomplete varags", cur_token.lineno,
cur_token.col)
assert isinstance(cur_token, TokenObject)
if cur_token.name != builder.parser.tokens['DOUBLESTAR']:
raise SyntaxError("Unexpected token", cur_token.lineno,
cur_token.col)
cur_token = tokens[index]
index += 1
assert isinstance(cur_token, TokenObject)
if cur_token.name == builder.parser.tokens['NAME']:
val = cur_token.get_value()
names.append( ast.AssName( val, consts.OP_ASSIGN ) )
flags |= consts.CO_VARKEYWORDS
index += 1
else:
raise SyntaxError("incomplete varags", cur_token.lineno,
cur_token.col)
if index < l:
token = tokens[index]
raise SyntaxError("unexpected token" , token.lineno,
token.col)
elif cur_token.name == builder.parser.tokens['NAME']:
val = cur_token.get_value()
names.append( ast.AssName( val, consts.OP_ASSIGN ) )
if first_with_default != -1:
num_expected_with_default = len(names) - first_with_default
if flags & consts.CO_VARKEYWORDS:
num_expected_with_default -= 1
if flags & consts.CO_VARARGS:
num_expected_with_default -= 1
if len(defaults) != num_expected_with_default:
raise SyntaxError('non-default argument follows default argument',
tokens[0].lineno, tokens[0].col)
return names, defaults, flags
def parse_listcomp(tokens, builder):
"""parses 'for j in k for i in j if i %2 == 0' and returns
a GenExprFor instance
XXX: refactor with listmaker ?
"""
list_fors = []
ifs = []
index = 0
if tokens:
lineno = tokens[0].lineno
else:
lineno = -1
while index < len(tokens):
token = tokens[index]
assert isinstance(token, TokenObject) # rtyper info + check
if token.get_value() == 'for':
index += 1 # skip 'for'
ass_node = to_lvalue(tokens[index], consts.OP_ASSIGN)
index += 2 # skip 'in'
iterables = [tokens[index]]
index += 1
while index < len(tokens):
tok2 = tokens[index]
if not isinstance(tok2, TokenObject):
break
if tok2.name != builder.parser.tokens['COMMA']:
break
iterables.append(tokens[index+1])
index += 2
if len(iterables) == 1:
iterable = iterables[0]
else:
iterable = ast.Tuple(iterables, token.lineno)
while index < len(tokens):
token = tokens[index]
assert isinstance(token, TokenObject) # rtyper info
if token.get_value() == 'if':
ifs.append(ast.ListCompIf(tokens[index+1], token.lineno))
index += 2
else:
break
list_fors.append(ast.ListCompFor(ass_node, iterable, ifs, lineno))
ifs = []
else:
assert False, 'Unexpected token: expecting for in listcomp'
#
# Original implementation:
#
# if tokens[index].get_value() == 'for':
# index += 1 # skip 'for'
# ass_node = to_lvalue(tokens[index], consts.OP_ASSIGN)
# index += 2 # skip 'in'
# iterable = tokens[index]
# index += 1
# while index < len(tokens) and tokens[index].get_value() == 'if':
# ifs.append(ast.ListCompIf(tokens[index+1]))
# index += 2
# list_fors.append(ast.ListCompFor(ass_node, iterable, ifs))
# ifs = []
# else:
# raise ValueError('Unexpected token: %s' % tokens[index])
return list_fors
def parse_genexpr_for(tokens):
"""parses 'for j in k for i in j if i %2 == 0' and returns
a GenExprFor instance
XXX: if RPYTHON supports to pass a class object to a function,
we could refactor parse_listcomp and parse_genexpr_for,
and call :
- parse_listcomp(tokens, forclass=ast.GenExprFor, ifclass=...)
or:
- parse_listcomp(tokens, forclass=ast.ListCompFor, ifclass=...)
"""
genexpr_fors = []
ifs = []
index = 0
if tokens:
lineno = tokens[0].lineno
else:
lineno = -1
while index < len(tokens):
token = tokens[index]
assert isinstance(token, TokenObject) # rtyper info + check
if token.get_value() == 'for':
index += 1 # skip 'for'
ass_node = to_lvalue(tokens[index], consts.OP_ASSIGN)
index += 2 # skip 'in'
iterable = tokens[index]
index += 1
while index < len(tokens):
token = tokens[index]
assert isinstance(token, TokenObject) # rtyper info
if token.get_value() == 'if':
ifs.append(ast.GenExprIf(tokens[index+1], token.lineno))
index += 2
else:
break
genexpr_fors.append(ast.GenExprFor(ass_node, iterable, ifs, lineno))
ifs = []
else:
raise SyntaxError('invalid syntax',
token.lineno, token.col)
return genexpr_fors
def get_docstring(builder,stmt):
"""parses a Stmt node.
If a docstring if found, the Discard node is **removed**
from <stmt> and the docstring is returned.
If no docstring is found, <stmt> is left unchanged
and None is returned
"""
if not isinstance(stmt, ast.Stmt):
return None
doc = builder.wrap_none()
if len(stmt.nodes):
first_child = stmt.nodes[0]
if isinstance(first_child, ast.Discard):
expr = first_child.expr
if builder.is_basestring_const(expr):
# This *is* a docstring, remove it from stmt list
assert isinstance(expr, ast.Const)
del stmt.nodes[0]
doc = expr.value
return doc
def to_lvalue(ast_node, flags):
lineno = ast_node.lineno
if isinstance( ast_node, ast.Name ):
return ast.AssName(ast_node.varname, flags, lineno)
# return ast.AssName(ast_node.name, flags)
elif isinstance(ast_node, ast.Tuple):
nodes = []
# FIXME: should ast_node.getChildren() but it's not annotable
# because of flatten()
for node in ast_node.nodes:
nodes.append(to_lvalue(node, flags))
return ast.AssTuple(nodes, lineno)
elif isinstance(ast_node, ast.List):
nodes = []
# FIXME: should ast_node.getChildren() but it's not annotable
# because of flatten()
for node in ast_node.nodes:
nodes.append(to_lvalue(node, flags))
return ast.AssList(nodes, lineno)
elif isinstance(ast_node, ast.Getattr):
expr = ast_node.expr
assert isinstance(ast_node, ast.Getattr)
attrname = ast_node.attrname
return ast.AssAttr(expr, attrname, flags, lineno)
elif isinstance(ast_node, ast.Subscript):
ast_node.flags = flags
return ast_node
elif isinstance(ast_node, ast.Slice):
ast_node.flags = flags
return ast_node
else:
if isinstance(ast_node, ast.GenExpr):
raise SyntaxError("assign to generator expression not possible",
lineno, 0, '')
elif isinstance(ast_node, ast.ListComp):
raise SyntaxError("can't assign to list comprehension",
lineno, 0, '')
elif isinstance(ast_node, ast.CallFunc):
if flags == consts.OP_DELETE:
raise SyntaxError("can't delete function call",
lineno, 0, '')
else:
raise SyntaxError("can't assign to function call",
lineno, 0, '')
else:
raise SyntaxError("can't assign to non-lvalue",
lineno, 0, '')
def is_augassign( ast_node ):
if ( isinstance( ast_node, ast.Name ) or
isinstance( ast_node, ast.Slice ) or
isinstance( ast_node, ast.Subscript ) or
isinstance( ast_node, ast.Getattr ) ):
return True
return False
def get_atoms(builder, nb):
atoms = []
i = nb
while i>0:
obj = builder.pop()
if isinstance(obj, BaseRuleObject):
i += obj.count
else:
atoms.append( obj )
i -= 1
atoms.reverse()
return atoms
def peek_atoms(builder, nb):
atoms = []
i = nb
current = len(builder.rule_stack) - 1
while i > 0:
assert current >= 0
obj = builder.rule_stack[current]
if isinstance(obj, BaseRuleObject):
i += obj.count
else:
atoms.append( obj )
i -= 1
current -= 1
atoms.reverse()
return atoms
#def eval_string(value):
# """temporary implementation
#
# FIXME: need to be finished (check compile.c (parsestr) and
# stringobject.c (PyString_DecodeEscape()) for complete implementation)
# """
# # return eval(value)
# if len(value) == 2:
# return ''
# result = ''
# length = len(value)
# quotetype = value[0]
# index = 1
# while index < length and value[index] == quotetype:
# index += 1
# if index == 6:
# # empty strings like """""" or ''''''
# return ''
# # XXX: is it RPYTHON to do this value[index:-index]
# chars = [char for char in value[index:len(value)-index]]
# result = ''.join(chars)
# result = result.replace('\\\\', '\\')
# d = {'\\b' : '\b', '\\f' : '\f', '\\t' : '\t', '\\n' : '\n',
# '\\r' : '\r', '\\v' : '\v', '\\a' : '\a',
# }
# for escaped, value in d.items():
# result = result.replace(escaped, value)
# return result
## misc utilities, especially for power: rule
def reduce_callfunc(obj, arglist):
"""generic factory for CallFunc nodes"""
assert isinstance(arglist, ArglistObject)
return ast.CallFunc(obj, arglist.arguments,
arglist.stararg, arglist.dstararg, arglist.lineno)
def reduce_subscript(obj, subscript):
"""generic factory for Subscript nodes"""
assert isinstance(subscript, SubscriptObject)
return ast.Subscript(obj, consts.OP_APPLY, subscript.value, subscript.lineno)
def reduce_slice(obj, sliceobj):
"""generic factory for Slice nodes"""
assert isinstance(sliceobj, SlicelistObject)
if sliceobj.fake_rulename == 'slice':
start = sliceobj.value[0]
end = sliceobj.value[1]
return ast.Slice(obj, consts.OP_APPLY, start, end, sliceobj.lineno)
else:
return ast.Subscript(obj, consts.OP_APPLY, ast.Sliceobj(sliceobj.value,
sliceobj.lineno), sliceobj.lineno)
def parse_attraccess(tokens, builder):
"""parses token list like ['a', '.', 'b', '.', 'c', ...]
and returns an ast node : ast.Getattr(Getattr(Name('a'), 'b'), 'c' ...)
"""
token = tokens[0]
# XXX HACK for when parse_attraccess is called from build_decorator
if isinstance(token, TokenObject):
val = token.get_value()
result = ast.Name(val, token.lineno)
else:
result = token
index = 1
while index < len(tokens):
token = tokens[index]
if isinstance(token, TokenObject) and token.name == builder.parser.tokens['DOT']:
index += 1
token = tokens[index]
assert isinstance(token, TokenObject)
result = ast.Getattr(result, token.get_value(), token.lineno)
elif isinstance(token, ArglistObject):
result = reduce_callfunc(result, token)
elif isinstance(token, SubscriptObject):
result = reduce_subscript(result, token)
elif isinstance(token, SlicelistObject):
result = reduce_slice(result, token)
else:
assert False, "Don't know how to handle index %s of %s" % (index, len(tokens))
index += 1
return result
## Stack elements definitions ###################################
class BaseRuleObject(ast.Node):
"""Base class for unnamed rules"""
def __init__(self, count, lineno):
self.count = count
self.lineno = lineno # src.getline()
self.col = 0 # src.getcol()
class RuleObject(BaseRuleObject):
"""A simple object used to wrap a rule or token"""
def __init__(self, name, count, lineno, parser):
BaseRuleObject.__init__(self, count, lineno)
self.rulename = name
self.parser = parser
def __str__(self):
return "<Rule: %s/%d>" % ( self.parser.symbol_repr(self.rulename), self.count)
def __repr__(self):
return "<Rule: %s/%d>" % ( self.parser.symbol_repr(self.rulename), self.count)
class TempRuleObject(BaseRuleObject):
"""used to keep track of how many items get_atom() should pop"""
def __init__(self, name, count, lineno):
BaseRuleObject.__init__(self, count, lineno)
self.temp_rulename = name
def __str__(self):
return "<Rule: %s/%d>" % (self.temp_rulename, self.count)
def __repr__(self):
return "<Rule: %s/%d>" % (self.temp_rulename, self.count)
class TokenObject(ast.Node):
"""A simple object used to wrap a rule or token"""
def __init__(self, name, value, lineno, parser):
self.name = name
self.value = value
self.count = 0
# self.line = 0 # src.getline()
self.col = 0 # src.getcol()
self.lineno = lineno
self.parser = parser
def get_name(self):
tokname = self.parser.tok_name.get(self.name, str(self.name))
return self.parser.tok_rvalues.get(self.name, tokname)
def get_value(self):
value = self.value
if value is None:
value = ''
return value
def descr_fget_value(space, self):
value = self.get_value()
return space.wrap(value)
def __str__(self):
return "<Token: (%s,%s)>" % (self.get_name(), self.value)
def __repr__(self):
return "<Token: (%r,%s)>" % (self.get_name(), self.value)
TokenObject.typedef = TypeDef('BuildToken',
name=interp_attrproperty('name', cls=TokenObject),
lineno=interp_attrproperty('lineno', cls=TokenObject),
value=GetSetProperty(TokenObject.descr_fget_value))
class ObjectAccessor(ast.Node):
"""base class for ArglistObject, SubscriptObject and SlicelistObject
FIXME: think about a more appropriate name
"""
class ArglistObject(ObjectAccessor):
"""helper class to build function's arg list
"""
def __init__(self, arguments, stararg, dstararg, lineno):
self.fake_rulename = 'arglist'
self.arguments = arguments
self.stararg = stararg
self.dstararg = dstararg
self.lineno = lineno
def __str__(self):
return "<ArgList: (%s, %s, %s)>" % self.value
def __repr__(self):
return "<ArgList: (%s, %s, %s)>" % self.value
class SubscriptObject(ObjectAccessor):
"""helper class to build subscript list
self.value represents the __getitem__ argument
"""
def __init__(self, name, value, lineno):
self.fake_rulename = name
self.value = value
self.lineno = lineno
def __str__(self):
return "<SubscriptList: (%s)>" % self.value
def __repr__(self):
return "<SubscriptList: (%s)>" % self.value
class SlicelistObject(ObjectAccessor):
"""helper class to build slice objects
self.value is a list [start, end, step]
self.fake_rulename can either be 'slice' or 'sliceobj' depending
on if a step is specfied or not (see Python's AST
for more information on that)
"""
def __init__(self, name, value, lineno):
self.fake_rulename = name
self.value = value
self.lineno = lineno
def __str__(self):
return "<SliceList: (%s)>" % self.value
def __repr__(self):
return "<SliceList: (%s)>" % self.value
| Python |
from grammar import Token, GrammarProxy
from grammar import AbstractBuilder, AbstractContext
ORDA = ord("A")
ORDZ = ord("Z")
ORDa = ord("a")
ORDz = ord("z")
ORD0 = ord("0")
ORD9 = ord("9")
ORD_ = ord("_")
def is_py_name( name ):
if len(name)<1:
return False
v = ord(name[0])
if not (ORDA <= v <= ORDZ or
ORDa <= v <= ORDz or v == ORD_):
return False
for c in name:
v = ord(c)
if not (ORDA <= v <= ORDZ or
ORDa <= v <= ORDz or
ORD0 <= v <= ORD9 or
v == ORD_):
return False
return True
punct=['>=', '<>', '!=', '<', '>', '<=', '==', '\\*=',
'//=', '%=', '^=', '<<=', '\\*\\*=', '\\', '=',
'\\+=', '>>=', '=', '&=', '/=', '-=', '\n,', '^',
'>>', '&', '\\+', '\\*', '-', '/', '\\.', '\\*\\*',
'%', '<<', '//', '\\', '', '\n\\)', '\\(', ';', ':',
'@', '\\[', '\\]', '`', '\\{', '\\}']
TERMINALS = ['NAME', 'NUMBER', 'STRING', 'NEWLINE', 'ENDMARKER',
'INDENT', 'DEDENT' ]
class NameToken(Token):
"""A token that is not a keyword"""
def __init__(self, parser, keywords=None):
Token.__init__(self, parser, parser.tokens['NAME'])
self.keywords = keywords
def match(self, source, builder, level=0):
"""Matches a token.
the default implementation is to match any token whose type
corresponds to the object's name. You can extend Token
to match anything returned from the lexer. for exemple
type, value = source.next()
if type=="integer" and int(value)>=0:
# found
else:
# error unknown or negative integer
"""
ctx = source.context()
tk = source.next()
if tk.codename == self.codename:
# XXX (adim): this is trunk's keyword management
# if tk.value not in builder.keywords:
if tk.value not in self.keywords:
ret = builder.token( tk.codename, tk.value, source )
return ret
source.restore( ctx )
return 0
def match_token(self, builder, other):
"""special case of match token for tokens which are really keywords
"""
if not isinstance(other, Token):
raise RuntimeError("Unexpected token type")
if other is self.parser.EmptyToken:
return False
if other.codename != self.codename:
return False
# XXX (adim): this is trunk's keyword management
# if other.value in builder.keywords:
if other.value in self.keywords:
return False
return True
class EBNFBuilderContext(AbstractContext):
def __init__(self, stackpos, seqcounts, altcounts):
self.stackpos = stackpos
self.seqcounts = seqcounts
self.altcounts = altcounts
class EBNFBuilder(AbstractBuilder):
"""Build a grammar tree"""
def __init__(self, gram_parser, dest_parser):
AbstractBuilder.__init__(self, dest_parser)
self.gram = gram_parser
self.rule_stack = []
self.seqcounts = [] # number of items in the current sequence
self.altcounts = [] # number of sequence in the current alternative
self.curaltcount = 0
self.curseqcount = 0
self.current_subrule = 0
self.current_rule = -1
self.current_rule_name = ""
self.tokens = {}
self.keywords = []
NAME = dest_parser.add_token('NAME')
# NAME = dest_parser.tokens['NAME']
self.tokens[NAME] = NameToken(dest_parser, keywords=self.keywords)
def context(self):
return EBNFBuilderContext(len(self.rule_stack), self.seqcounts, self.altcounts)
def restore(self, ctx):
del self.rule_stack[ctx.stackpos:]
self.seqcounts = ctx.seqcounts
self.altcounts = ctx.altcounts
def new_symbol(self):
"""Allocate and return a new (anonymous) grammar symbol whose
name is based on the current grammar rule being parsed"""
rule_name = ":" + self.current_rule_name + "_%d" % self.current_subrule
self.current_subrule += 1
name_id = self.parser.add_anon_symbol( rule_name )
return name_id
def new_rule(self, rule):
"""A simple helper method that registers a new rule as 'known'"""
self.parser.all_rules.append(rule)
return rule
def resolve_rules(self):
"""Remove GrammarProxy objects"""
to_be_deleted = {}
for rule in self.parser.all_rules:
# for i, arg in enumerate(rule.args):
for i in range(len(rule.args)):
arg = rule.args[i]
if isinstance(arg, GrammarProxy):
real_rule = self.parser.root_rules[arg.codename]
if isinstance(real_rule, GrammarProxy):
# If we still have a GrammarProxy associated to this codename
# this means we have encountered a terminal symbol
to_be_deleted[ arg.codename ] = True
rule.args[i] = self.get_token( arg.codename )
#print arg, "-> Token(",arg.rule_name,")"
else:
#print arg, "->", real_rule
rule.args[i] = real_rule
for codename in to_be_deleted.keys():
del self.parser.root_rules[codename]
def get_token(self, codename ):
"""Returns a new or existing Token"""
if codename in self.tokens:
return self.tokens[codename]
token = self.tokens[codename] = self.parser.build_token(codename)
return token
def get_symbolcode(self, name):
return self.parser.add_symbol( name )
def get_rule( self, name ):
if name in self.parser.tokens:
codename = self.parser.tokens[name]
return self.get_token( codename )
codename = self.get_symbolcode( name )
if codename in self.parser.root_rules:
return self.parser.root_rules[codename]
proxy = GrammarProxy( self.parser, name, codename )
self.parser.root_rules[codename] = proxy
return proxy
def alternative(self, rule, source):
return True
def pop_rules( self, count ):
offset = len(self.rule_stack)-count
assert offset>=0
rules = self.rule_stack[offset:]
del self.rule_stack[offset:]
return rules
def sequence(self, rule, source, elts_number):
_rule = rule.codename
if _rule == self.gram.sequence:
if self.curseqcount==1:
self.curseqcount = 0
self.curaltcount += 1
return True
rules = self.pop_rules(self.curseqcount)
new_rule = self.parser.build_sequence( self.new_symbol(), rules )
self.rule_stack.append( new_rule )
self.curseqcount = 0
self.curaltcount += 1
elif _rule == self.gram.alternative:
if self.curaltcount == 1:
self.curaltcount = 0
return True
rules = self.pop_rules(self.curaltcount)
new_rule = self.parser.build_alternative( self.new_symbol(), rules )
self.rule_stack.append( new_rule )
self.curaltcount = 0
elif _rule == self.gram.group:
self.curseqcount += 1
elif _rule == self.gram.option:
# pops the last alternative
rules = self.pop_rules( 1 )
new_rule = self.parser.build_kleenestar( self.new_symbol(), _min=0, _max=1, rule=rules[0] )
self.rule_stack.append( new_rule )
self.curseqcount += 1
elif _rule == self.gram.rule:
assert len(self.rule_stack)==1
old_rule = self.rule_stack[0]
del self.rule_stack[0]
if isinstance(old_rule,Token):
# Wrap a token into an alternative
old_rule = self.parser.build_alternative( self.current_rule, [old_rule] )
else:
# Make sure we use the codename from the named rule
old_rule.codename = self.current_rule
self.parser.root_rules[self.current_rule] = old_rule
self.current_subrule = 0
return True
def token(self, name, value, source):
if name == self.gram.TOK_STRING:
self.handle_TOK_STRING( name, value )
self.curseqcount += 1
elif name == self.gram.TOK_SYMDEF:
self.current_rule = self.get_symbolcode( value )
self.current_rule_name = value
elif name == self.gram.TOK_SYMBOL:
rule = self.get_rule( value )
self.rule_stack.append( rule )
self.curseqcount += 1
elif name == self.gram.TOK_STAR:
top = self.rule_stack[-1]
rule = self.parser.build_kleenestar( self.new_symbol(), _min=0, rule=top)
self.rule_stack[-1] = rule
elif name == self.gram.TOK_ADD:
top = self.rule_stack[-1]
rule = self.parser.build_kleenestar( self.new_symbol(), _min=1, rule=top)
self.rule_stack[-1] = rule
elif name == self.gram.TOK_BAR:
assert self.curseqcount == 0
elif name == self.gram.TOK_LPAR:
self.altcounts.append( self.curaltcount )
self.seqcounts.append( self.curseqcount )
self.curseqcount = 0
self.curaltcount = 0
elif name == self.gram.TOK_RPAR:
assert self.curaltcount == 0
self.curaltcount = self.altcounts.pop()
self.curseqcount = self.seqcounts.pop()
elif name == self.gram.TOK_LBRACKET:
self.altcounts.append( self.curaltcount )
self.seqcounts.append( self.curseqcount )
self.curseqcount = 0
self.curaltcount = 0
elif name == self.gram.TOK_RBRACKET:
assert self.curaltcount == 0
assert self.curseqcount == 0
self.curaltcount = self.altcounts.pop()
self.curseqcount = self.seqcounts.pop()
return True
def handle_TOK_STRING( self, name, value ):
if value in self.parser.tok_values:
# punctuation
tokencode = self.parser.tok_values[value]
tok = self.parser.build_token( tokencode, None )
else:
if not is_py_name(value):
raise RuntimeError("Unknown STRING value ('%s')" % value)
# assume a keyword
tok = self.parser.build_token( self.parser.tokens['NAME'], value)
if value not in self.keywords:
self.keywords.append(value)
self.rule_stack.append(tok)
| Python |
#!/usr/bin/env python
"""This module loads the python Grammar (2.3 or 2.4) and builds
the parser for this grammar in the global PYTHON_PARSER
helper functions are provided that use the grammar to parse
using file_input, single_input and eval_input targets
"""
import sys
import os
from pypy.interpreter.error import OperationError, debug_print
from pypy.interpreter import gateway
from pypy.interpreter.pyparser.error import SyntaxError
from pypy.interpreter.pyparser.pythonlexer import Source, match_encoding_declaration
from pypy.interpreter.astcompiler.consts import CO_FUTURE_WITH_STATEMENT
import pypy.interpreter.pyparser.pysymbol as pysymbol
import pypy.interpreter.pyparser.pytoken as pytoken
import pypy.interpreter.pyparser.ebnfparse as ebnfparse
from pypy.interpreter.pyparser.ebnflexer import GrammarSource
from pypy.interpreter.pyparser.ebnfgrammar import GRAMMAR_GRAMMAR
import pypy.interpreter.pyparser.grammar as grammar
from pypy.interpreter.pyparser.pythonutil import build_parser_for_version, build_parser
# try:
from pypy.interpreter.pyparser import symbol
# except ImportError:
# # for standalone testing
# import symbol
from codeop import PyCF_DONT_IMPLY_DEDENT
ENABLE_GRAMMAR_VERSION = "2.4"
## files encoding management ############################################
_recode_to_utf8 = gateway.applevel(r'''
def _recode_to_utf8(text, encoding):
return unicode(text, encoding).encode("utf-8")
''').interphook('_recode_to_utf8')
def recode_to_utf8(space, text, encoding):
return space.str_w(_recode_to_utf8(space, space.wrap(text),
space.wrap(encoding)))
def _normalize_encoding(encoding):
"""returns normalized name for <encoding>
see dist/src/Parser/tokenizer.c 'get_normal_name()'
for implementation details / reference
NOTE: for now, parser.suite() raises a MemoryError when
a bad encoding is used. (SF bug #979739)
"""
if encoding is None:
return None
# lower() + '_' / '-' conversion
encoding = encoding.replace('_', '-').lower()
if encoding.startswith('utf-8'):
return 'utf-8'
for variant in ['latin-1', 'iso-latin-1', 'iso-8859-1']:
if encoding.startswith(variant):
return 'iso-8859-1'
return encoding
def _check_for_encoding(s):
eol = s.find('\n')
if eol < 0:
return _check_line_for_encoding(s)
enc = _check_line_for_encoding(s[:eol])
if enc:
return enc
eol2 = s.find('\n', eol + 1)
if eol2 < 0:
return _check_line_for_encoding(s[eol + 1:])
return _check_line_for_encoding(s[eol + 1:eol2])
def _check_line_for_encoding(line):
"""returns the declared encoding or None"""
i = 0
for i in range(len(line)):
if line[i] == '#':
break
if line[i] not in ' \t\014':
return None
return match_encoding_declaration(line[i:])
## Python Source Parser ###################################################
class PythonParser(grammar.Parser):
"""Wrapper class for python grammar"""
targets = {
'eval' : "eval_input",
'single' : "single_input",
'exec' : "file_input",
}
def __init__(self): # , predefined_symbols=None):
grammar.Parser.__init__(self)
pytoken.setup_tokens(self)
# remember how many tokens were loaded
self._basetokens_count = self._sym_count
# if predefined_symbols:
# self.load_symbols(predefined_symbols)
self.keywords = []
def is_base_token(self, tokvalue):
return tokvalue < 0 or tokvalue >= self._basetokens_count
def parse_source(self, textsrc, mode, builder, flags=0):
"""Parse a python source according to goal"""
goal = self.targets[mode]
# Detect source encoding.
if textsrc[:3] == '\xEF\xBB\xBF':
textsrc = textsrc[3:]
enc = 'utf-8'
else:
enc = _normalize_encoding(_check_for_encoding(textsrc))
if enc is not None and enc not in ('utf-8', 'iso-8859-1'):
textsrc = recode_to_utf8(builder.space, textsrc, enc)
lines = [line + '\n' for line in textsrc.split('\n')]
builder.source_encoding = enc
if len(textsrc) and textsrc[-1] == '\n':
lines.pop()
flags &= ~PyCF_DONT_IMPLY_DEDENT
return self.parse_lines(lines, goal, builder, flags)
def parse_lines(self, lines, goal, builder, flags=0):
# builder.keywords = self.keywords.copy()
# if flags & CO_FUTURE_WITH_STATEMENT:
# builder.enable_with()
goalnumber = self.symbols[goal]
target = self.root_rules[goalnumber]
src = Source(self, lines, flags)
if not target.match(src, builder):
line, lineno = src.debug()
# XXX needs better error messages
raise SyntaxError("invalid syntax", lineno, -1, line)
# return None
return builder
def update_rules_references(self):
"""update references to old rules"""
# brute force algorithm
for rule in self.all_rules:
for i in range(len(rule.args)):
arg = rule.args[i]
if arg.codename in self.root_rules:
real_rule = self.root_rules[arg.codename]
# This rule has been updated
if real_rule is not rule.args[i]:
rule.args[i] = real_rule
def insert_rule(self, ruledef):
"""parses <ruledef> and inserts corresponding rules in the parser"""
# parse the ruledef(s)
source = GrammarSource(GRAMMAR_GRAMMAR, ruledef)
builder = ebnfparse.EBNFBuilder(GRAMMAR_GRAMMAR, dest_parser=self)
GRAMMAR_GRAMMAR.root_rules['grammar'].match(source, builder)
# remove proxy objects if any
builder.resolve_rules()
# update keywords
self.keywords.extend(builder.keywords)
# update old references in case an existing rule was modified
self.update_rules_references()
# recompute first sets
self.build_first_sets()
def make_pyparser(version=ENABLE_GRAMMAR_VERSION):
parser = PythonParser()
return build_parser_for_version(version, parser=parser)
PYTHON_PARSER = make_pyparser()
def translation_target(grammardef):
parser = PythonParser() # predefined_symbols=symbol.sym_name)
source = GrammarSource(GRAMMAR_GRAMMAR, grammardef)
builder = ebnfparse.EBNFBuilder(GRAMMAR_GRAMMAR, dest_parser=parser)
GRAMMAR_GRAMMAR.root_rules['grammar'].match(source, builder)
builder.resolve_rules()
parser.build_first_sets()
parser.keywords = builder.keywords
return 0
## XXX BROKEN
## def parse_grammar(space, w_src):
## """Loads the grammar using the 'dynamic' rpython parser"""
## src = space.str_w( w_src )
## ebnfbuilder = ebnfparse.parse_grammar_text( src )
## ebnfbuilder.resolve_rules()
## grammar.build_first_sets(ebnfbuilder.all_rules)
## return space.wrap( ebnfbuilder.root_rules )
def grammar_rules( space ):
w_rules = space.newdict()
parser = make_pyparser()
for key, value in parser.rules.iteritems():
space.setitem(w_rules, space.wrap(key), space.wrap(value))
return w_rules
| Python |
#! /usr/bin/env python
# ______________________________________________________________________
"""Module pytokenize
THIS FILE WAS COPIED FROM pypy/module/parser/pytokenize.py AND ADAPTED
TO BE ANNOTABLE (Mainly made lists homogeneous)
This is a modified version of Ka-Ping Yee's tokenize module found in the
Python standard library.
The primary modification is the removal of the tokenizer's dependence on the
standard Python regular expression module, which is written in C. The regular
expressions have been replaced with hand built DFA's using the
basil.util.automata module.
$Id: pytokenize.py,v 1.3 2003/10/03 16:31:53 jriehl Exp $
"""
# ______________________________________________________________________
from __future__ import generators
from pypy.interpreter.pyparser import automata
__all__ = [ "tokenize" ]
# ______________________________________________________________________
# Automatically generated DFA's (with one or two hand tweeks):
pseudoStatesAccepts = [True, True, True, True, True, True, True, True,
True, True, False, True, True, True, False, False,
False, False, True, False, False, True, True, False,
True, False, True, False, True, False, True, False,
False, False, True, False, False, False, True]
pseudoStates = [
{'\t': 0, '\n': 13, '\x0c': 0, '\r': 14, ' ': 0, '!': 10,
'"': 16, '#': 18, '%': 12, '&': 12,
"'": 15, '(': 13, ')': 13, '*': 7,
'+': 12, ',': 13, '-': 12, '.': 6,
'/': 11, '0': 4, '1': 5, '2': 5,
'3': 5, '4': 5, '5': 5, '6': 5,
'7': 5, '8': 5, '9': 5, ':': 13,
';': 13, '<': 9, '=': 12, '>': 8,
'@': 13, 'A': 1,
'B': 1, 'C': 1, 'D': 1, 'E': 1,
'F': 1, 'G': 1, 'H': 1, 'I': 1,
'J': 1, 'K': 1, 'L': 1, 'M': 1,
'N': 1, 'O': 1, 'P': 1, 'Q': 1,
'R': 2, 'S': 1, 'T': 1, 'U': 3,
'V': 1, 'W': 1, 'X': 1, 'Y': 1,
'Z': 1, '[': 13, '\\': 17, ']': 13,
'^': 12, '_': 1, '`': 13, 'a': 1,
'b': 1, 'c': 1, 'd': 1, 'e': 1,
'f': 1, 'g': 1, 'h': 1, 'i': 1,
'j': 1, 'k': 1, 'l': 1, 'm': 1,
'n': 1, 'o': 1, 'p': 1, 'q': 1,
'r': 2, 's': 1, 't': 1, 'u': 3,
'v': 1, 'w': 1, 'x': 1, 'y': 1,
'z': 1, '{': 13, '|': 12, '}': 13,
'~': 13},
{'0': 1, '1': 1, '2': 1, '3': 1,
'4': 1, '5': 1, '6': 1, '7': 1,
'8': 1, '9': 1, 'A': 1, 'B': 1,
'C': 1, 'D': 1, 'E': 1, 'F': 1,
'G': 1, 'H': 1, 'I': 1, 'J': 1,
'K': 1, 'L': 1, 'M': 1, 'N': 1,
'O': 1, 'P': 1, 'Q': 1, 'R': 1,
'S': 1, 'T': 1, 'U': 1, 'V': 1,
'W': 1, 'X': 1, 'Y': 1, 'Z': 1,
'_': 1, 'a': 1, 'b': 1, 'c': 1,
'd': 1, 'e': 1, 'f': 1, 'g': 1,
'h': 1, 'i': 1, 'j': 1, 'k': 1,
'l': 1, 'm': 1, 'n': 1, 'o': 1,
'p': 1, 'q': 1, 'r': 1, 's': 1,
't': 1, 'u': 1, 'v': 1, 'w': 1,
'x': 1, 'y': 1, 'z': 1},
{'"': 16, "'": 15, '0': 1, '1': 1,
'2': 1, '3': 1, '4': 1, '5': 1,
'6': 1, '7': 1, '8': 1, '9': 1,
'A': 1, 'B': 1, 'C': 1, 'D': 1,
'E': 1, 'F': 1, 'G': 1, 'H': 1,
'I': 1, 'J': 1, 'K': 1, 'L': 1,
'M': 1, 'N': 1, 'O': 1, 'P': 1,
'Q': 1, 'R': 1, 'S': 1, 'T': 1,
'U': 1, 'V': 1, 'W': 1, 'X': 1,
'Y': 1, 'Z': 1, '_': 1, 'a': 1,
'b': 1, 'c': 1, 'd': 1, 'e': 1,
'f': 1, 'g': 1, 'h': 1, 'i': 1,
'j': 1, 'k': 1, 'l': 1, 'm': 1,
'n': 1, 'o': 1, 'p': 1, 'q': 1,
'r': 1, 's': 1, 't': 1, 'u': 1,
'v': 1, 'w': 1, 'x': 1, 'y': 1,
'z': 1},
{'"': 16, "'": 15, '0': 1, '1': 1,
'2': 1, '3': 1, '4': 1, '5': 1,
'6': 1, '7': 1, '8': 1, '9': 1,
'A': 1, 'B': 1, 'C': 1, 'D': 1,
'E': 1, 'F': 1, 'G': 1, 'H': 1,
'I': 1, 'J': 1, 'K': 1, 'L': 1,
'M': 1, 'N': 1, 'O': 1, 'P': 1,
'Q': 1, 'R': 2, 'S': 1, 'T': 1,
'U': 1, 'V': 1, 'W': 1, 'X': 1,
'Y': 1, 'Z': 1, '_': 1, 'a': 1,
'b': 1, 'c': 1, 'd': 1, 'e': 1,
'f': 1, 'g': 1, 'h': 1, 'i': 1,
'j': 1, 'k': 1, 'l': 1, 'm': 1,
'n': 1, 'o': 1, 'p': 1, 'q': 1,
'r': 2, 's': 1, 't': 1, 'u': 1,
'v': 1, 'w': 1, 'x': 1, 'y': 1,
'z': 1},
{'.': 24, '0': 22, '1': 22, '2': 22,
'3': 22, '4': 22, '5': 22, '6': 22,
'7': 22, '8': 23, '9': 23, 'E': 25,
'J': 13, 'L': 13, 'X': 21, 'e': 25,
'j': 13, 'l': 13, 'x': 21},
{'.': 24, '0': 5, '1': 5, '2': 5,
'3': 5, '4': 5, '5': 5, '6': 5,
'7': 5, '8': 5, '9': 5, 'E': 25,
'J': 13, 'L': 13, 'e': 25, 'j': 13,
'l': 13},
{'0': 26, '1': 26, '2': 26, '3': 26,
'4': 26, '5': 26, '6': 26, '7': 26,
'8': 26, '9': 26},
{'*': 12, '=': 13},
{'=': 13, '>': 12},
{'=': 13, '<': 12, '>': 13},
{'=': 13},
{'=': 13, '/': 12},
{'=': 13},
{},
{'\n': 13},
{automata.DEFAULT: 19, '\n': 27, '\\': 29, "'": 28},
{automata.DEFAULT: 20, '"': 30, '\n': 27, '\\': 31},
{'\n': 13, '\r': 14},
{automata.DEFAULT: 18, '\n': 27, '\r': 27},
{automata.DEFAULT: 19, '\n': 27, '\\': 29, "'": 13},
{automata.DEFAULT: 20, '"': 13, '\n': 27, '\\': 31},
{'0': 21, '1': 21, '2': 21, '3': 21,
'4': 21, '5': 21, '6': 21, '7': 21,
'8': 21, '9': 21, 'A': 21, 'B': 21,
'C': 21, 'D': 21, 'E': 21, 'F': 21,
'L': 13, 'a': 21, 'b': 21, 'c': 21,
'd': 21, 'e': 21, 'f': 21, 'l': 13},
{'.': 24, '0': 22, '1': 22, '2': 22,
'3': 22, '4': 22, '5': 22, '6': 22,
'7': 22, '8': 23, '9': 23, 'E': 25,
'J': 13, 'L': 13, 'e': 25, 'j': 13,
'l': 13},
{'.': 24, '0': 23, '1': 23, '2': 23,
'3': 23, '4': 23, '5': 23, '6': 23,
'7': 23, '8': 23, '9': 23, 'E': 25,
'J': 13, 'e': 25, 'j': 13},
{'0': 24, '1': 24, '2': 24, '3': 24,
'4': 24, '5': 24, '6': 24, '7': 24,
'8': 24, '9': 24, 'E': 32, 'J': 13,
'e': 32, 'j': 13},
{'+': 33, '-': 33, '0': 34, '1': 34,
'2': 34, '3': 34, '4': 34, '5': 34,
'6': 34, '7': 34, '8': 34, '9': 34},
{'0': 26, '1': 26, '2': 26, '3': 26,
'4': 26, '5': 26, '6': 26, '7': 26,
'8': 26, '9': 26, 'E': 32, 'J': 13,
'e': 32, 'j': 13},
{},
{"'": 13},
{automata.DEFAULT: 35, '\n': 13, '\r': 14},
{'"': 13},
{automata.DEFAULT: 36, '\n': 13, '\r': 14},
{'+': 37, '-': 37, '0': 38, '1': 38,
'2': 38, '3': 38, '4': 38, '5': 38,
'6': 38, '7': 38, '8': 38, '9': 38},
{'0': 34, '1': 34, '2': 34, '3': 34,
'4': 34, '5': 34, '6': 34, '7': 34,
'8': 34, '9': 34},
{'0': 34, '1': 34, '2': 34, '3': 34,
'4': 34, '5': 34, '6': 34, '7': 34,
'8': 34, '9': 34, 'J': 13, 'j': 13},
{automata.DEFAULT: 35, '\n': 27, '\\': 29, "'": 13},
{automata.DEFAULT: 36, '"': 13, '\n': 27, '\\': 31},
{'0': 38, '1': 38, '2': 38, '3': 38,
'4': 38, '5': 38, '6': 38, '7': 38,
'8': 38, '9': 38},
{'0': 38, '1': 38, '2': 38, '3': 38,
'4': 38, '5': 38, '6': 38, '7': 38,
'8': 38, '9': 38, 'J': 13, 'j': 13},
]
pseudoDFA = automata.DFA(pseudoStates, pseudoStatesAccepts)
double3StatesAccepts = [False, False, False, False, False, True]
double3States = [
{automata.DEFAULT: 0, '"': 1, '\\': 2},
{automata.DEFAULT: 4, '"': 3, '\\': 2},
{automata.DEFAULT: 4},
{automata.DEFAULT: 4, '"': 5, '\\': 2},
{automata.DEFAULT: 4, '"': 1, '\\': 2},
{automata.DEFAULT: 4, '"': 5, '\\': 2},
]
double3DFA = automata.NonGreedyDFA(double3States, double3StatesAccepts)
single3StatesAccepts = [False, False, False, False, False, True]
single3States = [
{automata.DEFAULT: 0, '\\': 2, "'": 1},
{automata.DEFAULT: 4, '\\': 2, "'": 3},
{automata.DEFAULT: 4},
{automata.DEFAULT: 4, '\\': 2, "'": 5},
{automata.DEFAULT: 4, '\\': 2, "'": 1},
{automata.DEFAULT: 4, '\\': 2, "'": 5},
]
single3DFA = automata.NonGreedyDFA(single3States, single3StatesAccepts)
singleStatesAccepts = [False, True, False]
singleStates = [
{automata.DEFAULT: 0, '\\': 2, "'": 1},
{},
{automata.DEFAULT: 0},
]
singleDFA = automata.DFA(singleStates, singleStatesAccepts)
doubleStatesAccepts = [False, True, False]
doubleStates = [
{automata.DEFAULT: 0, '"': 1, '\\': 2},
{},
{automata.DEFAULT: 0},
]
doubleDFA = automata.DFA(doubleStates, doubleStatesAccepts)
endDFAs = {"'" : singleDFA,
'"' : doubleDFA,
"r" : None,
"R" : None,
"u" : None,
"U" : None}
for uniPrefix in ("", "u", "U"):
for rawPrefix in ("", "r", "R"):
prefix = uniPrefix + rawPrefix
endDFAs[prefix + "'''"] = single3DFA
endDFAs[prefix + '"""'] = double3DFA
whiteSpaceStatesAccepts = [True]
whiteSpaceStates = [{'\t': 0, ' ': 0, '\x0c': 0}]
whiteSpaceDFA = automata.DFA(whiteSpaceStates, whiteSpaceStatesAccepts)
# ______________________________________________________________________
# COPIED:
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""'):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"',
"ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"' ):
single_quoted[t] = t
tabsize = 8
# PYPY MODIFICATION: removed TokenError class as it's not needed here
# PYPY MODIFICATION: removed StopTokenizing class as it's not needed here
# PYPY MODIFICATION: removed printtoken() as it's not needed here
# PYPY MODIFICATION: removed tokenize() as it's not needed here
# PYPY MODIFICATION: removed tokenize_loop() as it's not needed here
# PYPY MODIFICATION: removed generate_tokens() as it was copied / modified
# in pythonlexer.py
# PYPY MODIFICATION: removed main() as it's not needed here
# ______________________________________________________________________
# End of pytokenize.py
| Python |
#! /usr/bin/env python
# ______________________________________________________________________
"""Module automata
THIS FILE WAS COPIED FROM pypy/module/parser/pytokenize.py AND ADAPTED
TO BE ANNOTABLE (Mainly made the DFA's __init__ accept two lists
instead of a unique nested one)
$Id: automata.py,v 1.2 2003/10/02 17:37:17 jriehl Exp $
"""
# ______________________________________________________________________
# Module level definitions
# PYPY Modification: removed the EMPTY class as it's not needed here
# PYPY Modification: we don't need a particuliar DEFAULT class here
# a simple None works fine.
# (Having a DefaultClass inheriting from str makes
# the annotator crash)
DEFAULT = "\00default" # XXX hack, the rtyper does not support dict of with str|None keys
# anyway using dicts doesn't seem the best final way to store these char indexed tables
# PYPY Modification : removed all automata functions (any, maybe,
# newArcPair, etc.)
class DFA:
# ____________________________________________________________
def __init__(self, states, accepts, start = 0):
self.states = states
self.accepts = accepts
self.start = start
# ____________________________________________________________
def recognize (self, inVec, pos = 0): # greedy = True
crntState = self.start
i = pos
lastAccept = False
for item in inVec[pos:]:
# arcMap, accept = self.states[crntState]
arcMap = self.states[crntState]
accept = self.accepts[crntState]
if item in arcMap:
crntState = arcMap[item]
elif DEFAULT in arcMap:
crntState = arcMap[DEFAULT]
elif accept:
return i
elif lastAccept:
# This is now needed b/c of exception cases where there are
# transitions to dead states
return i - 1
else:
return -1
lastAccept = accept
i += 1
# if self.states[crntState][1]:
if self.accepts[crntState]:
return i
elif lastAccept:
return i - 1
else:
return -1
# ______________________________________________________________________
class NonGreedyDFA (DFA):
def recognize (self, inVec, pos = 0):
crntState = self.start
i = pos
for item in inVec[pos:]:
# arcMap, accept = self.states[crntState]
arcMap = self.states[crntState]
accept = self.accepts[crntState]
if accept:
return i
elif item in arcMap:
crntState = arcMap[item]
elif DEFAULT in arcMap:
crntState = arcMap[DEFAULT]
else:
return -1
i += 1
# if self.states[crntState][1]:
if self.accepts[crntState]:
return i
else:
return -1
# ______________________________________________________________________
# End of automata.py
| Python |
"""miscelanneous utility functions
XXX: svn mv pythonutil.py gramtools.py / parsertools.py
"""
import sys
import os
import parser
from pypy.interpreter.pyparser.grammar import Parser
from pypy.interpreter.pyparser.pytoken import setup_tokens
from pypy.interpreter.pyparser.ebnfgrammar import GRAMMAR_GRAMMAR
from pypy.interpreter.pyparser.ebnflexer import GrammarSource
from pypy.interpreter.pyparser.ebnfparse import EBNFBuilder
from pypy.interpreter.pyparser.tuplebuilder import TupleBuilder
PYTHON_VERSION = ".".join([str(i) for i in sys.version_info[:2]])
def dirname(filename):
"""redefine dirname to avoid the need of os.path.split being rpython
"""
i = filename.rfind(os.sep) + 1
assert i >= 0
return filename[:i]
def get_grammar_file(version):
"""returns the python grammar corresponding to our CPython version"""
if version == "native":
_ver = PYTHON_VERSION
elif version == "stable":
_ver = "_stablecompiler"
elif version in ("2.3","2.4","2.5a"):
_ver = version
else:
raise ValueError('no such grammar version: %s' % version)
# two osp.join to avoid TyperError: can only iterate over tuples of length 1 for now
# generated by call to osp.join(a, *args)
return os.path.join( dirname(__file__),
os.path.join("data", "Grammar" + _ver) ), _ver
def build_parser(gramfile, parser=None):
"""reads a (EBNF) grammar definition and builds a parser for it"""
if parser is None:
parser = Parser()
setup_tokens(parser)
# XXX: clean up object dependencies
from pypy.rlib.streamio import open_file_as_stream
stream = open_file_as_stream(gramfile)
grammardef = stream.readall()
stream.close()
assert isinstance(grammardef, str)
source = GrammarSource(GRAMMAR_GRAMMAR, grammardef)
builder = EBNFBuilder(GRAMMAR_GRAMMAR, dest_parser=parser)
GRAMMAR_GRAMMAR.root_rules['grammar'].match(source, builder)
builder.resolve_rules()
parser.build_first_sets()
parser.keywords = builder.keywords
return parser
def build_parser_for_version(version, parser=None):
gramfile, _ = get_grammar_file(version)
return build_parser(gramfile, parser)
## XXX: the below code should probably go elsewhere
## convenience functions for computing AST objects using recparser
def ast_from_input(input, mode, transformer, parser):
"""converts a source input into an AST
- input : the source to be converted
- mode : 'exec', 'eval' or 'single'
- transformer : the transfomer instance to use to convert
the nested tuples into the AST
XXX: transformer could be instantiated here but we don't want
here to explicitly import compiler or stablecompiler or
etc. This is to be fixed in a clean way
"""
builder = TupleBuilder(parser, lineno=True)
parser.parse_source(input, mode, builder)
tuples = builder.stack[-1].as_tuple(True)
return transformer.compile_node(tuples)
def pypy_parse(source, mode='exec', lineno=False):
from pypy.interpreter.pyparser.pythonparse import PythonParser, make_pyparser
from pypy.interpreter.pyparser.astbuilder import AstBuilder
# parser = build_parser_for_version("2.4", PythonParser())
parser = make_pyparser('stable')
builder = TupleBuilder(parser)
parser.parse_source(source, mode, builder)
return builder.stack[-1].as_tuple(lineno)
def source2ast(source, mode='exec', version='2.4', space=None):
from pypy.interpreter.pyparser.pythonparse import PythonParser, make_pyparser
from pypy.interpreter.pyparser.astbuilder import AstBuilder
parser = make_pyparser(version)
builder = AstBuilder(parser, space=space)
parser.parse_source(source, mode, builder)
return builder.rule_stack[-1]
## convenience functions around CPython's parser functions
def python_parsefile(filename, lineno=False):
"""parse <filename> using CPython's parser module and return nested tuples
"""
pyf = file(filename)
source = pyf.read()
pyf.close()
return python_parse(source, 'exec', lineno)
def python_parse(source, mode='exec', lineno=False):
"""parse python source using CPython's parser module and return
nested tuples
"""
if mode == 'eval':
tp = parser.expr(source)
else:
tp = parser.suite(source)
return parser.ast2tuple(tp, line_info=lineno)
def pypy_parsefile(filename, lineno=False):
"""parse <filename> using PyPy's parser module and return
a tuple of three elements :
- The encoding declaration symbol or None if there were no encoding
statement
- The TupleBuilder's stack top element (instance of
tuplebuilder.StackElement which is a wrapper of some nested tuples
like those returned by the CPython's parser)
- The encoding string or None if there were no encoding statement
nested tuples
"""
pyf = file(filename)
source = pyf.read()
pyf.close()
return pypy_parse(source, 'exec', lineno)
| Python |
from pypy.interpreter import gateway
from pypy.interpreter.error import OperationError
def parsestr(space, encoding, s):
# compiler.transformer.Transformer.decode_literal depends on what
# might seem like minor details of this function -- changes here
# must be reflected there.
# we use ps as "pointer to s"
# q is the virtual last char index of the string
ps = 0
quote = s[ps]
rawmode = False
unicode = False
# string decoration handling
o = ord(quote)
isalpha = (o>=97 and o<=122) or (o>=65 and o<=90)
if isalpha or quote == '_':
if quote == 'u' or quote == 'U':
ps += 1
quote = s[ps]
unicode = True
if quote == 'r' or quote == 'R':
ps += 1
quote = s[ps]
rawmode = True
if quote != "'" and quote != '"':
raise_app_valueerror(space,
'Internal error: parser passed unquoted literal')
ps += 1
q = len(s) - 1
if s[q] != quote:
raise_app_valueerror(space, 'Internal error: parser passed unmatched '
'quotes in literal')
if q-ps >= 4 and s[ps] == quote and s[ps+1] == quote:
# triple quotes
ps += 2
if s[q-1] != quote or s[q-2] != quote:
raise_app_valueerror(space, 'Internal error: parser passed '
'unmatched triple quotes in literal')
q -= 2
if unicode: # XXX Py_UnicodeFlag is ignored for now
if encoding is None or encoding == "iso-8859-1":
buf = s
bufp = ps
bufq = q
u = None
else:
# "\XX" may become "\u005c\uHHLL" (12 bytes)
lis = [] # using a list to assemble the value
end = q
while ps < end:
if s[ps] == '\\':
lis.append(s[ps])
ps += 1
if ord(s[ps]) & 0x80:
lis.append("u005c")
if ord(s[ps]) & 0x80: # XXX inefficient
w, ps = decode_utf8(space, s, ps, end, "utf-16-be")
rn = len(w)
assert rn % 2 == 0
for i in range(0, rn, 2):
lis.append('\\u')
lis.append(hexbyte(ord(w[i])))
lis.append(hexbyte(ord(w[i+1])))
else:
lis.append(s[ps])
ps += 1
buf = ''.join(lis)
bufp = 0
bufq = len(buf)
assert 0 <= bufp <= bufq
w_substr = space.wrap(buf[bufp : bufq])
if rawmode:
w_v = PyUnicode_DecodeRawUnicodeEscape(space, w_substr)
else:
w_v = PyUnicode_DecodeUnicodeEscape(space, w_substr)
return w_v
need_encoding = (encoding is not None and
encoding != "utf-8" and encoding != "iso-8859-1")
# XXX add strchr like interface to rtyper
assert 0 <= ps <= q
substr = s[ps : q]
if rawmode or '\\' not in s[ps:]:
if need_encoding:
w_u = PyUnicode_DecodeUTF8(space, space.wrap(substr))
#w_v = space.wrap(space.unwrap(w_u).encode(encoding)) this works
w_v = PyUnicode_AsEncodedString(space, w_u, space.wrap(encoding))
return w_v
else:
return space.wrap(substr)
enc = None
if need_encoding:
enc = encoding
v = PyString_DecodeEscape(space, substr, unicode, enc)
return space.wrap(v)
def hexbyte(val):
result = "%x" % val
if len(result) == 1:
result = "0" + result
return result
def PyString_DecodeEscape(space, s, unicode, recode_encoding):
"""
Unescape a backslash-escaped string. If unicode is non-zero,
the string is a u-literal. If recode_encoding is non-zero,
the string is UTF-8 encoded and should be re-encoded in the
specified encoding.
"""
lis = []
ps = 0
end = len(s)
while ps < end:
if s[ps] != '\\':
# note that the C code has a label here.
# the logic is the same.
if recode_encoding and ord(s[ps]) & 0x80:
w, ps = decode_utf8(space, s, ps, end, recode_encoding)
# Append bytes to output buffer.
lis.append(w)
else:
lis.append(s[ps])
ps += 1
continue
ps += 1
if ps == end:
raise_app_valueerror(space, 'Trailing \\ in string')
ch = s[ps]
ps += 1
# XXX This assumes ASCII!
if ch == '\n':
pass
elif ch == '\\':
lis.append('\\')
elif ch == "'":
lis.append("'")
elif ch == '"':
lis.append('"')
elif ch == 'b':
lis.append("\010")
elif ch == 'f':
lis.append('\014') # FF
elif ch == 't':
lis.append('\t')
elif ch == 'n':
lis.append('\n')
elif ch == 'r':
lis.append('\r')
elif ch == 'v':
lis.append('\013') # VT
elif ch == 'a':
lis.append('\007') # BEL, not classic C
elif '0' <= ch <= '7':
c = ord(s[ps - 1]) - ord('0')
if ps < end and '0' <= s[ps] <= '7':
c = (c << 3) + ord(s[ps]) - ord('0')
ps += 1
if ps < end and '0' <= s[ps] <= '7':
c = (c << 3) + ord(s[ps]) - ord('0')
ps += 1
lis.append(chr(c))
elif ch == 'x':
if ps+2 <= end and isxdigit(s[ps]) and isxdigit(s[ps + 1]):
lis.append(chr(int(s[ps : ps + 2], 16)))
ps += 2
else:
raise_app_valueerror(space, 'invalid \\x escape')
# ignored replace and ignore for now
elif unicode and (ch == 'u' or ch == 'U' or ch == 'N'):
raise_app_valueerror(space, 'Unicode escapes not legal '
'when Unicode disabled')
else:
# this was not an escape, so the backslash
# has to be added, and we start over in
# non-escape mode.
lis.append('\\')
ps -= 1
assert ps >= 0
continue
# an arbitry number of unescaped UTF-8 bytes may follow.
buf = ''.join(lis)
return buf
def isxdigit(ch):
return (ch >= '0' and ch <= '9' or
ch >= 'a' and ch <= 'f' or
ch >= 'A' and ch <= 'F')
app = gateway.applevel(r'''
def PyUnicode_DecodeUnicodeEscape(data):
import _codecs
return _codecs.unicode_escape_decode(data)[0]
def PyUnicode_DecodeRawUnicodeEscape(data):
import _codecs
return _codecs.raw_unicode_escape_decode(data)[0]
def PyUnicode_DecodeUTF8(data):
import _codecs
return _codecs.utf_8_decode(data)[0]
def PyUnicode_AsEncodedString(data, encoding):
import _codecs
return _codecs.encode(data, encoding)
''')
PyUnicode_DecodeUnicodeEscape = app.interphook('PyUnicode_DecodeUnicodeEscape')
PyUnicode_DecodeRawUnicodeEscape = app.interphook('PyUnicode_DecodeRawUnicodeEscape')
PyUnicode_DecodeUTF8 = app.interphook('PyUnicode_DecodeUTF8')
PyUnicode_AsEncodedString = app.interphook('PyUnicode_AsEncodedString')
def decode_utf8(space, s, ps, end, encoding):
assert ps >= 0
pt = ps
# while (s < end && *s != '\\') s++; */ /* inefficient for u".."
while ps < end and ord(s[ps]) & 0x80:
ps += 1
w_u = PyUnicode_DecodeUTF8(space, space.wrap(s[pt : ps]))
w_v = PyUnicode_AsEncodedString(space, w_u, space.wrap(encoding))
v = space.str_w(w_v)
return v, ps
def raise_app_valueerror(space, msg):
raise OperationError(space.w_ValueError, space.wrap(msg))
| Python |
"""This module provides the astbuilder class which is to be used
by GrammarElements to directly build the AS during parsing
without going through the nested tuples step
"""
from grammar import BaseGrammarBuilder, AbstractContext
from pypy.interpreter.function import Function
from pypy.interpreter.astcompiler import ast, consts
# from pypy.interpreter.pyparser import pythonparse
#import pypy.interpreter.pyparser.pytoken as tok
from pypy.interpreter.pyparser.error import SyntaxError
from pypy.interpreter.pyparser.parsestring import parsestr
from pypy.interpreter.pyparser.pythonparse import ENABLE_GRAMMAR_VERSION
from pypy.interpreter.gateway import interp2app
from asthelper import *
## building functions helpers
## --------------------------
##
## Builder functions used to reduce the builder stack into appropriate
## AST nodes. All the builder functions have the same interface
##
## Naming convention:
## to provide a function handler for a grammar rule name yyy
## you should provide a build_yyy(builder, nb) function
## where builder is the AstBuilder instance used to build the
## ast tree and nb is the number of items this rule is reducing
##
## Example:
## for example if the rule
## term <- var ( '+' expr )*
## matches
## x + (2*y) + z
## build_term will be called with nb == 2
## and get_atoms(builder, nb) should return a list
## of 5 objects : Var TokenObject('+') Expr('2*y') TokenObject('+') Expr('z')
## where Var and Expr are AST subtrees and Token is a not yet
## reduced token
##
## ASTRULES is kept as a dictionnary to be rpython compliant this is the
## main reason why build_* functions are not methods of the AstBuilder class
##
def build_atom(builder, nb):
atoms = get_atoms(builder, nb)
top = atoms[0]
if isinstance(top, TokenObject):
# assert isinstance(top, TokenObject) # rtyper
if top.name == builder.parser.tokens['LPAR']:
if len(atoms) == 2:
builder.push(ast.Tuple([], top.lineno))
else:
builder.push( atoms[1] )
elif top.name == builder.parser.tokens['LSQB']:
if len(atoms) == 2:
builder.push(ast.List([], top.lineno))
else:
list_node = atoms[1]
list_node.lineno = top.lineno
builder.push(list_node)
elif top.name == builder.parser.tokens['LBRACE']:
items = []
for index in range(1, len(atoms)-1, 4):
# a : b , c : d
# ^ +1 +2 +3 +4
items.append((atoms[index], atoms[index+2]))
builder.push(ast.Dict(items, top.lineno))
elif top.name == builder.parser.tokens['NAME']:
val = top.get_value()
builder.push( ast.Name(val, top.lineno) )
elif top.name == builder.parser.tokens['NUMBER']:
builder.push(ast.Const(builder.eval_number(top.get_value()), top.lineno))
elif top.name == builder.parser.tokens['STRING']:
# need to concatenate strings in atoms
s = ''
if len(atoms) == 1:
token = atoms[0]
assert isinstance(token, TokenObject)
builder.push(ast.Const(parsestr(builder.space, builder.source_encoding, token.get_value()), top.lineno))
else:
space = builder.space
empty = space.wrap('')
accum = []
for token in atoms:
assert isinstance(token, TokenObject)
accum.append(parsestr(builder.space, builder.source_encoding, token.get_value()))
w_s = space.call_method(empty, 'join', space.newlist(accum))
builder.push(ast.Const(w_s, top.lineno))
elif top.name == builder.parser.tokens['BACKQUOTE']:
builder.push(ast.Backquote(atoms[1], atoms[1].lineno))
else:
raise SyntaxError("unexpected tokens", top.lineno, top.col)
def slicecut(lst, first, endskip): # endskip is negative
last = len(lst)+endskip
if last > first:
return lst[first:last]
else:
return []
def build_power(builder, nb):
"""power: atom trailer* ['**' factor]"""
atoms = get_atoms(builder, nb)
if len(atoms) == 1:
builder.push(atoms[0])
else:
lineno = atoms[0].lineno
token = atoms[-2]
if isinstance(token, TokenObject) and token.name == builder.parser.tokens['DOUBLESTAR']:
obj = parse_attraccess(slicecut(atoms, 0, -2), builder)
builder.push(ast.Power( obj, atoms[-1], lineno))
else:
obj = parse_attraccess(atoms, builder)
builder.push(obj)
def build_factor(builder, nb):
atoms = get_atoms(builder, nb)
if len(atoms) == 1:
builder.push( atoms[0] )
elif len(atoms) == 2:
token = atoms[0]
lineno = token.lineno
if isinstance(token, TokenObject):
if token.name == builder.parser.tokens['PLUS']:
builder.push( ast.UnaryAdd( atoms[1], lineno) )
if token.name == builder.parser.tokens['MINUS']:
builder.push( ast.UnarySub( atoms[1], lineno) )
if token.name == builder.parser.tokens['TILDE']:
builder.push( ast.Invert( atoms[1], lineno) )
def build_term(builder, nb):
atoms = get_atoms(builder, nb)
l = len(atoms)
left = atoms[0]
for i in range(2,l,2):
right = atoms[i]
op_node = atoms[i-1]
assert isinstance(op_node, TokenObject)
if op_node.name == builder.parser.tokens['STAR']:
left = ast.Mul( left, right, left.lineno )
elif op_node.name == builder.parser.tokens['SLASH']:
left = ast.Div( left, right, left.lineno )
elif op_node.name == builder.parser.tokens['PERCENT']:
left = ast.Mod( left, right, left.lineno )
elif op_node.name == builder.parser.tokens['DOUBLESLASH']:
left = ast.FloorDiv( left, right, left.lineno )
else:
token = atoms[i-1]
raise SyntaxError("unexpected token", token.lineno, token.col)
builder.push( left )
def build_arith_expr(builder, nb):
atoms = get_atoms(builder, nb)
l = len(atoms)
left = atoms[0]
for i in range(2,l,2):
right = atoms[i]
op_node = atoms[i-1]
assert isinstance(op_node, TokenObject)
if op_node.name == builder.parser.tokens['PLUS']:
left = ast.Add( left, right, left.lineno)
elif op_node.name == builder.parser.tokens['MINUS']:
left = ast.Sub( left, right, left.lineno)
else:
token = atoms[i-1]
raise SyntaxError("unexpected token", token.lineno, token.col)
builder.push( left )
def build_shift_expr(builder, nb):
atoms = get_atoms(builder, nb)
l = len(atoms)
left = atoms[0]
lineno = left.lineno
for i in range(2,l,2):
right = atoms[i]
op_node = atoms[i-1]
assert isinstance(op_node, TokenObject)
if op_node.name == builder.parser.tokens['LEFTSHIFT']:
left = ast.LeftShift( left, right, lineno )
elif op_node.name == builder.parser.tokens['RIGHTSHIFT']:
left = ast.RightShift( left, right, lineno )
else:
token = atoms[i-1]
raise SyntaxError("unexpected token", token.lineno, token.col)
builder.push(left)
def build_binary_expr(builder, nb, OP):
atoms = get_atoms(builder, nb)
l = len(atoms)
if l==1:
builder.push(atoms[0])
return
# Here, len(atoms) >= 2
items = []
# Apparently, lineno should be set to the line where
# the first OP occurs
lineno = atoms[1].lineno
for i in range(0,l,2): # this is atoms not 1
items.append(atoms[i])
builder.push(OP(items, lineno))
return
def build_and_expr(builder, nb):
return build_binary_expr(builder, nb, ast.Bitand)
def build_xor_expr(builder, nb):
return build_binary_expr(builder, nb, ast.Bitxor)
def build_expr(builder, nb):
return build_binary_expr(builder, nb, ast.Bitor)
def build_comparison(builder, nb):
atoms = get_atoms(builder, nb)
l = len(atoms)
if l == 1:
builder.push( atoms[0] )
return
else:
# a < b < c is transalted into:
# Compare(Name('a'), [('<', Name(b)), ('<', Name(c))])
left_token = atoms[0]
ops = []
for i in range(1, l, 2):
# if tok.name isn't in rpunct, then it should be
# 'is', 'is not', 'not' or 'not in' => tok.get_value()
token = atoms[i]
assert isinstance(token, TokenObject)
op_name = builder.parser.tok_rvalues.get(token.name, token.get_value())
ops.append((op_name, atoms[i+1]))
builder.push(ast.Compare(atoms[0], ops, atoms[0].lineno))
def build_comp_op(builder, nb):
"""comp_op reducing has 2 different cases:
1. There's only one token to reduce => nothing to
do, just re-push it on the stack
2. Two tokens to reduce => it's either 'not in' or 'is not',
so we need to find out which one it is, and re-push a
single token
Note: reducing comp_op is needed because reducing comparison
rules is much easier when we can assume the comparison
operator is one and only one token on the stack (which
is not the case, by default, with 'not in' and 'is not')
"""
atoms = get_atoms(builder, nb)
l = len(atoms)
# l==1 means '<', '>', '<=', etc.
if l == 1:
builder.push(atoms[0])
# l==2 means 'not in' or 'is not'
elif l == 2:
token = atoms[0]
lineno = token.lineno
assert isinstance(token, TokenObject)
if token.get_value() == 'not':
builder.push(TokenObject(builder.parser.tokens['NAME'], 'not in', lineno, builder.parser))
else:
builder.push(TokenObject(builder.parser.tokens['NAME'], 'is not', lineno, builder.parser))
else:
assert False, "TODO" # uh ?
def build_or_test(builder, nb):
return build_binary_expr(builder, nb, ast.Or)
def build_or_test(builder, nb):
return build_binary_expr(builder, nb, ast.Or)
def build_and_test(builder, nb):
return build_binary_expr(builder, nb, ast.And)
def build_not_test(builder, nb):
atoms = get_atoms(builder, nb)
if len(atoms) == 1:
builder.push(atoms[0])
elif len(atoms) == 2:
builder.push(ast.Not(atoms[1], atoms[1].lineno))
else:
assert False, "not_test implementation incomplete in not_test"
def build_test(builder, nb):
atoms = get_atoms(builder, nb)
l = len(atoms)
if l == 1:
builder.push(atoms[0])
elif l == 5 and atoms[1].get_value() == 'if':
builder.push(
ast.CondExpr(atoms[2], atoms[0], atoms[4], atoms[1].lineno))
else:
lineno = atoms[1].lineno
items = []
for i in range(0,l,2): # this is atoms not 1
items.append(atoms[i])
builder.push(ast.Or(items, lineno))
# Note: we do not include a build_old_test() because it does not need to do
# anything.
def build_testlist(builder, nb):
return build_binary_expr(builder, nb, ast.Tuple)
def build_expr_stmt(builder, nb):
"""expr_stmt: testlist (augassign testlist | ('=' testlist)*)
"""
atoms = get_atoms(builder, nb)
if atoms:
lineno = atoms[0].lineno
else:
lineno = -1
l = len(atoms)
if l==1:
builder.push(ast.Discard(atoms[0], lineno))
return
op = atoms[1]
assert isinstance(op, TokenObject)
if op.name == builder.parser.tokens['EQUAL']:
nodes = []
for i in range(0,l-2,2):
lvalue = to_lvalue(atoms[i], consts.OP_ASSIGN)
nodes.append(lvalue)
rvalue = atoms[-1]
builder.push( ast.Assign(nodes, rvalue, lineno) )
pass
else:
assert l==3
lvalue = atoms[0]
if isinstance(lvalue, ast.GenExpr) or isinstance(lvalue, ast.Tuple):
raise SyntaxError("augmented assign to tuple literal or generator expression not possible",
lineno, 0, "")
assert isinstance(op, TokenObject)
builder.push(ast.AugAssign(lvalue, op.get_name(), atoms[2], lineno))
def return_one(builder, nb):
atoms = get_atoms(builder, nb)
l = len(atoms)
assert l == 1, "missing one node in stack"
builder.push( atoms[0] )
return
def build_simple_stmt(builder, nb):
atoms = get_atoms(builder, nb)
l = len(atoms)
nodes = []
if atoms:
lineno = atoms[0].lineno
else:
lineno = -1
for n in range(0,l,2):
node = atoms[n]
if isinstance(node, TokenObject) and node.name == builder.parser.tokens['NEWLINE']:
nodes.append(ast.Discard(ast.Const(builder.wrap_none()), node.lineno))
else:
nodes.append(node)
builder.push(ast.Stmt(nodes, lineno))
def build_return_stmt(builder, nb):
atoms = get_atoms(builder, nb)
lineno = atoms[0].lineno
if len(atoms) > 2:
assert False, "return several stmts not implemented"
elif len(atoms) == 1:
builder.push(ast.Return(None, lineno))
else:
builder.push(ast.Return(atoms[1], lineno))
def build_file_input(builder, nb):
stmts = []
atoms = get_atoms(builder, nb)
if atoms:
lineno = atoms[0].lineno
else:
lineno = -1
for node in atoms:
if isinstance(node, ast.Stmt):
stmts.extend(node.nodes)
elif isinstance(node, TokenObject) and node.name == builder.parser.tokens['ENDMARKER']:
# XXX Can't we just remove the last element of the list ?
break
elif isinstance(node, TokenObject) and node.name == builder.parser.tokens['NEWLINE']:
continue
else:
stmts.append(node)
main_stmt = ast.Stmt(stmts, lineno)
doc = get_docstring(builder,main_stmt)
return builder.push(ast.Module(doc, main_stmt, lineno))
def build_eval_input(builder, nb):
doc = builder.wrap_none()
stmts = []
atoms = get_atoms(builder, nb)
assert len(atoms)>=1
return builder.push(ast.Expression(atoms[0]))
def build_single_input(builder, nb):
atoms = get_atoms(builder, nb)
l = len(atoms)
if l == 1 or l==2:
atom0 = atoms[0]
if isinstance(atom0, TokenObject) and atom0.name == builder.parser.tokens['NEWLINE']:
# atom0 = ast.Pass(atom0.lineno) # break test_astcompiler
atom0 = ast.Stmt([], atom0.lineno) # break test_astbuilder
elif not isinstance(atom0, ast.Stmt):
atom0 = ast.Stmt([atom0], atom0.lineno)
builder.push(ast.Module(builder.space.w_None, atom0, atom0.lineno))
else:
assert False, "Forbidden path"
def build_testlist_gexp(builder, nb):
atoms = get_atoms(builder, nb)
if atoms:
lineno = atoms[0].lineno
else:
lineno = -1
l = len(atoms)
if l == 1:
builder.push(atoms[0])
return
items = []
token = atoms[1]
if isinstance(token, TokenObject) and token.name == builder.parser.tokens['COMMA']:
for i in range(0, l, 2): # this is atoms not 1
items.append(atoms[i])
else:
# genfor: 'i for i in j'
# GenExpr(GenExprInner(Name('i'), [GenExprFor(AssName('i', 'OP_ASSIGN'), Name('j'), [])])))]))
expr = atoms[0]
genexpr_for = parse_genexpr_for(atoms[1:])
genexpr_for[0].is_outmost = True
builder.push(ast.GenExpr(ast.GenExprInner(expr, genexpr_for, lineno), lineno))
return
isConst = True
values = []
for item in items:
if isinstance(item, ast.Const):
values.append(item.value)
else:
isConst = False
break
if isConst:
builder.push(ast.Const(builder.space.newtuple(values), lineno))
else:
builder.push(ast.Tuple(items, lineno))
return
def build_lambdef(builder, nb):
"""lambdef: 'lambda' [varargslist] ':' test"""
atoms = get_atoms(builder, nb)
lineno = atoms[0].lineno
code = atoms[-1]
names, defaults, flags = parse_arglist(slicecut(atoms, 1, -2), builder)
builder.push(ast.Lambda(names, defaults, flags, code, lineno))
def build_trailer(builder, nb):
"""trailer: '(' ')' | '(' arglist ')' | '[' subscriptlist ']' | '.' NAME
"""
atoms = get_atoms(builder, nb)
first_token = atoms[0]
# Case 1 : '(' ...
if isinstance(first_token, TokenObject) and first_token.name == builder.parser.tokens['LPAR']:
if len(atoms) == 2: # and atoms[1].token == builder.parser.tokens['RPAR']:
builder.push(ArglistObject([], None, None, first_token.lineno))
elif len(atoms) == 3: # '(' Arglist ')'
# push arglist on the stack
builder.push(atoms[1])
elif isinstance(first_token, TokenObject) and first_token.name == builder.parser.tokens['LSQB']:
if len(atoms) == 3 and isinstance(atoms[1], SlicelistObject):
builder.push(atoms[1])
else:
# atoms is a list of, alternatively, values and comma tokens,
# with '[' and ']' tokens at the end
subs = []
for index in range(1, len(atoms)-1, 2):
atom = atoms[index]
if isinstance(atom, SlicelistObject):
num_slicevals = 3
slicevals = []
if atom.fake_rulename == 'slice':
num_slicevals = 2
for val in atom.value[:num_slicevals]:
if val is None:
slicevals.append(ast.Const(builder.wrap_none(), atom.lineno))
else:
slicevals.append(val)
subs.append(ast.Sliceobj(slicevals, atom.lineno))
else:
subs.append(atom)
if len(atoms) > 3: # at least one comma
sub = ast.Tuple(subs, first_token.lineno)
else:
[sub] = subs
builder.push(SubscriptObject('subscript', sub, first_token.lineno))
elif len(atoms) == 2:
# Attribute access: '.' NAME
builder.push(atoms[0])
builder.push(atoms[1])
builder.push(TempRuleObject('pending-attr-access', 2, first_token.lineno))
else:
assert False, "Trailer reducing implementation incomplete !"
def build_arglist(builder, nb):
"""
arglist: (argument ',')* ( '*' test [',' '**' test] |
'**' test |
argument |
[argument ','] )
"""
atoms = get_atoms(builder, nb)
arguments, stararg, dstararg = parse_argument(atoms, builder)
if atoms:
lineno = atoms[0].lineno
else:
lineno = -1
builder.push(ArglistObject(arguments, stararg, dstararg, lineno))
def build_subscript(builder, nb):
"""'.' '.' '.' | [test] ':' [test] [':' [test]] | test"""
atoms = get_atoms(builder, nb)
token = atoms[0]
lineno = token.lineno
if isinstance(token, TokenObject) and token.name == builder.parser.tokens['DOT']:
# Ellipsis:
builder.push(ast.Ellipsis(lineno))
elif len(atoms) == 1:
if isinstance(token, TokenObject) and token.name == builder.parser.tokens['COLON']:
sliceinfos = [None, None, None]
builder.push(SlicelistObject('slice', sliceinfos, lineno))
else:
# test
builder.push(token)
else: # elif len(atoms) > 1:
sliceinfos = [None, None, None]
infosindex = 0
for token in atoms:
if isinstance(token, TokenObject) and token.name == builder.parser.tokens['COLON']:
infosindex += 1
else:
sliceinfos[infosindex] = token
if infosindex == 2:
sliceobj_infos = []
for value in sliceinfos:
if value is None:
sliceobj_infos.append(ast.Const(builder.wrap_none(), lineno))
else:
sliceobj_infos.append(value)
builder.push(SlicelistObject('sliceobj', sliceobj_infos, lineno))
else:
builder.push(SlicelistObject('slice', sliceinfos, lineno))
def build_listmaker(builder, nb):
"""listmaker: test ( list_for | (',' test)* [','] )"""
atoms = get_atoms(builder, nb)
if len(atoms) >= 2:
token = atoms[1]
lineno = token.lineno
if isinstance(token, TokenObject):
if token.get_value() == 'for':
# list comp
expr = atoms[0]
list_for = parse_listcomp(atoms[1:], builder)
builder.push(ast.ListComp(expr, list_for, lineno))
return
# regular list building (like in [1, 2, 3,])
index = 0
nodes = []
while index < len(atoms):
nodes.append(atoms[index])
index += 2 # skip comas
if atoms:
lineno = atoms[0].lineno
else:
lineno = -1
builder.push(ast.List(nodes, lineno))
def build_decorator(builder, nb):
"""decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE"""
atoms = get_atoms(builder, nb)
nodes = []
# remove '@', '(' and ')' from atoms and use parse_attraccess
for token in atoms[1:]:
if isinstance(token, TokenObject) and (
token.name == builder.parser.tokens['LPAR']
or token.name == builder.parser.tokens['RPAR']
or token.name == builder.parser.tokens['NEWLINE']):
# skip those ones
continue
else:
nodes.append(token)
obj = parse_attraccess(nodes, builder)
builder.push(obj)
def build_funcdef(builder, nb):
"""funcdef: [decorators] 'def' NAME parameters ':' suite
"""
atoms = get_atoms(builder, nb)
index = 0
decorators = []
decorator_node = None
lineno = atoms[0].lineno
# the original loop was:
# while not (isinstance(atoms[index], TokenObject) and atoms[index].get_value() == 'def'):
# decorators.append(atoms[index])
# index += 1
while index < len(atoms):
atom = atoms[index]
if isinstance(atom, TokenObject) and atom.get_value() == 'def':
break
decorators.append(atoms[index])
index += 1
if decorators:
decorator_node = ast.Decorators(decorators, lineno)
atoms = atoms[index:]
funcname = atoms[1]
lineno = funcname.lineno
arglist = []
index = 3
arglist = slicecut(atoms, 3, -3)
names, default, flags = parse_arglist(arglist, builder)
funcname_token = atoms[1]
assert isinstance(funcname_token, TokenObject)
funcname = funcname_token.get_value()
assert funcname is not None
arglist = atoms[2]
code = atoms[-1]
doc = get_docstring(builder, code)
builder.push(ast.Function(decorator_node, funcname, names, default, flags, doc, code, lineno))
def build_classdef(builder, nb):
"""classdef: 'class' NAME ['(' testlist ')'] ':' suite"""
atoms = get_atoms(builder, nb)
lineno = atoms[0].lineno
l = len(atoms)
classname_token = atoms[1]
assert isinstance(classname_token, TokenObject)
classname = classname_token.get_value()
if l == 4:
basenames = []
body = atoms[3]
else:
assert l == 7
basenames = []
body = atoms[6]
base = atoms[3]
if isinstance(base, ast.Tuple):
for node in base.nodes:
basenames.append(node)
else:
basenames.append(base)
doc = get_docstring(builder,body)
builder.push(ast.Class(classname, basenames, doc, body, lineno))
def build_suite(builder, nb):
"""suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT"""
atoms = get_atoms(builder, nb)
if len(atoms) == 1:
builder.push(atoms[0])
elif len(atoms) == 4:
# Only one statement for (stmt+)
stmt = atoms[2]
if not isinstance(stmt, ast.Stmt):
stmt = ast.Stmt([stmt], atoms[0].lineno)
builder.push(stmt)
else:
# several statements
stmts = []
nodes = slicecut(atoms, 2,-1)
for node in nodes:
if isinstance(node, ast.Stmt):
stmts.extend(node.nodes)
else:
stmts.append(node)
builder.push(ast.Stmt(stmts, atoms[0].lineno))
def build_if_stmt(builder, nb):
"""
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
"""
atoms = get_atoms(builder, nb)
tests = []
tests.append((atoms[1], atoms[3]))
index = 4
else_ = None
while index < len(atoms):
cur_token = atoms[index]
assert isinstance(cur_token, TokenObject) # rtyper
if cur_token.get_value() == 'elif':
tests.append((atoms[index+1], atoms[index+3]))
index += 4
else: # cur_token.get_value() == 'else'
else_ = atoms[index+2]
break # break is not necessary
builder.push(ast.If(tests, else_, atoms[0].lineno))
def build_pass_stmt(builder, nb):
"""past_stmt: 'pass'"""
atoms = get_atoms(builder, nb)
assert len(atoms) == 1
builder.push(ast.Pass(atoms[0].lineno))
def build_break_stmt(builder, nb):
"""past_stmt: 'pass'"""
atoms = get_atoms(builder, nb)
assert len(atoms) == 1
builder.push(ast.Break(atoms[0].lineno))
def build_for_stmt(builder, nb):
"""for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]"""
atoms = get_atoms(builder, nb)
else_ = None
# skip 'for'
assign = to_lvalue(atoms[1], consts.OP_ASSIGN)
# skip 'in'
iterable = atoms[3]
# skip ':'
body = atoms[5]
# if there is a "else" statement
if len(atoms) > 6:
# skip 'else' and ':'
else_ = atoms[8]
builder.push(ast.For(assign, iterable, body, else_, atoms[0].lineno))
def build_exprlist(builder, nb):
"""exprlist: expr (',' expr)* [',']"""
atoms = get_atoms(builder, nb)
if len(atoms) <= 2:
builder.push(atoms[0])
else:
names = []
values = []
isConst = True
for index in range(0, len(atoms), 2):
item = atoms[index]
names.append(item)
if isinstance(item, ast.Const):
values.append(item)
else:
isConst = False
if isConst:
builder.push(ast.Const(builder.space.newtuple(values), atoms[0].lineno))
else:
builder.push(ast.Tuple(names, atoms[0].lineno))
def build_while_stmt(builder, nb):
"""while_stmt: 'while' test ':' suite ['else' ':' suite]"""
atoms = get_atoms(builder, nb)
else_ = None
# skip 'while'
test = atoms[1]
# skip ':'
body = atoms[3]
# if there is a "else" statement
if len(atoms) > 4:
# skip 'else' and ':'
else_ = atoms[6]
builder.push(ast.While(test, body, else_, atoms[0].lineno))
def build_with_stmt(builder, nb):
"""with_stmt: 'with' test [ NAME expr ] ':' suite"""
atoms = get_atoms(builder, nb)
# skip 'with'
test = atoms[1]
if len(atoms) == 4:
body = atoms[3]
var = None
# if there is an "as" clause
else:
token = atoms[2]
assert isinstance(token, TokenObject)
varexpr = atoms[3]
var = to_lvalue(varexpr, consts.OP_ASSIGN)
body = atoms[5]
builder.push(ast.With(test, body, var, atoms[0].lineno))
def build_import_name(builder, nb):
"""import_name: 'import' dotted_as_names
dotted_as_names: dotted_as_name (',' dotted_as_name)*
dotted_as_name: dotted_name [NAME NAME]
dotted_name: NAME ('.' NAME)*
written in an unfolded way:
'import' NAME(.NAME)* [NAME NAME], (NAME(.NAME)* [NAME NAME],)*
XXX: refactor build_import_name and build_import_from
"""
atoms = get_atoms(builder, nb)
index = 1 # skip 'import'
l = len(atoms)
names = []
while index < l:
as_name = None
# dotted name (a.b.c)
incr, name = parse_dotted_names(atoms[index:], builder)
index += incr
# 'as' value
if index < l:
token = atoms[index]
assert isinstance(token, TokenObject)
if token.get_value() == 'as':
token = atoms[index+1]
assert isinstance(token, TokenObject)
as_name = token.get_value()
index += 2
names.append((name, as_name))
# move forward until next ','
# XXX: what is it supposed to do ?
while index<l:
atom = atoms[index]
# for atom in atoms[index:]:
if isinstance(atom, TokenObject) and atom.name == builder.parser.tokens['COMMA']:
break
index += 1
## while index < l and isinstance(atoms[index], TokenObject) and \
## atoms[index].name != builder.parser.tokens['COMMA']:
## index += 1
index += 1
builder.push(ast.Import(names, atoms[0].lineno))
def build_import_from(builder, nb):
"""
import_from: 'from' dotted_name 'import' ('*' | '(' import_as_names [','] ')' | import_as_names)
import_as_names: import_as_name (',' import_as_name)*
import_as_name: NAME [NAME NAME]
"""
atoms = get_atoms(builder, nb)
index = 1
incr, from_name = parse_dotted_names(atoms[index:], builder)
index += (incr + 1) # skip 'import'
token = atoms[index]
assert isinstance(token, TokenObject) # XXX
if token.name == builder.parser.tokens['STAR']:
names = [('*', None)]
else:
if token.name == builder.parser.tokens['LPAR']:
# mutli-line imports
tokens = slicecut( atoms, index+1, -1 )
else:
tokens = atoms[index:]
index = 0
l = len(tokens)
names = []
while index < l:
token = tokens[index]
assert isinstance(token, TokenObject)
name = token.get_value()
as_name = None
index += 1
if index < l:
token = tokens[index]
assert isinstance(token, TokenObject)
if token.get_value() == 'as':
token = tokens[index+1]
assert isinstance(token, TokenObject)
as_name = token.get_value()
index += 2
names.append((name, as_name))
if index < l: # case ','
index += 1
if from_name == '__future__':
for name, asname in names:
if name == 'with_statement':
# found from __future__ import with_statement
if not builder.with_enabled:
builder.enable_with()
#raise pythonparse.AlternateGrammarException()
builder.push(ast.From(from_name, names, atoms[0].lineno))
def build_future_import_feature(builder, nb):
"""
future_import_feature: NAME [('as'|NAME) NAME]
Enables python language future imports. Called once per feature imported,
no matter how you got to this one particular feature.
"""
atoms = peek_atoms(builder, nb)
feature_name = atoms[0].get_value()
assert type(feature_name) is str
space = builder.space
w_feature_code = space.appexec([space.wrap(feature_name)],
"""(feature):
import __future__ as f
feature = getattr(f, feature, None)
return feature and feature.compiler_flag or 0
""")
# We will call a method on the parser (the method exists only in unit
# tests).
builder.parser.add_production(space.unwrap(w_feature_code))
def build_yield_stmt(builder, nb):
atoms = get_atoms(builder, nb)
builder.push(ast.Yield(atoms[1], atoms[0].lineno))
def build_continue_stmt(builder, nb):
atoms = get_atoms(builder, nb)
builder.push(ast.Continue(atoms[0].lineno))
def build_del_stmt(builder, nb):
atoms = get_atoms(builder, nb)
builder.push(to_lvalue(atoms[1], consts.OP_DELETE))
def build_assert_stmt(builder, nb):
"""assert_stmt: 'assert' test [',' test]"""
atoms = get_atoms(builder, nb)
test = atoms[1]
if len(atoms) == 4:
fail = atoms[3]
else:
fail = None
builder.push(ast.Assert(test, fail, atoms[0].lineno))
def build_exec_stmt(builder, nb):
"""exec_stmt: 'exec' expr ['in' test [',' test]]"""
atoms = get_atoms(builder, nb)
expr = atoms[1]
loc = None
glob = None
if len(atoms) > 2:
loc = atoms[3]
if len(atoms) > 4:
glob = atoms[5]
builder.push(ast.Exec(expr, loc, glob, atoms[0].lineno))
def build_print_stmt(builder, nb):
"""
print_stmt: 'print' ( '>>' test [ (',' test)+ [','] ] | [ test (',' test)* [','] ] )
"""
atoms = get_atoms(builder, nb)
l = len(atoms)
items = []
dest = None
start = 1
if l > 1:
token = atoms[1]
if isinstance(token, TokenObject) and token.name == builder.parser.tokens['RIGHTSHIFT']:
dest = atoms[2]
# skip following comma
start = 4
for index in range(start, l, 2):
items.append(atoms[index])
last_token = atoms[-1]
if isinstance(last_token, TokenObject) and last_token.name == builder.parser.tokens['COMMA']:
builder.push(ast.Print(items, dest, atoms[0].lineno))
else:
builder.push(ast.Printnl(items, dest, atoms[0].lineno))
def build_global_stmt(builder, nb):
"""global_stmt: 'global' NAME (',' NAME)*"""
atoms = get_atoms(builder, nb)
names = []
for index in range(1, len(atoms), 2):
token = atoms[index]
assert isinstance(token, TokenObject)
names.append(token.get_value())
builder.push(ast.Global(names, atoms[0].lineno))
def build_raise_stmt(builder, nb):
"""raise_stmt: 'raise' [test [',' test [',' test]]]"""
atoms = get_atoms(builder, nb)
l = len(atoms)
expr1 = None
expr2 = None
expr3 = None
if l >= 2:
expr1 = atoms[1]
if l >= 4:
expr2 = atoms[3]
if l == 6:
expr3 = atoms[5]
builder.push(ast.Raise(expr1, expr2, expr3, atoms[0].lineno))
def build_try_stmt(builder, nb):
"""
try_stmt: ('try' ':' suite (except_clause ':' suite)+ #diagram:break
['else' ':' suite] | 'try' ':' suite 'finally' ':' suite)
# NB compile.c makes sure that the default except clause is last
except_clause: 'except' [test [',' test]]
"""
atoms = get_atoms(builder, nb)
handlers = []
l = len(atoms)
else_ = None
body = atoms[2]
token = atoms[3]
assert isinstance(token, TokenObject)
if token.get_value() == 'finally':
builder.push(ast.TryFinally(body, atoms[5], atoms[0].lineno))
else: # token.get_value() == 'except'
index = 3
token = atoms[index]
while isinstance(token, TokenObject) and token.get_value() == 'except':
tokens_read, expr1, expr2, except_body = parse_except_clause(atoms[index:])
handlers.append((expr1, expr2, except_body))
index += tokens_read
if index < l:
token = atoms[index]
else:
break
if index < l:
token = atoms[index]
assert isinstance(token, TokenObject)
assert token.get_value() == 'else'
else_ = atoms[index+2] # skip ':'
builder.push(ast.TryExcept(body, handlers, else_, atoms[0].lineno))
ASTRULES_Template = {
'atom' : build_atom,
'power' : build_power,
'factor' : build_factor,
'term' : build_term,
'arith_expr' : build_arith_expr,
'shift_expr' : build_shift_expr,
'and_expr' : build_and_expr,
'xor_expr' : build_xor_expr,
'expr' : build_expr,
'comparison' : build_comparison,
'comp_op' : build_comp_op,
'or_test' : build_or_test,
'and_test' : build_and_test,
'not_test' : build_not_test,
'test' : build_test,
'testlist' : build_testlist,
'expr_stmt' : build_expr_stmt,
'small_stmt' : return_one,
'simple_stmt' : build_simple_stmt,
'single_input' : build_single_input,
'file_input' : build_file_input,
'testlist_gexp' : build_testlist_gexp,
'lambdef' : build_lambdef,
'old_lambdef' : build_lambdef,
'trailer' : build_trailer,
'arglist' : build_arglist,
'subscript' : build_subscript,
'listmaker' : build_listmaker,
'funcdef' : build_funcdef,
'classdef' : build_classdef,
'return_stmt' : build_return_stmt,
'suite' : build_suite,
'if_stmt' : build_if_stmt,
'pass_stmt' : build_pass_stmt,
'break_stmt' : build_break_stmt,
'for_stmt' : build_for_stmt,
'while_stmt' : build_while_stmt,
'import_name' : build_import_name,
'import_from' : build_import_from,
'yield_stmt' : build_yield_stmt,
'continue_stmt' : build_continue_stmt,
'del_stmt' : build_del_stmt,
'assert_stmt' : build_assert_stmt,
'exec_stmt' : build_exec_stmt,
'print_stmt' : build_print_stmt,
'global_stmt' : build_global_stmt,
'raise_stmt' : build_raise_stmt,
'try_stmt' : build_try_stmt,
'exprlist' : build_exprlist,
'decorator' : build_decorator,
'eval_input' : build_eval_input,
'with_stmt' : build_with_stmt,
}
class AstBuilderContext(AbstractContext):
"""specific context management for AstBuidler"""
def __init__(self, rule_stack):
#self.rule_stack = list(rule_stack)
self.d = len(rule_stack)
class AstBuilder(BaseGrammarBuilder):
"""A builder that directly produce the AST"""
def __init__(self, parser, debug=0, space=None,
grammar_version=ENABLE_GRAMMAR_VERSION):
BaseGrammarBuilder.__init__(self, parser, debug)
self.rule_stack = []
self.space = space
self.source_encoding = None
self.with_enabled = False
self.build_rules = ASTRULES_Template
self.user_build_rules = {}
if grammar_version >= "2.5":
self.build_rules.update({
'future_import_feature': build_future_import_feature,
'import_from_future': build_import_from,
})
def enable_with(self):
if self.with_enabled:
return
self.with_enabled = True
# XXX
# self.keywords.update({'with':None, 'as': None})
def context(self):
return AstBuilderContext(self.rule_stack)
def restore(self, ctx):
assert isinstance(ctx, AstBuilderContext)
assert len(self.rule_stack) >= ctx.d
del self.rule_stack[ctx.d:]
def pop(self):
return self.rule_stack.pop(-1)
def push(self, obj):
self.rule_stack.append(obj)
def push_tok(self, name, value, src ):
self.push( TokenObject( name, value, src._token_lnum, self.parser ) )
def push_rule(self, name, count, src ):
self.push( RuleObject( name, count, src._token_lnum, self.parser ) )
def alternative( self, rule, source ):
# Do nothing, keep rule on top of the stack
if rule.is_root():
rulename = self.parser.sym_name[rule.codename]
# builder_func = ASTRULES.get(rule.codename, None)
w_func = self.user_build_rules.get(rulename, None)
# user defined (applevel) function
if w_func:
w_items = self.space.newlist( [self.space.wrap( it ) for it in get_atoms(self, 1)] )
w_astnode = self.space.call_function(w_func, w_items)
astnode = self.space.interp_w(ast.Node, w_astnode, can_be_None=False)
self.push(astnode)
else:
builder_func = self.build_rules.get(rulename, None)
if builder_func:
builder_func(self, 1)
else:
self.push_rule(rule.codename, 1, source)
else:
self.push_rule(rule.codename, 1, source)
return True
def sequence(self, rule, source, elts_number):
""" """
if rule.is_root():
rulename = self.parser.sym_name[rule.codename]
# builder_func = ASTRULES.get(rule.codename, None)
w_func = self.user_build_rules.get(rulename, None)
# user defined (applevel) function
if w_func:
w_items = self.space.newlist( [self.space.wrap( it ) for it in get_atoms(self, elts_number)] )
w_astnode = self.space.call_function(w_func, w_items)
astnode = self.space.interp_w(ast.Node, w_astnode, can_be_None=False)
self.push(astnode)
else:
builder_func = self.build_rules.get(rulename, None)
if builder_func:
builder_func(self, elts_number)
else:
self.push_rule(rule.codename, elts_number, source)
else:
self.push_rule(rule.codename, elts_number, source)
return True
def token(self, name, value, source):
self.push_tok(name, value, source)
return True
def eval_number(self, value):
"""temporary implementation
eval_number intends to replace number = eval(value) ; return number
"""
space = self.space
base = 10
if value.startswith("0x") or value.startswith("0X"):
base = 16
elif value.startswith("0"):
base = 8
if value.endswith('l') or value.endswith('L'):
l = space.builtin.get('long')
return space.call_function(l, space.wrap(value), space.wrap(base))
if value.endswith('j') or value.endswith('J'):
c = space.builtin.get('complex')
return space.call_function(c, space.wrap(value))
try:
i = space.builtin.get('int')
return space.call_function(i, space.wrap(value), space.wrap(base))
except:
f = space.builtin.get('float')
return space.call_function(f, space.wrap(value))
def is_basestring_const(self, expr):
if not isinstance(expr, ast.Const):
return False
space = self.space
return space.is_true(space.isinstance(expr.value,space.w_basestring))
def wrap_string(self, obj):
if self.space:
return self.space.wrap(obj)
else:
return obj
def wrap_none(self):
if self.space:
return self.space.w_None
else:
return None
def show_stack(before, after):
"""debugging helper function"""
size1 = len(before)
size2 = len(after)
for i in range(max(size1, size2)):
if i< size1:
obj1 = str(before[i])
else:
obj1 = "-"
if i< size2:
obj2 = str(after[i])
else:
obj2 = "-"
print "% 3d | %30s | %30s" % (i, obj1, obj2)
| Python |
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, call this function from python_grammar()
# and call PyPy.
single_input = 256
file_input = 257
eval_input = 258
decorator = 259
decorators = 260
funcdef = 261
parameters = 262
varargslist = 263
fpdef = 264
fplist = 265
stmt = 266
simple_stmt = 267
small_stmt = 268
expr_stmt = 269
augassign = 270
print_stmt = 271
del_stmt = 272
pass_stmt = 273
flow_stmt = 274
break_stmt = 275
continue_stmt = 276
return_stmt = 277
yield_stmt = 278
raise_stmt = 279
import_stmt = 280
import_name = 281
import_from = 282
import_as_name = 283
dotted_as_name = 284
import_as_names = 285
dotted_as_names = 286
dotted_name = 287
global_stmt = 288
exec_stmt = 289
assert_stmt = 290
compound_stmt = 291
if_stmt = 292
while_stmt = 293
for_stmt = 294
try_stmt = 295
except_clause = 296
suite = 297
test = 298
and_test = 299
not_test = 300
comparison = 301
comp_op = 302
expr = 303
xor_expr = 304
and_expr = 305
shift_expr = 306
arith_expr = 307
term = 308
factor = 309
power = 310
atom = 311
listmaker = 312
testlist_gexp = 313
lambdef = 314
trailer = 315
subscriptlist = 316
subscript = 317
sliceop = 318
exprlist = 319
testlist = 320
testlist_safe = 321
dictmaker = 322
classdef = 323
arglist = 324
argument = 325
list_iter = 326
list_for = 327
list_if = 328
gen_iter = 329
gen_for = 330
gen_if = 331
testlist1 = 332
encoding_decl = 333
# Generate sym_name
sym_name = {}
for _name, _value in globals().items():
if type(_value) is type(0):
sym_name[_value] = _name
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.