code
stringlengths
1
1.72M
language
stringclasses
1 value
"""Fixer for sys.exc_{type, value, traceback} sys.exc_type -> sys.exc_info()[0] sys.exc_value -> sys.exc_info()[1] sys.exc_traceback -> sys.exc_info()[2] """ # By Jeff Balogh and Benjamin Peterson # Local imports from .. import fixer_base from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms class FixSysExc(fixer_base.BaseFix): BM_compatible = True # This order matches the ordering of sys.exc_info(). exc_info = ["exc_type", "exc_value", "exc_traceback"] PATTERN = """ power< 'sys' trailer< dot='.' attribute=(%s) > > """ % '|'.join("'%s'" % e for e in exc_info) def transform(self, node, results): sys_attr = results["attribute"][0] index = Number(self.exc_info.index(sys_attr.value)) call = Call(Name("exc_info"), prefix=sys_attr.prefix) attr = Attr(Name("sys"), call) attr[1].children[0].prefix = results["dot"].prefix attr.append(Subscript(index)) return Node(syms.power, attr, prefix=node.prefix)
Python
# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for removing uses of the types module. These work for only the known names in the types module. The forms above can include types. or not. ie, It is assumed the module is imported either as: import types from types import ... # either * or specific types The import statements are not modified. There should be another fixer that handles at least the following constants: type([]) -> list type(()) -> tuple type('') -> str """ # Local imports from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name _TYPE_MAPPING = { 'BooleanType' : 'bool', 'BufferType' : 'memoryview', 'ClassType' : 'type', 'ComplexType' : 'complex', 'DictType': 'dict', 'DictionaryType' : 'dict', 'EllipsisType' : 'type(Ellipsis)', #'FileType' : 'io.IOBase', 'FloatType': 'float', 'IntType': 'int', 'ListType': 'list', 'LongType': 'int', 'ObjectType' : 'object', 'NoneType': 'type(None)', 'NotImplementedType' : 'type(NotImplemented)', 'SliceType' : 'slice', 'StringType': 'bytes', # XXX ? 'StringTypes' : 'str', # XXX ? 'TupleType': 'tuple', 'TypeType' : 'type', 'UnicodeType': 'str', 'XRangeType' : 'range', } _pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING] class FixTypes(fixer_base.BaseFix): BM_compatible = True PATTERN = '|'.join(_pats) def transform(self, node, results): new_value = _TYPE_MAPPING.get(results["name"].value) if new_value: return Name(new_value, prefix=node.prefix) return None
Python
"""Remove __future__ imports from __future__ import foo is replaced with an empty line. """ # Author: Christian Heimes # Local imports from .. import fixer_base from ..fixer_util import BlankLine class FixFuture(fixer_base.BaseFix): BM_compatible = True PATTERN = """import_from< 'from' module_name="__future__" 'import' any >""" # This should be run last -- some things check for the import run_order = 10 def transform(self, node, results): new = BlankLine() new.prefix = node.prefix return new
Python
"""Fix bound method attributes (method.im_? -> method.__?__). """ # Author: Christian Heimes # Local imports from .. import fixer_base from ..fixer_util import Name MAP = { "im_func" : "__func__", "im_self" : "__self__", "im_class" : "__self__.__class__" } class FixMethodattrs(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > """ def transform(self, node, results): attr = results["attr"][0] new = MAP[attr.value] attr.replace(Name(new, prefix=attr.prefix))
Python
"""Fixer that addes parentheses where they are required This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.""" # By Taek Joo Kim and Benjamin Peterson # Local imports from .. import fixer_base from ..fixer_util import LParen, RParen # XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2] class FixParen(fixer_base.BaseFix): BM_compatible = True PATTERN = """ atom< ('[' | '(') (listmaker< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > > | testlist_gexp< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > >) (']' | ')') > """ def transform(self, node, results): target = results["target"] lparen = LParen() lparen.prefix = target.prefix target.prefix = "" # Make it hug the parentheses target.insert_child(0, lparen) target.append_child(RParen())
Python
"""Fixer for it.next() -> next(it), per PEP 3114.""" # Author: Collin Winter # Things that currently aren't covered: # - listcomp "next" names aren't warned # - "with" statement targets aren't checked # Local imports from ..pgen2 import token from ..pygram import python_symbols as syms from .. import fixer_base from ..fixer_util import Name, Call, find_binding bind_warning = "Calls to builtin next() possibly shadowed by global binding" class FixNext(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > | power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > | classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='next' parameters< '(' NAME ')' > any+ > any* > > | global=global_stmt< 'global' any* 'next' any* > """ order = "pre" # Pre-order tree traversal def start_tree(self, tree, filename): super(FixNext, self).start_tree(tree, filename) n = find_binding('next', tree) if n: self.warning(n, bind_warning) self.shadowed_next = True else: self.shadowed_next = False def transform(self, node, results): assert results base = results.get("base") attr = results.get("attr") name = results.get("name") if base: if self.shadowed_next: attr.replace(Name("__next__", prefix=attr.prefix)) else: base = [n.clone() for n in base] base[0].prefix = "" node.replace(Call(Name("next", prefix=node.prefix), base)) elif name: n = Name("__next__", prefix=name.prefix) name.replace(n) elif attr: # We don't do this transformation if we're assigning to "x.next". # Unfortunately, it doesn't seem possible to do this in PATTERN, # so it's being done here. if is_assign_target(node): head = results["head"] if "".join([str(n) for n in head]).strip() == '__builtin__': self.warning(node, bind_warning) return attr.replace(Name("__next__")) elif "global" in results: self.warning(node, bind_warning) self.shadowed_next = True ### The following functions help test if node is part of an assignment ### target. def is_assign_target(node): assign = find_assign(node) if assign is None: return False for child in assign.children: if child.type == token.EQUAL: return False elif is_subtree(child, node): return True return False def find_assign(node): if node.type == syms.expr_stmt: return node if node.type == syms.simple_stmt or node.parent is None: return None return find_assign(node.parent) def is_subtree(root, node): if root == node: return True return any(is_subtree(c, node) for c in root.children)
Python
"""Fixer that changes raw_input(...) into input(...).""" # Author: Andre Roberge # Local imports from .. import fixer_base from ..fixer_util import Name class FixRawInput(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< name='raw_input' trailer< '(' [any] ')' > any* > """ def transform(self, node, results): name = results["name"] name.replace(Name("input", prefix=name.prefix))
Python
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for exec. This converts usages of the exec statement into calls to a built-in exec() function. exec code in ns1, ns2 -> exec(code, ns1, ns2) """ # Local imports from .. import pytree from .. import fixer_base from ..fixer_util import Comma, Name, Call class FixExec(fixer_base.BaseFix): BM_compatible = True PATTERN = """ exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > | exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > """ def transform(self, node, results): assert results syms = self.syms a = results["a"] b = results.get("b") c = results.get("c") args = [a.clone()] args[0].prefix = "" if b is not None: args.extend([Comma(), b.clone()]) if c is not None: args.extend([Comma(), c.clone()]) return Call(Name("exec"), args, prefix=node.prefix)
Python
"""Fixer for operator.{isCallable,sequenceIncludes} operator.isCallable(obj) -> hasattr(obj, '__call__') operator.sequenceIncludes(obj) -> operator.contains(obj) """ # Local imports from .. import fixer_base from ..fixer_util import Call, Name, String class FixOperator(fixer_base.BaseFix): BM_compatible = True methods = "method=('isCallable'|'sequenceIncludes')" func = "'(' func=any ')'" PATTERN = """ power< module='operator' trailer< '.' %(methods)s > trailer< %(func)s > > | power< %(methods)s trailer< %(func)s > > """ % dict(methods=methods, func=func) def transform(self, node, results): method = results["method"][0] if method.value == "sequenceIncludes": if "module" not in results: # operator may not be in scope, so we can't make a change. self.warning(node, "You should use operator.contains here.") else: method.value = "contains" method.changed() elif method.value == "isCallable": if "module" not in results: self.warning(node, "You should use hasattr(%s, '__call__') here." % results["func"].value) else: func = results["func"] args = [func.clone(), String(", "), String("'__call__'")] return Call(Name("hasattr"), args, prefix=node.prefix)
Python
# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for callable(). This converts callable(obj) into isinstance(obj, collections.Callable), adding a collections import if needed.""" # Local imports from lib2to3 import fixer_base from lib2to3.fixer_util import Call, Name, String, Attr, touch_import class FixCallable(fixer_base.BaseFix): BM_compatible = True # Ignore callable(*args) or use of keywords. # Either could be a hint that the builtin callable() is not being used. PATTERN = """ power< 'callable' trailer< lpar='(' ( not(arglist | argument<any '=' any>) func=any | func=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > """ def transform(self, node, results): func = results['func'] touch_import(None, 'collections', node=node) args = [func.clone(), String(', ')] args.extend(Attr(Name('collections'), Name('Callable'))) return Call(Name('isinstance'), args, prefix=node.prefix)
Python
"""Fix function attribute names (f.func_x -> f.__x__).""" # Author: Collin Winter # Local imports from .. import fixer_base from ..fixer_util import Name class FixFuncattrs(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' | 'func_name' | 'func_defaults' | 'func_code' | 'func_dict') > any* > """ def transform(self, node, results): attr = results["attr"][0] attr.replace(Name(("__%s__" % attr.value[5:]), prefix=attr.prefix))
Python
# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that changes map(F, ...) into list(map(F, ...)) unless there exists a 'from future_builtins import map' statement in the top-level namespace. As a special case, map(None, X) is changed into list(X). (This is necessary because the semantics are changed in this case -- the new map(None, X) is equivalent to [(x,) for x in X].) We avoid the transformation (except for the special case mentioned above) if the map() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. NOTE: This is still not correct if the original code was depending on map(F, X, Y, ...) to go on until the longest argument is exhausted, substituting None for missing values -- like zip(), it now stops as soon as the shortest argument is exhausted. """ # Local imports from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name, Call, ListComp, in_special_context from ..pygram import python_symbols as syms class FixMap(fixer_base.ConditionalFix): BM_compatible = True PATTERN = """ map_none=power< 'map' trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > > | map_lambda=power< 'map' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > > | power< 'map' trailer< '(' [arglist=any] ')' > > """ skip_on = 'future_builtins.map' def transform(self, node, results): if self.should_skip(node): return if node.parent.type == syms.simple_stmt: self.warning(node, "You should use a for loop here") new = node.clone() new.prefix = "" new = Call(Name("list"), [new]) elif "map_lambda" in results: new = ListComp(results["xp"].clone(), results["fp"].clone(), results["it"].clone()) else: if "map_none" in results: new = results["arg"].clone() else: if "arglist" in results: args = results["arglist"] if args.type == syms.arglist and \ args.children[0].type == token.NAME and \ args.children[0].value == "None": self.warning(node, "cannot convert map(None, ...) " "with multiple arguments because map() " "now truncates to the shortest sequence") return if in_special_context(node): return None new = node.clone() new.prefix = "" new = Call(Name("list"), [new]) new.prefix = node.prefix return new
Python
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that transforms `xyzzy` into repr(xyzzy).""" # Local imports from .. import fixer_base from ..fixer_util import Call, Name, parenthesize class FixRepr(fixer_base.BaseFix): BM_compatible = True PATTERN = """ atom < '`' expr=any '`' > """ def transform(self, node, results): expr = results["expr"].clone() if expr.type == self.syms.testlist1: expr = parenthesize(expr) return Call(Name("repr"), [expr], prefix=node.prefix)
Python
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Refactoring framework. Used as a main program, this can refactor any number of files and/or recursively descend down directories. Imported as a module, this provides infrastructure to write your own refactoring tool. """ from __future__ import with_statement __author__ = "Guido van Rossum <guido@python.org>" # Python imports import os import sys import logging import operator import collections import io from itertools import chain # Local imports from .pgen2 import driver, tokenize, token from .fixer_util import find_root from . import pytree, pygram from . import btm_utils as bu from . import btm_matcher as bm def get_all_fix_names(fixer_pkg, remove_prefix=True): """Return a sorted list of all available fix names in the given package.""" pkg = __import__(fixer_pkg, [], [], ["*"]) fixer_dir = os.path.dirname(pkg.__file__) fix_names = [] for name in sorted(os.listdir(fixer_dir)): if name.startswith("fix_") and name.endswith(".py"): if remove_prefix: name = name[4:] fix_names.append(name[:-3]) return fix_names class _EveryNode(Exception): pass def _get_head_types(pat): """ Accepts a pytree Pattern Node and returns a set of the pattern types which will match first. """ if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)): # NodePatters must either have no type and no content # or a type and content -- so they don't get any farther # Always return leafs if pat.type is None: raise _EveryNode return set([pat.type]) if isinstance(pat, pytree.NegatedPattern): if pat.content: return _get_head_types(pat.content) raise _EveryNode # Negated Patterns don't have a type if isinstance(pat, pytree.WildcardPattern): # Recurse on each node in content r = set() for p in pat.content: for x in p: r.update(_get_head_types(x)) return r raise Exception("Oh no! I don't understand pattern %s" %(pat)) def _get_headnode_dict(fixer_list): """ Accepts a list of fixers and returns a dictionary of head node type --> fixer list. """ head_nodes = collections.defaultdict(list) every = [] for fixer in fixer_list: if fixer.pattern: try: heads = _get_head_types(fixer.pattern) except _EveryNode: every.append(fixer) else: for node_type in heads: head_nodes[node_type].append(fixer) else: if fixer._accept_type is not None: head_nodes[fixer._accept_type].append(fixer) else: every.append(fixer) for node_type in chain(pygram.python_grammar.symbol2number.values(), pygram.python_grammar.tokens): head_nodes[node_type].extend(every) return dict(head_nodes) def get_fixers_from_package(pkg_name): """ Return the fully qualified names for fixers in the package pkg_name. """ return [pkg_name + "." + fix_name for fix_name in get_all_fix_names(pkg_name, False)] def _identity(obj): return obj if sys.version_info < (3, 0): import codecs _open_with_encoding = codecs.open # codecs.open doesn't translate newlines sadly. def _from_system_newlines(input): return input.replace("\r\n", "\n") def _to_system_newlines(input): if os.linesep != "\n": return input.replace("\n", os.linesep) else: return input else: _open_with_encoding = open _from_system_newlines = _identity _to_system_newlines = _identity def _detect_future_features(source): have_docstring = False gen = tokenize.generate_tokens(io.StringIO(source).readline) def advance(): tok = next(gen) return tok[0], tok[1] ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT)) features = set() try: while True: tp, value = advance() if tp in ignore: continue elif tp == token.STRING: if have_docstring: break have_docstring = True elif tp == token.NAME and value == "from": tp, value = advance() if tp != token.NAME or value != "__future__": break tp, value = advance() if tp != token.NAME or value != "import": break tp, value = advance() if tp == token.OP and value == "(": tp, value = advance() while tp == token.NAME: features.add(value) tp, value = advance() if tp != token.OP or value != ",": break tp, value = advance() else: break except StopIteration: pass return frozenset(features) class FixerError(Exception): """A fixer could not be loaded.""" class RefactoringTool(object): _default_options = {"print_function" : False} CLASS_PREFIX = "Fix" # The prefix for fixer classes FILE_PREFIX = "fix_" # The prefix for modules with a fixer within def __init__(self, fixer_names, options=None, explicit=None): """Initializer. Args: fixer_names: a list of fixers to import options: an dict with configuration. explicit: a list of fixers to run even if they are explicit. """ self.fixers = fixer_names self.explicit = explicit or [] self.options = self._default_options.copy() if options is not None: self.options.update(options) if self.options["print_function"]: self.grammar = pygram.python_grammar_no_print_statement else: self.grammar = pygram.python_grammar self.errors = [] self.logger = logging.getLogger("RefactoringTool") self.fixer_log = [] self.wrote = False self.driver = driver.Driver(self.grammar, convert=pytree.convert, logger=self.logger) self.pre_order, self.post_order = self.get_fixers() self.pre_order_heads = _get_headnode_dict(self.pre_order) self.post_order_heads = _get_headnode_dict(self.post_order) self.files = [] # List of files that were or should be modified self.BM = bm.BottomMatcher() self.BM_incompatible_pre_order = [] self.BM_incompatible_post_order = [] for fixer in chain(self.post_order, self.pre_order): if fixer.BM_compatible: self.BM.add_fixer(fixer) # remove fixers that will be handled by the bottom-up # matcher elif fixer in self.pre_order: self.BM_incompatible_pre_order.append(fixer) elif fixer in self.post_order: self.BM_incompatible_post_order.append(fixer) def get_fixers(self): """Inspects the options to load the requested patterns and handlers. Returns: (pre_order, post_order), where pre_order is the list of fixers that want a pre-order AST traversal, and post_order is the list that want post-order traversal. """ pre_order_fixers = [] post_order_fixers = [] for fix_mod_path in self.fixers: mod = __import__(fix_mod_path, {}, {}, ["*"]) fix_name = fix_mod_path.rsplit(".", 1)[-1] if fix_name.startswith(self.FILE_PREFIX): fix_name = fix_name[len(self.FILE_PREFIX):] parts = fix_name.split("_") class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts]) try: fix_class = getattr(mod, class_name) except AttributeError: raise FixerError("Can't find %s.%s" % (fix_name, class_name)) fixer = fix_class(self.options, self.fixer_log) if fixer.explicit and self.explicit is not True and \ fix_mod_path not in self.explicit: self.log_message("Skipping implicit fixer: %s", fix_name) continue self.log_debug("Adding transformation: %s", fix_name) if fixer.order == "pre": pre_order_fixers.append(fixer) elif fixer.order == "post": post_order_fixers.append(fixer) else: raise FixerError("Illegal fixer order: %r" % fixer.order) key_func = operator.attrgetter("run_order") pre_order_fixers.sort(key=key_func) post_order_fixers.sort(key=key_func) return (pre_order_fixers, post_order_fixers) def log_error(self, msg, *args, **kwds): """Called when an error occurs.""" raise def log_message(self, msg, *args): """Hook to log a message.""" if args: msg = msg % args self.logger.info(msg) def log_debug(self, msg, *args): if args: msg = msg % args self.logger.debug(msg) def print_output(self, old_text, new_text, filename, equal): """Called with the old version, new version, and filename of a refactored file.""" pass def refactor(self, items, write=False, doctests_only=False): """Refactor a list of files and directories.""" for dir_or_file in items: if os.path.isdir(dir_or_file): self.refactor_dir(dir_or_file, write, doctests_only) else: self.refactor_file(dir_or_file, write, doctests_only) def refactor_dir(self, dir_name, write=False, doctests_only=False): """Descends down a directory and refactor every Python file found. Python files are assumed to have a .py extension. Files and subdirectories starting with '.' are skipped. """ for dirpath, dirnames, filenames in os.walk(dir_name): self.log_debug("Descending into %s", dirpath) dirnames.sort() filenames.sort() for name in filenames: if not name.startswith(".") and \ os.path.splitext(name)[1].endswith("py"): fullname = os.path.join(dirpath, name) self.refactor_file(fullname, write, doctests_only) # Modify dirnames in-place to remove subdirs with leading dots dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")] def _read_python_source(self, filename): """ Do our best to decode a Python source file correctly. """ try: f = open(filename, "rb") except IOError as err: self.log_error("Can't open %s: %s", filename, err) return None, None try: encoding = tokenize.detect_encoding(f.readline)[0] finally: f.close() with _open_with_encoding(filename, "r", encoding=encoding) as f: return _from_system_newlines(f.read()), encoding def refactor_file(self, filename, write=False, doctests_only=False): """Refactors a file.""" input, encoding = self._read_python_source(filename) if input is None: # Reading the file failed. return input += "\n" # Silence certain parse errors if doctests_only: self.log_debug("Refactoring doctests in %s", filename) output = self.refactor_docstring(input, filename) if output != input: self.processed_file(output, filename, input, write, encoding) else: self.log_debug("No doctest changes in %s", filename) else: tree = self.refactor_string(input, filename) if tree and tree.was_changed: # The [:-1] is to take off the \n we added earlier self.processed_file(str(tree)[:-1], filename, write=write, encoding=encoding) else: self.log_debug("No changes in %s", filename) def refactor_string(self, data, name): """Refactor a given input string. Args: data: a string holding the code to be refactored. name: a human-readable name for use in error/log messages. Returns: An AST corresponding to the refactored input stream; None if there were errors during the parse. """ features = _detect_future_features(data) if "print_function" in features: self.driver.grammar = pygram.python_grammar_no_print_statement try: tree = self.driver.parse_string(data) except Exception as err: self.log_error("Can't parse %s: %s: %s", name, err.__class__.__name__, err) return finally: self.driver.grammar = self.grammar tree.future_features = features self.log_debug("Refactoring %s", name) self.refactor_tree(tree, name) return tree def refactor_stdin(self, doctests_only=False): input = sys.stdin.read() if doctests_only: self.log_debug("Refactoring doctests in stdin") output = self.refactor_docstring(input, "<stdin>") if output != input: self.processed_file(output, "<stdin>", input) else: self.log_debug("No doctest changes in stdin") else: tree = self.refactor_string(input, "<stdin>") if tree and tree.was_changed: self.processed_file(str(tree), "<stdin>", input) else: self.log_debug("No changes in stdin") def refactor_tree(self, tree, name): """Refactors a parse tree (modifying the tree in place). For compatible patterns the bottom matcher module is used. Otherwise the tree is traversed node-to-node for matches. Args: tree: a pytree.Node instance representing the root of the tree to be refactored. name: a human-readable name for this tree. Returns: True if the tree was modified, False otherwise. """ for fixer in chain(self.pre_order, self.post_order): fixer.start_tree(tree, name) # obtain a set of candidate nodes match_set = self.BM.run(tree.leaves()) while any(list(match_set.values())): for fixer in self.BM.fixers: if fixer in match_set.keys() and match_set[fixer]: #sort by depth; apply fixers from bottom(of the AST) to top match_set[fixer].sort(key=pytree.Base.depth, reverse=True) if fixer.keep_line_order: #some fixers(eg fix_imports) must be applied #with the original file's line order match_set[fixer].sort(key=pytree.Base.get_lineno) for node in list(match_set[fixer]): if node in match_set[fixer]: match_set[fixer].remove(node) try: find_root(node) except AssertionError: # this node has been cut off from a # previous transformation ; skip continue if fixer in node.fixers_applied: # do not apply the same fixer again continue results = fixer.match(node) if results: new = fixer.transform(node, results) if new is not None: node.replace(new) #new.fixers_applied.append(fixer) for node in new.post_order(): # do not apply the fixer again to # this or any subnode node.fixers_applied.append(fixer) # update the original match set for # the added code new_matches = self.BM.run(new.leaves()) for fxr in new_matches.keys(): if not fxr in list(match_set.keys()): match_set[fxr]=[] match_set[fxr].extend(new_matches[fxr]) #use traditional matching for the incompatible fixers self.traverse_by(self.BM_incompatible_pre_order, tree.pre_order()) self.traverse_by(self.BM_incompatible_post_order, tree.post_order()) for fixer in chain(self.pre_order, self.post_order): fixer.finish_tree(tree, name) return tree.was_changed def traverse_by(self, fixers, traversal): """Traverse an AST, applying a set of fixers to each node. This is a helper method for refactor_tree(). Args: fixers: a list of fixer instances. traversal: a generator that yields AST nodes. Returns: None """ if not fixers: return fixers_dict = _get_headnode_dict(fixers) for node in traversal: for fixer in fixers_dict[node.type]: results = fixer.match(node) if results: new = fixer.transform(node, results) if new is not None: node.replace(new) node = new def processed_file(self, new_text, filename, old_text=None, write=False, encoding=None): """ Called when a file has been refactored, and there are changes. """ self.files.append(filename) if old_text is None: old_text = self._read_python_source(filename)[0] if old_text is None: return equal = old_text == new_text self.print_output(old_text, new_text, filename, equal) if equal: self.log_debug("No changes to %s", filename) return if write: self.write_file(new_text, filename, old_text, encoding) else: self.log_debug("Not writing changes to %s", filename) def write_file(self, new_text, filename, old_text, encoding=None): """Writes a string to a file. It first shows a unified diff between the old text and the new text, and then rewrites the file; the latter is only done if the write option is set. """ try: f = _open_with_encoding(filename, "w", encoding=encoding) except os.error as err: self.log_error("Can't create %s: %s", filename, err) return try: f.write(_to_system_newlines(new_text)) except os.error as err: self.log_error("Can't write %s: %s", filename, err) finally: f.close() self.log_debug("Wrote changes to %s", filename) self.wrote = True PS1 = ">>> " PS2 = "... " def refactor_docstring(self, input, filename): """Refactors a docstring, looking for doctests. This returns a modified version of the input string. It looks for doctests, which start with a ">>>" prompt, and may be continued with "..." prompts, as long as the "..." is indented the same as the ">>>". (Unfortunately we can't use the doctest module's parser, since, like most parsers, it is not geared towards preserving the original source.) """ result = [] block = None block_lineno = None indent = None lineno = 0 for line in input.splitlines(True): lineno += 1 if line.lstrip().startswith(self.PS1): if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) block_lineno = lineno block = [line] i = line.find(self.PS1) indent = line[:i] elif (indent is not None and (line.startswith(indent + self.PS2) or line == indent + self.PS2.rstrip() + "\n")): block.append(line) else: if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) block = None indent = None result.append(line) if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) return "".join(result) def refactor_doctest(self, block, lineno, indent, filename): """Refactors one doctest. A doctest is given as a block of lines, the first of which starts with ">>>" (possibly indented), while the remaining lines start with "..." (identically indented). """ try: tree = self.parse_block(block, lineno, indent) except Exception as err: if self.log.isEnabledFor(logging.DEBUG): for line in block: self.log_debug("Source: %s", line.rstrip("\n")) self.log_error("Can't parse docstring in %s line %s: %s: %s", filename, lineno, err.__class__.__name__, err) return block if self.refactor_tree(tree, filename): new = str(tree).splitlines(True) # Undo the adjustment of the line numbers in wrap_toks() below. clipped, new = new[:lineno-1], new[lineno-1:] assert clipped == ["\n"] * (lineno-1), clipped if not new[-1].endswith("\n"): new[-1] += "\n" block = [indent + self.PS1 + new.pop(0)] if new: block += [indent + self.PS2 + line for line in new] return block def summarize(self): if self.wrote: were = "were" else: were = "need to be" if not self.files: self.log_message("No files %s modified.", were) else: self.log_message("Files that %s modified:", were) for file in self.files: self.log_message(file) if self.fixer_log: self.log_message("Warnings/messages while refactoring:") for message in self.fixer_log: self.log_message(message) if self.errors: if len(self.errors) == 1: self.log_message("There was 1 error:") else: self.log_message("There were %d errors:", len(self.errors)) for msg, args, kwds in self.errors: self.log_message(msg, *args, **kwds) def parse_block(self, block, lineno, indent): """Parses a block into a tree. This is necessary to get correct line number / offset information in the parser diagnostics and embedded into the parse tree. """ tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent)) tree.future_features = frozenset() return tree def wrap_toks(self, block, lineno, indent): """Wraps a tokenize stream to systematically modify start/end.""" tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__) for type, value, (line0, col0), (line1, col1), line_text in tokens: line0 += lineno - 1 line1 += lineno - 1 # Don't bother updating the columns; this is too complicated # since line_text would also have to be updated and it would # still break for tokens spanning lines. Let the user guess # that the column numbers for doctests are relative to the # end of the prompt string (PS1 or PS2). yield type, value, (line0, col0), (line1, col1), line_text def gen_lines(self, block, indent): """Generates lines as expected by tokenize from a list of lines. This strips the first len(indent + self.PS1) characters off each line. """ prefix1 = indent + self.PS1 prefix2 = indent + self.PS2 prefix = prefix1 for line in block: if line.startswith(prefix): yield line[len(prefix):] elif line == prefix.rstrip() + "\n": yield "\n" else: raise AssertionError("line=%r, prefix=%r" % (line, prefix)) prefix = prefix2 while True: yield "" class MultiprocessingUnsupported(Exception): pass class MultiprocessRefactoringTool(RefactoringTool): def __init__(self, *args, **kwargs): super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs) self.queue = None self.output_lock = None def refactor(self, items, write=False, doctests_only=False, num_processes=1): if num_processes == 1: return super(MultiprocessRefactoringTool, self).refactor( items, write, doctests_only) try: import multiprocessing except ImportError: raise MultiprocessingUnsupported if self.queue is not None: raise RuntimeError("already doing multiple processes") self.queue = multiprocessing.JoinableQueue() self.output_lock = multiprocessing.Lock() processes = [multiprocessing.Process(target=self._child) for i in range(num_processes)] try: for p in processes: p.start() super(MultiprocessRefactoringTool, self).refactor(items, write, doctests_only) finally: self.queue.join() for i in range(num_processes): self.queue.put(None) for p in processes: if p.is_alive(): p.join() self.queue = None def _child(self): task = self.queue.get() while task is not None: args, kwargs = task try: super(MultiprocessRefactoringTool, self).refactor_file( *args, **kwargs) finally: self.queue.task_done() task = self.queue.get() def refactor_file(self, *args, **kwargs): if self.queue is not None: self.queue.put((args, kwargs)) else: return super(MultiprocessRefactoringTool, self).refactor_file( *args, **kwargs)
Python
# Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. # NOTE: When you run this test as a script from the command line, you # get warnings about certain hex/oct constants. Since those are # issued by the parser, you can't suppress them by adding a # filterwarnings() call to this module. Therefore, to shut up the # regression test, the filterwarnings() call has been added to # regrtest.py. from test.test_support import run_unittest, check_syntax_error import unittest import sys # testing import * from sys import * class TokenTests(unittest.TestCase): def testBackslash(self): # Backslash means line continuation: x = 1 \ + 1 self.assertEquals(x, 2, 'backslash for line continuation') # Backslash does not means continuation in comments :\ x = 0 self.assertEquals(x, 0, 'backslash ending comment') def testPlainIntegers(self): self.assertEquals(0xff, 255) self.assertEquals(0377, 255) self.assertEquals(2147483647, 017777777777) # "0x" is not a valid literal self.assertRaises(SyntaxError, eval, "0x") from sys import maxint if maxint == 2147483647: self.assertEquals(-2147483647-1, -020000000000) # XXX -2147483648 self.assert_(037777777777 > 0) self.assert_(0xffffffff > 0) for s in '2147483648', '040000000000', '0x100000000': try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) elif maxint == 9223372036854775807: self.assertEquals(-9223372036854775807-1, -01000000000000000000000) self.assert_(01777777777777777777777 > 0) self.assert_(0xffffffffffffffff > 0) for s in '9223372036854775808', '02000000000000000000000', \ '0x10000000000000000': try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) else: self.fail('Weird maxint value %r' % maxint) def testLongIntegers(self): x = 0L x = 0l x = 0xffffffffffffffffL x = 0xffffffffffffffffl x = 077777777777777777L x = 077777777777777777l x = 123456789012345678901234567890L x = 123456789012345678901234567890l def testFloats(self): x = 3.14 x = 314. x = 0.314 # XXX x = 000.314 x = .314 x = 3e14 x = 3E14 x = 3e-14 x = 3e+14 x = 3.e14 x = .3e14 x = 3.1e4 def testStringLiterals(self): x = ''; y = ""; self.assert_(len(x) == 0 and x == y) x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) x = "doesn't \"shrink\" does it" y = 'doesn\'t "shrink" does it' self.assert_(len(x) == 24 and x == y) x = "does \"shrink\" doesn't it" y = 'does "shrink" doesn\'t it' self.assert_(len(x) == 24 and x == y) x = """ The "quick" brown fox jumps over the 'lazy' dog. """ y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' self.assertEquals(x, y) y = ''' The "quick" brown fox jumps over the 'lazy' dog. ''' self.assertEquals(x, y) y = "\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the 'lazy' dog.\n\ " self.assertEquals(x, y) y = '\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the \'lazy\' dog.\n\ ' self.assertEquals(x, y) class GrammarTests(unittest.TestCase): # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE # XXX can't test in a script -- this rule is only used when interactive # file_input: (NEWLINE | stmt)* ENDMARKER # Being tested as this very moment this very module # expr_input: testlist NEWLINE # XXX Hard to test -- used only in calls to input() def testEvalInput(self): # testlist ENDMARKER x = eval('1, 0 or 1') def testFuncdef(self): ### 'def' NAME parameters ':' suite ### parameters: '(' [varargslist] ')' ### varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' ('**'|'*' '*') NAME] ### | ('**'|'*' '*') NAME) ### | fpdef ['=' test] (',' fpdef ['=' test])* [','] ### fpdef: NAME | '(' fplist ')' ### fplist: fpdef (',' fpdef)* [','] ### arglist: (argument ',')* (argument | *' test [',' '**' test] | '**' test) ### argument: [test '='] test # Really [keyword '='] test def f1(): pass f1() f1(*()) f1(*(), **{}) def f2(one_argument): pass def f3(two, arguments): pass def f4(two, (compound, (argument, list))): pass def f5((compound, first), two): pass self.assertEquals(f2.func_code.co_varnames, ('one_argument',)) self.assertEquals(f3.func_code.co_varnames, ('two', 'arguments')) if sys.platform.startswith('java'): self.assertEquals(f4.func_code.co_varnames, ('two', '(compound, (argument, list))', 'compound', 'argument', 'list',)) self.assertEquals(f5.func_code.co_varnames, ('(compound, first)', 'two', 'compound', 'first')) else: self.assertEquals(f4.func_code.co_varnames, ('two', '.1', 'compound', 'argument', 'list')) self.assertEquals(f5.func_code.co_varnames, ('.0', 'two', 'compound', 'first')) def a1(one_arg,): pass def a2(two, args,): pass def v0(*rest): pass def v1(a, *rest): pass def v2(a, b, *rest): pass def v3(a, (b, c), *rest): return a, b, c, rest f1() f2(1) f2(1,) f3(1, 2) f3(1, 2,) f4(1, (2, (3, 4))) v0() v0(1) v0(1,) v0(1,2) v0(1,2,3,4,5,6,7,8,9,0) v1(1) v1(1,) v1(1,2) v1(1,2,3) v1(1,2,3,4,5,6,7,8,9,0) v2(1,2) v2(1,2,3) v2(1,2,3,4) v2(1,2,3,4,5,6,7,8,9,0) v3(1,(2,3)) v3(1,(2,3),4) v3(1,(2,3),4,5,6,7,8,9,0) # ceval unpacks the formal arguments into the first argcount names; # thus, the names nested inside tuples must appear after these names. if sys.platform.startswith('java'): self.assertEquals(v3.func_code.co_varnames, ('a', '(b, c)', 'rest', 'b', 'c')) else: self.assertEquals(v3.func_code.co_varnames, ('a', '.1', 'rest', 'b', 'c')) self.assertEquals(v3(1, (2, 3), 4), (1, 2, 3, (4,))) def d01(a=1): pass d01() d01(1) d01(*(1,)) d01(**{'a':2}) def d11(a, b=1): pass d11(1) d11(1, 2) d11(1, **{'b':2}) def d21(a, b, c=1): pass d21(1, 2) d21(1, 2, 3) d21(*(1, 2, 3)) d21(1, *(2, 3)) d21(1, 2, *(3,)) d21(1, 2, **{'c':3}) def d02(a=1, b=2): pass d02() d02(1) d02(1, 2) d02(*(1, 2)) d02(1, *(2,)) d02(1, **{'b':2}) d02(**{'a': 1, 'b': 2}) def d12(a, b=1, c=2): pass d12(1) d12(1, 2) d12(1, 2, 3) def d22(a, b, c=1, d=2): pass d22(1, 2) d22(1, 2, 3) d22(1, 2, 3, 4) def d01v(a=1, *rest): pass d01v() d01v(1) d01v(1, 2) d01v(*(1, 2, 3, 4)) d01v(*(1,)) d01v(**{'a':2}) def d11v(a, b=1, *rest): pass d11v(1) d11v(1, 2) d11v(1, 2, 3) def d21v(a, b, c=1, *rest): pass d21v(1, 2) d21v(1, 2, 3) d21v(1, 2, 3, 4) d21v(*(1, 2, 3, 4)) d21v(1, 2, **{'c': 3}) def d02v(a=1, b=2, *rest): pass d02v() d02v(1) d02v(1, 2) d02v(1, 2, 3) d02v(1, *(2, 3, 4)) d02v(**{'a': 1, 'b': 2}) def d12v(a, b=1, c=2, *rest): pass d12v(1) d12v(1, 2) d12v(1, 2, 3) d12v(1, 2, 3, 4) d12v(*(1, 2, 3, 4)) d12v(1, 2, *(3, 4, 5)) d12v(1, *(2,), **{'c': 3}) def d22v(a, b, c=1, d=2, *rest): pass d22v(1, 2) d22v(1, 2, 3) d22v(1, 2, 3, 4) d22v(1, 2, 3, 4, 5) d22v(*(1, 2, 3, 4)) d22v(1, 2, *(3, 4, 5)) d22v(1, *(2, 3), **{'d': 4}) def d31v((x)): pass d31v(1) def d32v((x,)): pass d32v((1,)) # keyword arguments after *arglist def f(*args, **kwargs): return args, kwargs self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4), {'x':2, 'y':5})) self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)") self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)") # Check ast errors in *args and *kwargs check_syntax_error(self, "f(*g(1=2))") check_syntax_error(self, "f(**g(1=2))") def testLambdef(self): ### lambdef: 'lambda' [varargslist] ':' test l1 = lambda : 0 self.assertEquals(l1(), 0) l2 = lambda : a[d] # XXX just testing the expression l3 = lambda : [2 < x for x in [-1, 3, 0L]] self.assertEquals(l3(), [0, 1, 0]) l4 = lambda x = lambda y = lambda z=1 : z : y() : x() self.assertEquals(l4(), 1) l5 = lambda x, y, z=2: x + y + z self.assertEquals(l5(1, 2), 5) self.assertEquals(l5(1, 2, 3), 6) check_syntax_error(self, "lambda x: x = 2") check_syntax_error(self, "lambda (None,): None") ### stmt: simple_stmt | compound_stmt # Tested below def testSimpleStmt(self): ### simple_stmt: small_stmt (';' small_stmt)* [';'] x = 1; pass; del x def foo(): # verify statments that end with semi-colons x = 1; pass; del x; foo() ### small_stmt: expr_stmt | print_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt | exec_stmt # Tested below def testExprStmt(self): # (exprlist '=')* exprlist 1 1, 2, 3 x = 1 x = 1, 2, 3 x = y = z = 1, 2, 3 x, y, z = 1, 2, 3 abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) check_syntax_error(self, "x + 1 = 1") check_syntax_error(self, "a + 1 = b + 2") def testPrintStmt(self): # 'print' (test ',')* [test] import StringIO # Can't test printing to real stdout without comparing output # which is not available in unittest. save_stdout = sys.stdout sys.stdout = StringIO.StringIO() print 1, 2, 3 print 1, 2, 3, print print 0 or 1, 0 or 1, print 0 or 1 # 'print' '>>' test ',' print >> sys.stdout, 1, 2, 3 print >> sys.stdout, 1, 2, 3, print >> sys.stdout print >> sys.stdout, 0 or 1, 0 or 1, print >> sys.stdout, 0 or 1 # test printing to an instance class Gulp: def write(self, msg): pass gulp = Gulp() print >> gulp, 1, 2, 3 print >> gulp, 1, 2, 3, print >> gulp print >> gulp, 0 or 1, 0 or 1, print >> gulp, 0 or 1 # test print >> None def driver(): oldstdout = sys.stdout sys.stdout = Gulp() try: tellme(Gulp()) tellme() finally: sys.stdout = oldstdout # we should see this once def tellme(file=sys.stdout): print >> file, 'hello world' driver() # we should not see this at all def tellme(file=None): print >> file, 'goodbye universe' driver() self.assertEqual(sys.stdout.getvalue(), '''\ 1 2 3 1 2 3 1 1 1 1 2 3 1 2 3 1 1 1 hello world ''') sys.stdout = save_stdout # syntax errors check_syntax_error(self, 'print ,') check_syntax_error(self, 'print >> x,') def testDelStmt(self): # 'del' exprlist abc = [1,2,3] x, y, z = abc xyz = x, y, z del abc del x, y, (z, xyz) def testPassStmt(self): # 'pass' pass # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt # Tested below def testBreakStmt(self): # 'break' while 1: break def testContinueStmt(self): # 'continue' i = 1 while i: i = 0; continue msg = "" while not msg: msg = "ok" try: continue msg = "continue failed to continue inside try" except: msg = "continue inside try called except block" if msg != "ok": self.fail(msg) msg = "" while not msg: msg = "finally block not called" try: continue finally: msg = "ok" if msg != "ok": self.fail(msg) def test_break_continue_loop(self): # This test warrants an explanation. It is a test specifically for SF bugs # #463359 and #462937. The bug is that a 'break' statement executed or # exception raised inside a try/except inside a loop, *after* a continue # statement has been executed in that loop, will cause the wrong number of # arguments to be popped off the stack and the instruction pointer reset to # a very small number (usually 0.) Because of this, the following test # *must* written as a function, and the tracking vars *must* be function # arguments with default values. Otherwise, the test will loop and loop. def test_inner(extra_burning_oil = 1, count=0): big_hippo = 2 while big_hippo: count += 1 try: if extra_burning_oil and big_hippo == 1: extra_burning_oil -= 1 break big_hippo -= 1 continue except: raise if count > 2 or big_hippo <> 1: self.fail("continue then break in try/except in loop broken!") test_inner() def testReturn(self): # 'return' [testlist] def g1(): return def g2(): return 1 g1() x = g2() check_syntax_error(self, "class foo:return 1") def testYield(self): check_syntax_error(self, "class foo:yield 1") def testRaise(self): # 'raise' test [',' test] try: raise RuntimeError, 'just testing' except RuntimeError: pass try: raise KeyboardInterrupt except KeyboardInterrupt: pass def testImport(self): # 'import' dotted_as_names import sys import time, sys # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) from time import time from time import (time) # not testable inside a function, but already done at top of the module # from sys import * from sys import path, argv from sys import (path, argv) from sys import (path, argv,) def testGlobal(self): # 'global' NAME (',' NAME)* global a global a, b global one, two, three, four, five, six, seven, eight, nine, ten def testExec(self): # 'exec' expr ['in' expr [',' expr]] z = None del z exec 'z=1+1\n' if z != 2: self.fail('exec \'z=1+1\'\\n') del z exec 'z=1+1' if z != 2: self.fail('exec \'z=1+1\'') z = None del z import types if hasattr(types, "UnicodeType"): exec r"""if 1: exec u'z=1+1\n' if z != 2: self.fail('exec u\'z=1+1\'\\n') del z exec u'z=1+1' if z != 2: self.fail('exec u\'z=1+1\'')""" g = {} exec 'z = 1' in g if g.has_key('__builtins__'): del g['__builtins__'] if g != {'z': 1}: self.fail('exec \'z = 1\' in g') g = {} l = {} import warnings warnings.filterwarnings("ignore", "global statement", module="<string>") exec 'global a; a = 1; b = 2' in g, l if g.has_key('__builtins__'): del g['__builtins__'] if l.has_key('__builtins__'): del l['__builtins__'] if (g, l) != ({'a':1}, {'b':2}): self.fail('exec ... in g (%s), l (%s)' %(g,l)) def testAssert(self): # assert_stmt: 'assert' test [',' test] assert 1 assert 1, 1 assert lambda x:x assert 1, lambda x:x+1 try: assert 0, "msg" except AssertionError, e: self.assertEquals(e.args[0], "msg") else: if __debug__: self.fail("AssertionError not raised by assert 0") ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef # Tested below def testIf(self): # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] if 1: pass if 1: pass else: pass if 0: pass elif 0: pass if 0: pass elif 0: pass elif 0: pass elif 0: pass else: pass def testWhile(self): # 'while' test ':' suite ['else' ':' suite] while 0: pass while 0: pass else: pass # Issue1920: "while 0" is optimized away, # ensure that the "else" clause is still present. x = 0 while 0: x = 1 else: x = 2 self.assertEquals(x, 2) def testFor(self): # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite] for i in 1, 2, 3: pass for i, j, k in (): pass else: pass class Squares: def __init__(self, max): self.max = max self.sofar = [] def __len__(self): return len(self.sofar) def __getitem__(self, i): if not 0 <= i < self.max: raise IndexError n = len(self.sofar) while n <= i: self.sofar.append(n*n) n = n+1 return self.sofar[i] n = 0 for x in Squares(10): n = n+x if n != 285: self.fail('for over growing sequence') result = [] for x, in [(1,), (2,), (3,)]: result.append(x) self.assertEqual(result, [1, 2, 3]) def testTry(self): ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] ### | 'try' ':' suite 'finally' ':' suite ### except_clause: 'except' [expr [('as' | ',') expr]] try: 1/0 except ZeroDivisionError: pass else: pass try: 1/0 except EOFError: pass except TypeError as msg: pass except RuntimeError, msg: pass except: pass else: pass try: 1/0 except (EOFError, TypeError, ZeroDivisionError): pass try: 1/0 except (EOFError, TypeError, ZeroDivisionError), msg: pass try: pass finally: pass def testSuite(self): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT if 1: pass if 1: pass if 1: # # # pass pass # pass # def testTest(self): ### and_test ('or' and_test)* ### and_test: not_test ('and' not_test)* ### not_test: 'not' not_test | comparison if not 1: pass if 1 and 1: pass if 1 or 1: pass if not not not 1: pass if not 1 and 1 and 1: pass if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass def testComparison(self): ### comparison: expr (comp_op expr)* ### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' if 1: pass x = (1 == 1) if 1 == 1: pass if 1 != 1: pass if 1 <> 1: pass if 1 < 1: pass if 1 > 1: pass if 1 <= 1: pass if 1 >= 1: pass if 1 is 1: pass if 1 is not 1: pass if 1 in (): pass if 1 not in (): pass if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass def testBinaryMaskOps(self): x = 1 & 1 x = 1 ^ 1 x = 1 | 1 def testShiftOps(self): x = 1 << 1 x = 1 >> 1 x = 1 << 1 >> 1 def testAdditiveOps(self): x = 1 x = 1 + 1 x = 1 - 1 - 1 x = 1 - 1 + 1 - 1 + 1 def testMultiplicativeOps(self): x = 1 * 1 x = 1 / 1 x = 1 % 1 x = 1 / 1 * 1 % 1 def testUnaryOps(self): x = +1 x = -1 x = ~1 x = ~1 ^ 1 & 1 | 1 & 1 ^ -1 x = -1*1/1 + 1*1 - ---1*1 def testSelectors(self): ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME ### subscript: expr | [expr] ':' [expr] import sys, time c = sys.path[0] x = time.time() x = sys.modules['time'].time() a = '01234' c = a[0] c = a[-1] s = a[0:5] s = a[:5] s = a[0:] s = a[:] s = a[-5:] s = a[:-1] s = a[-4:-3] # A rough test of SF bug 1333982. http://python.org/sf/1333982 # The testing here is fairly incomplete. # Test cases should include: commas with 1 and 2 colons d = {} d[1] = 1 d[1,] = 2 d[1,2] = 3 d[1,2,3] = 4 L = list(d) L.sort() self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]') def testAtoms(self): ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING ### dictmaker: test ':' test (',' test ':' test)* [','] x = (1) x = (1 or 2 or 3) x = (1 or 2 or 3, 2, 3) x = [] x = [1] x = [1 or 2 or 3] x = [1 or 2 or 3, 2, 3] x = [] x = {} x = {'one': 1} x = {'one': 1,} x = {'one' or 'two': 1 or 2} x = {'one': 1, 'two': 2} x = {'one': 1, 'two': 2,} x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6} x = `x` x = `1 or 2 or 3` self.assertEqual(`1,2`, '(1, 2)') x = x x = 'x' x = 123 ### exprlist: expr (',' expr)* [','] ### testlist: test (',' test)* [','] # These have been exercised enough above def testClassdef(self): # 'class' NAME ['(' [testlist] ')'] ':' suite class B: pass class B2(): pass class C1(B): pass class C2(B): pass class D(C1, C2, B): pass class C: def meth1(self): pass def meth2(self, arg): pass def meth3(self, a1, a2): pass # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE # decorators: decorator+ # decorated: decorators (classdef | funcdef) def class_decorator(x): x.decorated = True return x @class_decorator class G: pass self.assertEqual(G.decorated, True) def testListcomps(self): # list comprehension tests nums = [1, 2, 3, 4, 5] strs = ["Apple", "Banana", "Coconut"] spcs = [" Apple", " Banana ", "Coco nut "] self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut']) self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15]) self.assertEqual([x for x in nums if x > 2], [3, 4, 5]) self.assertEqual([(i, s) for i in nums for s in strs], [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'), (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'), (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'), (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'), (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')]) self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]], [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'), (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'), (5, 'Banana'), (5, 'Coconut')]) self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)], [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]]) def test_in_func(l): return [None < x < 3 for x in l if x > 2] self.assertEqual(test_in_func(nums), [False, False, False]) def test_nested_front(): self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]], [[1, 2], [3, 4], [5, 6]]) test_nested_front() check_syntax_error(self, "[i, s for i in nums for s in strs]") check_syntax_error(self, "[x if y]") suppliers = [ (1, "Boeing"), (2, "Ford"), (3, "Macdonalds") ] parts = [ (10, "Airliner"), (20, "Engine"), (30, "Cheeseburger") ] suppart = [ (1, 10), (1, 20), (2, 20), (3, 30) ] x = [ (sname, pname) for (sno, sname) in suppliers for (pno, pname) in parts for (sp_sno, sp_pno) in suppart if sno == sp_sno and pno == sp_pno ] self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'), ('Macdonalds', 'Cheeseburger')]) def testGenexps(self): # generator expression tests g = ([x for x in range(10)] for x in range(1)) self.assertEqual(g.next(), [x for x in range(10)]) try: g.next() self.fail('should produce StopIteration exception') except StopIteration: pass a = 1 try: g = (a for d in a) g.next() self.fail('should produce TypeError') except TypeError: pass self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd']) self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy']) a = [x for x in range(10)] b = (x for x in (y for y in a)) self.assertEqual(sum(b), sum([x for x in range(10)])) self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)])) self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2])) self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)])) self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0) check_syntax_error(self, "foo(x for x in range(10), 100)") check_syntax_error(self, "foo(100, x for x in range(10))") def testComprehensionSpecials(self): # test for outmost iterable precomputation x = 10; g = (i for i in range(x)); x = 5 self.assertEqual(len(list(g)), 10) # This should hold, since we're only precomputing outmost iterable. x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x)) x = 5; t = True; self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g)) # Grammar allows multiple adjacent 'if's in listcomps and genexps, # even though it's silly. Make sure it works (ifelse broke this.) self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7]) self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7]) # verify unpacking single element tuples in listcomp/genexp. self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6]) self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9]) def test_with_statement(self): class manager(object): def __enter__(self): return (1, 2) def __exit__(self, *args): pass with manager(): pass with manager() as x: pass with manager() as (x, y): pass with manager(), manager(): pass with manager() as x, manager() as y: pass with manager() as x, manager(): pass def testIfElseExpr(self): # Test ifelse expressions in various cases def _checkeval(msg, ret): "helper to check that evaluation of expressions is done correctly" print x return ret self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5) self.assertEqual((5 and 6 if 0 else 1), 1) self.assertEqual(((5 and 6) if 0 else 1), 1) self.assertEqual((5 and (6 if 1 else 1)), 6) self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3) self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1) self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5) self.assertEqual((not 5 if 1 else 1), False) self.assertEqual((not 5 if 0 else 1), 1) self.assertEqual((6 + 1 if 1 else 2), 7) self.assertEqual((6 - 1 if 1 else 2), 5) self.assertEqual((6 * 2 if 1 else 4), 12) self.assertEqual((6 / 2 if 1 else 3), 3) self.assertEqual((6 < 4 if 0 else 2), 2) def test_main(): run_unittest(TokenTests, GrammarTests) if __name__ == '__main__': test_main()
Python
#!/usr/bin/env python # -*- coding: utf-8 -*- print u'ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ' def f(x): print '%s\t-> α(%2i):%s β(%s)'
Python
#!/usr/bin/env python # -*- coding: utf-8 -*- print u'ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ' def f(x): print '%s\t-> α(%2i):%s β(%s)'
Python
# Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. # NOTE: When you run this test as a script from the command line, you # get warnings about certain hex/oct constants. Since those are # issued by the parser, you can't suppress them by adding a # filterwarnings() call to this module. Therefore, to shut up the # regression test, the filterwarnings() call has been added to # regrtest.py. from test.support import run_unittest, check_syntax_error import unittest import sys # testing import * from sys import * class TokenTests(unittest.TestCase): def testBackslash(self): # Backslash means line continuation: x = 1 \ + 1 self.assertEquals(x, 2, 'backslash for line continuation') # Backslash does not means continuation in comments :\ x = 0 self.assertEquals(x, 0, 'backslash ending comment') def testPlainIntegers(self): self.assertEquals(type(000), type(0)) self.assertEquals(0xff, 255) self.assertEquals(0o377, 255) self.assertEquals(2147483647, 0o17777777777) self.assertEquals(0b1001, 9) # "0x" is not a valid literal self.assertRaises(SyntaxError, eval, "0x") from sys import maxsize if maxsize == 2147483647: self.assertEquals(-2147483647-1, -0o20000000000) # XXX -2147483648 self.assert_(0o37777777777 > 0) self.assert_(0xffffffff > 0) self.assert_(0b1111111111111111111111111111111 > 0) for s in ('2147483648', '0o40000000000', '0x100000000', '0b10000000000000000000000000000000'): try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) elif maxsize == 9223372036854775807: self.assertEquals(-9223372036854775807-1, -0o1000000000000000000000) self.assert_(0o1777777777777777777777 > 0) self.assert_(0xffffffffffffffff > 0) self.assert_(0b11111111111111111111111111111111111111111111111111111111111111 > 0) for s in '9223372036854775808', '0o2000000000000000000000', \ '0x10000000000000000', \ '0b100000000000000000000000000000000000000000000000000000000000000': try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) else: self.fail('Weird maxsize value %r' % maxsize) def testLongIntegers(self): x = 0 x = 0xffffffffffffffff x = 0Xffffffffffffffff x = 0o77777777777777777 x = 0O77777777777777777 x = 123456789012345678901234567890 x = 0b100000000000000000000000000000000000000000000000000000000000000000000 x = 0B111111111111111111111111111111111111111111111111111111111111111111111 def testFloats(self): x = 3.14 x = 314. x = 0.314 # XXX x = 000.314 x = .314 x = 3e14 x = 3E14 x = 3e-14 x = 3e+14 x = 3.e14 x = .3e14 x = 3.1e4 def testStringLiterals(self): x = ''; y = ""; self.assert_(len(x) == 0 and x == y) x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) x = "doesn't \"shrink\" does it" y = 'doesn\'t "shrink" does it' self.assert_(len(x) == 24 and x == y) x = "does \"shrink\" doesn't it" y = 'does "shrink" doesn\'t it' self.assert_(len(x) == 24 and x == y) x = """ The "quick" brown fox jumps over the 'lazy' dog. """ y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' self.assertEquals(x, y) y = ''' The "quick" brown fox jumps over the 'lazy' dog. ''' self.assertEquals(x, y) y = "\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the 'lazy' dog.\n\ " self.assertEquals(x, y) y = '\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the \'lazy\' dog.\n\ ' self.assertEquals(x, y) def testEllipsis(self): x = ... self.assert_(x is Ellipsis) self.assertRaises(SyntaxError, eval, ".. .") class GrammarTests(unittest.TestCase): # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE # XXX can't test in a script -- this rule is only used when interactive # file_input: (NEWLINE | stmt)* ENDMARKER # Being tested as this very moment this very module # expr_input: testlist NEWLINE # XXX Hard to test -- used only in calls to input() def testEvalInput(self): # testlist ENDMARKER x = eval('1, 0 or 1') def testFuncdef(self): ### [decorators] 'def' NAME parameters ['->' test] ':' suite ### decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE ### decorators: decorator+ ### parameters: '(' [typedargslist] ')' ### typedargslist: ((tfpdef ['=' test] ',')* ### ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef) ### | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) ### tfpdef: NAME [':' test] ### varargslist: ((vfpdef ['=' test] ',')* ### ('*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef) ### | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) ### vfpdef: NAME def f1(): pass f1() f1(*()) f1(*(), **{}) def f2(one_argument): pass def f3(two, arguments): pass self.assertEquals(f2.__code__.co_varnames, ('one_argument',)) self.assertEquals(f3.__code__.co_varnames, ('two', 'arguments')) def a1(one_arg,): pass def a2(two, args,): pass def v0(*rest): pass def v1(a, *rest): pass def v2(a, b, *rest): pass f1() f2(1) f2(1,) f3(1, 2) f3(1, 2,) v0() v0(1) v0(1,) v0(1,2) v0(1,2,3,4,5,6,7,8,9,0) v1(1) v1(1,) v1(1,2) v1(1,2,3) v1(1,2,3,4,5,6,7,8,9,0) v2(1,2) v2(1,2,3) v2(1,2,3,4) v2(1,2,3,4,5,6,7,8,9,0) def d01(a=1): pass d01() d01(1) d01(*(1,)) d01(**{'a':2}) def d11(a, b=1): pass d11(1) d11(1, 2) d11(1, **{'b':2}) def d21(a, b, c=1): pass d21(1, 2) d21(1, 2, 3) d21(*(1, 2, 3)) d21(1, *(2, 3)) d21(1, 2, *(3,)) d21(1, 2, **{'c':3}) def d02(a=1, b=2): pass d02() d02(1) d02(1, 2) d02(*(1, 2)) d02(1, *(2,)) d02(1, **{'b':2}) d02(**{'a': 1, 'b': 2}) def d12(a, b=1, c=2): pass d12(1) d12(1, 2) d12(1, 2, 3) def d22(a, b, c=1, d=2): pass d22(1, 2) d22(1, 2, 3) d22(1, 2, 3, 4) def d01v(a=1, *rest): pass d01v() d01v(1) d01v(1, 2) d01v(*(1, 2, 3, 4)) d01v(*(1,)) d01v(**{'a':2}) def d11v(a, b=1, *rest): pass d11v(1) d11v(1, 2) d11v(1, 2, 3) def d21v(a, b, c=1, *rest): pass d21v(1, 2) d21v(1, 2, 3) d21v(1, 2, 3, 4) d21v(*(1, 2, 3, 4)) d21v(1, 2, **{'c': 3}) def d02v(a=1, b=2, *rest): pass d02v() d02v(1) d02v(1, 2) d02v(1, 2, 3) d02v(1, *(2, 3, 4)) d02v(**{'a': 1, 'b': 2}) def d12v(a, b=1, c=2, *rest): pass d12v(1) d12v(1, 2) d12v(1, 2, 3) d12v(1, 2, 3, 4) d12v(*(1, 2, 3, 4)) d12v(1, 2, *(3, 4, 5)) d12v(1, *(2,), **{'c': 3}) def d22v(a, b, c=1, d=2, *rest): pass d22v(1, 2) d22v(1, 2, 3) d22v(1, 2, 3, 4) d22v(1, 2, 3, 4, 5) d22v(*(1, 2, 3, 4)) d22v(1, 2, *(3, 4, 5)) d22v(1, *(2, 3), **{'d': 4}) # keyword argument type tests try: str('x', **{b'foo':1 }) except TypeError: pass else: self.fail('Bytes should not work as keyword argument names') # keyword only argument tests def pos0key1(*, key): return key pos0key1(key=100) def pos2key2(p1, p2, *, k1, k2=100): return p1,p2,k1,k2 pos2key2(1, 2, k1=100) pos2key2(1, 2, k1=100, k2=200) pos2key2(1, 2, k2=100, k1=200) def pos2key2dict(p1, p2, *, k1=100, k2, **kwarg): return p1,p2,k1,k2,kwarg pos2key2dict(1,2,k2=100,tokwarg1=100,tokwarg2=200) pos2key2dict(1,2,tokwarg1=100,tokwarg2=200, k2=100) # keyword arguments after *arglist def f(*args, **kwargs): return args, kwargs self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4), {'x':2, 'y':5})) self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)") self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)") # argument annotation tests def f(x) -> list: pass self.assertEquals(f.__annotations__, {'return': list}) def f(x:int): pass self.assertEquals(f.__annotations__, {'x': int}) def f(*x:str): pass self.assertEquals(f.__annotations__, {'x': str}) def f(**x:float): pass self.assertEquals(f.__annotations__, {'x': float}) def f(x, y:1+2): pass self.assertEquals(f.__annotations__, {'y': 3}) def f(a, b:1, c:2, d): pass self.assertEquals(f.__annotations__, {'b': 1, 'c': 2}) def f(a, b:1, c:2, d, e:3=4, f=5, *g:6): pass self.assertEquals(f.__annotations__, {'b': 1, 'c': 2, 'e': 3, 'g': 6}) def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass self.assertEquals(f.__annotations__, {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9, 'k': 11, 'return': 12}) # Check for SF Bug #1697248 - mixing decorators and a return annotation def null(x): return x @null def f(x) -> list: pass self.assertEquals(f.__annotations__, {'return': list}) # test MAKE_CLOSURE with a variety of oparg's closure = 1 def f(): return closure def f(x=1): return closure def f(*, k=1): return closure def f() -> int: return closure # Check ast errors in *args and *kwargs check_syntax_error(self, "f(*g(1=2))") check_syntax_error(self, "f(**g(1=2))") def testLambdef(self): ### lambdef: 'lambda' [varargslist] ':' test l1 = lambda : 0 self.assertEquals(l1(), 0) l2 = lambda : a[d] # XXX just testing the expression l3 = lambda : [2 < x for x in [-1, 3, 0]] self.assertEquals(l3(), [0, 1, 0]) l4 = lambda x = lambda y = lambda z=1 : z : y() : x() self.assertEquals(l4(), 1) l5 = lambda x, y, z=2: x + y + z self.assertEquals(l5(1, 2), 5) self.assertEquals(l5(1, 2, 3), 6) check_syntax_error(self, "lambda x: x = 2") check_syntax_error(self, "lambda (None,): None") l6 = lambda x, y, *, k=20: x+y+k self.assertEquals(l6(1,2), 1+2+20) self.assertEquals(l6(1,2,k=10), 1+2+10) ### stmt: simple_stmt | compound_stmt # Tested below def testSimpleStmt(self): ### simple_stmt: small_stmt (';' small_stmt)* [';'] x = 1; pass; del x def foo(): # verify statments that end with semi-colons x = 1; pass; del x; foo() ### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt # Tested below def testExprStmt(self): # (exprlist '=')* exprlist 1 1, 2, 3 x = 1 x = 1, 2, 3 x = y = z = 1, 2, 3 x, y, z = 1, 2, 3 abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) check_syntax_error(self, "x + 1 = 1") check_syntax_error(self, "a + 1 = b + 2") def testDelStmt(self): # 'del' exprlist abc = [1,2,3] x, y, z = abc xyz = x, y, z del abc del x, y, (z, xyz) def testPassStmt(self): # 'pass' pass # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt # Tested below def testBreakStmt(self): # 'break' while 1: break def testContinueStmt(self): # 'continue' i = 1 while i: i = 0; continue msg = "" while not msg: msg = "ok" try: continue msg = "continue failed to continue inside try" except: msg = "continue inside try called except block" if msg != "ok": self.fail(msg) msg = "" while not msg: msg = "finally block not called" try: continue finally: msg = "ok" if msg != "ok": self.fail(msg) def test_break_continue_loop(self): # This test warrants an explanation. It is a test specifically for SF bugs # #463359 and #462937. The bug is that a 'break' statement executed or # exception raised inside a try/except inside a loop, *after* a continue # statement has been executed in that loop, will cause the wrong number of # arguments to be popped off the stack and the instruction pointer reset to # a very small number (usually 0.) Because of this, the following test # *must* written as a function, and the tracking vars *must* be function # arguments with default values. Otherwise, the test will loop and loop. def test_inner(extra_burning_oil = 1, count=0): big_hippo = 2 while big_hippo: count += 1 try: if extra_burning_oil and big_hippo == 1: extra_burning_oil -= 1 break big_hippo -= 1 continue except: raise if count > 2 or big_hippo != 1: self.fail("continue then break in try/except in loop broken!") test_inner() def testReturn(self): # 'return' [testlist] def g1(): return def g2(): return 1 g1() x = g2() check_syntax_error(self, "class foo:return 1") def testYield(self): check_syntax_error(self, "class foo:yield 1") def testRaise(self): # 'raise' test [',' test] try: raise RuntimeError('just testing') except RuntimeError: pass try: raise KeyboardInterrupt except KeyboardInterrupt: pass def testImport(self): # 'import' dotted_as_names import sys import time, sys # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) from time import time from time import (time) # not testable inside a function, but already done at top of the module # from sys import * from sys import path, argv from sys import (path, argv) from sys import (path, argv,) def testGlobal(self): # 'global' NAME (',' NAME)* global a global a, b global one, two, three, four, five, six, seven, eight, nine, ten def testNonlocal(self): # 'nonlocal' NAME (',' NAME)* x = 0 y = 0 def f(): nonlocal x nonlocal x, y def testAssert(self): # assert_stmt: 'assert' test [',' test] assert 1 assert 1, 1 assert lambda x:x assert 1, lambda x:x+1 try: assert 0, "msg" except AssertionError as e: self.assertEquals(e.args[0], "msg") else: if __debug__: self.fail("AssertionError not raised by assert 0") ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef # Tested below def testIf(self): # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] if 1: pass if 1: pass else: pass if 0: pass elif 0: pass if 0: pass elif 0: pass elif 0: pass elif 0: pass else: pass def testWhile(self): # 'while' test ':' suite ['else' ':' suite] while 0: pass while 0: pass else: pass # Issue1920: "while 0" is optimized away, # ensure that the "else" clause is still present. x = 0 while 0: x = 1 else: x = 2 self.assertEquals(x, 2) def testFor(self): # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite] for i in 1, 2, 3: pass for i, j, k in (): pass else: pass class Squares: def __init__(self, max): self.max = max self.sofar = [] def __len__(self): return len(self.sofar) def __getitem__(self, i): if not 0 <= i < self.max: raise IndexError n = len(self.sofar) while n <= i: self.sofar.append(n*n) n = n+1 return self.sofar[i] n = 0 for x in Squares(10): n = n+x if n != 285: self.fail('for over growing sequence') result = [] for x, in [(1,), (2,), (3,)]: result.append(x) self.assertEqual(result, [1, 2, 3]) def testTry(self): ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] ### | 'try' ':' suite 'finally' ':' suite ### except_clause: 'except' [expr ['as' expr]] try: 1/0 except ZeroDivisionError: pass else: pass try: 1/0 except EOFError: pass except TypeError as msg: pass except RuntimeError as msg: pass except: pass else: pass try: 1/0 except (EOFError, TypeError, ZeroDivisionError): pass try: 1/0 except (EOFError, TypeError, ZeroDivisionError) as msg: pass try: pass finally: pass def testSuite(self): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT if 1: pass if 1: pass if 1: # # # pass pass # pass # def testTest(self): ### and_test ('or' and_test)* ### and_test: not_test ('and' not_test)* ### not_test: 'not' not_test | comparison if not 1: pass if 1 and 1: pass if 1 or 1: pass if not not not 1: pass if not 1 and 1 and 1: pass if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass def testComparison(self): ### comparison: expr (comp_op expr)* ### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not' if 1: pass x = (1 == 1) if 1 == 1: pass if 1 != 1: pass if 1 < 1: pass if 1 > 1: pass if 1 <= 1: pass if 1 >= 1: pass if 1 is 1: pass if 1 is not 1: pass if 1 in (): pass if 1 not in (): pass if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass def testBinaryMaskOps(self): x = 1 & 1 x = 1 ^ 1 x = 1 | 1 def testShiftOps(self): x = 1 << 1 x = 1 >> 1 x = 1 << 1 >> 1 def testAdditiveOps(self): x = 1 x = 1 + 1 x = 1 - 1 - 1 x = 1 - 1 + 1 - 1 + 1 def testMultiplicativeOps(self): x = 1 * 1 x = 1 / 1 x = 1 % 1 x = 1 / 1 * 1 % 1 def testUnaryOps(self): x = +1 x = -1 x = ~1 x = ~1 ^ 1 & 1 | 1 & 1 ^ -1 x = -1*1/1 + 1*1 - ---1*1 def testSelectors(self): ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME ### subscript: expr | [expr] ':' [expr] import sys, time c = sys.path[0] x = time.time() x = sys.modules['time'].time() a = '01234' c = a[0] c = a[-1] s = a[0:5] s = a[:5] s = a[0:] s = a[:] s = a[-5:] s = a[:-1] s = a[-4:-3] # A rough test of SF bug 1333982. http://python.org/sf/1333982 # The testing here is fairly incomplete. # Test cases should include: commas with 1 and 2 colons d = {} d[1] = 1 d[1,] = 2 d[1,2] = 3 d[1,2,3] = 4 L = list(d) L.sort(key=lambda x: x if isinstance(x, tuple) else ()) self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]') def testAtoms(self): ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictsetmaker] '}' | NAME | NUMBER | STRING ### dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [',']) x = (1) x = (1 or 2 or 3) x = (1 or 2 or 3, 2, 3) x = [] x = [1] x = [1 or 2 or 3] x = [1 or 2 or 3, 2, 3] x = [] x = {} x = {'one': 1} x = {'one': 1,} x = {'one' or 'two': 1 or 2} x = {'one': 1, 'two': 2} x = {'one': 1, 'two': 2,} x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6} x = {'one'} x = {'one', 1,} x = {'one', 'two', 'three'} x = {2, 3, 4,} x = x x = 'x' x = 123 ### exprlist: expr (',' expr)* [','] ### testlist: test (',' test)* [','] # These have been exercised enough above def testClassdef(self): # 'class' NAME ['(' [testlist] ')'] ':' suite class B: pass class B2(): pass class C1(B): pass class C2(B): pass class D(C1, C2, B): pass class C: def meth1(self): pass def meth2(self, arg): pass def meth3(self, a1, a2): pass # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE # decorators: decorator+ # decorated: decorators (classdef | funcdef) def class_decorator(x): return x @class_decorator class G: pass def testDictcomps(self): # dictorsetmaker: ( (test ':' test (comp_for | # (',' test ':' test)* [','])) | # (test (comp_for | (',' test)* [','])) ) nums = [1, 2, 3] self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4}) def testListcomps(self): # list comprehension tests nums = [1, 2, 3, 4, 5] strs = ["Apple", "Banana", "Coconut"] spcs = [" Apple", " Banana ", "Coco nut "] self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut']) self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15]) self.assertEqual([x for x in nums if x > 2], [3, 4, 5]) self.assertEqual([(i, s) for i in nums for s in strs], [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'), (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'), (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'), (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'), (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')]) self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]], [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'), (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'), (5, 'Banana'), (5, 'Coconut')]) self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)], [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]]) def test_in_func(l): return [0 < x < 3 for x in l if x > 2] self.assertEqual(test_in_func(nums), [False, False, False]) def test_nested_front(): self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]], [[1, 2], [3, 4], [5, 6]]) test_nested_front() check_syntax_error(self, "[i, s for i in nums for s in strs]") check_syntax_error(self, "[x if y]") suppliers = [ (1, "Boeing"), (2, "Ford"), (3, "Macdonalds") ] parts = [ (10, "Airliner"), (20, "Engine"), (30, "Cheeseburger") ] suppart = [ (1, 10), (1, 20), (2, 20), (3, 30) ] x = [ (sname, pname) for (sno, sname) in suppliers for (pno, pname) in parts for (sp_sno, sp_pno) in suppart if sno == sp_sno and pno == sp_pno ] self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'), ('Macdonalds', 'Cheeseburger')]) def testGenexps(self): # generator expression tests g = ([x for x in range(10)] for x in range(1)) self.assertEqual(next(g), [x for x in range(10)]) try: next(g) self.fail('should produce StopIteration exception') except StopIteration: pass a = 1 try: g = (a for d in a) next(g) self.fail('should produce TypeError') except TypeError: pass self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd']) self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy']) a = [x for x in range(10)] b = (x for x in (y for y in a)) self.assertEqual(sum(b), sum([x for x in range(10)])) self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)])) self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2])) self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)])) self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0) check_syntax_error(self, "foo(x for x in range(10), 100)") check_syntax_error(self, "foo(100, x for x in range(10))") def testComprehensionSpecials(self): # test for outmost iterable precomputation x = 10; g = (i for i in range(x)); x = 5 self.assertEqual(len(list(g)), 10) # This should hold, since we're only precomputing outmost iterable. x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x)) x = 5; t = True; self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g)) # Grammar allows multiple adjacent 'if's in listcomps and genexps, # even though it's silly. Make sure it works (ifelse broke this.) self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7]) self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7]) # verify unpacking single element tuples in listcomp/genexp. self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6]) self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9]) def test_with_statement(self): class manager(object): def __enter__(self): return (1, 2) def __exit__(self, *args): pass with manager(): pass with manager() as x: pass with manager() as (x, y): pass with manager(), manager(): pass with manager() as x, manager() as y: pass with manager() as x, manager(): pass def testIfElseExpr(self): # Test ifelse expressions in various cases def _checkeval(msg, ret): "helper to check that evaluation of expressions is done correctly" print(x) return ret # the next line is not allowed anymore #self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5) self.assertEqual((5 and 6 if 0 else 1), 1) self.assertEqual(((5 and 6) if 0 else 1), 1) self.assertEqual((5 and (6 if 1 else 1)), 6) self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3) self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1) self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5) self.assertEqual((not 5 if 1 else 1), False) self.assertEqual((not 5 if 0 else 1), 1) self.assertEqual((6 + 1 if 1 else 2), 7) self.assertEqual((6 - 1 if 1 else 2), 5) self.assertEqual((6 * 2 if 1 else 4), 12) self.assertEqual((6 / 2 if 1 else 3), 3) self.assertEqual((6 < 4 if 0 else 2), 2) def test_main(): run_unittest(TokenTests, GrammarTests) if __name__ == '__main__': test_main()
Python
# coding: utf-8 print "BOM BOOM!"
Python
print "hi" print "Like bad Windows newlines?"
Python
# This file is used to verify that 2to3 falls back to a slower, iterative pattern matching # scheme in the event that the faster recursive system fails due to infinite recursion. from ctypes import * STRING = c_char_p OSUnknownByteOrder = 0 UIT_PROMPT = 1 P_PGID = 2 P_PID = 1 UIT_ERROR = 5 UIT_INFO = 4 UIT_NONE = 0 P_ALL = 0 UIT_VERIFY = 2 OSBigEndian = 2 UIT_BOOLEAN = 3 OSLittleEndian = 1 __darwin_nl_item = c_int __darwin_wctrans_t = c_int __darwin_wctype_t = c_ulong __int8_t = c_byte __uint8_t = c_ubyte __int16_t = c_short __uint16_t = c_ushort __int32_t = c_int __uint32_t = c_uint __int64_t = c_longlong __uint64_t = c_ulonglong __darwin_intptr_t = c_long __darwin_natural_t = c_uint __darwin_ct_rune_t = c_int class __mbstate_t(Union): pass __mbstate_t._pack_ = 4 __mbstate_t._fields_ = [ ('__mbstate8', c_char * 128), ('_mbstateL', c_longlong), ] assert sizeof(__mbstate_t) == 128, sizeof(__mbstate_t) assert alignment(__mbstate_t) == 4, alignment(__mbstate_t) __darwin_mbstate_t = __mbstate_t __darwin_ptrdiff_t = c_int __darwin_size_t = c_ulong __darwin_va_list = STRING __darwin_wchar_t = c_int __darwin_rune_t = __darwin_wchar_t __darwin_wint_t = c_int __darwin_clock_t = c_ulong __darwin_socklen_t = __uint32_t __darwin_ssize_t = c_long __darwin_time_t = c_long sig_atomic_t = c_int class sigcontext(Structure): pass sigcontext._fields_ = [ ('sc_onstack', c_int), ('sc_mask', c_int), ('sc_eax', c_uint), ('sc_ebx', c_uint), ('sc_ecx', c_uint), ('sc_edx', c_uint), ('sc_edi', c_uint), ('sc_esi', c_uint), ('sc_ebp', c_uint), ('sc_esp', c_uint), ('sc_ss', c_uint), ('sc_eflags', c_uint), ('sc_eip', c_uint), ('sc_cs', c_uint), ('sc_ds', c_uint), ('sc_es', c_uint), ('sc_fs', c_uint), ('sc_gs', c_uint), ] assert sizeof(sigcontext) == 72, sizeof(sigcontext) assert alignment(sigcontext) == 4, alignment(sigcontext) u_int8_t = c_ubyte u_int16_t = c_ushort u_int32_t = c_uint u_int64_t = c_ulonglong int32_t = c_int register_t = int32_t user_addr_t = u_int64_t user_size_t = u_int64_t int64_t = c_longlong user_ssize_t = int64_t user_long_t = int64_t user_ulong_t = u_int64_t user_time_t = int64_t syscall_arg_t = u_int64_t # values for unnamed enumeration class aes_key_st(Structure): pass aes_key_st._fields_ = [ ('rd_key', c_ulong * 60), ('rounds', c_int), ] assert sizeof(aes_key_st) == 244, sizeof(aes_key_st) assert alignment(aes_key_st) == 4, alignment(aes_key_st) AES_KEY = aes_key_st class asn1_ctx_st(Structure): pass asn1_ctx_st._fields_ = [ ('p', POINTER(c_ubyte)), ('eos', c_int), ('error', c_int), ('inf', c_int), ('tag', c_int), ('xclass', c_int), ('slen', c_long), ('max', POINTER(c_ubyte)), ('q', POINTER(c_ubyte)), ('pp', POINTER(POINTER(c_ubyte))), ('line', c_int), ] assert sizeof(asn1_ctx_st) == 44, sizeof(asn1_ctx_st) assert alignment(asn1_ctx_st) == 4, alignment(asn1_ctx_st) ASN1_CTX = asn1_ctx_st class asn1_object_st(Structure): pass asn1_object_st._fields_ = [ ('sn', STRING), ('ln', STRING), ('nid', c_int), ('length', c_int), ('data', POINTER(c_ubyte)), ('flags', c_int), ] assert sizeof(asn1_object_st) == 24, sizeof(asn1_object_st) assert alignment(asn1_object_st) == 4, alignment(asn1_object_st) ASN1_OBJECT = asn1_object_st class asn1_string_st(Structure): pass asn1_string_st._fields_ = [ ('length', c_int), ('type', c_int), ('data', POINTER(c_ubyte)), ('flags', c_long), ] assert sizeof(asn1_string_st) == 16, sizeof(asn1_string_st) assert alignment(asn1_string_st) == 4, alignment(asn1_string_st) ASN1_STRING = asn1_string_st class ASN1_ENCODING_st(Structure): pass ASN1_ENCODING_st._fields_ = [ ('enc', POINTER(c_ubyte)), ('len', c_long), ('modified', c_int), ] assert sizeof(ASN1_ENCODING_st) == 12, sizeof(ASN1_ENCODING_st) assert alignment(ASN1_ENCODING_st) == 4, alignment(ASN1_ENCODING_st) ASN1_ENCODING = ASN1_ENCODING_st class asn1_string_table_st(Structure): pass asn1_string_table_st._fields_ = [ ('nid', c_int), ('minsize', c_long), ('maxsize', c_long), ('mask', c_ulong), ('flags', c_ulong), ] assert sizeof(asn1_string_table_st) == 20, sizeof(asn1_string_table_st) assert alignment(asn1_string_table_st) == 4, alignment(asn1_string_table_st) ASN1_STRING_TABLE = asn1_string_table_st class ASN1_TEMPLATE_st(Structure): pass ASN1_TEMPLATE_st._fields_ = [ ] ASN1_TEMPLATE = ASN1_TEMPLATE_st class ASN1_ITEM_st(Structure): pass ASN1_ITEM = ASN1_ITEM_st ASN1_ITEM_st._fields_ = [ ] class ASN1_TLC_st(Structure): pass ASN1_TLC = ASN1_TLC_st ASN1_TLC_st._fields_ = [ ] class ASN1_VALUE_st(Structure): pass ASN1_VALUE_st._fields_ = [ ] ASN1_VALUE = ASN1_VALUE_st ASN1_ITEM_EXP = ASN1_ITEM class asn1_type_st(Structure): pass class N12asn1_type_st4DOLLAR_11E(Union): pass ASN1_BOOLEAN = c_int ASN1_INTEGER = asn1_string_st ASN1_ENUMERATED = asn1_string_st ASN1_BIT_STRING = asn1_string_st ASN1_OCTET_STRING = asn1_string_st ASN1_PRINTABLESTRING = asn1_string_st ASN1_T61STRING = asn1_string_st ASN1_IA5STRING = asn1_string_st ASN1_GENERALSTRING = asn1_string_st ASN1_BMPSTRING = asn1_string_st ASN1_UNIVERSALSTRING = asn1_string_st ASN1_UTCTIME = asn1_string_st ASN1_GENERALIZEDTIME = asn1_string_st ASN1_VISIBLESTRING = asn1_string_st ASN1_UTF8STRING = asn1_string_st N12asn1_type_st4DOLLAR_11E._fields_ = [ ('ptr', STRING), ('boolean', ASN1_BOOLEAN), ('asn1_string', POINTER(ASN1_STRING)), ('object', POINTER(ASN1_OBJECT)), ('integer', POINTER(ASN1_INTEGER)), ('enumerated', POINTER(ASN1_ENUMERATED)), ('bit_string', POINTER(ASN1_BIT_STRING)), ('octet_string', POINTER(ASN1_OCTET_STRING)), ('printablestring', POINTER(ASN1_PRINTABLESTRING)), ('t61string', POINTER(ASN1_T61STRING)), ('ia5string', POINTER(ASN1_IA5STRING)), ('generalstring', POINTER(ASN1_GENERALSTRING)), ('bmpstring', POINTER(ASN1_BMPSTRING)), ('universalstring', POINTER(ASN1_UNIVERSALSTRING)), ('utctime', POINTER(ASN1_UTCTIME)), ('generalizedtime', POINTER(ASN1_GENERALIZEDTIME)), ('visiblestring', POINTER(ASN1_VISIBLESTRING)), ('utf8string', POINTER(ASN1_UTF8STRING)), ('set', POINTER(ASN1_STRING)), ('sequence', POINTER(ASN1_STRING)), ] assert sizeof(N12asn1_type_st4DOLLAR_11E) == 4, sizeof(N12asn1_type_st4DOLLAR_11E) assert alignment(N12asn1_type_st4DOLLAR_11E) == 4, alignment(N12asn1_type_st4DOLLAR_11E) asn1_type_st._fields_ = [ ('type', c_int), ('value', N12asn1_type_st4DOLLAR_11E), ] assert sizeof(asn1_type_st) == 8, sizeof(asn1_type_st) assert alignment(asn1_type_st) == 4, alignment(asn1_type_st) ASN1_TYPE = asn1_type_st class asn1_method_st(Structure): pass asn1_method_st._fields_ = [ ('i2d', CFUNCTYPE(c_int)), ('d2i', CFUNCTYPE(STRING)), ('create', CFUNCTYPE(STRING)), ('destroy', CFUNCTYPE(None)), ] assert sizeof(asn1_method_st) == 16, sizeof(asn1_method_st) assert alignment(asn1_method_st) == 4, alignment(asn1_method_st) ASN1_METHOD = asn1_method_st class asn1_header_st(Structure): pass asn1_header_st._fields_ = [ ('header', POINTER(ASN1_OCTET_STRING)), ('data', STRING), ('meth', POINTER(ASN1_METHOD)), ] assert sizeof(asn1_header_st) == 12, sizeof(asn1_header_st) assert alignment(asn1_header_st) == 4, alignment(asn1_header_st) ASN1_HEADER = asn1_header_st class BIT_STRING_BITNAME_st(Structure): pass BIT_STRING_BITNAME_st._fields_ = [ ('bitnum', c_int), ('lname', STRING), ('sname', STRING), ] assert sizeof(BIT_STRING_BITNAME_st) == 12, sizeof(BIT_STRING_BITNAME_st) assert alignment(BIT_STRING_BITNAME_st) == 4, alignment(BIT_STRING_BITNAME_st) BIT_STRING_BITNAME = BIT_STRING_BITNAME_st class bio_st(Structure): pass BIO = bio_st bio_info_cb = CFUNCTYPE(None, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long) class bio_method_st(Structure): pass bio_method_st._fields_ = [ ('type', c_int), ('name', STRING), ('bwrite', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), ('bread', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), ('bputs', CFUNCTYPE(c_int, POINTER(BIO), STRING)), ('bgets', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), ('ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, c_long, c_void_p)), ('create', CFUNCTYPE(c_int, POINTER(BIO))), ('destroy', CFUNCTYPE(c_int, POINTER(BIO))), ('callback_ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, POINTER(bio_info_cb))), ] assert sizeof(bio_method_st) == 40, sizeof(bio_method_st) assert alignment(bio_method_st) == 4, alignment(bio_method_st) BIO_METHOD = bio_method_st class crypto_ex_data_st(Structure): pass class stack_st(Structure): pass STACK = stack_st crypto_ex_data_st._fields_ = [ ('sk', POINTER(STACK)), ('dummy', c_int), ] assert sizeof(crypto_ex_data_st) == 8, sizeof(crypto_ex_data_st) assert alignment(crypto_ex_data_st) == 4, alignment(crypto_ex_data_st) CRYPTO_EX_DATA = crypto_ex_data_st bio_st._fields_ = [ ('method', POINTER(BIO_METHOD)), ('callback', CFUNCTYPE(c_long, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)), ('cb_arg', STRING), ('init', c_int), ('shutdown', c_int), ('flags', c_int), ('retry_reason', c_int), ('num', c_int), ('ptr', c_void_p), ('next_bio', POINTER(bio_st)), ('prev_bio', POINTER(bio_st)), ('references', c_int), ('num_read', c_ulong), ('num_write', c_ulong), ('ex_data', CRYPTO_EX_DATA), ] assert sizeof(bio_st) == 64, sizeof(bio_st) assert alignment(bio_st) == 4, alignment(bio_st) class bio_f_buffer_ctx_struct(Structure): pass bio_f_buffer_ctx_struct._fields_ = [ ('ibuf_size', c_int), ('obuf_size', c_int), ('ibuf', STRING), ('ibuf_len', c_int), ('ibuf_off', c_int), ('obuf', STRING), ('obuf_len', c_int), ('obuf_off', c_int), ] assert sizeof(bio_f_buffer_ctx_struct) == 32, sizeof(bio_f_buffer_ctx_struct) assert alignment(bio_f_buffer_ctx_struct) == 4, alignment(bio_f_buffer_ctx_struct) BIO_F_BUFFER_CTX = bio_f_buffer_ctx_struct class hostent(Structure): pass hostent._fields_ = [ ] class bf_key_st(Structure): pass bf_key_st._fields_ = [ ('P', c_uint * 18), ('S', c_uint * 1024), ] assert sizeof(bf_key_st) == 4168, sizeof(bf_key_st) assert alignment(bf_key_st) == 4, alignment(bf_key_st) BF_KEY = bf_key_st class bignum_st(Structure): pass bignum_st._fields_ = [ ('d', POINTER(c_ulong)), ('top', c_int), ('dmax', c_int), ('neg', c_int), ('flags', c_int), ] assert sizeof(bignum_st) == 20, sizeof(bignum_st) assert alignment(bignum_st) == 4, alignment(bignum_st) BIGNUM = bignum_st class bignum_ctx(Structure): pass bignum_ctx._fields_ = [ ] BN_CTX = bignum_ctx class bn_blinding_st(Structure): pass bn_blinding_st._fields_ = [ ('init', c_int), ('A', POINTER(BIGNUM)), ('Ai', POINTER(BIGNUM)), ('mod', POINTER(BIGNUM)), ('thread_id', c_ulong), ] assert sizeof(bn_blinding_st) == 20, sizeof(bn_blinding_st) assert alignment(bn_blinding_st) == 4, alignment(bn_blinding_st) BN_BLINDING = bn_blinding_st class bn_mont_ctx_st(Structure): pass bn_mont_ctx_st._fields_ = [ ('ri', c_int), ('RR', BIGNUM), ('N', BIGNUM), ('Ni', BIGNUM), ('n0', c_ulong), ('flags', c_int), ] assert sizeof(bn_mont_ctx_st) == 72, sizeof(bn_mont_ctx_st) assert alignment(bn_mont_ctx_st) == 4, alignment(bn_mont_ctx_st) BN_MONT_CTX = bn_mont_ctx_st class bn_recp_ctx_st(Structure): pass bn_recp_ctx_st._fields_ = [ ('N', BIGNUM), ('Nr', BIGNUM), ('num_bits', c_int), ('shift', c_int), ('flags', c_int), ] assert sizeof(bn_recp_ctx_st) == 52, sizeof(bn_recp_ctx_st) assert alignment(bn_recp_ctx_st) == 4, alignment(bn_recp_ctx_st) BN_RECP_CTX = bn_recp_ctx_st class buf_mem_st(Structure): pass buf_mem_st._fields_ = [ ('length', c_int), ('data', STRING), ('max', c_int), ] assert sizeof(buf_mem_st) == 12, sizeof(buf_mem_st) assert alignment(buf_mem_st) == 4, alignment(buf_mem_st) BUF_MEM = buf_mem_st class cast_key_st(Structure): pass cast_key_st._fields_ = [ ('data', c_ulong * 32), ('short_key', c_int), ] assert sizeof(cast_key_st) == 132, sizeof(cast_key_st) assert alignment(cast_key_st) == 4, alignment(cast_key_st) CAST_KEY = cast_key_st class comp_method_st(Structure): pass comp_method_st._fields_ = [ ('type', c_int), ('name', STRING), ('init', CFUNCTYPE(c_int)), ('finish', CFUNCTYPE(None)), ('compress', CFUNCTYPE(c_int)), ('expand', CFUNCTYPE(c_int)), ('ctrl', CFUNCTYPE(c_long)), ('callback_ctrl', CFUNCTYPE(c_long)), ] assert sizeof(comp_method_st) == 32, sizeof(comp_method_st) assert alignment(comp_method_st) == 4, alignment(comp_method_st) COMP_METHOD = comp_method_st class comp_ctx_st(Structure): pass comp_ctx_st._fields_ = [ ('meth', POINTER(COMP_METHOD)), ('compress_in', c_ulong), ('compress_out', c_ulong), ('expand_in', c_ulong), ('expand_out', c_ulong), ('ex_data', CRYPTO_EX_DATA), ] assert sizeof(comp_ctx_st) == 28, sizeof(comp_ctx_st) assert alignment(comp_ctx_st) == 4, alignment(comp_ctx_st) COMP_CTX = comp_ctx_st class CRYPTO_dynlock_value(Structure): pass CRYPTO_dynlock_value._fields_ = [ ] class CRYPTO_dynlock(Structure): pass CRYPTO_dynlock._fields_ = [ ('references', c_int), ('data', POINTER(CRYPTO_dynlock_value)), ] assert sizeof(CRYPTO_dynlock) == 8, sizeof(CRYPTO_dynlock) assert alignment(CRYPTO_dynlock) == 4, alignment(CRYPTO_dynlock) BIO_dummy = bio_st CRYPTO_EX_new = CFUNCTYPE(c_int, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p) CRYPTO_EX_free = CFUNCTYPE(None, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p) CRYPTO_EX_dup = CFUNCTYPE(c_int, POINTER(CRYPTO_EX_DATA), POINTER(CRYPTO_EX_DATA), c_void_p, c_int, c_long, c_void_p) class crypto_ex_data_func_st(Structure): pass crypto_ex_data_func_st._fields_ = [ ('argl', c_long), ('argp', c_void_p), ('new_func', POINTER(CRYPTO_EX_new)), ('free_func', POINTER(CRYPTO_EX_free)), ('dup_func', POINTER(CRYPTO_EX_dup)), ] assert sizeof(crypto_ex_data_func_st) == 20, sizeof(crypto_ex_data_func_st) assert alignment(crypto_ex_data_func_st) == 4, alignment(crypto_ex_data_func_st) CRYPTO_EX_DATA_FUNCS = crypto_ex_data_func_st class st_CRYPTO_EX_DATA_IMPL(Structure): pass CRYPTO_EX_DATA_IMPL = st_CRYPTO_EX_DATA_IMPL st_CRYPTO_EX_DATA_IMPL._fields_ = [ ] CRYPTO_MEM_LEAK_CB = CFUNCTYPE(c_void_p, c_ulong, STRING, c_int, c_int, c_void_p) DES_cblock = c_ubyte * 8 const_DES_cblock = c_ubyte * 8 class DES_ks(Structure): pass class N6DES_ks3DOLLAR_9E(Union): pass N6DES_ks3DOLLAR_9E._fields_ = [ ('cblock', DES_cblock), ('deslong', c_ulong * 2), ] assert sizeof(N6DES_ks3DOLLAR_9E) == 8, sizeof(N6DES_ks3DOLLAR_9E) assert alignment(N6DES_ks3DOLLAR_9E) == 4, alignment(N6DES_ks3DOLLAR_9E) DES_ks._fields_ = [ ('ks', N6DES_ks3DOLLAR_9E * 16), ] assert sizeof(DES_ks) == 128, sizeof(DES_ks) assert alignment(DES_ks) == 4, alignment(DES_ks) DES_key_schedule = DES_ks _ossl_old_des_cblock = c_ubyte * 8 class _ossl_old_des_ks_struct(Structure): pass class N23_ossl_old_des_ks_struct4DOLLAR_10E(Union): pass N23_ossl_old_des_ks_struct4DOLLAR_10E._fields_ = [ ('_', _ossl_old_des_cblock), ('pad', c_ulong * 2), ] assert sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 8, sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) assert alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 4, alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) _ossl_old_des_ks_struct._fields_ = [ ('ks', N23_ossl_old_des_ks_struct4DOLLAR_10E), ] assert sizeof(_ossl_old_des_ks_struct) == 8, sizeof(_ossl_old_des_ks_struct) assert alignment(_ossl_old_des_ks_struct) == 4, alignment(_ossl_old_des_ks_struct) _ossl_old_des_key_schedule = _ossl_old_des_ks_struct * 16 class dh_st(Structure): pass DH = dh_st class dh_method(Structure): pass dh_method._fields_ = [ ('name', STRING), ('generate_key', CFUNCTYPE(c_int, POINTER(DH))), ('compute_key', CFUNCTYPE(c_int, POINTER(c_ubyte), POINTER(BIGNUM), POINTER(DH))), ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DH), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), ('init', CFUNCTYPE(c_int, POINTER(DH))), ('finish', CFUNCTYPE(c_int, POINTER(DH))), ('flags', c_int), ('app_data', STRING), ] assert sizeof(dh_method) == 32, sizeof(dh_method) assert alignment(dh_method) == 4, alignment(dh_method) DH_METHOD = dh_method class engine_st(Structure): pass ENGINE = engine_st dh_st._fields_ = [ ('pad', c_int), ('version', c_int), ('p', POINTER(BIGNUM)), ('g', POINTER(BIGNUM)), ('length', c_long), ('pub_key', POINTER(BIGNUM)), ('priv_key', POINTER(BIGNUM)), ('flags', c_int), ('method_mont_p', STRING), ('q', POINTER(BIGNUM)), ('j', POINTER(BIGNUM)), ('seed', POINTER(c_ubyte)), ('seedlen', c_int), ('counter', POINTER(BIGNUM)), ('references', c_int), ('ex_data', CRYPTO_EX_DATA), ('meth', POINTER(DH_METHOD)), ('engine', POINTER(ENGINE)), ] assert sizeof(dh_st) == 76, sizeof(dh_st) assert alignment(dh_st) == 4, alignment(dh_st) class dsa_st(Structure): pass DSA = dsa_st class DSA_SIG_st(Structure): pass DSA_SIG_st._fields_ = [ ('r', POINTER(BIGNUM)), ('s', POINTER(BIGNUM)), ] assert sizeof(DSA_SIG_st) == 8, sizeof(DSA_SIG_st) assert alignment(DSA_SIG_st) == 4, alignment(DSA_SIG_st) DSA_SIG = DSA_SIG_st class dsa_method(Structure): pass dsa_method._fields_ = [ ('name', STRING), ('dsa_do_sign', CFUNCTYPE(POINTER(DSA_SIG), POINTER(c_ubyte), c_int, POINTER(DSA))), ('dsa_sign_setup', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BN_CTX), POINTER(POINTER(BIGNUM)), POINTER(POINTER(BIGNUM)))), ('dsa_do_verify', CFUNCTYPE(c_int, POINTER(c_ubyte), c_int, POINTER(DSA_SIG), POINTER(DSA))), ('dsa_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), ('init', CFUNCTYPE(c_int, POINTER(DSA))), ('finish', CFUNCTYPE(c_int, POINTER(DSA))), ('flags', c_int), ('app_data', STRING), ] assert sizeof(dsa_method) == 40, sizeof(dsa_method) assert alignment(dsa_method) == 4, alignment(dsa_method) DSA_METHOD = dsa_method dsa_st._fields_ = [ ('pad', c_int), ('version', c_long), ('write_params', c_int), ('p', POINTER(BIGNUM)), ('q', POINTER(BIGNUM)), ('g', POINTER(BIGNUM)), ('pub_key', POINTER(BIGNUM)), ('priv_key', POINTER(BIGNUM)), ('kinv', POINTER(BIGNUM)), ('r', POINTER(BIGNUM)), ('flags', c_int), ('method_mont_p', STRING), ('references', c_int), ('ex_data', CRYPTO_EX_DATA), ('meth', POINTER(DSA_METHOD)), ('engine', POINTER(ENGINE)), ] assert sizeof(dsa_st) == 68, sizeof(dsa_st) assert alignment(dsa_st) == 4, alignment(dsa_st) class evp_pkey_st(Structure): pass class N11evp_pkey_st4DOLLAR_12E(Union): pass class rsa_st(Structure): pass N11evp_pkey_st4DOLLAR_12E._fields_ = [ ('ptr', STRING), ('rsa', POINTER(rsa_st)), ('dsa', POINTER(dsa_st)), ('dh', POINTER(dh_st)), ] assert sizeof(N11evp_pkey_st4DOLLAR_12E) == 4, sizeof(N11evp_pkey_st4DOLLAR_12E) assert alignment(N11evp_pkey_st4DOLLAR_12E) == 4, alignment(N11evp_pkey_st4DOLLAR_12E) evp_pkey_st._fields_ = [ ('type', c_int), ('save_type', c_int), ('references', c_int), ('pkey', N11evp_pkey_st4DOLLAR_12E), ('save_parameters', c_int), ('attributes', POINTER(STACK)), ] assert sizeof(evp_pkey_st) == 24, sizeof(evp_pkey_st) assert alignment(evp_pkey_st) == 4, alignment(evp_pkey_st) class env_md_st(Structure): pass class env_md_ctx_st(Structure): pass EVP_MD_CTX = env_md_ctx_st env_md_st._fields_ = [ ('type', c_int), ('pkey_type', c_int), ('md_size', c_int), ('flags', c_ulong), ('init', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))), ('update', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), c_void_p, c_ulong)), ('final', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(c_ubyte))), ('copy', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(EVP_MD_CTX))), ('cleanup', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))), ('sign', CFUNCTYPE(c_int)), ('verify', CFUNCTYPE(c_int)), ('required_pkey_type', c_int * 5), ('block_size', c_int), ('ctx_size', c_int), ] assert sizeof(env_md_st) == 72, sizeof(env_md_st) assert alignment(env_md_st) == 4, alignment(env_md_st) EVP_MD = env_md_st env_md_ctx_st._fields_ = [ ('digest', POINTER(EVP_MD)), ('engine', POINTER(ENGINE)), ('flags', c_ulong), ('md_data', c_void_p), ] assert sizeof(env_md_ctx_st) == 16, sizeof(env_md_ctx_st) assert alignment(env_md_ctx_st) == 4, alignment(env_md_ctx_st) class evp_cipher_st(Structure): pass class evp_cipher_ctx_st(Structure): pass EVP_CIPHER_CTX = evp_cipher_ctx_st evp_cipher_st._fields_ = [ ('nid', c_int), ('block_size', c_int), ('key_len', c_int), ('iv_len', c_int), ('flags', c_ulong), ('init', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_int)), ('do_cipher', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_uint)), ('cleanup', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX))), ('ctx_size', c_int), ('set_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))), ('get_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))), ('ctrl', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), c_int, c_int, c_void_p)), ('app_data', c_void_p), ] assert sizeof(evp_cipher_st) == 52, sizeof(evp_cipher_st) assert alignment(evp_cipher_st) == 4, alignment(evp_cipher_st) class evp_cipher_info_st(Structure): pass EVP_CIPHER = evp_cipher_st evp_cipher_info_st._fields_ = [ ('cipher', POINTER(EVP_CIPHER)), ('iv', c_ubyte * 16), ] assert sizeof(evp_cipher_info_st) == 20, sizeof(evp_cipher_info_st) assert alignment(evp_cipher_info_st) == 4, alignment(evp_cipher_info_st) EVP_CIPHER_INFO = evp_cipher_info_st evp_cipher_ctx_st._fields_ = [ ('cipher', POINTER(EVP_CIPHER)), ('engine', POINTER(ENGINE)), ('encrypt', c_int), ('buf_len', c_int), ('oiv', c_ubyte * 16), ('iv', c_ubyte * 16), ('buf', c_ubyte * 32), ('num', c_int), ('app_data', c_void_p), ('key_len', c_int), ('flags', c_ulong), ('cipher_data', c_void_p), ('final_used', c_int), ('block_mask', c_int), ('final', c_ubyte * 32), ] assert sizeof(evp_cipher_ctx_st) == 140, sizeof(evp_cipher_ctx_st) assert alignment(evp_cipher_ctx_st) == 4, alignment(evp_cipher_ctx_st) class evp_Encode_Ctx_st(Structure): pass evp_Encode_Ctx_st._fields_ = [ ('num', c_int), ('length', c_int), ('enc_data', c_ubyte * 80), ('line_num', c_int), ('expect_nl', c_int), ] assert sizeof(evp_Encode_Ctx_st) == 96, sizeof(evp_Encode_Ctx_st) assert alignment(evp_Encode_Ctx_st) == 4, alignment(evp_Encode_Ctx_st) EVP_ENCODE_CTX = evp_Encode_Ctx_st EVP_PBE_KEYGEN = CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), STRING, c_int, POINTER(ASN1_TYPE), POINTER(EVP_CIPHER), POINTER(EVP_MD), c_int) class lhash_node_st(Structure): pass lhash_node_st._fields_ = [ ('data', c_void_p), ('next', POINTER(lhash_node_st)), ('hash', c_ulong), ] assert sizeof(lhash_node_st) == 12, sizeof(lhash_node_st) assert alignment(lhash_node_st) == 4, alignment(lhash_node_st) LHASH_NODE = lhash_node_st LHASH_COMP_FN_TYPE = CFUNCTYPE(c_int, c_void_p, c_void_p) LHASH_HASH_FN_TYPE = CFUNCTYPE(c_ulong, c_void_p) LHASH_DOALL_FN_TYPE = CFUNCTYPE(None, c_void_p) LHASH_DOALL_ARG_FN_TYPE = CFUNCTYPE(None, c_void_p, c_void_p) class lhash_st(Structure): pass lhash_st._fields_ = [ ('b', POINTER(POINTER(LHASH_NODE))), ('comp', LHASH_COMP_FN_TYPE), ('hash', LHASH_HASH_FN_TYPE), ('num_nodes', c_uint), ('num_alloc_nodes', c_uint), ('p', c_uint), ('pmax', c_uint), ('up_load', c_ulong), ('down_load', c_ulong), ('num_items', c_ulong), ('num_expands', c_ulong), ('num_expand_reallocs', c_ulong), ('num_contracts', c_ulong), ('num_contract_reallocs', c_ulong), ('num_hash_calls', c_ulong), ('num_comp_calls', c_ulong), ('num_insert', c_ulong), ('num_replace', c_ulong), ('num_delete', c_ulong), ('num_no_delete', c_ulong), ('num_retrieve', c_ulong), ('num_retrieve_miss', c_ulong), ('num_hash_comps', c_ulong), ('error', c_int), ] assert sizeof(lhash_st) == 96, sizeof(lhash_st) assert alignment(lhash_st) == 4, alignment(lhash_st) LHASH = lhash_st class MD2state_st(Structure): pass MD2state_st._fields_ = [ ('num', c_int), ('data', c_ubyte * 16), ('cksm', c_uint * 16), ('state', c_uint * 16), ] assert sizeof(MD2state_st) == 148, sizeof(MD2state_st) assert alignment(MD2state_st) == 4, alignment(MD2state_st) MD2_CTX = MD2state_st class MD4state_st(Structure): pass MD4state_st._fields_ = [ ('A', c_uint), ('B', c_uint), ('C', c_uint), ('D', c_uint), ('Nl', c_uint), ('Nh', c_uint), ('data', c_uint * 16), ('num', c_int), ] assert sizeof(MD4state_st) == 92, sizeof(MD4state_st) assert alignment(MD4state_st) == 4, alignment(MD4state_st) MD4_CTX = MD4state_st class MD5state_st(Structure): pass MD5state_st._fields_ = [ ('A', c_uint), ('B', c_uint), ('C', c_uint), ('D', c_uint), ('Nl', c_uint), ('Nh', c_uint), ('data', c_uint * 16), ('num', c_int), ] assert sizeof(MD5state_st) == 92, sizeof(MD5state_st) assert alignment(MD5state_st) == 4, alignment(MD5state_st) MD5_CTX = MD5state_st class mdc2_ctx_st(Structure): pass mdc2_ctx_st._fields_ = [ ('num', c_int), ('data', c_ubyte * 8), ('h', DES_cblock), ('hh', DES_cblock), ('pad_type', c_int), ] assert sizeof(mdc2_ctx_st) == 32, sizeof(mdc2_ctx_st) assert alignment(mdc2_ctx_st) == 4, alignment(mdc2_ctx_st) MDC2_CTX = mdc2_ctx_st class obj_name_st(Structure): pass obj_name_st._fields_ = [ ('type', c_int), ('alias', c_int), ('name', STRING), ('data', STRING), ] assert sizeof(obj_name_st) == 16, sizeof(obj_name_st) assert alignment(obj_name_st) == 4, alignment(obj_name_st) OBJ_NAME = obj_name_st ASN1_TIME = asn1_string_st ASN1_NULL = c_int EVP_PKEY = evp_pkey_st class x509_st(Structure): pass X509 = x509_st class X509_algor_st(Structure): pass X509_ALGOR = X509_algor_st class X509_crl_st(Structure): pass X509_CRL = X509_crl_st class X509_name_st(Structure): pass X509_NAME = X509_name_st class x509_store_st(Structure): pass X509_STORE = x509_store_st class x509_store_ctx_st(Structure): pass X509_STORE_CTX = x509_store_ctx_st engine_st._fields_ = [ ] class PEM_Encode_Seal_st(Structure): pass PEM_Encode_Seal_st._fields_ = [ ('encode', EVP_ENCODE_CTX), ('md', EVP_MD_CTX), ('cipher', EVP_CIPHER_CTX), ] assert sizeof(PEM_Encode_Seal_st) == 252, sizeof(PEM_Encode_Seal_st) assert alignment(PEM_Encode_Seal_st) == 4, alignment(PEM_Encode_Seal_st) PEM_ENCODE_SEAL_CTX = PEM_Encode_Seal_st class pem_recip_st(Structure): pass pem_recip_st._fields_ = [ ('name', STRING), ('dn', POINTER(X509_NAME)), ('cipher', c_int), ('key_enc', c_int), ] assert sizeof(pem_recip_st) == 16, sizeof(pem_recip_st) assert alignment(pem_recip_st) == 4, alignment(pem_recip_st) PEM_USER = pem_recip_st class pem_ctx_st(Structure): pass class N10pem_ctx_st4DOLLAR_16E(Structure): pass N10pem_ctx_st4DOLLAR_16E._fields_ = [ ('version', c_int), ('mode', c_int), ] assert sizeof(N10pem_ctx_st4DOLLAR_16E) == 8, sizeof(N10pem_ctx_st4DOLLAR_16E) assert alignment(N10pem_ctx_st4DOLLAR_16E) == 4, alignment(N10pem_ctx_st4DOLLAR_16E) class N10pem_ctx_st4DOLLAR_17E(Structure): pass N10pem_ctx_st4DOLLAR_17E._fields_ = [ ('cipher', c_int), ] assert sizeof(N10pem_ctx_st4DOLLAR_17E) == 4, sizeof(N10pem_ctx_st4DOLLAR_17E) assert alignment(N10pem_ctx_st4DOLLAR_17E) == 4, alignment(N10pem_ctx_st4DOLLAR_17E) pem_ctx_st._fields_ = [ ('type', c_int), ('proc_type', N10pem_ctx_st4DOLLAR_16E), ('domain', STRING), ('DEK_info', N10pem_ctx_st4DOLLAR_17E), ('originator', POINTER(PEM_USER)), ('num_recipient', c_int), ('recipient', POINTER(POINTER(PEM_USER))), ('x509_chain', POINTER(STACK)), ('md', POINTER(EVP_MD)), ('md_enc', c_int), ('md_len', c_int), ('md_data', STRING), ('dec', POINTER(EVP_CIPHER)), ('key_len', c_int), ('key', POINTER(c_ubyte)), ('data_enc', c_int), ('data_len', c_int), ('data', POINTER(c_ubyte)), ] assert sizeof(pem_ctx_st) == 76, sizeof(pem_ctx_st) assert alignment(pem_ctx_st) == 4, alignment(pem_ctx_st) PEM_CTX = pem_ctx_st pem_password_cb = CFUNCTYPE(c_int, STRING, c_int, c_int, c_void_p) class pkcs7_issuer_and_serial_st(Structure): pass pkcs7_issuer_and_serial_st._fields_ = [ ('issuer', POINTER(X509_NAME)), ('serial', POINTER(ASN1_INTEGER)), ] assert sizeof(pkcs7_issuer_and_serial_st) == 8, sizeof(pkcs7_issuer_and_serial_st) assert alignment(pkcs7_issuer_and_serial_st) == 4, alignment(pkcs7_issuer_and_serial_st) PKCS7_ISSUER_AND_SERIAL = pkcs7_issuer_and_serial_st class pkcs7_signer_info_st(Structure): pass pkcs7_signer_info_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)), ('digest_alg', POINTER(X509_ALGOR)), ('auth_attr', POINTER(STACK)), ('digest_enc_alg', POINTER(X509_ALGOR)), ('enc_digest', POINTER(ASN1_OCTET_STRING)), ('unauth_attr', POINTER(STACK)), ('pkey', POINTER(EVP_PKEY)), ] assert sizeof(pkcs7_signer_info_st) == 32, sizeof(pkcs7_signer_info_st) assert alignment(pkcs7_signer_info_st) == 4, alignment(pkcs7_signer_info_st) PKCS7_SIGNER_INFO = pkcs7_signer_info_st class pkcs7_recip_info_st(Structure): pass pkcs7_recip_info_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)), ('key_enc_algor', POINTER(X509_ALGOR)), ('enc_key', POINTER(ASN1_OCTET_STRING)), ('cert', POINTER(X509)), ] assert sizeof(pkcs7_recip_info_st) == 20, sizeof(pkcs7_recip_info_st) assert alignment(pkcs7_recip_info_st) == 4, alignment(pkcs7_recip_info_st) PKCS7_RECIP_INFO = pkcs7_recip_info_st class pkcs7_signed_st(Structure): pass class pkcs7_st(Structure): pass pkcs7_signed_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('md_algs', POINTER(STACK)), ('cert', POINTER(STACK)), ('crl', POINTER(STACK)), ('signer_info', POINTER(STACK)), ('contents', POINTER(pkcs7_st)), ] assert sizeof(pkcs7_signed_st) == 24, sizeof(pkcs7_signed_st) assert alignment(pkcs7_signed_st) == 4, alignment(pkcs7_signed_st) PKCS7_SIGNED = pkcs7_signed_st class pkcs7_enc_content_st(Structure): pass pkcs7_enc_content_st._fields_ = [ ('content_type', POINTER(ASN1_OBJECT)), ('algorithm', POINTER(X509_ALGOR)), ('enc_data', POINTER(ASN1_OCTET_STRING)), ('cipher', POINTER(EVP_CIPHER)), ] assert sizeof(pkcs7_enc_content_st) == 16, sizeof(pkcs7_enc_content_st) assert alignment(pkcs7_enc_content_st) == 4, alignment(pkcs7_enc_content_st) PKCS7_ENC_CONTENT = pkcs7_enc_content_st class pkcs7_enveloped_st(Structure): pass pkcs7_enveloped_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('recipientinfo', POINTER(STACK)), ('enc_data', POINTER(PKCS7_ENC_CONTENT)), ] assert sizeof(pkcs7_enveloped_st) == 12, sizeof(pkcs7_enveloped_st) assert alignment(pkcs7_enveloped_st) == 4, alignment(pkcs7_enveloped_st) PKCS7_ENVELOPE = pkcs7_enveloped_st class pkcs7_signedandenveloped_st(Structure): pass pkcs7_signedandenveloped_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('md_algs', POINTER(STACK)), ('cert', POINTER(STACK)), ('crl', POINTER(STACK)), ('signer_info', POINTER(STACK)), ('enc_data', POINTER(PKCS7_ENC_CONTENT)), ('recipientinfo', POINTER(STACK)), ] assert sizeof(pkcs7_signedandenveloped_st) == 28, sizeof(pkcs7_signedandenveloped_st) assert alignment(pkcs7_signedandenveloped_st) == 4, alignment(pkcs7_signedandenveloped_st) PKCS7_SIGN_ENVELOPE = pkcs7_signedandenveloped_st class pkcs7_digest_st(Structure): pass pkcs7_digest_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('md', POINTER(X509_ALGOR)), ('contents', POINTER(pkcs7_st)), ('digest', POINTER(ASN1_OCTET_STRING)), ] assert sizeof(pkcs7_digest_st) == 16, sizeof(pkcs7_digest_st) assert alignment(pkcs7_digest_st) == 4, alignment(pkcs7_digest_st) PKCS7_DIGEST = pkcs7_digest_st class pkcs7_encrypted_st(Structure): pass pkcs7_encrypted_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('enc_data', POINTER(PKCS7_ENC_CONTENT)), ] assert sizeof(pkcs7_encrypted_st) == 8, sizeof(pkcs7_encrypted_st) assert alignment(pkcs7_encrypted_st) == 4, alignment(pkcs7_encrypted_st) PKCS7_ENCRYPT = pkcs7_encrypted_st class N8pkcs7_st4DOLLAR_15E(Union): pass N8pkcs7_st4DOLLAR_15E._fields_ = [ ('ptr', STRING), ('data', POINTER(ASN1_OCTET_STRING)), ('sign', POINTER(PKCS7_SIGNED)), ('enveloped', POINTER(PKCS7_ENVELOPE)), ('signed_and_enveloped', POINTER(PKCS7_SIGN_ENVELOPE)), ('digest', POINTER(PKCS7_DIGEST)), ('encrypted', POINTER(PKCS7_ENCRYPT)), ('other', POINTER(ASN1_TYPE)), ] assert sizeof(N8pkcs7_st4DOLLAR_15E) == 4, sizeof(N8pkcs7_st4DOLLAR_15E) assert alignment(N8pkcs7_st4DOLLAR_15E) == 4, alignment(N8pkcs7_st4DOLLAR_15E) pkcs7_st._fields_ = [ ('asn1', POINTER(c_ubyte)), ('length', c_long), ('state', c_int), ('detached', c_int), ('type', POINTER(ASN1_OBJECT)), ('d', N8pkcs7_st4DOLLAR_15E), ] assert sizeof(pkcs7_st) == 24, sizeof(pkcs7_st) assert alignment(pkcs7_st) == 4, alignment(pkcs7_st) PKCS7 = pkcs7_st class rc2_key_st(Structure): pass rc2_key_st._fields_ = [ ('data', c_uint * 64), ] assert sizeof(rc2_key_st) == 256, sizeof(rc2_key_st) assert alignment(rc2_key_st) == 4, alignment(rc2_key_st) RC2_KEY = rc2_key_st class rc4_key_st(Structure): pass rc4_key_st._fields_ = [ ('x', c_ubyte), ('y', c_ubyte), ('data', c_ubyte * 256), ] assert sizeof(rc4_key_st) == 258, sizeof(rc4_key_st) assert alignment(rc4_key_st) == 1, alignment(rc4_key_st) RC4_KEY = rc4_key_st class rc5_key_st(Structure): pass rc5_key_st._fields_ = [ ('rounds', c_int), ('data', c_ulong * 34), ] assert sizeof(rc5_key_st) == 140, sizeof(rc5_key_st) assert alignment(rc5_key_st) == 4, alignment(rc5_key_st) RC5_32_KEY = rc5_key_st class RIPEMD160state_st(Structure): pass RIPEMD160state_st._fields_ = [ ('A', c_uint), ('B', c_uint), ('C', c_uint), ('D', c_uint), ('E', c_uint), ('Nl', c_uint), ('Nh', c_uint), ('data', c_uint * 16), ('num', c_int), ] assert sizeof(RIPEMD160state_st) == 96, sizeof(RIPEMD160state_st) assert alignment(RIPEMD160state_st) == 4, alignment(RIPEMD160state_st) RIPEMD160_CTX = RIPEMD160state_st RSA = rsa_st class rsa_meth_st(Structure): pass rsa_meth_st._fields_ = [ ('name', STRING), ('rsa_pub_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), ('rsa_pub_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), ('rsa_priv_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), ('rsa_priv_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), ('rsa_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(RSA))), ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), ('init', CFUNCTYPE(c_int, POINTER(RSA))), ('finish', CFUNCTYPE(c_int, POINTER(RSA))), ('flags', c_int), ('app_data', STRING), ('rsa_sign', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), POINTER(c_uint), POINTER(RSA))), ('rsa_verify', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), c_uint, POINTER(RSA))), ] assert sizeof(rsa_meth_st) == 52, sizeof(rsa_meth_st) assert alignment(rsa_meth_st) == 4, alignment(rsa_meth_st) RSA_METHOD = rsa_meth_st rsa_st._fields_ = [ ('pad', c_int), ('version', c_long), ('meth', POINTER(RSA_METHOD)), ('engine', POINTER(ENGINE)), ('n', POINTER(BIGNUM)), ('e', POINTER(BIGNUM)), ('d', POINTER(BIGNUM)), ('p', POINTER(BIGNUM)), ('q', POINTER(BIGNUM)), ('dmp1', POINTER(BIGNUM)), ('dmq1', POINTER(BIGNUM)), ('iqmp', POINTER(BIGNUM)), ('ex_data', CRYPTO_EX_DATA), ('references', c_int), ('flags', c_int), ('_method_mod_n', POINTER(BN_MONT_CTX)), ('_method_mod_p', POINTER(BN_MONT_CTX)), ('_method_mod_q', POINTER(BN_MONT_CTX)), ('bignum_data', STRING), ('blinding', POINTER(BN_BLINDING)), ] assert sizeof(rsa_st) == 84, sizeof(rsa_st) assert alignment(rsa_st) == 4, alignment(rsa_st) openssl_fptr = CFUNCTYPE(None) class SHAstate_st(Structure): pass SHAstate_st._fields_ = [ ('h0', c_uint), ('h1', c_uint), ('h2', c_uint), ('h3', c_uint), ('h4', c_uint), ('Nl', c_uint), ('Nh', c_uint), ('data', c_uint * 16), ('num', c_int), ] assert sizeof(SHAstate_st) == 96, sizeof(SHAstate_st) assert alignment(SHAstate_st) == 4, alignment(SHAstate_st) SHA_CTX = SHAstate_st class ssl_st(Structure): pass ssl_crock_st = POINTER(ssl_st) class ssl_cipher_st(Structure): pass ssl_cipher_st._fields_ = [ ('valid', c_int), ('name', STRING), ('id', c_ulong), ('algorithms', c_ulong), ('algo_strength', c_ulong), ('algorithm2', c_ulong), ('strength_bits', c_int), ('alg_bits', c_int), ('mask', c_ulong), ('mask_strength', c_ulong), ] assert sizeof(ssl_cipher_st) == 40, sizeof(ssl_cipher_st) assert alignment(ssl_cipher_st) == 4, alignment(ssl_cipher_st) SSL_CIPHER = ssl_cipher_st SSL = ssl_st class ssl_ctx_st(Structure): pass SSL_CTX = ssl_ctx_st class ssl_method_st(Structure): pass class ssl3_enc_method(Structure): pass ssl_method_st._fields_ = [ ('version', c_int), ('ssl_new', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_clear', CFUNCTYPE(None, POINTER(SSL))), ('ssl_free', CFUNCTYPE(None, POINTER(SSL))), ('ssl_accept', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_connect', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_read', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), ('ssl_peek', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), ('ssl_write', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), ('ssl_shutdown', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_renegotiate', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_renegotiate_check', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, c_long, c_void_p)), ('ssl_ctx_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, c_long, c_void_p)), ('get_cipher_by_char', CFUNCTYPE(POINTER(SSL_CIPHER), POINTER(c_ubyte))), ('put_cipher_by_char', CFUNCTYPE(c_int, POINTER(SSL_CIPHER), POINTER(c_ubyte))), ('ssl_pending', CFUNCTYPE(c_int, POINTER(SSL))), ('num_ciphers', CFUNCTYPE(c_int)), ('get_cipher', CFUNCTYPE(POINTER(SSL_CIPHER), c_uint)), ('get_ssl_method', CFUNCTYPE(POINTER(ssl_method_st), c_int)), ('get_timeout', CFUNCTYPE(c_long)), ('ssl3_enc', POINTER(ssl3_enc_method)), ('ssl_version', CFUNCTYPE(c_int)), ('ssl_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, CFUNCTYPE(None))), ('ssl_ctx_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, CFUNCTYPE(None))), ] assert sizeof(ssl_method_st) == 100, sizeof(ssl_method_st) assert alignment(ssl_method_st) == 4, alignment(ssl_method_st) ssl3_enc_method._fields_ = [ ] SSL_METHOD = ssl_method_st class ssl_session_st(Structure): pass class sess_cert_st(Structure): pass ssl_session_st._fields_ = [ ('ssl_version', c_int), ('key_arg_length', c_uint), ('key_arg', c_ubyte * 8), ('master_key_length', c_int), ('master_key', c_ubyte * 48), ('session_id_length', c_uint), ('session_id', c_ubyte * 32), ('sid_ctx_length', c_uint), ('sid_ctx', c_ubyte * 32), ('not_resumable', c_int), ('sess_cert', POINTER(sess_cert_st)), ('peer', POINTER(X509)), ('verify_result', c_long), ('references', c_int), ('timeout', c_long), ('time', c_long), ('compress_meth', c_int), ('cipher', POINTER(SSL_CIPHER)), ('cipher_id', c_ulong), ('ciphers', POINTER(STACK)), ('ex_data', CRYPTO_EX_DATA), ('prev', POINTER(ssl_session_st)), ('next', POINTER(ssl_session_st)), ] assert sizeof(ssl_session_st) == 200, sizeof(ssl_session_st) assert alignment(ssl_session_st) == 4, alignment(ssl_session_st) sess_cert_st._fields_ = [ ] SSL_SESSION = ssl_session_st GEN_SESSION_CB = CFUNCTYPE(c_int, POINTER(SSL), POINTER(c_ubyte), POINTER(c_uint)) class ssl_comp_st(Structure): pass ssl_comp_st._fields_ = [ ('id', c_int), ('name', STRING), ('method', POINTER(COMP_METHOD)), ] assert sizeof(ssl_comp_st) == 12, sizeof(ssl_comp_st) assert alignment(ssl_comp_st) == 4, alignment(ssl_comp_st) SSL_COMP = ssl_comp_st class N10ssl_ctx_st4DOLLAR_18E(Structure): pass N10ssl_ctx_st4DOLLAR_18E._fields_ = [ ('sess_connect', c_int), ('sess_connect_renegotiate', c_int), ('sess_connect_good', c_int), ('sess_accept', c_int), ('sess_accept_renegotiate', c_int), ('sess_accept_good', c_int), ('sess_miss', c_int), ('sess_timeout', c_int), ('sess_cache_full', c_int), ('sess_hit', c_int), ('sess_cb_hit', c_int), ] assert sizeof(N10ssl_ctx_st4DOLLAR_18E) == 44, sizeof(N10ssl_ctx_st4DOLLAR_18E) assert alignment(N10ssl_ctx_st4DOLLAR_18E) == 4, alignment(N10ssl_ctx_st4DOLLAR_18E) class cert_st(Structure): pass ssl_ctx_st._fields_ = [ ('method', POINTER(SSL_METHOD)), ('cipher_list', POINTER(STACK)), ('cipher_list_by_id', POINTER(STACK)), ('cert_store', POINTER(x509_store_st)), ('sessions', POINTER(lhash_st)), ('session_cache_size', c_ulong), ('session_cache_head', POINTER(ssl_session_st)), ('session_cache_tail', POINTER(ssl_session_st)), ('session_cache_mode', c_int), ('session_timeout', c_long), ('new_session_cb', CFUNCTYPE(c_int, POINTER(ssl_st), POINTER(SSL_SESSION))), ('remove_session_cb', CFUNCTYPE(None, POINTER(ssl_ctx_st), POINTER(SSL_SESSION))), ('get_session_cb', CFUNCTYPE(POINTER(SSL_SESSION), POINTER(ssl_st), POINTER(c_ubyte), c_int, POINTER(c_int))), ('stats', N10ssl_ctx_st4DOLLAR_18E), ('references', c_int), ('app_verify_callback', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), c_void_p)), ('app_verify_arg', c_void_p), ('default_passwd_callback', POINTER(pem_password_cb)), ('default_passwd_callback_userdata', c_void_p), ('client_cert_cb', CFUNCTYPE(c_int, POINTER(SSL), POINTER(POINTER(X509)), POINTER(POINTER(EVP_PKEY)))), ('ex_data', CRYPTO_EX_DATA), ('rsa_md5', POINTER(EVP_MD)), ('md5', POINTER(EVP_MD)), ('sha1', POINTER(EVP_MD)), ('extra_certs', POINTER(STACK)), ('comp_methods', POINTER(STACK)), ('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)), ('client_CA', POINTER(STACK)), ('options', c_ulong), ('mode', c_ulong), ('max_cert_list', c_long), ('cert', POINTER(cert_st)), ('read_ahead', c_int), ('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)), ('msg_callback_arg', c_void_p), ('verify_mode', c_int), ('verify_depth', c_int), ('sid_ctx_length', c_uint), ('sid_ctx', c_ubyte * 32), ('default_verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), ('generate_session_id', GEN_SESSION_CB), ('purpose', c_int), ('trust', c_int), ('quiet_shutdown', c_int), ] assert sizeof(ssl_ctx_st) == 248, sizeof(ssl_ctx_st) assert alignment(ssl_ctx_st) == 4, alignment(ssl_ctx_st) cert_st._fields_ = [ ] class ssl2_state_st(Structure): pass class ssl3_state_st(Structure): pass ssl_st._fields_ = [ ('version', c_int), ('type', c_int), ('method', POINTER(SSL_METHOD)), ('rbio', POINTER(BIO)), ('wbio', POINTER(BIO)), ('bbio', POINTER(BIO)), ('rwstate', c_int), ('in_handshake', c_int), ('handshake_func', CFUNCTYPE(c_int)), ('server', c_int), ('new_session', c_int), ('quiet_shutdown', c_int), ('shutdown', c_int), ('state', c_int), ('rstate', c_int), ('init_buf', POINTER(BUF_MEM)), ('init_msg', c_void_p), ('init_num', c_int), ('init_off', c_int), ('packet', POINTER(c_ubyte)), ('packet_length', c_uint), ('s2', POINTER(ssl2_state_st)), ('s3', POINTER(ssl3_state_st)), ('read_ahead', c_int), ('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)), ('msg_callback_arg', c_void_p), ('hit', c_int), ('purpose', c_int), ('trust', c_int), ('cipher_list', POINTER(STACK)), ('cipher_list_by_id', POINTER(STACK)), ('enc_read_ctx', POINTER(EVP_CIPHER_CTX)), ('read_hash', POINTER(EVP_MD)), ('expand', POINTER(COMP_CTX)), ('enc_write_ctx', POINTER(EVP_CIPHER_CTX)), ('write_hash', POINTER(EVP_MD)), ('compress', POINTER(COMP_CTX)), ('cert', POINTER(cert_st)), ('sid_ctx_length', c_uint), ('sid_ctx', c_ubyte * 32), ('session', POINTER(SSL_SESSION)), ('generate_session_id', GEN_SESSION_CB), ('verify_mode', c_int), ('verify_depth', c_int), ('verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), ('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)), ('error', c_int), ('error_code', c_int), ('ctx', POINTER(SSL_CTX)), ('debug', c_int), ('verify_result', c_long), ('ex_data', CRYPTO_EX_DATA), ('client_CA', POINTER(STACK)), ('references', c_int), ('options', c_ulong), ('mode', c_ulong), ('max_cert_list', c_long), ('first_packet', c_int), ('client_version', c_int), ] assert sizeof(ssl_st) == 268, sizeof(ssl_st) assert alignment(ssl_st) == 4, alignment(ssl_st) class N13ssl2_state_st4DOLLAR_19E(Structure): pass N13ssl2_state_st4DOLLAR_19E._fields_ = [ ('conn_id_length', c_uint), ('cert_type', c_uint), ('cert_length', c_uint), ('csl', c_uint), ('clear', c_uint), ('enc', c_uint), ('ccl', c_ubyte * 32), ('cipher_spec_length', c_uint), ('session_id_length', c_uint), ('clen', c_uint), ('rlen', c_uint), ] assert sizeof(N13ssl2_state_st4DOLLAR_19E) == 72, sizeof(N13ssl2_state_st4DOLLAR_19E) assert alignment(N13ssl2_state_st4DOLLAR_19E) == 4, alignment(N13ssl2_state_st4DOLLAR_19E) ssl2_state_st._fields_ = [ ('three_byte_header', c_int), ('clear_text', c_int), ('escape', c_int), ('ssl2_rollback', c_int), ('wnum', c_uint), ('wpend_tot', c_int), ('wpend_buf', POINTER(c_ubyte)), ('wpend_off', c_int), ('wpend_len', c_int), ('wpend_ret', c_int), ('rbuf_left', c_int), ('rbuf_offs', c_int), ('rbuf', POINTER(c_ubyte)), ('wbuf', POINTER(c_ubyte)), ('write_ptr', POINTER(c_ubyte)), ('padding', c_uint), ('rlength', c_uint), ('ract_data_length', c_int), ('wlength', c_uint), ('wact_data_length', c_int), ('ract_data', POINTER(c_ubyte)), ('wact_data', POINTER(c_ubyte)), ('mac_data', POINTER(c_ubyte)), ('read_key', POINTER(c_ubyte)), ('write_key', POINTER(c_ubyte)), ('challenge_length', c_uint), ('challenge', c_ubyte * 32), ('conn_id_length', c_uint), ('conn_id', c_ubyte * 16), ('key_material_length', c_uint), ('key_material', c_ubyte * 48), ('read_sequence', c_ulong), ('write_sequence', c_ulong), ('tmp', N13ssl2_state_st4DOLLAR_19E), ] assert sizeof(ssl2_state_st) == 288, sizeof(ssl2_state_st) assert alignment(ssl2_state_st) == 4, alignment(ssl2_state_st) SSL2_STATE = ssl2_state_st class ssl3_record_st(Structure): pass ssl3_record_st._fields_ = [ ('type', c_int), ('length', c_uint), ('off', c_uint), ('data', POINTER(c_ubyte)), ('input', POINTER(c_ubyte)), ('comp', POINTER(c_ubyte)), ] assert sizeof(ssl3_record_st) == 24, sizeof(ssl3_record_st) assert alignment(ssl3_record_st) == 4, alignment(ssl3_record_st) SSL3_RECORD = ssl3_record_st class ssl3_buffer_st(Structure): pass size_t = __darwin_size_t ssl3_buffer_st._fields_ = [ ('buf', POINTER(c_ubyte)), ('len', size_t), ('offset', c_int), ('left', c_int), ] assert sizeof(ssl3_buffer_st) == 16, sizeof(ssl3_buffer_st) assert alignment(ssl3_buffer_st) == 4, alignment(ssl3_buffer_st) SSL3_BUFFER = ssl3_buffer_st class N13ssl3_state_st4DOLLAR_20E(Structure): pass N13ssl3_state_st4DOLLAR_20E._fields_ = [ ('cert_verify_md', c_ubyte * 72), ('finish_md', c_ubyte * 72), ('finish_md_len', c_int), ('peer_finish_md', c_ubyte * 72), ('peer_finish_md_len', c_int), ('message_size', c_ulong), ('message_type', c_int), ('new_cipher', POINTER(SSL_CIPHER)), ('dh', POINTER(DH)), ('next_state', c_int), ('reuse_message', c_int), ('cert_req', c_int), ('ctype_num', c_int), ('ctype', c_char * 7), ('ca_names', POINTER(STACK)), ('use_rsa_tmp', c_int), ('key_block_length', c_int), ('key_block', POINTER(c_ubyte)), ('new_sym_enc', POINTER(EVP_CIPHER)), ('new_hash', POINTER(EVP_MD)), ('new_compression', POINTER(SSL_COMP)), ('cert_request', c_int), ] assert sizeof(N13ssl3_state_st4DOLLAR_20E) == 296, sizeof(N13ssl3_state_st4DOLLAR_20E) assert alignment(N13ssl3_state_st4DOLLAR_20E) == 4, alignment(N13ssl3_state_st4DOLLAR_20E) ssl3_state_st._fields_ = [ ('flags', c_long), ('delay_buf_pop_ret', c_int), ('read_sequence', c_ubyte * 8), ('read_mac_secret', c_ubyte * 36), ('write_sequence', c_ubyte * 8), ('write_mac_secret', c_ubyte * 36), ('server_random', c_ubyte * 32), ('client_random', c_ubyte * 32), ('need_empty_fragments', c_int), ('empty_fragment_done', c_int), ('rbuf', SSL3_BUFFER), ('wbuf', SSL3_BUFFER), ('rrec', SSL3_RECORD), ('wrec', SSL3_RECORD), ('alert_fragment', c_ubyte * 2), ('alert_fragment_len', c_uint), ('handshake_fragment', c_ubyte * 4), ('handshake_fragment_len', c_uint), ('wnum', c_uint), ('wpend_tot', c_int), ('wpend_type', c_int), ('wpend_ret', c_int), ('wpend_buf', POINTER(c_ubyte)), ('finish_dgst1', EVP_MD_CTX), ('finish_dgst2', EVP_MD_CTX), ('change_cipher_spec', c_int), ('warn_alert', c_int), ('fatal_alert', c_int), ('alert_dispatch', c_int), ('send_alert', c_ubyte * 2), ('renegotiate', c_int), ('total_renegotiations', c_int), ('num_renegotiations', c_int), ('in_read_app_data', c_int), ('tmp', N13ssl3_state_st4DOLLAR_20E), ] assert sizeof(ssl3_state_st) == 648, sizeof(ssl3_state_st) assert alignment(ssl3_state_st) == 4, alignment(ssl3_state_st) SSL3_STATE = ssl3_state_st stack_st._fields_ = [ ('num', c_int), ('data', POINTER(STRING)), ('sorted', c_int), ('num_alloc', c_int), ('comp', CFUNCTYPE(c_int, POINTER(STRING), POINTER(STRING))), ] assert sizeof(stack_st) == 20, sizeof(stack_st) assert alignment(stack_st) == 4, alignment(stack_st) class ui_st(Structure): pass ui_st._fields_ = [ ] UI = ui_st class ui_method_st(Structure): pass ui_method_st._fields_ = [ ] UI_METHOD = ui_method_st class ui_string_st(Structure): pass ui_string_st._fields_ = [ ] UI_STRING = ui_string_st # values for enumeration 'UI_string_types' UI_string_types = c_int # enum class X509_objects_st(Structure): pass X509_objects_st._fields_ = [ ('nid', c_int), ('a2i', CFUNCTYPE(c_int)), ('i2a', CFUNCTYPE(c_int)), ] assert sizeof(X509_objects_st) == 12, sizeof(X509_objects_st) assert alignment(X509_objects_st) == 4, alignment(X509_objects_st) X509_OBJECTS = X509_objects_st X509_algor_st._fields_ = [ ('algorithm', POINTER(ASN1_OBJECT)), ('parameter', POINTER(ASN1_TYPE)), ] assert sizeof(X509_algor_st) == 8, sizeof(X509_algor_st) assert alignment(X509_algor_st) == 4, alignment(X509_algor_st) class X509_val_st(Structure): pass X509_val_st._fields_ = [ ('notBefore', POINTER(ASN1_TIME)), ('notAfter', POINTER(ASN1_TIME)), ] assert sizeof(X509_val_st) == 8, sizeof(X509_val_st) assert alignment(X509_val_st) == 4, alignment(X509_val_st) X509_VAL = X509_val_st class X509_pubkey_st(Structure): pass X509_pubkey_st._fields_ = [ ('algor', POINTER(X509_ALGOR)), ('public_key', POINTER(ASN1_BIT_STRING)), ('pkey', POINTER(EVP_PKEY)), ] assert sizeof(X509_pubkey_st) == 12, sizeof(X509_pubkey_st) assert alignment(X509_pubkey_st) == 4, alignment(X509_pubkey_st) X509_PUBKEY = X509_pubkey_st class X509_sig_st(Structure): pass X509_sig_st._fields_ = [ ('algor', POINTER(X509_ALGOR)), ('digest', POINTER(ASN1_OCTET_STRING)), ] assert sizeof(X509_sig_st) == 8, sizeof(X509_sig_st) assert alignment(X509_sig_st) == 4, alignment(X509_sig_st) X509_SIG = X509_sig_st class X509_name_entry_st(Structure): pass X509_name_entry_st._fields_ = [ ('object', POINTER(ASN1_OBJECT)), ('value', POINTER(ASN1_STRING)), ('set', c_int), ('size', c_int), ] assert sizeof(X509_name_entry_st) == 16, sizeof(X509_name_entry_st) assert alignment(X509_name_entry_st) == 4, alignment(X509_name_entry_st) X509_NAME_ENTRY = X509_name_entry_st X509_name_st._fields_ = [ ('entries', POINTER(STACK)), ('modified', c_int), ('bytes', POINTER(BUF_MEM)), ('hash', c_ulong), ] assert sizeof(X509_name_st) == 16, sizeof(X509_name_st) assert alignment(X509_name_st) == 4, alignment(X509_name_st) class X509_extension_st(Structure): pass X509_extension_st._fields_ = [ ('object', POINTER(ASN1_OBJECT)), ('critical', ASN1_BOOLEAN), ('value', POINTER(ASN1_OCTET_STRING)), ] assert sizeof(X509_extension_st) == 12, sizeof(X509_extension_st) assert alignment(X509_extension_st) == 4, alignment(X509_extension_st) X509_EXTENSION = X509_extension_st class x509_attributes_st(Structure): pass class N18x509_attributes_st4DOLLAR_13E(Union): pass N18x509_attributes_st4DOLLAR_13E._fields_ = [ ('ptr', STRING), ('set', POINTER(STACK)), ('single', POINTER(ASN1_TYPE)), ] assert sizeof(N18x509_attributes_st4DOLLAR_13E) == 4, sizeof(N18x509_attributes_st4DOLLAR_13E) assert alignment(N18x509_attributes_st4DOLLAR_13E) == 4, alignment(N18x509_attributes_st4DOLLAR_13E) x509_attributes_st._fields_ = [ ('object', POINTER(ASN1_OBJECT)), ('single', c_int), ('value', N18x509_attributes_st4DOLLAR_13E), ] assert sizeof(x509_attributes_st) == 12, sizeof(x509_attributes_st) assert alignment(x509_attributes_st) == 4, alignment(x509_attributes_st) X509_ATTRIBUTE = x509_attributes_st class X509_req_info_st(Structure): pass X509_req_info_st._fields_ = [ ('enc', ASN1_ENCODING), ('version', POINTER(ASN1_INTEGER)), ('subject', POINTER(X509_NAME)), ('pubkey', POINTER(X509_PUBKEY)), ('attributes', POINTER(STACK)), ] assert sizeof(X509_req_info_st) == 28, sizeof(X509_req_info_st) assert alignment(X509_req_info_st) == 4, alignment(X509_req_info_st) X509_REQ_INFO = X509_req_info_st class X509_req_st(Structure): pass X509_req_st._fields_ = [ ('req_info', POINTER(X509_REQ_INFO)), ('sig_alg', POINTER(X509_ALGOR)), ('signature', POINTER(ASN1_BIT_STRING)), ('references', c_int), ] assert sizeof(X509_req_st) == 16, sizeof(X509_req_st) assert alignment(X509_req_st) == 4, alignment(X509_req_st) X509_REQ = X509_req_st class x509_cinf_st(Structure): pass x509_cinf_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('serialNumber', POINTER(ASN1_INTEGER)), ('signature', POINTER(X509_ALGOR)), ('issuer', POINTER(X509_NAME)), ('validity', POINTER(X509_VAL)), ('subject', POINTER(X509_NAME)), ('key', POINTER(X509_PUBKEY)), ('issuerUID', POINTER(ASN1_BIT_STRING)), ('subjectUID', POINTER(ASN1_BIT_STRING)), ('extensions', POINTER(STACK)), ] assert sizeof(x509_cinf_st) == 40, sizeof(x509_cinf_st) assert alignment(x509_cinf_st) == 4, alignment(x509_cinf_st) X509_CINF = x509_cinf_st class x509_cert_aux_st(Structure): pass x509_cert_aux_st._fields_ = [ ('trust', POINTER(STACK)), ('reject', POINTER(STACK)), ('alias', POINTER(ASN1_UTF8STRING)), ('keyid', POINTER(ASN1_OCTET_STRING)), ('other', POINTER(STACK)), ] assert sizeof(x509_cert_aux_st) == 20, sizeof(x509_cert_aux_st) assert alignment(x509_cert_aux_st) == 4, alignment(x509_cert_aux_st) X509_CERT_AUX = x509_cert_aux_st class AUTHORITY_KEYID_st(Structure): pass x509_st._fields_ = [ ('cert_info', POINTER(X509_CINF)), ('sig_alg', POINTER(X509_ALGOR)), ('signature', POINTER(ASN1_BIT_STRING)), ('valid', c_int), ('references', c_int), ('name', STRING), ('ex_data', CRYPTO_EX_DATA), ('ex_pathlen', c_long), ('ex_flags', c_ulong), ('ex_kusage', c_ulong), ('ex_xkusage', c_ulong), ('ex_nscert', c_ulong), ('skid', POINTER(ASN1_OCTET_STRING)), ('akid', POINTER(AUTHORITY_KEYID_st)), ('sha1_hash', c_ubyte * 20), ('aux', POINTER(X509_CERT_AUX)), ] assert sizeof(x509_st) == 84, sizeof(x509_st) assert alignment(x509_st) == 4, alignment(x509_st) AUTHORITY_KEYID_st._fields_ = [ ] class x509_trust_st(Structure): pass x509_trust_st._fields_ = [ ('trust', c_int), ('flags', c_int), ('check_trust', CFUNCTYPE(c_int, POINTER(x509_trust_st), POINTER(X509), c_int)), ('name', STRING), ('arg1', c_int), ('arg2', c_void_p), ] assert sizeof(x509_trust_st) == 24, sizeof(x509_trust_st) assert alignment(x509_trust_st) == 4, alignment(x509_trust_st) X509_TRUST = x509_trust_st class X509_revoked_st(Structure): pass X509_revoked_st._fields_ = [ ('serialNumber', POINTER(ASN1_INTEGER)), ('revocationDate', POINTER(ASN1_TIME)), ('extensions', POINTER(STACK)), ('sequence', c_int), ] assert sizeof(X509_revoked_st) == 16, sizeof(X509_revoked_st) assert alignment(X509_revoked_st) == 4, alignment(X509_revoked_st) X509_REVOKED = X509_revoked_st class X509_crl_info_st(Structure): pass X509_crl_info_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('sig_alg', POINTER(X509_ALGOR)), ('issuer', POINTER(X509_NAME)), ('lastUpdate', POINTER(ASN1_TIME)), ('nextUpdate', POINTER(ASN1_TIME)), ('revoked', POINTER(STACK)), ('extensions', POINTER(STACK)), ('enc', ASN1_ENCODING), ] assert sizeof(X509_crl_info_st) == 40, sizeof(X509_crl_info_st) assert alignment(X509_crl_info_st) == 4, alignment(X509_crl_info_st) X509_CRL_INFO = X509_crl_info_st X509_crl_st._fields_ = [ ('crl', POINTER(X509_CRL_INFO)), ('sig_alg', POINTER(X509_ALGOR)), ('signature', POINTER(ASN1_BIT_STRING)), ('references', c_int), ] assert sizeof(X509_crl_st) == 16, sizeof(X509_crl_st) assert alignment(X509_crl_st) == 4, alignment(X509_crl_st) class private_key_st(Structure): pass private_key_st._fields_ = [ ('version', c_int), ('enc_algor', POINTER(X509_ALGOR)), ('enc_pkey', POINTER(ASN1_OCTET_STRING)), ('dec_pkey', POINTER(EVP_PKEY)), ('key_length', c_int), ('key_data', STRING), ('key_free', c_int), ('cipher', EVP_CIPHER_INFO), ('references', c_int), ] assert sizeof(private_key_st) == 52, sizeof(private_key_st) assert alignment(private_key_st) == 4, alignment(private_key_st) X509_PKEY = private_key_st class X509_info_st(Structure): pass X509_info_st._fields_ = [ ('x509', POINTER(X509)), ('crl', POINTER(X509_CRL)), ('x_pkey', POINTER(X509_PKEY)), ('enc_cipher', EVP_CIPHER_INFO), ('enc_len', c_int), ('enc_data', STRING), ('references', c_int), ] assert sizeof(X509_info_st) == 44, sizeof(X509_info_st) assert alignment(X509_info_st) == 4, alignment(X509_info_st) X509_INFO = X509_info_st class Netscape_spkac_st(Structure): pass Netscape_spkac_st._fields_ = [ ('pubkey', POINTER(X509_PUBKEY)), ('challenge', POINTER(ASN1_IA5STRING)), ] assert sizeof(Netscape_spkac_st) == 8, sizeof(Netscape_spkac_st) assert alignment(Netscape_spkac_st) == 4, alignment(Netscape_spkac_st) NETSCAPE_SPKAC = Netscape_spkac_st class Netscape_spki_st(Structure): pass Netscape_spki_st._fields_ = [ ('spkac', POINTER(NETSCAPE_SPKAC)), ('sig_algor', POINTER(X509_ALGOR)), ('signature', POINTER(ASN1_BIT_STRING)), ] assert sizeof(Netscape_spki_st) == 12, sizeof(Netscape_spki_st) assert alignment(Netscape_spki_st) == 4, alignment(Netscape_spki_st) NETSCAPE_SPKI = Netscape_spki_st class Netscape_certificate_sequence(Structure): pass Netscape_certificate_sequence._fields_ = [ ('type', POINTER(ASN1_OBJECT)), ('certs', POINTER(STACK)), ] assert sizeof(Netscape_certificate_sequence) == 8, sizeof(Netscape_certificate_sequence) assert alignment(Netscape_certificate_sequence) == 4, alignment(Netscape_certificate_sequence) NETSCAPE_CERT_SEQUENCE = Netscape_certificate_sequence class PBEPARAM_st(Structure): pass PBEPARAM_st._fields_ = [ ('salt', POINTER(ASN1_OCTET_STRING)), ('iter', POINTER(ASN1_INTEGER)), ] assert sizeof(PBEPARAM_st) == 8, sizeof(PBEPARAM_st) assert alignment(PBEPARAM_st) == 4, alignment(PBEPARAM_st) PBEPARAM = PBEPARAM_st class PBE2PARAM_st(Structure): pass PBE2PARAM_st._fields_ = [ ('keyfunc', POINTER(X509_ALGOR)), ('encryption', POINTER(X509_ALGOR)), ] assert sizeof(PBE2PARAM_st) == 8, sizeof(PBE2PARAM_st) assert alignment(PBE2PARAM_st) == 4, alignment(PBE2PARAM_st) PBE2PARAM = PBE2PARAM_st class PBKDF2PARAM_st(Structure): pass PBKDF2PARAM_st._fields_ = [ ('salt', POINTER(ASN1_TYPE)), ('iter', POINTER(ASN1_INTEGER)), ('keylength', POINTER(ASN1_INTEGER)), ('prf', POINTER(X509_ALGOR)), ] assert sizeof(PBKDF2PARAM_st) == 16, sizeof(PBKDF2PARAM_st) assert alignment(PBKDF2PARAM_st) == 4, alignment(PBKDF2PARAM_st) PBKDF2PARAM = PBKDF2PARAM_st class pkcs8_priv_key_info_st(Structure): pass pkcs8_priv_key_info_st._fields_ = [ ('broken', c_int), ('version', POINTER(ASN1_INTEGER)), ('pkeyalg', POINTER(X509_ALGOR)), ('pkey', POINTER(ASN1_TYPE)), ('attributes', POINTER(STACK)), ] assert sizeof(pkcs8_priv_key_info_st) == 20, sizeof(pkcs8_priv_key_info_st) assert alignment(pkcs8_priv_key_info_st) == 4, alignment(pkcs8_priv_key_info_st) PKCS8_PRIV_KEY_INFO = pkcs8_priv_key_info_st class x509_hash_dir_st(Structure): pass x509_hash_dir_st._fields_ = [ ('num_dirs', c_int), ('dirs', POINTER(STRING)), ('dirs_type', POINTER(c_int)), ('num_dirs_alloced', c_int), ] assert sizeof(x509_hash_dir_st) == 16, sizeof(x509_hash_dir_st) assert alignment(x509_hash_dir_st) == 4, alignment(x509_hash_dir_st) X509_HASH_DIR_CTX = x509_hash_dir_st class x509_file_st(Structure): pass x509_file_st._fields_ = [ ('num_paths', c_int), ('num_alloced', c_int), ('paths', POINTER(STRING)), ('path_type', POINTER(c_int)), ] assert sizeof(x509_file_st) == 16, sizeof(x509_file_st) assert alignment(x509_file_st) == 4, alignment(x509_file_st) X509_CERT_FILE_CTX = x509_file_st class x509_object_st(Structure): pass class N14x509_object_st4DOLLAR_14E(Union): pass N14x509_object_st4DOLLAR_14E._fields_ = [ ('ptr', STRING), ('x509', POINTER(X509)), ('crl', POINTER(X509_CRL)), ('pkey', POINTER(EVP_PKEY)), ] assert sizeof(N14x509_object_st4DOLLAR_14E) == 4, sizeof(N14x509_object_st4DOLLAR_14E) assert alignment(N14x509_object_st4DOLLAR_14E) == 4, alignment(N14x509_object_st4DOLLAR_14E) x509_object_st._fields_ = [ ('type', c_int), ('data', N14x509_object_st4DOLLAR_14E), ] assert sizeof(x509_object_st) == 8, sizeof(x509_object_st) assert alignment(x509_object_st) == 4, alignment(x509_object_st) X509_OBJECT = x509_object_st class x509_lookup_st(Structure): pass X509_LOOKUP = x509_lookup_st class x509_lookup_method_st(Structure): pass x509_lookup_method_st._fields_ = [ ('name', STRING), ('new_item', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), ('free', CFUNCTYPE(None, POINTER(X509_LOOKUP))), ('init', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), ('shutdown', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), ('ctrl', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_long, POINTER(STRING))), ('get_by_subject', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(X509_OBJECT))), ('get_by_issuer_serial', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(ASN1_INTEGER), POINTER(X509_OBJECT))), ('get_by_fingerprint', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(c_ubyte), c_int, POINTER(X509_OBJECT))), ('get_by_alias', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_int, POINTER(X509_OBJECT))), ] assert sizeof(x509_lookup_method_st) == 40, sizeof(x509_lookup_method_st) assert alignment(x509_lookup_method_st) == 4, alignment(x509_lookup_method_st) X509_LOOKUP_METHOD = x509_lookup_method_st x509_store_st._fields_ = [ ('cache', c_int), ('objs', POINTER(STACK)), ('get_cert_methods', POINTER(STACK)), ('flags', c_ulong), ('purpose', c_int), ('trust', c_int), ('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), ('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))), ('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))), ('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))), ('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))), ('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))), ('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('ex_data', CRYPTO_EX_DATA), ('references', c_int), ('depth', c_int), ] assert sizeof(x509_store_st) == 76, sizeof(x509_store_st) assert alignment(x509_store_st) == 4, alignment(x509_store_st) x509_lookup_st._fields_ = [ ('init', c_int), ('skip', c_int), ('method', POINTER(X509_LOOKUP_METHOD)), ('method_data', STRING), ('store_ctx', POINTER(X509_STORE)), ] assert sizeof(x509_lookup_st) == 20, sizeof(x509_lookup_st) assert alignment(x509_lookup_st) == 4, alignment(x509_lookup_st) time_t = __darwin_time_t x509_store_ctx_st._fields_ = [ ('ctx', POINTER(X509_STORE)), ('current_method', c_int), ('cert', POINTER(X509)), ('untrusted', POINTER(STACK)), ('purpose', c_int), ('trust', c_int), ('check_time', time_t), ('flags', c_ulong), ('other_ctx', c_void_p), ('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), ('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))), ('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))), ('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))), ('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))), ('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))), ('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('depth', c_int), ('valid', c_int), ('last_untrusted', c_int), ('chain', POINTER(STACK)), ('error_depth', c_int), ('error', c_int), ('current_cert', POINTER(X509)), ('current_issuer', POINTER(X509)), ('current_crl', POINTER(X509_CRL)), ('ex_data', CRYPTO_EX_DATA), ] assert sizeof(x509_store_ctx_st) == 116, sizeof(x509_store_ctx_st) assert alignment(x509_store_ctx_st) == 4, alignment(x509_store_ctx_st) va_list = __darwin_va_list __darwin_off_t = __int64_t fpos_t = __darwin_off_t class __sbuf(Structure): pass __sbuf._fields_ = [ ('_base', POINTER(c_ubyte)), ('_size', c_int), ] assert sizeof(__sbuf) == 8, sizeof(__sbuf) assert alignment(__sbuf) == 4, alignment(__sbuf) class __sFILEX(Structure): pass __sFILEX._fields_ = [ ] class __sFILE(Structure): pass __sFILE._pack_ = 4 __sFILE._fields_ = [ ('_p', POINTER(c_ubyte)), ('_r', c_int), ('_w', c_int), ('_flags', c_short), ('_file', c_short), ('_bf', __sbuf), ('_lbfsize', c_int), ('_cookie', c_void_p), ('_close', CFUNCTYPE(c_int, c_void_p)), ('_read', CFUNCTYPE(c_int, c_void_p, STRING, c_int)), ('_seek', CFUNCTYPE(fpos_t, c_void_p, c_longlong, c_int)), ('_write', CFUNCTYPE(c_int, c_void_p, STRING, c_int)), ('_ub', __sbuf), ('_extra', POINTER(__sFILEX)), ('_ur', c_int), ('_ubuf', c_ubyte * 3), ('_nbuf', c_ubyte * 1), ('_lb', __sbuf), ('_blksize', c_int), ('_offset', fpos_t), ] assert sizeof(__sFILE) == 88, sizeof(__sFILE) assert alignment(__sFILE) == 4, alignment(__sFILE) FILE = __sFILE ct_rune_t = __darwin_ct_rune_t rune_t = __darwin_rune_t class div_t(Structure): pass div_t._fields_ = [ ('quot', c_int), ('rem', c_int), ] assert sizeof(div_t) == 8, sizeof(div_t) assert alignment(div_t) == 4, alignment(div_t) class ldiv_t(Structure): pass ldiv_t._fields_ = [ ('quot', c_long), ('rem', c_long), ] assert sizeof(ldiv_t) == 8, sizeof(ldiv_t) assert alignment(ldiv_t) == 4, alignment(ldiv_t) class lldiv_t(Structure): pass lldiv_t._pack_ = 4 lldiv_t._fields_ = [ ('quot', c_longlong), ('rem', c_longlong), ] assert sizeof(lldiv_t) == 16, sizeof(lldiv_t) assert alignment(lldiv_t) == 4, alignment(lldiv_t) __darwin_dev_t = __int32_t dev_t = __darwin_dev_t __darwin_mode_t = __uint16_t mode_t = __darwin_mode_t class mcontext(Structure): pass mcontext._fields_ = [ ] class mcontext64(Structure): pass mcontext64._fields_ = [ ] class __darwin_pthread_handler_rec(Structure): pass __darwin_pthread_handler_rec._fields_ = [ ('__routine', CFUNCTYPE(None, c_void_p)), ('__arg', c_void_p), ('__next', POINTER(__darwin_pthread_handler_rec)), ] assert sizeof(__darwin_pthread_handler_rec) == 12, sizeof(__darwin_pthread_handler_rec) assert alignment(__darwin_pthread_handler_rec) == 4, alignment(__darwin_pthread_handler_rec) class _opaque_pthread_attr_t(Structure): pass _opaque_pthread_attr_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 36), ] assert sizeof(_opaque_pthread_attr_t) == 40, sizeof(_opaque_pthread_attr_t) assert alignment(_opaque_pthread_attr_t) == 4, alignment(_opaque_pthread_attr_t) class _opaque_pthread_cond_t(Structure): pass _opaque_pthread_cond_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 24), ] assert sizeof(_opaque_pthread_cond_t) == 28, sizeof(_opaque_pthread_cond_t) assert alignment(_opaque_pthread_cond_t) == 4, alignment(_opaque_pthread_cond_t) class _opaque_pthread_condattr_t(Structure): pass _opaque_pthread_condattr_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 4), ] assert sizeof(_opaque_pthread_condattr_t) == 8, sizeof(_opaque_pthread_condattr_t) assert alignment(_opaque_pthread_condattr_t) == 4, alignment(_opaque_pthread_condattr_t) class _opaque_pthread_mutex_t(Structure): pass _opaque_pthread_mutex_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 40), ] assert sizeof(_opaque_pthread_mutex_t) == 44, sizeof(_opaque_pthread_mutex_t) assert alignment(_opaque_pthread_mutex_t) == 4, alignment(_opaque_pthread_mutex_t) class _opaque_pthread_mutexattr_t(Structure): pass _opaque_pthread_mutexattr_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 8), ] assert sizeof(_opaque_pthread_mutexattr_t) == 12, sizeof(_opaque_pthread_mutexattr_t) assert alignment(_opaque_pthread_mutexattr_t) == 4, alignment(_opaque_pthread_mutexattr_t) class _opaque_pthread_once_t(Structure): pass _opaque_pthread_once_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 4), ] assert sizeof(_opaque_pthread_once_t) == 8, sizeof(_opaque_pthread_once_t) assert alignment(_opaque_pthread_once_t) == 4, alignment(_opaque_pthread_once_t) class _opaque_pthread_rwlock_t(Structure): pass _opaque_pthread_rwlock_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 124), ] assert sizeof(_opaque_pthread_rwlock_t) == 128, sizeof(_opaque_pthread_rwlock_t) assert alignment(_opaque_pthread_rwlock_t) == 4, alignment(_opaque_pthread_rwlock_t) class _opaque_pthread_rwlockattr_t(Structure): pass _opaque_pthread_rwlockattr_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 12), ] assert sizeof(_opaque_pthread_rwlockattr_t) == 16, sizeof(_opaque_pthread_rwlockattr_t) assert alignment(_opaque_pthread_rwlockattr_t) == 4, alignment(_opaque_pthread_rwlockattr_t) class _opaque_pthread_t(Structure): pass _opaque_pthread_t._fields_ = [ ('__sig', c_long), ('__cleanup_stack', POINTER(__darwin_pthread_handler_rec)), ('__opaque', c_char * 596), ] assert sizeof(_opaque_pthread_t) == 604, sizeof(_opaque_pthread_t) assert alignment(_opaque_pthread_t) == 4, alignment(_opaque_pthread_t) __darwin_blkcnt_t = __int64_t __darwin_blksize_t = __int32_t __darwin_fsblkcnt_t = c_uint __darwin_fsfilcnt_t = c_uint __darwin_gid_t = __uint32_t __darwin_id_t = __uint32_t __darwin_ino_t = __uint32_t __darwin_mach_port_name_t = __darwin_natural_t __darwin_mach_port_t = __darwin_mach_port_name_t __darwin_mcontext_t = POINTER(mcontext) __darwin_mcontext64_t = POINTER(mcontext64) __darwin_pid_t = __int32_t __darwin_pthread_attr_t = _opaque_pthread_attr_t __darwin_pthread_cond_t = _opaque_pthread_cond_t __darwin_pthread_condattr_t = _opaque_pthread_condattr_t __darwin_pthread_key_t = c_ulong __darwin_pthread_mutex_t = _opaque_pthread_mutex_t __darwin_pthread_mutexattr_t = _opaque_pthread_mutexattr_t __darwin_pthread_once_t = _opaque_pthread_once_t __darwin_pthread_rwlock_t = _opaque_pthread_rwlock_t __darwin_pthread_rwlockattr_t = _opaque_pthread_rwlockattr_t __darwin_pthread_t = POINTER(_opaque_pthread_t) __darwin_sigset_t = __uint32_t __darwin_suseconds_t = __int32_t __darwin_uid_t = __uint32_t __darwin_useconds_t = __uint32_t __darwin_uuid_t = c_ubyte * 16 class sigaltstack(Structure): pass sigaltstack._fields_ = [ ('ss_sp', c_void_p), ('ss_size', __darwin_size_t), ('ss_flags', c_int), ] assert sizeof(sigaltstack) == 12, sizeof(sigaltstack) assert alignment(sigaltstack) == 4, alignment(sigaltstack) __darwin_stack_t = sigaltstack class ucontext(Structure): pass ucontext._fields_ = [ ('uc_onstack', c_int), ('uc_sigmask', __darwin_sigset_t), ('uc_stack', __darwin_stack_t), ('uc_link', POINTER(ucontext)), ('uc_mcsize', __darwin_size_t), ('uc_mcontext', __darwin_mcontext_t), ] assert sizeof(ucontext) == 32, sizeof(ucontext) assert alignment(ucontext) == 4, alignment(ucontext) __darwin_ucontext_t = ucontext class ucontext64(Structure): pass ucontext64._fields_ = [ ('uc_onstack', c_int), ('uc_sigmask', __darwin_sigset_t), ('uc_stack', __darwin_stack_t), ('uc_link', POINTER(ucontext64)), ('uc_mcsize', __darwin_size_t), ('uc_mcontext64', __darwin_mcontext64_t), ] assert sizeof(ucontext64) == 32, sizeof(ucontext64) assert alignment(ucontext64) == 4, alignment(ucontext64) __darwin_ucontext64_t = ucontext64 class timeval(Structure): pass timeval._fields_ = [ ('tv_sec', __darwin_time_t), ('tv_usec', __darwin_suseconds_t), ] assert sizeof(timeval) == 8, sizeof(timeval) assert alignment(timeval) == 4, alignment(timeval) rlim_t = __int64_t class rusage(Structure): pass rusage._fields_ = [ ('ru_utime', timeval), ('ru_stime', timeval), ('ru_maxrss', c_long), ('ru_ixrss', c_long), ('ru_idrss', c_long), ('ru_isrss', c_long), ('ru_minflt', c_long), ('ru_majflt', c_long), ('ru_nswap', c_long), ('ru_inblock', c_long), ('ru_oublock', c_long), ('ru_msgsnd', c_long), ('ru_msgrcv', c_long), ('ru_nsignals', c_long), ('ru_nvcsw', c_long), ('ru_nivcsw', c_long), ] assert sizeof(rusage) == 72, sizeof(rusage) assert alignment(rusage) == 4, alignment(rusage) class rlimit(Structure): pass rlimit._pack_ = 4 rlimit._fields_ = [ ('rlim_cur', rlim_t), ('rlim_max', rlim_t), ] assert sizeof(rlimit) == 16, sizeof(rlimit) assert alignment(rlimit) == 4, alignment(rlimit) mcontext_t = __darwin_mcontext_t mcontext64_t = __darwin_mcontext64_t pthread_attr_t = __darwin_pthread_attr_t sigset_t = __darwin_sigset_t ucontext_t = __darwin_ucontext_t ucontext64_t = __darwin_ucontext64_t uid_t = __darwin_uid_t class sigval(Union): pass sigval._fields_ = [ ('sival_int', c_int), ('sival_ptr', c_void_p), ] assert sizeof(sigval) == 4, sizeof(sigval) assert alignment(sigval) == 4, alignment(sigval) class sigevent(Structure): pass sigevent._fields_ = [ ('sigev_notify', c_int), ('sigev_signo', c_int), ('sigev_value', sigval), ('sigev_notify_function', CFUNCTYPE(None, sigval)), ('sigev_notify_attributes', POINTER(pthread_attr_t)), ] assert sizeof(sigevent) == 20, sizeof(sigevent) assert alignment(sigevent) == 4, alignment(sigevent) class __siginfo(Structure): pass pid_t = __darwin_pid_t __siginfo._fields_ = [ ('si_signo', c_int), ('si_errno', c_int), ('si_code', c_int), ('si_pid', pid_t), ('si_uid', uid_t), ('si_status', c_int), ('si_addr', c_void_p), ('si_value', sigval), ('si_band', c_long), ('pad', c_ulong * 7), ] assert sizeof(__siginfo) == 64, sizeof(__siginfo) assert alignment(__siginfo) == 4, alignment(__siginfo) siginfo_t = __siginfo class __sigaction_u(Union): pass __sigaction_u._fields_ = [ ('__sa_handler', CFUNCTYPE(None, c_int)), ('__sa_sigaction', CFUNCTYPE(None, c_int, POINTER(__siginfo), c_void_p)), ] assert sizeof(__sigaction_u) == 4, sizeof(__sigaction_u) assert alignment(__sigaction_u) == 4, alignment(__sigaction_u) class __sigaction(Structure): pass __sigaction._fields_ = [ ('__sigaction_u', __sigaction_u), ('sa_tramp', CFUNCTYPE(None, c_void_p, c_int, c_int, POINTER(siginfo_t), c_void_p)), ('sa_mask', sigset_t), ('sa_flags', c_int), ] assert sizeof(__sigaction) == 16, sizeof(__sigaction) assert alignment(__sigaction) == 4, alignment(__sigaction) class sigaction(Structure): pass sigaction._fields_ = [ ('__sigaction_u', __sigaction_u), ('sa_mask', sigset_t), ('sa_flags', c_int), ] assert sizeof(sigaction) == 12, sizeof(sigaction) assert alignment(sigaction) == 4, alignment(sigaction) sig_t = CFUNCTYPE(None, c_int) stack_t = __darwin_stack_t class sigvec(Structure): pass sigvec._fields_ = [ ('sv_handler', CFUNCTYPE(None, c_int)), ('sv_mask', c_int), ('sv_flags', c_int), ] assert sizeof(sigvec) == 12, sizeof(sigvec) assert alignment(sigvec) == 4, alignment(sigvec) class sigstack(Structure): pass sigstack._fields_ = [ ('ss_sp', STRING), ('ss_onstack', c_int), ] assert sizeof(sigstack) == 8, sizeof(sigstack) assert alignment(sigstack) == 4, alignment(sigstack) u_char = c_ubyte u_short = c_ushort u_int = c_uint u_long = c_ulong ushort = c_ushort uint = c_uint u_quad_t = u_int64_t quad_t = int64_t qaddr_t = POINTER(quad_t) caddr_t = STRING daddr_t = int32_t fixpt_t = u_int32_t blkcnt_t = __darwin_blkcnt_t blksize_t = __darwin_blksize_t gid_t = __darwin_gid_t in_addr_t = __uint32_t in_port_t = __uint16_t ino_t = __darwin_ino_t key_t = __int32_t nlink_t = __uint16_t off_t = __darwin_off_t segsz_t = int32_t swblk_t = int32_t clock_t = __darwin_clock_t ssize_t = __darwin_ssize_t useconds_t = __darwin_useconds_t suseconds_t = __darwin_suseconds_t fd_mask = __int32_t class fd_set(Structure): pass fd_set._fields_ = [ ('fds_bits', __int32_t * 32), ] assert sizeof(fd_set) == 128, sizeof(fd_set) assert alignment(fd_set) == 4, alignment(fd_set) pthread_cond_t = __darwin_pthread_cond_t pthread_condattr_t = __darwin_pthread_condattr_t pthread_mutex_t = __darwin_pthread_mutex_t pthread_mutexattr_t = __darwin_pthread_mutexattr_t pthread_once_t = __darwin_pthread_once_t pthread_rwlock_t = __darwin_pthread_rwlock_t pthread_rwlockattr_t = __darwin_pthread_rwlockattr_t pthread_t = __darwin_pthread_t pthread_key_t = __darwin_pthread_key_t fsblkcnt_t = __darwin_fsblkcnt_t fsfilcnt_t = __darwin_fsfilcnt_t # values for enumeration 'idtype_t' idtype_t = c_int # enum id_t = __darwin_id_t class wait(Union): pass class N4wait3DOLLAR_3E(Structure): pass N4wait3DOLLAR_3E._fields_ = [ ('w_Termsig', c_uint, 7), ('w_Coredump', c_uint, 1), ('w_Retcode', c_uint, 8), ('w_Filler', c_uint, 16), ] assert sizeof(N4wait3DOLLAR_3E) == 4, sizeof(N4wait3DOLLAR_3E) assert alignment(N4wait3DOLLAR_3E) == 4, alignment(N4wait3DOLLAR_3E) class N4wait3DOLLAR_4E(Structure): pass N4wait3DOLLAR_4E._fields_ = [ ('w_Stopval', c_uint, 8), ('w_Stopsig', c_uint, 8), ('w_Filler', c_uint, 16), ] assert sizeof(N4wait3DOLLAR_4E) == 4, sizeof(N4wait3DOLLAR_4E) assert alignment(N4wait3DOLLAR_4E) == 4, alignment(N4wait3DOLLAR_4E) wait._fields_ = [ ('w_status', c_int), ('w_T', N4wait3DOLLAR_3E), ('w_S', N4wait3DOLLAR_4E), ] assert sizeof(wait) == 4, sizeof(wait) assert alignment(wait) == 4, alignment(wait) class timespec(Structure): pass timespec._fields_ = [ ('tv_sec', time_t), ('tv_nsec', c_long), ] assert sizeof(timespec) == 8, sizeof(timespec) assert alignment(timespec) == 4, alignment(timespec) class tm(Structure): pass tm._fields_ = [ ('tm_sec', c_int), ('tm_min', c_int), ('tm_hour', c_int), ('tm_mday', c_int), ('tm_mon', c_int), ('tm_year', c_int), ('tm_wday', c_int), ('tm_yday', c_int), ('tm_isdst', c_int), ('tm_gmtoff', c_long), ('tm_zone', STRING), ] assert sizeof(tm) == 44, sizeof(tm) assert alignment(tm) == 4, alignment(tm) __gnuc_va_list = STRING ptrdiff_t = c_int int8_t = c_byte int16_t = c_short uint8_t = c_ubyte uint16_t = c_ushort uint32_t = c_uint uint64_t = c_ulonglong int_least8_t = int8_t int_least16_t = int16_t int_least32_t = int32_t int_least64_t = int64_t uint_least8_t = uint8_t uint_least16_t = uint16_t uint_least32_t = uint32_t uint_least64_t = uint64_t int_fast8_t = int8_t int_fast16_t = int16_t int_fast32_t = int32_t int_fast64_t = int64_t uint_fast8_t = uint8_t uint_fast16_t = uint16_t uint_fast32_t = uint32_t uint_fast64_t = uint64_t intptr_t = c_long uintptr_t = c_ulong intmax_t = c_longlong uintmax_t = c_ulonglong __all__ = ['ENGINE', 'pkcs7_enc_content_st', '__int16_t', 'X509_REVOKED', 'SSL_CTX', 'UIT_BOOLEAN', '__darwin_time_t', 'ucontext64_t', 'int_fast32_t', 'pem_ctx_st', 'uint8_t', 'fpos_t', 'X509', 'COMP_CTX', 'tm', 'N10pem_ctx_st4DOLLAR_17E', 'swblk_t', 'ASN1_TEMPLATE', '__darwin_pthread_t', 'fixpt_t', 'BIO_METHOD', 'ASN1_PRINTABLESTRING', 'EVP_ENCODE_CTX', 'dh_method', 'bio_f_buffer_ctx_struct', 'in_port_t', 'X509_SIG', '__darwin_ssize_t', '__darwin_sigset_t', 'wait', 'uint_fast16_t', 'N12asn1_type_st4DOLLAR_11E', 'uint_least8_t', 'pthread_rwlock_t', 'ASN1_IA5STRING', 'fsfilcnt_t', 'ucontext', '__uint64_t', 'timespec', 'x509_cinf_st', 'COMP_METHOD', 'MD5_CTX', 'buf_mem_st', 'ASN1_ENCODING_st', 'PBEPARAM', 'X509_NAME_ENTRY', '__darwin_va_list', 'ucontext_t', 'lhash_st', 'N4wait3DOLLAR_4E', '__darwin_uuid_t', '_ossl_old_des_ks_struct', 'id_t', 'ASN1_BIT_STRING', 'va_list', '__darwin_wchar_t', 'pthread_key_t', 'pkcs7_signer_info_st', 'ASN1_METHOD', 'DSA_SIG', 'DSA', 'UIT_NONE', 'pthread_t', '__darwin_useconds_t', 'uint_fast8_t', 'UI_STRING', 'DES_cblock', '__darwin_mcontext64_t', 'rlim_t', 'PEM_Encode_Seal_st', 'SHAstate_st', 'u_quad_t', 'openssl_fptr', '_opaque_pthread_rwlockattr_t', 'N18x509_attributes_st4DOLLAR_13E', '__darwin_pthread_rwlock_t', 'daddr_t', 'ui_string_st', 'x509_file_st', 'X509_req_info_st', 'int_least64_t', 'evp_Encode_Ctx_st', 'X509_OBJECTS', 'CRYPTO_EX_DATA', '__int8_t', 'AUTHORITY_KEYID_st', '_opaque_pthread_attr_t', 'sigstack', 'EVP_CIPHER_CTX', 'X509_extension_st', 'pid_t', 'RSA_METHOD', 'PEM_USER', 'pem_recip_st', 'env_md_ctx_st', 'rc5_key_st', 'ui_st', 'X509_PUBKEY', 'u_int8_t', 'ASN1_ITEM_st', 'pkcs7_recip_info_st', 'ssl2_state_st', 'off_t', 'N10ssl_ctx_st4DOLLAR_18E', 'crypto_ex_data_st', 'ui_method_st', '__darwin_pthread_rwlockattr_t', 'CRYPTO_EX_dup', '__darwin_ino_t', '__sFILE', 'OSUnknownByteOrder', 'BN_MONT_CTX', 'ASN1_NULL', 'time_t', 'CRYPTO_EX_new', 'asn1_type_st', 'CRYPTO_EX_DATA_FUNCS', 'user_time_t', 'BIGNUM', 'pthread_rwlockattr_t', 'ASN1_VALUE_st', 'DH_METHOD', '__darwin_off_t', '_opaque_pthread_t', 'bn_blinding_st', 'RSA', 'ssize_t', 'mcontext64_t', 'user_long_t', 'fsblkcnt_t', 'cert_st', '__darwin_pthread_condattr_t', 'X509_PKEY', '__darwin_id_t', '__darwin_nl_item', 'SSL2_STATE', 'FILE', 'pthread_mutexattr_t', 'size_t', '_ossl_old_des_key_schedule', 'pkcs7_issuer_and_serial_st', 'sigval', 'CRYPTO_MEM_LEAK_CB', 'X509_NAME', 'blkcnt_t', 'uint_least16_t', '__darwin_dev_t', 'evp_cipher_info_st', 'BN_BLINDING', 'ssl3_state_st', 'uint_least64_t', 'user_addr_t', 'DES_key_schedule', 'RIPEMD160_CTX', 'u_char', 'X509_algor_st', 'uid_t', 'sess_cert_st', 'u_int64_t', 'u_int16_t', 'sigset_t', '__darwin_ptrdiff_t', 'ASN1_CTX', 'STACK', '__int32_t', 'UI_METHOD', 'NETSCAPE_SPKI', 'UIT_PROMPT', 'st_CRYPTO_EX_DATA_IMPL', 'cast_key_st', 'X509_HASH_DIR_CTX', 'sigevent', 'user_ssize_t', 'clock_t', 'aes_key_st', '__darwin_socklen_t', '__darwin_intptr_t', 'int_fast64_t', 'asn1_string_table_st', 'uint_fast32_t', 'ASN1_VISIBLESTRING', 'DSA_SIG_st', 'obj_name_st', 'X509_LOOKUP_METHOD', 'u_int32_t', 'EVP_CIPHER_INFO', '__gnuc_va_list', 'AES_KEY', 'PKCS7_ISSUER_AND_SERIAL', 'BN_CTX', '__darwin_blkcnt_t', 'key_t', 'SHA_CTX', 'pkcs7_signed_st', 'SSL', 'N10pem_ctx_st4DOLLAR_16E', 'pthread_attr_t', 'EVP_MD', 'uint', 'ASN1_BOOLEAN', 'ino_t', '__darwin_clock_t', 'ASN1_OCTET_STRING', 'asn1_ctx_st', 'BIO_F_BUFFER_CTX', 'bn_mont_ctx_st', 'X509_REQ_INFO', 'PEM_CTX', 'sigvec', '__darwin_pthread_mutexattr_t', 'x509_attributes_st', 'stack_t', '__darwin_mode_t', '__mbstate_t', 'asn1_object_st', 'ASN1_ENCODING', '__uint8_t', 'LHASH_NODE', 'PKCS7_SIGNER_INFO', 'asn1_method_st', 'stack_st', 'bio_info_cb', 'div_t', 'UIT_VERIFY', 'PBEPARAM_st', 'N4wait3DOLLAR_3E', 'quad_t', '__siginfo', '__darwin_mbstate_t', 'rsa_st', 'ASN1_UNIVERSALSTRING', 'uint64_t', 'ssl_comp_st', 'X509_OBJECT', 'pthread_cond_t', 'DH', '__darwin_wctype_t', 'PKCS7_ENVELOPE', 'ASN1_TLC_st', 'sig_atomic_t', 'BIO', 'nlink_t', 'BUF_MEM', 'SSL3_RECORD', 'bio_method_st', 'timeval', 'UI_string_types', 'BIO_dummy', 'ssl_ctx_st', 'NETSCAPE_CERT_SEQUENCE', 'BIT_STRING_BITNAME_st', '__darwin_pthread_attr_t', 'int8_t', '__darwin_wint_t', 'OBJ_NAME', 'PKCS8_PRIV_KEY_INFO', 'PBE2PARAM_st', 'LHASH_DOALL_FN_TYPE', 'x509_st', 'X509_VAL', 'dev_t', 'ASN1_TEMPLATE_st', 'MD5state_st', '__uint16_t', 'LHASH_DOALL_ARG_FN_TYPE', 'mdc2_ctx_st', 'SSL3_STATE', 'ssl3_buffer_st', 'ASN1_ITEM_EXP', '_opaque_pthread_condattr_t', 'mode_t', 'ASN1_VALUE', 'qaddr_t', '__darwin_gid_t', 'EVP_PKEY', 'CRYPTO_EX_free', '_ossl_old_des_cblock', 'X509_INFO', 'asn1_string_st', 'intptr_t', 'UIT_INFO', 'int_fast8_t', 'sigaltstack', 'env_md_st', 'LHASH', '__darwin_ucontext_t', 'PKCS7_SIGN_ENVELOPE', '__darwin_mcontext_t', 'ct_rune_t', 'MD2_CTX', 'pthread_once_t', 'SSL3_BUFFER', 'fd_mask', 'ASN1_TYPE', 'PKCS7_SIGNED', 'ssl3_record_st', 'BF_KEY', 'MD4state_st', 'MD4_CTX', 'int16_t', 'SSL_CIPHER', 'rune_t', 'X509_TRUST', 'siginfo_t', 'X509_STORE', '__sbuf', 'X509_STORE_CTX', '__darwin_blksize_t', 'ldiv_t', 'ASN1_TIME', 'SSL_METHOD', 'X509_LOOKUP', 'Netscape_spki_st', 'P_PID', 'sigaction', 'sig_t', 'hostent', 'x509_cert_aux_st', '_opaque_pthread_cond_t', 'segsz_t', 'ushort', '__darwin_ct_rune_t', 'fd_set', 'BN_RECP_CTX', 'x509_lookup_st', 'uint16_t', 'pkcs7_st', 'asn1_header_st', '__darwin_pthread_key_t', 'x509_trust_st', '__darwin_pthread_handler_rec', 'int32_t', 'X509_CRL_INFO', 'N11evp_pkey_st4DOLLAR_12E', 'MDC2_CTX', 'N23_ossl_old_des_ks_struct4DOLLAR_10E', 'ASN1_HEADER', 'X509_crl_info_st', 'LHASH_HASH_FN_TYPE', '_opaque_pthread_mutexattr_t', 'ssl_st', 'N8pkcs7_st4DOLLAR_15E', 'evp_pkey_st', 'pkcs7_signedandenveloped_st', '__darwin_mach_port_t', 'EVP_PBE_KEYGEN', '_opaque_pthread_mutex_t', 'ASN1_UTCTIME', 'mcontext', 'crypto_ex_data_func_st', 'u_long', 'PBKDF2PARAM_st', 'rc4_key_st', 'DSA_METHOD', 'EVP_CIPHER', 'BIT_STRING_BITNAME', 'PKCS7_RECIP_INFO', 'ssl3_enc_method', 'X509_CERT_AUX', 'uintmax_t', 'int_fast16_t', 'RC5_32_KEY', 'ucontext64', 'ASN1_INTEGER', 'u_short', 'N14x509_object_st4DOLLAR_14E', 'mcontext64', 'X509_sig_st', 'ASN1_GENERALSTRING', 'PKCS7', '__sFILEX', 'X509_name_entry_st', 'ssl_session_st', 'caddr_t', 'bignum_st', 'X509_CINF', '__darwin_pthread_cond_t', 'ASN1_TLC', 'PKCS7_ENCRYPT', 'NETSCAPE_SPKAC', 'Netscape_spkac_st', 'idtype_t', 'UIT_ERROR', 'uint_fast64_t', 'in_addr_t', 'pthread_mutex_t', '__int64_t', 'ASN1_BMPSTRING', 'uint32_t', 'PEM_ENCODE_SEAL_CTX', 'suseconds_t', 'ASN1_OBJECT', 'X509_val_st', 'private_key_st', 'CRYPTO_dynlock', 'X509_objects_st', 'CRYPTO_EX_DATA_IMPL', 'pthread_condattr_t', 'PKCS7_DIGEST', 'uint_least32_t', 'ASN1_STRING', '__uint32_t', 'P_PGID', 'rsa_meth_st', 'X509_crl_st', 'RC2_KEY', '__darwin_fsfilcnt_t', 'X509_revoked_st', 'PBE2PARAM', 'blksize_t', 'Netscape_certificate_sequence', 'ssl_cipher_st', 'bignum_ctx', 'register_t', 'ASN1_UTF8STRING', 'pkcs7_encrypted_st', 'RC4_KEY', '__darwin_ucontext64_t', 'N13ssl2_state_st4DOLLAR_19E', 'bn_recp_ctx_st', 'CAST_KEY', 'X509_ATTRIBUTE', '__darwin_suseconds_t', '__sigaction', 'user_ulong_t', 'syscall_arg_t', 'evp_cipher_ctx_st', 'X509_ALGOR', 'mcontext_t', 'const_DES_cblock', '__darwin_fsblkcnt_t', 'dsa_st', 'int_least8_t', 'MD2state_st', 'X509_EXTENSION', 'GEN_SESSION_CB', 'int_least16_t', '__darwin_wctrans_t', 'PBKDF2PARAM', 'x509_lookup_method_st', 'pem_password_cb', 'X509_info_st', 'x509_store_st', '__darwin_natural_t', 'X509_pubkey_st', 'pkcs7_digest_st', '__darwin_size_t', 'ASN1_STRING_TABLE', 'OSLittleEndian', 'RIPEMD160state_st', 'pkcs7_enveloped_st', 'UI', 'ptrdiff_t', 'X509_REQ', 'CRYPTO_dynlock_value', 'X509_req_st', 'x509_store_ctx_st', 'N13ssl3_state_st4DOLLAR_20E', 'lhash_node_st', '__darwin_pthread_mutex_t', 'LHASH_COMP_FN_TYPE', '__darwin_rune_t', 'rlimit', '__darwin_pthread_once_t', 'OSBigEndian', 'uintptr_t', '__darwin_uid_t', 'u_int', 'ASN1_T61STRING', 'gid_t', 'ssl_method_st', 'ASN1_ITEM', 'ASN1_ENUMERATED', '_opaque_pthread_rwlock_t', 'pkcs8_priv_key_info_st', 'intmax_t', 'sigcontext', 'X509_CRL', 'rc2_key_st', 'engine_st', 'x509_object_st', '_opaque_pthread_once_t', 'DES_ks', 'SSL_COMP', 'dsa_method', 'int64_t', 'bio_st', 'bf_key_st', 'ASN1_GENERALIZEDTIME', 'PKCS7_ENC_CONTENT', '__darwin_pid_t', 'lldiv_t', 'comp_method_st', 'EVP_MD_CTX', 'evp_cipher_st', 'X509_name_st', 'x509_hash_dir_st', '__darwin_mach_port_name_t', 'useconds_t', 'user_size_t', 'SSL_SESSION', 'rusage', 'ssl_crock_st', 'int_least32_t', '__sigaction_u', 'dh_st', 'P_ALL', '__darwin_stack_t', 'N6DES_ks3DOLLAR_9E', 'comp_ctx_st', 'X509_CERT_FILE_CTX']
Python
def parrot(): pass
Python
# This is empty so trying to fetch the fixer class gives an AttributeError
Python
from lib2to3.fixer_base import BaseFix from lib2to3.fixer_util import Name class FixParrot(BaseFix): """ Change functions named 'parrot' to 'cheese'. """ PATTERN = """funcdef < 'def' name='parrot' any* >""" def transform(self, node, results): name = results["name"] name.replace(Name("cheese", name.prefix))
Python
from lib2to3.fixer_base import BaseFix class FixPreorder(BaseFix): order = "pre" def match(self, node): return False
Python
from lib2to3.fixer_base import BaseFix class FixLast(BaseFix): run_order = 10 def match(self, node): return False
Python
from lib2to3.fixer_base import BaseFix class FixFirst(BaseFix): run_order = 1 def match(self, node): return False
Python
from lib2to3.fixer_base import BaseFix class FixExplicit(BaseFix): explicit = True def match(self): return False
Python
from lib2to3.fixer_base import BaseFix class FixBadOrder(BaseFix): order = "crazy"
Python
#!/usr/bin/env python3 # Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Main program for testing the infrastructure.""" __author__ = "Guido van Rossum <guido@python.org>" # Support imports (need to be imported first) from . import support # Python imports import os import sys import logging # Local imports from .. import pytree import pgen2 from pgen2 import driver logging.basicConfig() def main(): gr = driver.load_grammar("Grammar.txt") dr = driver.Driver(gr, convert=pytree.convert) fn = "example.py" tree = dr.parse_file(fn, debug=True) if not diff(fn, tree): print("No diffs.") if not sys.argv[1:]: return # Pass a dummy argument to run the complete test suite below problems = [] # Process every imported module for name in sys.modules: mod = sys.modules[name] if mod is None or not hasattr(mod, "__file__"): continue fn = mod.__file__ if fn.endswith(".pyc"): fn = fn[:-1] if not fn.endswith(".py"): continue print("Parsing", fn, file=sys.stderr) tree = dr.parse_file(fn, debug=True) if diff(fn, tree): problems.append(fn) # Process every single module on sys.path (but not in packages) for dir in sys.path: try: names = os.listdir(dir) except os.error: continue print("Scanning", dir, "...", file=sys.stderr) for name in names: if not name.endswith(".py"): continue print("Parsing", name, file=sys.stderr) fn = os.path.join(dir, name) try: tree = dr.parse_file(fn, debug=True) except pgen2.parse.ParseError as err: print("ParseError:", err) else: if diff(fn, tree): problems.append(fn) # Show summary of problem files if not problems: print("No problems. Congratulations!") else: print("Problems in following files:") for fn in problems: print("***", fn) def diff(fn, tree): f = open("@", "w") try: f.write(str(tree)) finally: f.close() try: return os.system("diff -u %s @" % fn) finally: os.remove("@") if __name__ == "__main__": main()
Python
"""Support code for test_*.py files""" # Author: Collin Winter # Python imports import unittest import sys import os import os.path import re from textwrap import dedent # Local imports from lib2to3 import pytree, refactor from lib2to3.pgen2 import driver test_dir = os.path.dirname(__file__) proj_dir = os.path.normpath(os.path.join(test_dir, "..")) grammar_path = os.path.join(test_dir, "..", "Grammar.txt") grammar = driver.load_grammar(grammar_path) driver = driver.Driver(grammar, convert=pytree.convert) def parse_string(string): return driver.parse_string(reformat(string), debug=True) def run_all_tests(test_mod=None, tests=None): if tests is None: tests = unittest.TestLoader().loadTestsFromModule(test_mod) unittest.TextTestRunner(verbosity=2).run(tests) def reformat(string): return dedent(string) + "\n\n" def get_refactorer(fixer_pkg="lib2to3", fixers=None, options=None): """ A convenience function for creating a RefactoringTool for tests. fixers is a list of fixers for the RefactoringTool to use. By default "lib2to3.fixes.*" is used. options is an optional dictionary of options to be passed to the RefactoringTool. """ if fixers is not None: fixers = [fixer_pkg + ".fixes.fix_" + fix for fix in fixers] else: fixers = refactor.get_fixers_from_package(fixer_pkg + ".fixes") options = options or {} return refactor.RefactoringTool(fixers, options, explicit=True) def all_project_files(): for dirpath, dirnames, filenames in os.walk(proj_dir): for filename in filenames: if filename.endswith(".py"): yield os.path.join(dirpath, filename) TestCase = unittest.TestCase
Python
#!/usr/bin/env python3 # Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Main program for testing the infrastructure.""" __author__ = "Guido van Rossum <guido@python.org>" # Support imports (need to be imported first) from . import support # Python imports import os import sys import logging # Local imports from .. import pytree import pgen2 from pgen2 import driver logging.basicConfig() def main(): gr = driver.load_grammar("Grammar.txt") dr = driver.Driver(gr, convert=pytree.convert) fn = "example.py" tree = dr.parse_file(fn, debug=True) if not diff(fn, tree): print("No diffs.") if not sys.argv[1:]: return # Pass a dummy argument to run the complete test suite below problems = [] # Process every imported module for name in sys.modules: mod = sys.modules[name] if mod is None or not hasattr(mod, "__file__"): continue fn = mod.__file__ if fn.endswith(".pyc"): fn = fn[:-1] if not fn.endswith(".py"): continue print("Parsing", fn, file=sys.stderr) tree = dr.parse_file(fn, debug=True) if diff(fn, tree): problems.append(fn) # Process every single module on sys.path (but not in packages) for dir in sys.path: try: names = os.listdir(dir) except os.error: continue print("Scanning", dir, "...", file=sys.stderr) for name in names: if not name.endswith(".py"): continue print("Parsing", name, file=sys.stderr) fn = os.path.join(dir, name) try: tree = dr.parse_file(fn, debug=True) except pgen2.parse.ParseError as err: print("ParseError:", err) else: if diff(fn, tree): problems.append(fn) # Show summary of problem files if not problems: print("No problems. Congratulations!") else: print("Problems in following files:") for fn in problems: print("***", fn) def diff(fn, tree): f = open("@", "w") try: f.write(str(tree)) finally: f.close() try: return os.system("diff -u %s @" % fn) finally: os.remove("@") if __name__ == "__main__": main()
Python
"""Make tests/ into a package. This allows us to "import tests" and have tests.all_tests be a TestSuite representing all test cases from all test_*.py files in tests/.""" # Author: Collin Winter import os import os.path import unittest import types from . import support all_tests = unittest.TestSuite() tests_dir = os.path.join(os.path.dirname(__file__), '..', 'tests') tests = [t[0:-3] for t in os.listdir(tests_dir) if t.startswith('test_') and t.endswith('.py')] loader = unittest.TestLoader() for t in tests: __import__("",globals(),locals(),[t],level=1) mod = globals()[t] all_tests.addTests(loader.loadTestsFromModule(mod))
Python
#empty
Python
from __future__ import with_statement from flask import Flask, request, session, g, redirect, url_for, abort, jsonify import sqlite3 from contextlib import closing import os #config DATABASE = './data/notel.db' DEBUG = True SECRET_KEY = 'notel_dev' app = Flask(__name__) app.config.from_object(__name__) app.config.from_envvar('NOTEL_SETTINGS', silent=True) def init_db(): """ Junwoo Kim:kim1035 | Last Updated:03/28/12 this function is used to initialize the db connection after the db has been changed.""" with closing(connect_db()) as db: with app.open_resource('schema.sql') as f: db.cursor().executescript(f.read()) db.commit() def connect_db(): """ Junwoo Kim:kim1035 | Last Updated:03/28/12 this function will be called when there has been a request asking for data in sqlite database. This gives you the connection to the database.""" return sqlite3.connect(app.config['DATABASE']) def query_db(query, args=(), one=False): """Junwoo Kim:kim1035 | Last Updated : 03/29/12 this function is a helper function to make quering easier query : sqlite3 query one : indicated whether we expect only one result or not """ cur = g.db.execute(query, args) rv = [dict((cur.description[idx][0], value) for idx, value in enumerate(row)) for row in cur.fetchall()] return (rv[0] if rv else None) if one else rv @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': user = query_db('SELECT * FROM users WHERE userid = ?', [request.form['userid']], one = True) print request.form['userid'], user['pass'], request.form['pass'] if user is None: return jsonify(accepted='false', reason='id not found') elif request.form['pass'] != user['pass']: print 'login attempt with wrong password' return jsonify(accepted='false', reason="wrong pass") elif request.form['pass'] == user['pass']: return jsonify(accepted='true', classes=[]) #returning array -> jsonify(items=[x for x in list]) elif request.method == 'GET': return 'login page' @app.route('/create_user', methods=['GET', 'POST']) def create_user(): if request.method == 'POST': pass elif request.method == 'GET': return 'id creation page' #defines the behavior of the server before and after the request @app.before_request def before_request(): g.db = connect_db() @app.teardown_request def teardown_request(exception): g.db.close() #makes the app run by defalt when 'python notel_server.py' is typed if __name__ == '__main__': port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
Python
import os import sys import warnings import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib # Important! To work on pypy, this must be a module that resides in the # lib-python/modified-x.y.z directory dirname = os.path.dirname distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils') if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)): warnings.warn( "The virtualenv distutils package at %s appears to be in the same location as the system distutils?") else: __path__.insert(0, distutils_path) exec(open(os.path.join(distutils_path, '__init__.py')).read()) try: import dist import sysconfig except ImportError: from distutils import dist, sysconfig try: basestring except NameError: basestring = str ## patch build_ext (distutils doesn't know how to get the libs directory ## path on windows - it hardcodes the paths around the patched sys.prefix) if sys.platform == 'win32': from distutils.command.build_ext import build_ext as old_build_ext class build_ext(old_build_ext): def finalize_options (self): if self.library_dirs is None: self.library_dirs = [] elif isinstance(self.library_dirs, basestring): self.library_dirs = self.library_dirs.split(os.pathsep) self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs")) old_build_ext.finalize_options(self) from distutils.command import build_ext as build_ext_module build_ext_module.build_ext = build_ext ## distutils.dist patches: old_find_config_files = dist.Distribution.find_config_files def find_config_files(self): found = old_find_config_files(self) system_distutils = os.path.join(distutils_path, 'distutils.cfg') #if os.path.exists(system_distutils): # found.insert(0, system_distutils) # What to call the per-user config file if os.name == 'posix': user_filename = ".pydistutils.cfg" else: user_filename = "pydistutils.cfg" user_filename = os.path.join(sys.prefix, user_filename) if os.path.isfile(user_filename): for item in list(found): if item.endswith('pydistutils.cfg'): found.remove(item) found.append(user_filename) return found dist.Distribution.find_config_files = find_config_files ## distutils.sysconfig patches: old_get_python_inc = sysconfig.get_python_inc def sysconfig_get_python_inc(plat_specific=0, prefix=None): if prefix is None: prefix = sys.real_prefix return old_get_python_inc(plat_specific, prefix) sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__ sysconfig.get_python_inc = sysconfig_get_python_inc old_get_python_lib = sysconfig.get_python_lib def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None): if standard_lib and prefix is None: prefix = sys.real_prefix return old_get_python_lib(plat_specific, standard_lib, prefix) sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__ sysconfig.get_python_lib = sysconfig_get_python_lib old_get_config_vars = sysconfig.get_config_vars def sysconfig_get_config_vars(*args): real_vars = old_get_config_vars(*args) if sys.platform == 'win32': lib_dir = os.path.join(sys.real_prefix, "libs") if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars: real_vars['LIBDIR'] = lib_dir # asked for all elif isinstance(real_vars, list) and 'LIBDIR' in args: real_vars = real_vars + [lib_dir] # asked for list return real_vars sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__ sysconfig.get_config_vars = sysconfig_get_config_vars
Python
"""Append module search paths for third-party packages to sys.path. **************************************************************** * This module is automatically imported during initialization. * **************************************************************** In earlier versions of Python (up to 1.5a3), scripts or modules that needed to use site-specific modules would place ``import site'' somewhere near the top of their code. Because of the automatic import, this is no longer necessary (but code that does it still works). This will append site-specific paths to the module search path. On Unix, it starts with sys.prefix and sys.exec_prefix (if different) and appends lib/python<version>/site-packages as well as lib/site-python. It also supports the Debian convention of lib/python<version>/dist-packages. On other platforms (mainly Mac and Windows), it uses just sys.prefix (and sys.exec_prefix, if different, but this is unlikely). The resulting directories, if they exist, are appended to sys.path, and also inspected for path configuration files. FOR DEBIAN, this sys.path is augmented with directories in /usr/local. Local addons go into /usr/local/lib/python<version>/site-packages (resp. /usr/local/lib/site-python), Debian addons install into /usr/{lib,share}/python<version>/dist-packages. A path configuration file is a file whose name has the form <package>.pth; its contents are additional directories (one per line) to be added to sys.path. Non-existing directories (or non-directories) are never added to sys.path; no directory is added to sys.path more than once. Blank lines and lines beginning with '#' are skipped. Lines starting with 'import' are executed. For example, suppose sys.prefix and sys.exec_prefix are set to /usr/local and there is a directory /usr/local/lib/python2.X/site-packages with three subdirectories, foo, bar and spam, and two path configuration files, foo.pth and bar.pth. Assume foo.pth contains the following: # foo package configuration foo bar bletch and bar.pth contains: # bar package configuration bar Then the following directories are added to sys.path, in this order: /usr/local/lib/python2.X/site-packages/bar /usr/local/lib/python2.X/site-packages/foo Note that bletch is omitted because it doesn't exist; bar precedes foo because bar.pth comes alphabetically before foo.pth; and spam is omitted because it is not mentioned in either path configuration file. After these path manipulations, an attempt is made to import a module named sitecustomize, which can perform arbitrary additional site-specific customizations. If this import fails with an ImportError exception, it is silently ignored. """ import sys import os try: import __builtin__ as builtins except ImportError: import builtins try: set except NameError: from sets import Set as set # Prefixes for site-packages; add additional prefixes like /usr/local here PREFIXES = [sys.prefix, sys.exec_prefix] # Enable per user site-packages directory # set it to False to disable the feature or True to force the feature ENABLE_USER_SITE = None # for distutils.commands.install USER_SITE = None USER_BASE = None _is_pypy = hasattr(sys, 'pypy_version_info') _is_jython = sys.platform[:4] == 'java' if _is_jython: ModuleType = type(os) def makepath(*paths): dir = os.path.join(*paths) if _is_jython and (dir == '__classpath__' or dir.startswith('__pyclasspath__')): return dir, dir dir = os.path.abspath(dir) return dir, os.path.normcase(dir) def abs__file__(): """Set all module' __file__ attribute to an absolute path""" for m in sys.modules.values(): if ((_is_jython and not isinstance(m, ModuleType)) or hasattr(m, '__loader__')): # only modules need the abspath in Jython. and don't mess # with a PEP 302-supplied __file__ continue f = getattr(m, '__file__', None) if f is None: continue m.__file__ = os.path.abspath(f) def removeduppaths(): """ Remove duplicate entries from sys.path along with making them absolute""" # This ensures that the initial path provided by the interpreter contains # only absolute pathnames, even if we're running from the build directory. L = [] known_paths = set() for dir in sys.path: # Filter out duplicate paths (on case-insensitive file systems also # if they only differ in case); turn relative paths into absolute # paths. dir, dircase = makepath(dir) if not dircase in known_paths: L.append(dir) known_paths.add(dircase) sys.path[:] = L return known_paths # XXX This should not be part of site.py, since it is needed even when # using the -S option for Python. See http://www.python.org/sf/586680 def addbuilddir(): """Append ./build/lib.<platform> in case we're running in the build dir (especially for Guido :-)""" from distutils.util import get_platform s = "build/lib.%s-%.3s" % (get_platform(), sys.version) if hasattr(sys, 'gettotalrefcount'): s += '-pydebug' s = os.path.join(os.path.dirname(sys.path[-1]), s) sys.path.append(s) def _init_pathinfo(): """Return a set containing all existing directory entries from sys.path""" d = set() for dir in sys.path: try: if os.path.isdir(dir): dir, dircase = makepath(dir) d.add(dircase) except TypeError: continue return d def addpackage(sitedir, name, known_paths): """Add a new path to known_paths by combining sitedir and 'name' or execute sitedir if it starts with 'import'""" if known_paths is None: _init_pathinfo() reset = 1 else: reset = 0 fullname = os.path.join(sitedir, name) try: f = open(fullname, "rU") except IOError: return try: for line in f: if line.startswith("#"): continue if line.startswith("import"): exec(line) continue line = line.rstrip() dir, dircase = makepath(sitedir, line) if not dircase in known_paths and os.path.exists(dir): sys.path.append(dir) known_paths.add(dircase) finally: f.close() if reset: known_paths = None return known_paths def addsitedir(sitedir, known_paths=None): """Add 'sitedir' argument to sys.path if missing and handle .pth files in 'sitedir'""" if known_paths is None: known_paths = _init_pathinfo() reset = 1 else: reset = 0 sitedir, sitedircase = makepath(sitedir) if not sitedircase in known_paths: sys.path.append(sitedir) # Add path component try: names = os.listdir(sitedir) except os.error: return names.sort() for name in names: if name.endswith(os.extsep + "pth"): addpackage(sitedir, name, known_paths) if reset: known_paths = None return known_paths def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix): """Add site-packages (and possibly site-python) to sys.path""" prefixes = [os.path.join(sys_prefix, "local"), sys_prefix] if exec_prefix != sys_prefix: prefixes.append(os.path.join(exec_prefix, "local")) for prefix in prefixes: if prefix: if sys.platform in ('os2emx', 'riscos') or _is_jython: sitedirs = [os.path.join(prefix, "Lib", "site-packages")] elif _is_pypy: sitedirs = [os.path.join(prefix, 'site-packages')] elif sys.platform == 'darwin' and prefix == sys_prefix: if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"), os.path.join(prefix, "Extras", "lib", "python")] else: # any other Python distros on OSX work this way sitedirs = [os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages")] elif os.sep == '/': sitedirs = [os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages"), os.path.join(prefix, "lib", "site-python"), os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")] lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages") if (os.path.exists(lib64_dir) and os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]): sitedirs.append(lib64_dir) try: # sys.getobjects only available in --with-pydebug build sys.getobjects sitedirs.insert(0, os.path.join(sitedirs[0], 'debug')) except AttributeError: pass # Debian-specific dist-packages directories: if sys.version[0] == '2': sitedirs.append(os.path.join(prefix, "lib", "python" + sys.version[:3], "dist-packages")) else: sitedirs.append(os.path.join(prefix, "lib", "python" + sys.version[0], "dist-packages")) sitedirs.append(os.path.join(prefix, "local/lib", "python" + sys.version[:3], "dist-packages")) sitedirs.append(os.path.join(prefix, "lib", "dist-python")) else: sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")] if sys.platform == 'darwin': # for framework builds *only* we add the standard Apple # locations. Currently only per-user, but /Library and # /Network/Library could be added too if 'Python.framework' in prefix: home = os.environ.get('HOME') if home: sitedirs.append( os.path.join(home, 'Library', 'Python', sys.version[:3], 'site-packages')) for sitedir in sitedirs: if os.path.isdir(sitedir): addsitedir(sitedir, known_paths) return None def check_enableusersite(): """Check if user site directory is safe for inclusion The function tests for the command line flag (including environment var), process uid/gid equal to effective uid/gid. None: Disabled for security reasons False: Disabled by user (command line option) True: Safe and enabled """ if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False): return False if hasattr(os, "getuid") and hasattr(os, "geteuid"): # check process uid == effective uid if os.geteuid() != os.getuid(): return None if hasattr(os, "getgid") and hasattr(os, "getegid"): # check process gid == effective gid if os.getegid() != os.getgid(): return None return True def addusersitepackages(known_paths): """Add a per user site-package to sys.path Each user has its own python directory with site-packages in the home directory. USER_BASE is the root directory for all Python versions USER_SITE is the user specific site-packages directory USER_SITE/.. can be used for data. """ global USER_BASE, USER_SITE, ENABLE_USER_SITE env_base = os.environ.get("PYTHONUSERBASE", None) def joinuser(*args): return os.path.expanduser(os.path.join(*args)) #if sys.platform in ('os2emx', 'riscos'): # # Don't know what to put here # USER_BASE = '' # USER_SITE = '' if os.name == "nt": base = os.environ.get("APPDATA") or "~" if env_base: USER_BASE = env_base else: USER_BASE = joinuser(base, "Python") USER_SITE = os.path.join(USER_BASE, "Python" + sys.version[0] + sys.version[2], "site-packages") else: if env_base: USER_BASE = env_base else: USER_BASE = joinuser("~", ".local") USER_SITE = os.path.join(USER_BASE, "lib", "python" + sys.version[:3], "site-packages") if ENABLE_USER_SITE and os.path.isdir(USER_SITE): addsitedir(USER_SITE, known_paths) if ENABLE_USER_SITE: for dist_libdir in ("lib", "local/lib"): user_site = os.path.join(USER_BASE, dist_libdir, "python" + sys.version[:3], "dist-packages") if os.path.isdir(user_site): addsitedir(user_site, known_paths) return known_paths def setBEGINLIBPATH(): """The OS/2 EMX port has optional extension modules that do double duty as DLLs (and must use the .DLL file extension) for other extensions. The library search path needs to be amended so these will be found during module import. Use BEGINLIBPATH so that these are at the start of the library search path. """ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") libpath = os.environ['BEGINLIBPATH'].split(';') if libpath[-1]: libpath.append(dllpath) else: libpath[-1] = dllpath os.environ['BEGINLIBPATH'] = ';'.join(libpath) def setquit(): """Define new built-ins 'quit' and 'exit'. These are simply strings that display a hint on how to exit. """ if os.sep == ':': eof = 'Cmd-Q' elif os.sep == '\\': eof = 'Ctrl-Z plus Return' else: eof = 'Ctrl-D (i.e. EOF)' class Quitter(object): def __init__(self, name): self.name = name def __repr__(self): return 'Use %s() or %s to exit' % (self.name, eof) def __call__(self, code=None): # Shells like IDLE catch the SystemExit, but listen when their # stdin wrapper is closed. try: sys.stdin.close() except: pass raise SystemExit(code) builtins.quit = Quitter('quit') builtins.exit = Quitter('exit') class _Printer(object): """interactive prompt objects for printing the license text, a list of contributors and the copyright notice.""" MAXLINES = 23 def __init__(self, name, data, files=(), dirs=()): self.__name = name self.__data = data self.__files = files self.__dirs = dirs self.__lines = None def __setup(self): if self.__lines: return data = None for dir in self.__dirs: for filename in self.__files: filename = os.path.join(dir, filename) try: fp = file(filename, "rU") data = fp.read() fp.close() break except IOError: pass if data: break if not data: data = self.__data self.__lines = data.split('\n') self.__linecnt = len(self.__lines) def __repr__(self): self.__setup() if len(self.__lines) <= self.MAXLINES: return "\n".join(self.__lines) else: return "Type %s() to see the full %s text" % ((self.__name,)*2) def __call__(self): self.__setup() prompt = 'Hit Return for more, or q (and Return) to quit: ' lineno = 0 while 1: try: for i in range(lineno, lineno + self.MAXLINES): print(self.__lines[i]) except IndexError: break else: lineno += self.MAXLINES key = None while key is None: try: key = raw_input(prompt) except NameError: key = input(prompt) if key not in ('', 'q'): key = None if key == 'q': break def setcopyright(): """Set 'copyright' and 'credits' in __builtin__""" builtins.copyright = _Printer("copyright", sys.copyright) if _is_jython: builtins.credits = _Printer( "credits", "Jython is maintained by the Jython developers (www.jython.org).") elif _is_pypy: builtins.credits = _Printer( "credits", "PyPy is maintained by the PyPy developers: http://codespeak.net/pypy") else: builtins.credits = _Printer("credits", """\ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands for supporting Python development. See www.python.org for more information.""") here = os.path.dirname(os.__file__) builtins.license = _Printer( "license", "See http://www.python.org/%.3s/license.html" % sys.version, ["LICENSE.txt", "LICENSE"], [os.path.join(here, os.pardir), here, os.curdir]) class _Helper(object): """Define the built-in 'help'. This is a wrapper around pydoc.help (with a twist). """ def __repr__(self): return "Type help() for interactive help, " \ "or help(object) for help about object." def __call__(self, *args, **kwds): import pydoc return pydoc.help(*args, **kwds) def sethelper(): builtins.help = _Helper() def aliasmbcs(): """On Windows, some default encodings are not provided by Python, while they are always available as "mbcs" in each locale. Make them usable by aliasing to "mbcs" in such a case.""" if sys.platform == 'win32': import locale, codecs enc = locale.getdefaultlocale()[1] if enc.startswith('cp'): # "cp***" ? try: codecs.lookup(enc) except LookupError: import encodings encodings._cache[enc] = encodings._unknown encodings.aliases.aliases[enc] = 'mbcs' def setencoding(): """Set the string encoding used by the Unicode implementation. The default is 'ascii', but if you're willing to experiment, you can change this.""" encoding = "ascii" # Default value set by _PyUnicode_Init() if 0: # Enable to support locale aware default string encodings. import locale loc = locale.getdefaultlocale() if loc[1]: encoding = loc[1] if 0: # Enable to switch off string to Unicode coercion and implicit # Unicode to string conversion. encoding = "undefined" if encoding != "ascii": # On Non-Unicode builds this will raise an AttributeError... sys.setdefaultencoding(encoding) # Needs Python Unicode build ! def execsitecustomize(): """Run custom site specific code, if available.""" try: import sitecustomize except ImportError: pass def virtual_install_main_packages(): f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt')) sys.real_prefix = f.read().strip() f.close() pos = 2 hardcoded_relative_dirs = [] if sys.path[0] == '': pos += 1 if sys.platform == 'win32': paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')] elif _is_jython: paths = [os.path.join(sys.real_prefix, 'Lib')] elif _is_pypy: if sys.pypy_version_info >= (1, 5): cpyver = '%d.%d' % sys.version_info[:2] else: cpyver = '%d.%d.%d' % sys.version_info[:3] paths = [os.path.join(sys.real_prefix, 'lib_pypy'), os.path.join(sys.real_prefix, 'lib-python', 'modified-%s' % cpyver), os.path.join(sys.real_prefix, 'lib-python', cpyver)] hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below # # This is hardcoded in the Python executable, but relative to sys.prefix: for path in paths[:]: plat_path = os.path.join(path, 'plat-%s' % sys.platform) if os.path.exists(plat_path): paths.append(plat_path) else: paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])] hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3]) if os.path.exists(lib64_path): paths.append(lib64_path) # This is hardcoded in the Python executable, but relative to sys.prefix: plat_path = os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3], 'plat-%s' % sys.platform) if os.path.exists(plat_path): paths.append(plat_path) # This is hardcoded in the Python executable, but # relative to sys.prefix, so we have to fix up: for path in list(paths): tk_dir = os.path.join(path, 'lib-tk') if os.path.exists(tk_dir): paths.append(tk_dir) # These are hardcoded in the Apple's Python executable, # but relative to sys.prefix, so we have to fix them up: if sys.platform == 'darwin': hardcoded_paths = [os.path.join(relative_dir, module) for relative_dir in hardcoded_relative_dirs for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')] for path in hardcoded_paths: if os.path.exists(path): paths.append(path) sys.path.extend(paths) def force_global_eggs_after_local_site_packages(): """ Force easy_installed eggs in the global environment to get placed in sys.path after all packages inside the virtualenv. This maintains the "least surprise" result that packages in the virtualenv always mask global packages, never the other way around. """ egginsert = getattr(sys, '__egginsert', 0) for i, path in enumerate(sys.path): if i > egginsert and path.startswith(sys.prefix): egginsert = i sys.__egginsert = egginsert + 1 def virtual_addsitepackages(known_paths): force_global_eggs_after_local_site_packages() return addsitepackages(known_paths, sys_prefix=sys.real_prefix) def fixclasspath(): """Adjust the special classpath sys.path entries for Jython. These entries should follow the base virtualenv lib directories. """ paths = [] classpaths = [] for path in sys.path: if path == '__classpath__' or path.startswith('__pyclasspath__'): classpaths.append(path) else: paths.append(path) sys.path = paths sys.path.extend(classpaths) def execusercustomize(): """Run custom user specific code, if available.""" try: import usercustomize except ImportError: pass def main(): global ENABLE_USER_SITE virtual_install_main_packages() abs__file__() paths_in_sys = removeduppaths() if (os.name == "posix" and sys.path and os.path.basename(sys.path[-1]) == "Modules"): addbuilddir() if _is_jython: fixclasspath() GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt')) if not GLOBAL_SITE_PACKAGES: ENABLE_USER_SITE = False if ENABLE_USER_SITE is None: ENABLE_USER_SITE = check_enableusersite() paths_in_sys = addsitepackages(paths_in_sys) paths_in_sys = addusersitepackages(paths_in_sys) if GLOBAL_SITE_PACKAGES: paths_in_sys = virtual_addsitepackages(paths_in_sys) if sys.platform == 'os2emx': setBEGINLIBPATH() setquit() setcopyright() sethelper() aliasmbcs() setencoding() execsitecustomize() if ENABLE_USER_SITE: execusercustomize() # Remove sys.setdefaultencoding() so that users cannot change the # encoding after initialization. The test for presence is needed when # this module is run as a script, because this code is executed twice. if hasattr(sys, "setdefaultencoding"): del sys.setdefaultencoding main() def _script(): help = """\ %s [--user-base] [--user-site] Without arguments print some useful information With arguments print the value of USER_BASE and/or USER_SITE separated by '%s'. Exit codes with --user-base or --user-site: 0 - user site directory is enabled 1 - user site directory is disabled by user 2 - uses site directory is disabled by super user or for security reasons >2 - unknown error """ args = sys.argv[1:] if not args: print("sys.path = [") for dir in sys.path: print(" %r," % (dir,)) print("]") def exists(path): if os.path.isdir(path): return "exists" else: return "doesn't exist" print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE))) print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE))) print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE) sys.exit(0) buffer = [] if '--user-base' in args: buffer.append(USER_BASE) if '--user-site' in args: buffer.append(USER_SITE) if buffer: print(os.pathsep.join(buffer)) if ENABLE_USER_SITE: sys.exit(0) elif ENABLE_USER_SITE is False: sys.exit(1) elif ENABLE_USER_SITE is None: sys.exit(2) else: sys.exit(3) else: import textwrap print(textwrap.dedent(help % (sys.argv[0], os.pathsep))) sys.exit(10) if __name__ == '__main__': _script()
Python
#!/usr/bin/python # Copyright 2011 Google, Inc. All Rights Reserved. # simple script to walk source tree looking for third-party licenses # dumps resulting html page to stdout import os, re, mimetypes, sys # read source directories to scan from command line SOURCE = sys.argv[1:] # regex to find /* */ style comment blocks COMMENT_BLOCK = re.compile(r"(/\*.+?\*/)", re.MULTILINE | re.DOTALL) # regex used to detect if comment block is a license COMMENT_LICENSE = re.compile(r"(license)", re.IGNORECASE) COMMENT_COPYRIGHT = re.compile(r"(copyright)", re.IGNORECASE) EXCLUDE_TYPES = [ "application/xml", "image/png", ] # list of known licenses; keys are derived by stripping all whitespace and # forcing to lowercase to help combine multiple files that have same license. KNOWN_LICENSES = {} class License: def __init__(self, license_text): self.license_text = license_text self.filenames = [] # add filename to the list of files that have the same license text def add_file(self, filename): if filename not in self.filenames: self.filenames.append(filename) LICENSE_KEY = re.compile(r"[^\w]") def find_license(license_text): # TODO(alice): a lot these licenses are almost identical Apache licenses. # Most of them differ in origin/modifications. Consider combining similar # licenses. license_key = LICENSE_KEY.sub("", license_text).lower() if license_key not in KNOWN_LICENSES: KNOWN_LICENSES[license_key] = License(license_text) return KNOWN_LICENSES[license_key] def discover_license(exact_path, filename): # when filename ends with LICENSE, assume applies to filename prefixed if filename.endswith("LICENSE"): with open(exact_path) as file: license_text = file.read() target_filename = filename[:-len("LICENSE")] if target_filename.endswith("."): target_filename = target_filename[:-1] find_license(license_text).add_file(target_filename) return None # try searching for license blocks in raw file mimetype = mimetypes.guess_type(filename) if mimetype in EXCLUDE_TYPES: return None with open(exact_path) as file: raw_file = file.read() # include comments that have both "license" and "copyright" in the text for comment in COMMENT_BLOCK.finditer(raw_file): comment = comment.group(1) if COMMENT_LICENSE.search(comment) is None: continue if COMMENT_COPYRIGHT.search(comment) is None: continue find_license(comment).add_file(filename) for source in SOURCE: for root, dirs, files in os.walk(source): for name in files: discover_license(os.path.join(root, name), name) print "<html><head><style> body { font-family: sans-serif; } pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; } </style></head><body>" for license in KNOWN_LICENSES.values(): print "<h3>Notices for files:</h3><ul>" filenames = license.filenames filenames.sort() for filename in filenames: print "<li>%s</li>" % (filename) print "</ul>" print "<pre>%s</pre>" % license.license_text print "</body></html>"
Python
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Base class for fixers (optional, but recommended).""" # Python imports import logging import itertools # Local imports from .patcomp import PatternCompiler from . import pygram from .fixer_util import does_tree_import from . import node_sm class BaseFix(object): """Optional base class for fixers. The subclass name must be FixFooBar where FooBar is the result of removing underscores and capitalizing the words of the fix name. For example, the class name for a fixer named 'has_key' should be FixHasKey. """ PATTERN = None # Most subclasses should override with a string literal pattern = None # Compiled pattern, set by compile_pattern() options = None # Options object passed to initializer filename = None # The filename (set by set_filename) logger = None # A logger (set by set_filename) numbers = itertools.count(1) # For new_name() used_names = set() # A set of all used NAMEs order = "post" # Does the fixer prefer pre- or post-order traversal explicit = False # Is this ignored by refactor.py -f all? run_order = 5 # Fixers will be sorted by run order before execution # Lower numbers will be run first. _accept_type = None # [Advanced and not public] This tells RefactoringTool # which node type to accept when there's not a pattern. sm_pattern = None # Pattern compiled into a state machine, set with # compile_sm_pattern() # Shortcut for access to Python grammar symbols syms = pygram.python_symbols def __init__(self, options, log): """Initializer. Subclass may override. Args: options: an dict containing the options passed to RefactoringTool that could be used to customize the fixer through the command line. log: a list to append warnings and other messages to. """ self.options = options self.log = log self.sm_pattern = None self.compile_pattern() def compile_pattern(self): """Compiles self.PATTERN into self.pattern. Subclass may override if it doesn't want to use self.{pattern,PATTERN} in .match(). """ if self.PATTERN is not None: self.pattern = PatternCompiler().compile_pattern(self.PATTERN) def compile_sm_pattern(self): """Compiles self.PATTERN into self.sm_pattern""" if (self.pattern is not None) and (self.sm_pattern is None): sm_pattern = node_sm.Pattern_Matcher() self.sm_pattern = self.pattern.build_sm_node(sm_pattern) def set_filename(self, filename): """Set the filename, and a logger derived from it. The main refactoring tool should call this. """ self.filename = filename self.logger = logging.getLogger(filename) def match(self, node): """Returns match for a given parse tree node. Should return a true or false object (not necessarily a bool). It may return a non-empty dict of matching sub-nodes as returned by a matching pattern. Subclass may override. """ results = {"node": node} return self.pattern.match(node, results) and results def transform(self, node, results): """Returns the transformation for a given parse tree node. Args: node: the root of the parse tree that matched the fixer. results: a dict mapping symbolic names to part of the match. Returns: None, or a node that is a modified copy of the argument node. The node argument may also be modified in-place to effect the same change. Subclass *must* override. """ raise NotImplementedError() def new_name(self, template=u"xxx_todo_changeme"): """Return a string suitable for use as an identifier The new name is guaranteed not to conflict with other identifiers. """ name = template while name in self.used_names: name = template + unicode(self.numbers.next()) self.used_names.add(name) return name def log_message(self, message): if self.first_log: self.first_log = False self.log.append("### In file %s ###" % self.filename) self.log.append(message) def cannot_convert(self, node, reason=None): """Warn the user that a given chunk of code is not valid Python 3, but that it cannot be converted automatically. First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. """ lineno = node.get_lineno() for_output = node.clone() for_output.prefix = u"" msg = "Line %d: could not convert: %s" self.log_message(msg % (lineno, for_output)) if reason: self.log_message(reason) def warning(self, node, reason): """Used for warning the user about possible uncertainty in the translation. First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. """ lineno = node.get_lineno() self.log_message("Line %d: %s" % (lineno, reason)) def start_tree(self, tree, filename): """Some fixers need to maintain tree-wide state. This method is called once, at the start of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. """ self.used_names = tree.used_names self.set_filename(filename) self.numbers = itertools.count(1) self.first_log = True def finish_tree(self, tree, filename): """Some fixers need to maintain tree-wide state. This method is called once, at the conclusion of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. """ pass class ConditionalFix(BaseFix): """ Base class for fixers which not execute if an import is found. """ # This is the name of the import which, if found, will cause the test to be skipped skip_on = None def start_tree(self, *args): super(ConditionalFix, self).start_tree(*args) self._should_skip = None def should_skip(self, node): if self._should_skip is not None: return self._should_skip pkg = self.skip_on.split(".") name = pkg[-1] pkg = ".".join(pkg[:-1]) self._should_skip = does_tree_import(pkg, name, node) return self._should_skip
Python
"""Utility functions, node construction macros, etc.""" # Author: Collin Winter # Local imports from .pgen2 import token from .pytree import Leaf, Node from .pygram import python_symbols as syms from . import patcomp ########################################################### ### Common node-construction "macros" ########################################################### def KeywordArg(keyword, value): return Node(syms.argument, [keyword, Leaf(token.EQUAL, u'='), value]) def LParen(): return Leaf(token.LPAR, u"(") def RParen(): return Leaf(token.RPAR, u")") def Assign(target, source): """Build an assignment statement""" if not isinstance(target, list): target = [target] if not isinstance(source, list): source.prefix = u" " source = [source] return Node(syms.atom, target + [Leaf(token.EQUAL, u"=", prefix=u" ")] + source) def Name(name, prefix=None): """Return a NAME leaf""" return Leaf(token.NAME, name, prefix=prefix) def Attr(obj, attr): """A node tuple for obj.attr""" return [obj, Node(syms.trailer, [Dot(), attr])] def Comma(): """A comma leaf""" return Leaf(token.COMMA, u",") def Dot(): """A period (.) leaf""" return Leaf(token.DOT, u".") def ArgList(args, lparen=LParen(), rparen=RParen()): """A parenthesised argument list, used by Call()""" node = Node(syms.trailer, [lparen.clone(), rparen.clone()]) if args: node.insert_child(1, Node(syms.arglist, args)) return node def Call(func_name, args=None, prefix=None): """A function call""" node = Node(syms.power, [func_name, ArgList(args)]) if prefix is not None: node.prefix = prefix return node def Newline(): """A newline literal""" return Leaf(token.NEWLINE, u"\n") def BlankLine(): """A blank line""" return Leaf(token.NEWLINE, u"") def Number(n, prefix=None): return Leaf(token.NUMBER, n, prefix=prefix) def Subscript(index_node): """A numeric or string subscript""" return Node(syms.trailer, [Leaf(token.LBRACE, u'['), index_node, Leaf(token.RBRACE, u']')]) def String(string, prefix=None): """A string leaf""" return Leaf(token.STRING, string, prefix=prefix) def ListComp(xp, fp, it, test=None): """A list comprehension of the form [xp for fp in it if test]. If test is None, the "if test" part is omitted. """ xp.prefix = u"" fp.prefix = u" " it.prefix = u" " for_leaf = Leaf(token.NAME, u"for") for_leaf.prefix = u" " in_leaf = Leaf(token.NAME, u"in") in_leaf.prefix = u" " inner_args = [for_leaf, fp, in_leaf, it] if test: test.prefix = u" " if_leaf = Leaf(token.NAME, u"if") if_leaf.prefix = u" " inner_args.append(Node(syms.comp_if, [if_leaf, test])) inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) return Node(syms.atom, [Leaf(token.LBRACE, u"["), inner, Leaf(token.RBRACE, u"]")]) def FromImport(package_name, name_leafs): """ Return an import statement in the form: from package import name_leafs""" # XXX: May not handle dotted imports properly (eg, package_name='foo.bar') #assert package_name == '.' or '.' not in package_name, "FromImport has "\ # "not been tested with dotted package names -- use at your own "\ # "peril!" for leaf in name_leafs: # Pull the leaves out of their old tree leaf.remove() children = [Leaf(token.NAME, u'from'), Leaf(token.NAME, package_name, prefix=u" "), Leaf(token.NAME, u'import', prefix=u" "), Node(syms.import_as_names, name_leafs)] imp = Node(syms.import_from, children) return imp ########################################################### ### Determine whether a node represents a given literal ########################################################### def is_tuple(node): """Does the node represent a tuple literal?""" if isinstance(node, Node) and node.children == [LParen(), RParen()]: return True return (isinstance(node, Node) and len(node.children) == 3 and isinstance(node.children[0], Leaf) and isinstance(node.children[1], Node) and isinstance(node.children[2], Leaf) and node.children[0].value == u"(" and node.children[2].value == u")") def is_list(node): """Does the node represent a list literal?""" return (isinstance(node, Node) and len(node.children) > 1 and isinstance(node.children[0], Leaf) and isinstance(node.children[-1], Leaf) and node.children[0].value == u"[" and node.children[-1].value == u"]") ########################################################### ### Misc ########################################################### def parenthesize(node): return Node(syms.atom, [LParen(), node, RParen()]) consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum", "min", "max"]) def attr_chain(obj, attr): """Follow an attribute chain. If you have a chain of objects where a.foo -> b, b.foo-> c, etc, use this to iterate over all objects in the chain. Iteration is terminated by getattr(x, attr) is None. Args: obj: the starting object attr: the name of the chaining attribute Yields: Each successive object in the chain. """ next = getattr(obj, attr) while next: yield next next = getattr(next, attr) p0 = """for_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > """ p1 = """ power< ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | 'any' | 'all' | (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* > """ p2 = """ power< 'sorted' trailer< '(' arglist<node=any any*> ')' > any* > """ pats_built = False def in_special_context(node): """ Returns true if node is in an environment where all that is required of it is being itterable (ie, it doesn't matter if it returns a list or an itterator). See test_map_nochange in test_fixers.py for some examples and tests. """ global p0, p1, p2, pats_built if not pats_built: p1 = patcomp.compile_pattern(p1) p0 = patcomp.compile_pattern(p0) p2 = patcomp.compile_pattern(p2) pats_built = True patterns = [p0, p1, p2] for pattern, parent in zip(patterns, attr_chain(node, "parent")): results = {} if pattern.match(parent, results) and results["node"] is node: return True return False def is_probably_builtin(node): """ Check that something isn't an attribute or function name etc. """ prev = node.prev_sibling if prev is not None and prev.type == token.DOT: # Attribute lookup. return False parent = node.parent if parent.type in (syms.funcdef, syms.classdef): return False if parent.type == syms.expr_stmt and parent.children[0] is node: # Assignment. return False if parent.type == syms.parameters or \ (parent.type == syms.typedargslist and ( (prev is not None and prev.type == token.COMMA) or parent.children[0] is node )): # The name of an argument. return False return True ########################################################### ### The following functions are to find bindings in a suite ########################################################### def make_suite(node): if node.type == syms.suite: return node node = node.clone() parent, node.parent = node.parent, None suite = Node(syms.suite, [node]) suite.parent = parent return suite def find_root(node): """Find the top level namespace.""" # Scamper up to the top level namespace while node.type != syms.file_input: assert node.parent, "Tree is insane! root found before "\ "file_input node was found." node = node.parent return node def does_tree_import(package, name, node): """ Returns true if name is imported from package at the top level of the tree which node belongs to. To cover the case of an import like 'import foo', use None for the package and 'foo' for the name. """ binding = find_binding(name, find_root(node), package) return bool(binding) def is_import(node): """Returns true if the node is an import statement.""" return node.type in (syms.import_name, syms.import_from) def touch_import(package, name, node): """ Works like `does_tree_import` but adds an import statement if it was not imported. """ def is_import_stmt(node): return node.type == syms.simple_stmt and node.children and \ is_import(node.children[0]) root = find_root(node) if does_tree_import(package, name, root): return # figure out where to insert the new import. First try to find # the first import and then skip to the last one. insert_pos = offset = 0 for idx, node in enumerate(root.children): if not is_import_stmt(node): continue for offset, node2 in enumerate(root.children[idx:]): if not is_import_stmt(node2): break insert_pos = idx + offset break # if there are no imports where we can insert, find the docstring. # if that also fails, we stick to the beginning of the file if insert_pos == 0: for idx, node in enumerate(root.children): if node.type == syms.simple_stmt and node.children and \ node.children[0].type == token.STRING: insert_pos = idx + 1 break if package is None: import_ = Node(syms.import_name, [ Leaf(token.NAME, u'import'), Leaf(token.NAME, name, prefix=u' ') ]) else: import_ = FromImport(package, [Leaf(token.NAME, name, prefix=u' ')]) children = [import_, Newline()] root.insert_child(insert_pos, Node(syms.simple_stmt, children)) _def_syms = set([syms.classdef, syms.funcdef]) def find_binding(name, node, package=None): """ Returns the node which binds variable name, otherwise None. If optional argument package is supplied, only imports will be returned. See test cases for examples.""" for child in node.children: ret = None if child.type == syms.for_stmt: if _find(name, child.children[1]): return child n = find_binding(name, make_suite(child.children[-1]), package) if n: ret = n elif child.type in (syms.if_stmt, syms.while_stmt): n = find_binding(name, make_suite(child.children[-1]), package) if n: ret = n elif child.type == syms.try_stmt: n = find_binding(name, make_suite(child.children[2]), package) if n: ret = n else: for i, kid in enumerate(child.children[3:]): if kid.type == token.COLON and kid.value == ":": # i+3 is the colon, i+4 is the suite n = find_binding(name, make_suite(child.children[i+4]), package) if n: ret = n elif child.type in _def_syms and child.children[1].value == name: ret = child elif _is_import_binding(child, name, package): ret = child elif child.type == syms.simple_stmt: ret = find_binding(name, child, package) elif child.type == syms.expr_stmt: if _find(name, child.children[0]): ret = child if ret: if not package: return ret if is_import(ret): return ret return None _block_syms = set([syms.funcdef, syms.classdef, syms.trailer]) def _find(name, node): nodes = [node] while nodes: node = nodes.pop() if node.type > 256 and node.type not in _block_syms: nodes.extend(node.children) elif node.type == token.NAME and node.value == name: return node return None def _is_import_binding(node, name, package=None): """ Will reuturn node if node will import name, or node will import * from package. None is returned otherwise. See test cases for examples. """ if node.type == syms.import_name and not package: imp = node.children[1] if imp.type == syms.dotted_as_names: for child in imp.children: if child.type == syms.dotted_as_name: if child.children[2].value == name: return node elif child.type == token.NAME and child.value == name: return node elif imp.type == syms.dotted_as_name: last = imp.children[-1] if last.type == token.NAME and last.value == name: return node elif imp.type == token.NAME and imp.value == name: return node elif node.type == syms.import_from: # unicode(...) is used to make life easier here, because # from a.b import parses to ['import', ['a', '.', 'b'], ...] if package and unicode(node.children[1]).strip() != package: return None n = node.children[3] if package and _find(u'as', n): # See test_from_import_as for explanation return None elif n.type == syms.import_as_names and _find(name, n): return node elif n.type == syms.import_as_name: child = n.children[2] if child.type == token.NAME and child.value == name: return node elif n.type == token.NAME and n.value == name: return node elif package and n.type == token.STAR: return node return None
Python
# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that changes buffer(...) into memoryview(...).""" # Local imports from .. import fixer_base from ..fixer_util import Name class FixBuffer(fixer_base.BaseFix): #TODO Matt: Change this back after testing #explicit = True # The user must ask for this fixer PATTERN = """ power< name='buffer' trailer< '(' [any] ')' > any* > """ def transform(self, node, results): name = results["name"] name.replace(Name(u"memoryview", prefix=name.prefix))
Python
"""Fixer for it.next() -> next(it), per PEP 3114.""" # Author: Collin Winter # Things that currently aren't covered: # - listcomp "next" names aren't warned # - "with" statement targets aren't checked # Local imports from ..pgen2 import token from ..pygram import python_symbols as syms from .. import fixer_base from ..fixer_util import Name, Call, find_binding bind_warning = "Calls to builtin next() possibly shadowed by global binding" class FixNext(fixer_base.BaseFix): PATTERN = """ power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > | power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > | classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='next' parameters< '(' NAME ')' > any+ > any* > > | global=global_stmt< 'global' any* 'next' any* > """ order = "pre" # Pre-order tree traversal def start_tree(self, tree, filename): super(FixNext, self).start_tree(tree, filename) n = find_binding(u'next', tree) if n: self.warning(n, bind_warning) self.shadowed_next = True else: self.shadowed_next = False def transform(self, node, results): assert results base = results.get("base") attr = results.get("attr") name = results.get("name") if base: if self.shadowed_next: attr.replace(Name(u"__next__", prefix=attr.prefix)) else: base = [n.clone() for n in base] base[0].prefix = u"" node.replace(Call(Name(u"next", prefix=node.prefix), base)) elif name: n = Name(u"__next__", prefix=name.prefix) name.replace(n) elif attr: # We don't do this transformation if we're assigning to "x.next". # Unfortunately, it doesn't seem possible to do this in PATTERN, # so it's being done here. if is_assign_target(node): head = results["head"] if "".join([str(n) for n in head]).strip() == u'__builtin__': self.warning(node, bind_warning) return attr.replace(Name(u"__next__")) elif "global" in results: self.warning(node, bind_warning) self.shadowed_next = True ### The following functions help test if node is part of an assignment ### target. def is_assign_target(node): assign = find_assign(node) if assign is None: return False for child in assign.children: if child.type == token.EQUAL: return False elif is_subtree(child, node): return True return False def find_assign(node): if node.type == syms.expr_stmt: return node if node.type == syms.simple_stmt or node.parent is None: return None return find_assign(node.parent) def is_subtree(root, node): if root == node: return True return any(is_subtree(c, node) for c in root.children)
Python
# Dummy file to make this directory a package.
Python
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Pattern compiler. The grammer is taken from PatternGrammar.txt. The compiler compiles a pattern to a pytree.*Pattern instance. """ __author__ = "Guido van Rossum <guido@python.org>" # Python imports import os # Fairly local imports from .pgen2 import driver, literals, token, tokenize, parse, grammar # Really local imports from . import pytree from . import pygram # The pattern grammar file _PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "PatternGrammar.txt") class PatternSyntaxError(Exception): pass def tokenize_wrapper(input): """Tokenizes a string suppressing significant whitespace.""" skip = set((token.NEWLINE, token.INDENT, token.DEDENT)) tokens = tokenize.generate_tokens(driver.generate_lines(input).next) for quintuple in tokens: type, value, start, end, line_text = quintuple if type not in skip: yield quintuple class PatternCompiler(object): def __init__(self, grammar_file=_PATTERN_GRAMMAR_FILE): """Initializer. Takes an optional alternative filename for the pattern grammar. """ self.grammar = driver.load_grammar(grammar_file) self.syms = pygram.Symbols(self.grammar) self.pygrammar = pygram.python_grammar self.pysyms = pygram.python_symbols self.driver = driver.Driver(self.grammar, convert=pattern_convert) def compile_pattern(self, input, debug=False): """Compiles a pattern string to a nested pytree.*Pattern object.""" tokens = tokenize_wrapper(input) try: root = self.driver.parse_tokens(tokens, debug=debug) except parse.ParseError, e: raise PatternSyntaxError(str(e)) return self.compile_node(root) def compile_sm_pattern(self, input, debug=False): """Compiles a nested pytree.*Pattern into a node_sm matcher.""" #tokens = tokenize_wrapper(input) #try: # root = self.driver.parse_tokens(tokens, debug=debug) #except parse.ParseError, e: # raise PatternSyntaxError(str(e)) #return (self.compile_sm_node(root), root) pass def compile_sm_node(self, node): return None def compile_node(self, node): """Compiles a node, recursively. This is one big switch on the node type. """ # XXX Optimize certain Wildcard-containing-Wildcard patterns # that can be merged if node.type == self.syms.Matcher: node = node.children[0] # Avoid unneeded recursion if node.type == self.syms.Alternatives: # Skip the odd children since they are just '|' tokens alts = [self.compile_node(ch) for ch in node.children[::2]] if len(alts) == 1: return alts[0] p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1) return p.optimize() if node.type == self.syms.Alternative: units = [self.compile_node(ch) for ch in node.children] if len(units) == 1: return units[0] p = pytree.WildcardPattern([units], min=1, max=1) return p.optimize() if node.type == self.syms.NegatedUnit: pattern = self.compile_basic(node.children[1:]) p = pytree.NegatedPattern(pattern) return p.optimize() assert node.type == self.syms.Unit name = None nodes = node.children if len(nodes) >= 3 and nodes[1].type == token.EQUAL: name = nodes[0].value nodes = nodes[2:] repeat = None if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater: repeat = nodes[-1] nodes = nodes[:-1] # Now we've reduced it to: STRING | NAME [Details] | (...) | [...] pattern = self.compile_basic(nodes, repeat) if repeat is not None: assert repeat.type == self.syms.Repeater children = repeat.children child = children[0] if child.type == token.STAR: min = 0 max = pytree.HUGE elif child.type == token.PLUS: min = 1 max = pytree.HUGE elif child.type == token.LBRACE: assert children[-1].type == token.RBRACE assert len(children) in (3, 5) min = max = self.get_int(children[1]) if len(children) == 5: max = self.get_int(children[3]) else: assert False if min != 1 or max != 1: pattern = pattern.optimize() pattern = pytree.WildcardPattern([[pattern]], min=min, max=max) if name is not None: pattern.name = name return pattern.optimize() def compile_basic(self, nodes, repeat=None): # Compile STRING | NAME [Details] | (...) | [...] assert len(nodes) >= 1 node = nodes[0] if node.type == token.STRING: value = unicode(literals.evalString(node.value)) return pytree.LeafPattern(_type_of_literal(value), value) elif node.type == token.NAME: value = node.value if value.isupper(): if value not in TOKEN_MAP: raise PatternSyntaxError("Invalid token: %r" % value) if nodes[1:]: raise PatternSyntaxError("Can't have details for token") return pytree.LeafPattern(TOKEN_MAP[value]) else: if value == "any": type = None elif not value.startswith("_"): type = getattr(self.pysyms, value, None) if type is None: raise PatternSyntaxError("Invalid symbol: %r" % value) if nodes[1:]: # Details present content = [self.compile_node(nodes[1].children[1])] else: content = None return pytree.NodePattern(type, content) elif node.value == "(": return self.compile_node(nodes[1]) elif node.value == "[": assert repeat is None subpattern = self.compile_node(nodes[1]) return pytree.WildcardPattern([[subpattern]], min=0, max=1) assert False, node def get_int(self, node): assert node.type == token.NUMBER return int(node.value) # Map named tokens to the type value for a LeafPattern TOKEN_MAP = {"NAME": token.NAME, "STRING": token.STRING, "NUMBER": token.NUMBER, "TOKEN": None} def _type_of_literal(value): if value[0].isalpha(): return token.NAME elif value in grammar.opmap: return grammar.opmap[value] else: return None def pattern_convert(grammar, raw_node_info): """Converts raw node information to a Node or Leaf instance.""" type, value, context, children = raw_node_info if children or type in grammar.number2symbol: return pytree.Node(type, children, context=context) else: return pytree.Leaf(type, value, context=context) def compile_pattern(pattern): return PatternCompiler().compile_pattern(pattern)
Python
# Copyright 2010 G. M. Bond. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """ State machine based node matcher. """ EPSILON = 0 #Empty transition TYPE = 1 #Transition based on type STRING = 2 #Transition based on string matching ANY = 3 #Transition matches any node CLOSE = 4 #Transition matches end of specific node #TODO Matt: Handle registration of starting a possible match class Pattern_Matcher(object): """ Holds the overall state and handles transitions. Currently inefficient, based on function calls. Could be compiled into a more efficient structure once generation is complete. """ def __init__(self, priority=1): """Set up the initial states""" self.states = set() self.new_states = set() self.accept_states = set() self.start_state = None self.priority = priority def register_accept(self, state): """Add state to the set of accepting states""" self.accept_states.add(state) def next_node(self, node): """ Determine the next set of states based on current states and input node """ self.new_states.clear() transitions = (x.move(node) for x in self.states) if len(transitions) < 1: #No possible nodes returned. This means the input is not valid for #this pattern and we should return to the start #We could also find a way to skip accept testing if this branch #executes, as it might speed things up. transitions.add(self.pattern_start) transitions.update(n.e_closure for n in transitions) self.new_states.update(transitions) def check_for_accept(self): """ Checks if we have entered any accepting states. Returns accepting states as a list sorted by priority. This might be improved by returning only the highest priority matching state instead of sorting the whole list. """ s = None accepts = intersection(self.new_states, self.accept_states) if len(accepts) > 0: s = sorted(accepts, key=lambda state: state.priority) return s class State(object): """An individual state and associated transitions""" def __init__(self, matcher): """ Setup the state. Matcher should be a reference to the containing Pattern_Matcher, and first should be a reference to the first state in the pattern, used for transitioning to the beginning of the pattern when nothing else matches during a transition """ self.transitions = set() self.matcher = matcher def accept(self, action): """Set node to accept state and save action function""" self.accept = TRUE self.matcher.register_accept(self) self.action = action def add_transition(self, type, node, condition=None): """Register a transition out of this node""" #This is all just sanity checking and could be dropped for speed if type == EPSILON: assert condition is None elif type == TYPE: assert condition # is a valid node type? Where do we get that? elif type == STRING: #convert to unicode just in case condition = unicode(condition) elif type == ANY: assert condition is None elif type == CLOSE: #We're looking for an id here, but we don't know it at compile time #So we should pass it another node. When this is tested, we'll look #at the id of the node that matched in the node we were passed assert condition # isint? self.transitions.add( (type, node, condition) ) @property def e_closure(self): """Get the epsilon closure of the node.""" try: return self._e_closure except NameError: f = lambda n : n[1] if n[0] == EPSILON else None nodes = (f(n) for n in self.transitions) nodes.update(n.e_closure for n in nodes) nodes.add(self) self._e_closure = nodes return self._e_closure def move(self, in_node): """ Get the node(s) active after following all possible transitions based on in_node, except those reached through epsilon transitions """ nodes = set() for t in self.transitions: if t[0] == EPSILON: continue if t[1] in nodes: #it might save a costly comparison, especially in regard to #string comparison? Maybe? continue else: if t[0] == ANY or \ (t[0] == TYPE and t[2] == in_node.type) or \ (t[0] == CLOSE and t[2] == id(node)) or \ (t[0] == STRING and t[2] == node.value): nodes.add(t[1]) return nodes
Python
""" Main program for 2to3. """ from __future__ import with_statement import sys import os import difflib import logging import shutil import optparse from . import refactor def diff_texts(a, b, filename): """Return a unified diff of two strings.""" a = a.splitlines() b = b.splitlines() return difflib.unified_diff(a, b, filename, filename, "(original)", "(refactored)", lineterm="") class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): """ Prints output to stdout. """ def __init__(self, fixers, options, explicit, nobackups, show_diffs): self.nobackups = nobackups self.show_diffs = show_diffs super(StdoutRefactoringTool, self).__init__(fixers, options, explicit) def log_error(self, msg, *args, **kwargs): self.errors.append((msg, args, kwargs)) self.logger.error(msg, *args, **kwargs) def write_file(self, new_text, filename, old_text, encoding): if not self.nobackups: # Make backup backup = filename + ".bak" if os.path.lexists(backup): try: os.remove(backup) except os.error, err: self.log_message("Can't remove backup %s", backup) try: os.rename(filename, backup) except os.error, err: self.log_message("Can't rename %s to %s", filename, backup) # Actually write the new file write = super(StdoutRefactoringTool, self).write_file write(new_text, filename, old_text, encoding) if not self.nobackups: shutil.copymode(backup, filename) def print_output(self, old, new, filename, equal): if equal: self.log_message("No changes to %s", filename) else: self.log_message("Refactored %s", filename) if self.show_diffs: diff_lines = diff_texts(old, new, filename) try: if self.output_lock is not None: with self.output_lock: for line in diff_lines: print line sys.stdout.flush() else: for line in diff_lines: print line except UnicodeEncodeError: warn("couldn't encode %s's diff for your terminal" % (filename,)) return def warn(msg): print >> sys.stderr, "WARNING: %s" % (msg,) def main(fixer_pkg, args=None): """Main program. Args: fixer_pkg: the name of a package where the fixers are located. args: optional; a list of command line arguments. If omitted, sys.argv[1:] is used. Returns a suggested exit status (0, 1, 2). """ # Set up option parser parser = optparse.OptionParser(usage="2to3 [options] file|dir ...") parser.add_option("-d", "--doctests_only", action="store_true", help="Fix up doctests only") parser.add_option("-f", "--fix", action="append", default=[], help="Each FIX specifies a transformation; default: all") parser.add_option("-j", "--processes", action="store", default=1, type="int", help="Run 2to3 concurrently") parser.add_option("-x", "--nofix", action="append", default=[], help="Prevent a fixer from being run.") parser.add_option("-l", "--list-fixes", action="store_true", help="List available transformations") parser.add_option("-p", "--print-function", action="store_true", help="Modify the grammar so that print() is a function") parser.add_option("-v", "--verbose", action="store_true", help="More verbose logging") parser.add_option("--no-diffs", action="store_true", help="Don't show diffs of the refactoring") parser.add_option("-w", "--write", action="store_true", help="Write back modified files") parser.add_option("-n", "--nobackups", action="store_true", default=False, help="Don't write backups for modified files.") # Parse command line arguments refactor_stdin = False flags = {} options, args = parser.parse_args(args) if not options.write and options.no_diffs: warn("not writing files and not printing diffs; that's not very useful") if not options.write and options.nobackups: parser.error("Can't use -n without -w") if options.list_fixes: print "Available transformations for the -f/--fix option:" for fixname in refactor.get_all_fix_names(fixer_pkg): print fixname if not args: return 0 if not args: print >> sys.stderr, "At least one file or directory argument required." print >> sys.stderr, "Use --help to show usage." return 2 if "-" in args: refactor_stdin = True if options.write: print >> sys.stderr, "Can't write to stdin." return 2 if options.print_function: flags["print_function"] = True # Set up logging handler level = logging.DEBUG if options.verbose else logging.INFO logging.basicConfig(format='%(name)s: %(message)s', level=level) # Initialize the refactoring tool avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg)) unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) explicit = set() if options.fix: all_present = False for fix in options.fix: if fix == "all": all_present = True else: explicit.add(fixer_pkg + ".fix_" + fix) requested = avail_fixes.union(explicit) if all_present else explicit else: requested = avail_fixes.union(explicit) fixer_names = requested.difference(unwanted_fixes) rt = StdoutRefactoringTool(sorted(fixer_names), flags, sorted(explicit), options.nobackups, not options.no_diffs) # Refactor all files and directories passed as arguments if not rt.errors: if refactor_stdin: rt.refactor_stdin() else: try: rt.refactor(args, options.write, options.doctests_only, options.processes) except refactor.MultiprocessingUnsupported: assert options.processes > 1 print >> sys.stderr, "Sorry, -j isn't " \ "supported on this platform." return 1 rt.summarize() # Return error status (0 if rt.errors is zero) return int(bool(rt.errors))
Python
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """ Python parse tree definitions. This is a very concrete parse tree; we need to keep every token and even the comments and whitespace between tokens. There's also a pattern matching implementation here. """ __author__ = "Guido van Rossum <guido@python.org>" import sys import warnings from StringIO import StringIO from . import node_sm HUGE = 0x7FFFFFFF # maximum repeat count, default max _type_reprs = {} def type_repr(type_num): global _type_reprs if not _type_reprs: from .pygram import python_symbols # printing tokens is possible but not as useful # from .pgen2 import token // token.__dict__.items(): for name, val in python_symbols.__dict__.items(): if type(val) == int: _type_reprs[val] = name return _type_reprs.setdefault(type_num, type_num) class Base(object): """ Abstract base class for Node and Leaf. This provides some default functionality and boilerplate using the template pattern. A node may be a subnode of at most one parent. """ # Default values for instance variables type = None # int: token number (< 256) or symbol number (>= 256) parent = None # Parent node pointer, or None children = () # Tuple of subnodes was_changed = False def __new__(cls, *args, **kwds): """Constructor that prevents Base from being instantiated.""" assert cls is not Base, "Cannot instantiate Base" return object.__new__(cls) def __eq__(self, other): """ Compare two nodes for equality. This calls the method _eq(). """ if self.__class__ is not other.__class__: return NotImplemented return self._eq(other) __hash__ = None # For Py3 compatibility. def __ne__(self, other): """ Compare two nodes for inequality. This calls the method _eq(). """ if self.__class__ is not other.__class__: return NotImplemented return not self._eq(other) def _eq(self, other): """ Compare two nodes for equality. This is called by __eq__ and __ne__. It is only called if the two nodes have the same type. This must be implemented by the concrete subclass. Nodes should be considered equal if they have the same structure, ignoring the prefix string and other context information. """ raise NotImplementedError def clone(self): """ Return a cloned (deep) copy of self. This must be implemented by the concrete subclass. """ raise NotImplementedError def post_order(self): """ Return a post-order iterator for the tree. This must be implemented by the concrete subclass. """ raise NotImplementedError def pre_order(self): """ Return a pre-order iterator for the tree. This must be implemented by the concrete subclass. """ raise NotImplementedError def set_prefix(self, prefix): """ Set the prefix for the node (see Leaf class). DEPRECATED; use the prefix property directly. """ warnings.warn("set_prefix() is deprecated; use the prefix property", DeprecationWarning, stacklevel=2) self.prefix = prefix def get_prefix(self): """ Return the prefix for the node (see Leaf class). DEPRECATED; use the prefix property directly. """ warnings.warn("get_prefix() is deprecated; use the prefix property", DeprecationWarning, stacklevel=2) return self.prefix def replace(self, new): """Replace this node with a new one in the parent.""" assert self.parent is not None, str(self) assert new is not None if not isinstance(new, list): new = [new] l_children = [] found = False for ch in self.parent.children: if ch is self: assert not found, (self.parent.children, self, new) if new is not None: l_children.extend(new) found = True else: l_children.append(ch) assert found, (self.children, self, new) self.parent.changed() self.parent.children = l_children for x in new: x.parent = self.parent self.parent = None def get_lineno(self): """Return the line number which generated the invocant node.""" node = self while not isinstance(node, Leaf): if not node.children: return node = node.children[0] return node.lineno def changed(self): if self.parent: self.parent.changed() self.was_changed = True def remove(self): """ Remove the node from the tree. Returns the position of the node in its parent's children before it was removed. """ if self.parent: for i, node in enumerate(self.parent.children): if node is self: self.parent.changed() del self.parent.children[i] self.parent = None return i @property def next_sibling(self): """ The node immediately following the invocant in their parent's children list. If the invocant does not have a next sibling, it is None """ if self.parent is None: return None # Can't use index(); we need to test by identity for i, child in enumerate(self.parent.children): if child is self: try: return self.parent.children[i+1] except IndexError: return None @property def prev_sibling(self): """ The node immediately preceding the invocant in their parent's children list. If the invocant does not have a previous sibling, it is None. """ if self.parent is None: return None # Can't use index(); we need to test by identity for i, child in enumerate(self.parent.children): if child is self: if i == 0: return None return self.parent.children[i-1] def get_suffix(self): """ Return the string immediately following the invocant node. This is effectively equivalent to node.next_sibling.prefix """ next_sib = self.next_sibling if next_sib is None: return u"" return next_sib.prefix if sys.version_info < (3, 0): def __str__(self): return unicode(self).encode("ascii") #TODO: Matt Remove this? #def type_repr(self): # return type_repr(self.type) class Node(Base): """Concrete implementation for interior nodes.""" def __init__(self, type, children, context=None, prefix=None): """ Initializer. Takes a type constant (a symbol number >= 256), a sequence of child nodes, and an optional context keyword argument. As a side effect, the parent pointers of the children are updated. """ assert type >= 256, type self.type = type self.children = list(children) for ch in self.children: assert ch.parent is None, repr(ch) ch.parent = self if prefix is not None: self.prefix = prefix def __repr__(self): """Return a canonical string representation.""" return "%s(%s, %r)" % (self.__class__.__name__, type_repr(self.type), self.children) def __unicode__(self): """ Return a pretty string representation. This reproduces the input source exactly. """ return u"".join(map(unicode, self.children)) if sys.version_info > (3, 0): __str__ = __unicode__ def _eq(self, other): """Compare two nodes for equality.""" return (self.type, self.children) == (other.type, other.children) def clone(self): """Return a cloned (deep) copy of self.""" return Node(self.type, [ch.clone() for ch in self.children]) def post_order(self): """Return a post-order iterator for the tree.""" for child in self.children: for node in child.post_order(): yield node yield self def pre_order(self): """Return a pre-order iterator for the tree.""" yield self for child in self.children: for node in child.post_order(): yield node def _prefix_getter(self): """ The whitespace and comments preceding this node in the input. """ if not self.children: return "" return self.children[0].prefix def _prefix_setter(self, prefix): if self.children: self.children[0].prefix = prefix prefix = property(_prefix_getter, _prefix_setter) def set_child(self, i, child): """ Equivalent to 'node.children[i] = child'. This method also sets the child's parent attribute appropriately. """ child.parent = self self.children[i].parent = None self.children[i] = child self.changed() def insert_child(self, i, child): """ Equivalent to 'node.children.insert(i, child)'. This method also sets the child's parent attribute appropriately. """ child.parent = self self.children.insert(i, child) self.changed() def append_child(self, child): """ Equivalent to 'node.children.append(child)'. This method also sets the child's parent attribute appropriately. """ child.parent = self self.children.append(child) self.changed() class Leaf(Base): """Concrete implementation for leaf nodes.""" # Default values for instance variables _prefix = "" # Whitespace and comments preceding this token in the input lineno = 0 # Line where this token starts in the input column = 0 # Column where this token tarts in the input def __init__(self, type, value, context=None, prefix=None): """ Initializer. Takes a type constant (a token number < 256), a string value, and an optional context keyword argument. """ assert 0 <= type < 256, type if context is not None: self._prefix, (self.lineno, self.column) = context self.type = type self.value = value if prefix is not None: self._prefix = prefix def __repr__(self): """Return a canonical string representation.""" return "%s(%r, %r)" % (self.__class__.__name__, self.type, self.value) def __unicode__(self): """ Return a pretty string representation. This reproduces the input source exactly. """ return self.prefix + unicode(self.value) if sys.version_info > (3, 0): __str__ = __unicode__ def _eq(self, other): """Compare two nodes for equality.""" return (self.type, self.value) == (other.type, other.value) def clone(self): """Return a cloned (deep) copy of self.""" return Leaf(self.type, self.value, (self.prefix, (self.lineno, self.column))) def post_order(self): """Return a post-order iterator for the tree.""" yield self def pre_order(self): """Return a pre-order iterator for the tree.""" yield self def _prefix_getter(self): """ The whitespace and comments preceding this token in the input. """ return self._prefix def _prefix_setter(self, prefix): self.changed() self._prefix = prefix prefix = property(_prefix_getter, _prefix_setter) def convert(gr, raw_node): """ Convert raw node information to a Node or Leaf instance. This is passed to the parser driver which calls it whenever a reduction of a grammar rule produces a new complete node, so that the tree is build strictly bottom-up. """ type, value, context, children = raw_node if children or type in gr.number2symbol: # If there's exactly one child, return that child instead of # creating a new node. if len(children) == 1: return children[0] return Node(type, children, context=context) else: return Leaf(type, value, context=context) class BasePattern(object): """ A pattern is a tree matching pattern. It looks for a specific node type (token or symbol), and optionally for a specific content. This is an abstract base class. There are three concrete subclasses: - LeafPattern matches a single leaf node; - NodePattern matches a single node (usually non-leaf); - WildcardPattern matches a sequence of nodes of variable length. """ # Defaults for instance variables type = None # Node type (token if < 256, symbol if >= 256) content = None # Optional content matching pattern name = None # Optional name used to store match in results dict def __new__(cls, *args, **kwds): """Constructor that prevents BasePattern from being instantiated.""" assert cls is not BasePattern, "Cannot instantiate BasePattern" return object.__new__(cls) def __repr__(self): args = [type_repr(self.type), self.content, self.name] while args and args[-1] is None: del args[-1] return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args))) def optimize(self): """ A subclass can define this as a hook for optimizations. Returns either self or another node with the same effect. """ return self def match(self, node, results=None): """ Does this pattern exactly match a node? Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. Default implementation for non-wildcard patterns. """ if self.type is not None and node.type != self.type: return False if self.content is not None: r = None if results is not None: r = {} if not self._submatch(node, r): return False if r: results.update(r) if results is not None and self.name: results[self.name] = node return True def match_seq(self, nodes, results=None): """ Does this pattern exactly match a sequence of nodes? Default implementation for non-wildcard patterns. """ if len(nodes) != 1: return False return self.match(nodes[0], results) def generate_matches(self, nodes): """ Generator yielding all matches for this pattern. Default implementation for non-wildcard patterns. """ r = {} if nodes and self.match(nodes[0], r): yield 1, r #TODO: Matt Remove this? #def type_repr(self): # return type_repr(self.type) def node_repr(self): """Return node information suitable for graphing""" return "Classtype: Base Node" def graph_node(self, graph, graph_children=True): """Add this node to graph, and optionally its children. It may be perfectly acceptable in some cases to not add this node, so concrete subclasses must override this if they should appear on the graph.""" def build_sm_node(self, pattern): """ Returns the pattern of this node and its children if any as node_sm.State objects configured to match this node's pattern, including children. This is a partial pattern, and will not run. To build a full pattern, call build_sm_pattern instead. """ raise NotImplementedError # Functionality Moved to fixer_base # def build_sm_pattern(self): # """ # Returns the pattern of this node and its children rendered as # node_sm objects. This method should be called on the root of a pattern # and will generate a full pattern matcher. # """ # raise NotImplementedError class LeafPattern(BasePattern): def __init__(self, type=None, content=None, name=None): """ Initializer. Takes optional type, content, and name. The type, if given must be a token type (< 256). If not given, this matches any *leaf* node; the content may still be required. The content, if given, must be a string. If a name is given, the matching node is stored in the results dict under that key. """ if type is not None: assert 0 <= type < 256, type if content is not None: assert isinstance(content, basestring), repr(content) self.type = type self.content = content self.name = name def match(self, node, results=None): """Override match() to insist on a leaf node.""" if not isinstance(node, Leaf): return False return BasePattern.match(self, node, results) def _submatch(self, node, results=None): """ Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated. """ return self.content == node.value def node_repr(self): """Return node information suitable for graphing""" str_name_list = ["Classtype: Leaf"] if self.type is None: str_name_list.append("Match: any leaf{1}") else: str_name_list.append("Match: %s{1}" % type_repr(self.type)) if self.content is not None: str_name_list.append("Match String: %s" % self.content) if self.name is not None: str_name_list.append("Store as: %s" % self.name) return '\\n'.join(str_name_list) def graph_node(self, graph, graph_children=True): """Add this node to graph, and optionally its children.""" #Graph self this = graph.add_node(self.node_repr()) #Leaf Nodes have no children return this def build_sm_node(self, pattern): """ Registers the pattern of this node and its children if any as node_sm.State objects in pattern, configured to match this node's pattern including children. This is a partial pattern, and will not run. To build a full pattern, call build_sm_pattern instead. """ start_node = node_sm.State(pattern) end_node = node_sm.State(pattern) #if self.type is None: #We're matching any one node. Transition on type, then eat any #other nodes we encounter as children of this node until we hit #the end of the current node. # parent.add_transition(node_sm.ANY, start_node) # start_node.add_transition(node_sm.CLOSE, end_node, start_node) # start_node.add_transition(node_sm.ANY, start_node) # return end_node #else: # parent.add_transition(node_sm.TYPE, start_node, self.type) # if self.content is not None: #We're matching a type node and children. Transition on type to #child nodes, finishing with a required transition for the end #of the parent node (Parent->Child*->End of Parent) # last_child = None #If the following assert fails, we've found a Node with multiple #children. We don't know how to handle this. # assert len(self.content) == 1 # for n in self.content # last_child = n.build_sm_node(start_node, pattern) # last_child.add_transition(node_sm.CLOSE, end_node, start_node) # else: # start_node.add_transition(node_sm.ANY, start_node) # start_node.add_transition(node_sm.CLOSE, end_node, start_node) return end_node raise NotImplementedError # Functionality Moved to fixer_base # def build_sm_pattern(self): # """ # Returns the pattern of this node and its children rendered as # node_sm objects. This method should be called on the root of a pattern # and will generate a full pattern matcher. # """ # raise NotImplementedError class NodePattern(BasePattern): wildcards = False def __init__(self, type=None, content=None, name=None): """ Initializer. Takes optional type, content, and name. The type, if given, must be a symbol type (>= 256). If the type is None this matches *any* single node (leaf or not), except if content is not None, in which it only matches non-leaf nodes that also match the content pattern. The content, if not None, must be a sequence of Patterns that must match the node's children exactly. If the content is given, the type must not be None. If a name is given, the matching node is stored in the results dict under that key. """ if type is not None: assert type >= 256, type if content is not None: assert not isinstance(content, basestring), repr(content) content = list(content) for i, item in enumerate(content): assert isinstance(item, BasePattern), (i, item) if isinstance(item, WildcardPattern): self.wildcards = True self.type = type self.content = content self.name = name def _submatch(self, node, results=None): """ Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated. """ if self.wildcards: for c, r in generate_matches(self.content, node.children): if c == len(node.children): if results is not None: results.update(r) return True return False if len(self.content) != len(node.children): return False for subpattern, child in zip(self.content, node.children): if not subpattern.match(child, results): return False return True def node_repr(self): """Return node information suitable for graphing""" str_name_list = ["Classtype: Node"] if self.type is None: str_name_list.append("Match: any{1}") else: str_name_list.append("Match: %s{1}" % type_repr(self.type)) if self.name is not None: str_name_list.append("Store as: %s" % self.name) return '\\n'.join(str_name_list) def graph_node(self, graph, graph_children=True): """Add this node to graph, and optionally its children.""" #Graph self this = graph.add_node(self.node_repr()) #Graph children if graph_children and (self.content is not None): for n in self.content: child = n.graph_node(graph) graph.add_edge(this, child) return this def build_sm_node(self, parent, pattern): """ Returns the pattern of this node and its children if any as node_sm.State objects configured to match this node's pattern, including children. This is a partial pattern, and will not run. To build a full pattern, call build_sm_pattern instead. """ start_node = node_sm.State(pattern) end_node = node_sm.State(pattern) if self.type is None: #We're matching any one node. Transition on type, then eat any #other nodes we encounter as children of this node until we hit #the end of the current node. parent.add_transition(node_sm.ANY, start_node) start_node.add_transition(node_sm.CLOSE, end_node, start_node) start_node.add_transition(node_sm.ANY, start_node) return end_node else: parent.add_transition(node_sm.TYPE, start_node, self.type) if self.content is not None: #We're matching a type node and children. Transition on type to #child nodes, finishing with a required transition for the end #of the parent node (Parent->Child*->End of Parent) last_child = None #If the following assert fails, we've found a Node with multiple #children. We don't know how to handle this. assert len(self.content) == 1 for n in self.content: last_child = n.build_sm_node(start_node, pattern) last_child.add_transition(node_sm.CLOSE, end_node, start_node) else: start_node.add_transition(node_sm.ANY, start_node) start_node.add_transition(node_sm.CLOSE, end_node, start_node) return end_node # Functionality Moved to fixer_base # def build_sm_pattern(self): # """ # Returns the pattern of this node and its children rendered as # node_sm objects. This method should be called on the root of a pattern # and will generate a full pattern matcher. # """ # #set up overall pattern # pattern = node_sm.Pattern_Matcher() # self._build_sm_node(pattern) # self. # return pattern class WildcardPattern(BasePattern): """ A wildcard pattern can match zero or more nodes. This has all the flexibility needed to implement patterns like: .* .+ .? .{m,n} (a b c | d e | f) (...)* (...)+ (...)? (...){m,n} except it always uses non-greedy matching. """ def __init__(self, content=None, min=0, max=HUGE, name=None): """ Initializer. Args: content: optional sequence of subsequences of patterns; if absent, matches one node; if present, each subsequence is an alternative [*] min: optinal minumum number of times to match, default 0 max: optional maximum number of times tro match, default HUGE name: optional name assigned to this match [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is equivalent to (a b c | d e | f g h); if content is None, this is equivalent to '.' in regular expression terms. The min and max parameters work as follows: min=0, max=maxint: .* min=1, max=maxint: .+ min=0, max=1: .? min=1, max=1: . If content is not None, replace the dot with the parenthesized list of alternatives, e.g. (a b c | d e | f g h)* """ assert 0 <= min <= max <= HUGE, (min, max) if content is not None: content = tuple(map(tuple, content)) # Protect against alterations # Check sanity of alternatives assert len(content), repr(content) # Can't have zero alternatives for alt in content: assert len(alt), repr(alt) # Can have empty alternatives self.content = content self.min = min self.max = max self.name = name def optimize(self): """Optimize certain stacked wildcard patterns.""" subpattern = None if (self.content is not None and len(self.content) == 1 and len(self.content[0]) == 1): subpattern = self.content[0][0] if self.min == 1 and self.max == 1: if self.content is None: return NodePattern(name=self.name) if subpattern is not None and self.name == subpattern.name: return subpattern.optimize() if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and subpattern.min <= 1 and self.name == subpattern.name): return WildcardPattern(subpattern.content, self.min*subpattern.min, self.max*subpattern.max, subpattern.name) return self def match(self, node, results=None): """Does this pattern exactly match a node?""" return self.match_seq([node], results) def match_seq(self, nodes, results=None): """Does this pattern exactly match a sequence of nodes?""" for c, r in self.generate_matches(nodes): if c == len(nodes): if results is not None: results.update(r) if self.name: results[self.name] = list(nodes) return True return False def generate_matches(self, nodes): """ Generator yielding matches for a sequence of nodes. Args: nodes: sequence of nodes Yields: (count, results) tuples where: count: the match comprises nodes[:count]; results: dict containing named submatches. """ if self.content is None: # Shortcut for special case (see __init__.__doc__) for count in xrange(self.min, 1 + min(len(nodes), self.max)): r = {} if self.name: r[self.name] = nodes[:count] yield count, r elif self.name == "bare_name": yield self._bare_name_matches(nodes) else: # The reason for this is that hitting the recursion limit usually # results in some ugly messages about how RuntimeErrors are being # ignored. save_stderr = sys.stderr sys.stderr = StringIO() try: for count, r in self._recursive_matches(nodes, 0): if self.name: r[self.name] = nodes[:count] yield count, r except RuntimeError: # We fall back to the iterative pattern matching scheme if the recursive # scheme hits the recursion limit. for count, r in self._iterative_matches(nodes): if self.name: r[self.name] = nodes[:count] yield count, r finally: sys.stderr = save_stderr def _iterative_matches(self, nodes): """Helper to iteratively yield the matches.""" nodelen = len(nodes) if 0 >= self.min: yield 0, {} results = [] # generate matches that use just one alt from self.content for alt in self.content: for c, r in generate_matches(alt, nodes): yield c, r results.append((c, r)) # for each match, iterate down the nodes while results: new_results = [] for c0, r0 in results: # stop if the entire set of nodes has been matched if c0 < nodelen and c0 <= self.max: for alt in self.content: for c1, r1 in generate_matches(alt, nodes[c0:]): if c1 > 0: r = {} r.update(r0) r.update(r1) yield c0 + c1, r new_results.append((c0 + c1, r)) results = new_results def _bare_name_matches(self, nodes): """Special optimized matcher for bare_name.""" count = 0 r = {} done = False max = len(nodes) while not done and count < max: done = True for leaf in self.content: if leaf[0].match(nodes[count], r): count += 1 done = False break r[self.name] = nodes[:count] return count, r def _recursive_matches(self, nodes, count): """Helper to recursively yield the matches.""" assert self.content is not None if count >= self.min: yield 0, {} if count < self.max: for alt in self.content: for c0, r0 in generate_matches(alt, nodes): for c1, r1 in self._recursive_matches(nodes[c0:], count+1): r = {} r.update(r0) r.update(r1) yield c0 + c1, r def node_repr(self): """Return node information suitable for graphing""" #TODO: This needs number matching info added str_name_list = ["Classtype: Wildcard"] if self.content is None: str_name_list.append("Match: any") else: str_name_list.append("Match: subsequence") str_name_list.append if self.name is not None: str_name_list.append("Store as: %s" % self.name) return '\\n'.join(str_name_list) def graph_node(self, graph, graph_children=True): """Add this node to graph, and optionally its children.""" #Graph self this = graph.add_node(self.node_repr()) #Graph children if graph_children and (self.content is not None): for sequence in self.content: parent = graph.add_node("Sequence") graph.add_edge(this, parent) for node in sequence: child = node.graph_node(graph) graph.add_edge(parent, child) return this class NegatedPattern(BasePattern): def __init__(self, content=None): """ Initializer. The argument is either a pattern or None. If it is None, this only matches an empty sequence (effectively '$' in regex lingo). If it is not None, this matches whenever the argument pattern doesn't have any matches. """ if content is not None: assert isinstance(content, BasePattern), repr(content) self.content = content def match(self, node): # We never match a node in its entirety return False def match_seq(self, nodes): # We only match an empty sequence of nodes in its entirety return len(nodes) == 0 def generate_matches(self, nodes): if self.content is None: # Return a match if there is an empty sequence if len(nodes) == 0: yield 0, {} else: # Return a match if the argument pattern has no matches for c, r in self.content.generate_matches(nodes): return yield 0, {} def node_repr(self): """Return node information suitable for graphing""" str_name_list = ["Classtype: Negated Node"] if self.content is None: str_name_list.append("Match: $") else: str_name_list.append("Match: Not Subpattern") return '\\n'.join(str_name_list) def graph_node(self, graph, graph_children=True): """Add this node to graph, and optionally its children.""" #Graph self this = graph.add_node(self.node_repr()) #Graph children if graph_children and (self.content is not None): child = self.content.graph_node(graph) graph.add_edge(this, child) return this def generate_matches(patterns, nodes): """ Generator yielding matches for a sequence of patterns and nodes. Args: patterns: a sequence of patterns nodes: a sequence of nodes Yields: (count, results) tuples where: count: the entire sequence of patterns matches nodes[:count]; results: dict containing named submatches. """ if not patterns: yield 0, {} else: p, rest = patterns[0], patterns[1:] for c0, r0 in p.generate_matches(nodes): if not rest: yield c0, r0 else: for c1, r1 in generate_matches(rest, nodes[c0:]): r = {} r.update(r0) r.update(r1) yield c0 + c1, r
Python
#!/usr/bin/env python # Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Main program for testing the infrastructure.""" __author__ = "Guido van Rossum <guido@python.org>" # Support imports (need to be imported first) from . import support # Python imports import os import sys import logging # Local imports from .. import pytree import pgen2 from pgen2 import driver logging.basicConfig() def main(): gr = driver.load_grammar("Grammar.txt") dr = driver.Driver(gr, convert=pytree.convert) fn = "example.py" tree = dr.parse_file(fn, debug=True) if not diff(fn, tree): print "No diffs." if not sys.argv[1:]: return # Pass a dummy argument to run the complete test suite below problems = [] # Process every imported module for name in sys.modules: mod = sys.modules[name] if mod is None or not hasattr(mod, "__file__"): continue fn = mod.__file__ if fn.endswith(".pyc"): fn = fn[:-1] if not fn.endswith(".py"): continue print >>sys.stderr, "Parsing", fn tree = dr.parse_file(fn, debug=True) if diff(fn, tree): problems.append(fn) # Process every single module on sys.path (but not in packages) for dir in sys.path: try: names = os.listdir(dir) except os.error: continue print >>sys.stderr, "Scanning", dir, "..." for name in names: if not name.endswith(".py"): continue print >>sys.stderr, "Parsing", name fn = os.path.join(dir, name) try: tree = dr.parse_file(fn, debug=True) except pgen2.parse.ParseError, err: print "ParseError:", err else: if diff(fn, tree): problems.append(fn) # Show summary of problem files if not problems: print "No problems. Congratulations!" else: print "Problems in following files:" for fn in problems: print "***", fn def diff(fn, tree): f = open("@", "w") try: f.write(str(tree)) finally: f.close() try: return os.system("diff -u %s @" % fn) finally: os.remove("@") if __name__ == "__main__": main()
Python
# coding: utf-8 print "BOM BOOM!"
Python
# This file is used to verify that 2to3 falls back to a slower, iterative pattern matching # scheme in the event that the faster recursive system fails due to infinite recursion. from ctypes import * STRING = c_char_p OSUnknownByteOrder = 0 UIT_PROMPT = 1 P_PGID = 2 P_PID = 1 UIT_ERROR = 5 UIT_INFO = 4 UIT_NONE = 0 P_ALL = 0 UIT_VERIFY = 2 OSBigEndian = 2 UIT_BOOLEAN = 3 OSLittleEndian = 1 __darwin_nl_item = c_int __darwin_wctrans_t = c_int __darwin_wctype_t = c_ulong __int8_t = c_byte __uint8_t = c_ubyte __int16_t = c_short __uint16_t = c_ushort __int32_t = c_int __uint32_t = c_uint __int64_t = c_longlong __uint64_t = c_ulonglong __darwin_intptr_t = c_long __darwin_natural_t = c_uint __darwin_ct_rune_t = c_int class __mbstate_t(Union): pass __mbstate_t._pack_ = 4 __mbstate_t._fields_ = [ ('__mbstate8', c_char * 128), ('_mbstateL', c_longlong), ] assert sizeof(__mbstate_t) == 128, sizeof(__mbstate_t) assert alignment(__mbstate_t) == 4, alignment(__mbstate_t) __darwin_mbstate_t = __mbstate_t __darwin_ptrdiff_t = c_int __darwin_size_t = c_ulong __darwin_va_list = STRING __darwin_wchar_t = c_int __darwin_rune_t = __darwin_wchar_t __darwin_wint_t = c_int __darwin_clock_t = c_ulong __darwin_socklen_t = __uint32_t __darwin_ssize_t = c_long __darwin_time_t = c_long sig_atomic_t = c_int class sigcontext(Structure): pass sigcontext._fields_ = [ ('sc_onstack', c_int), ('sc_mask', c_int), ('sc_eax', c_uint), ('sc_ebx', c_uint), ('sc_ecx', c_uint), ('sc_edx', c_uint), ('sc_edi', c_uint), ('sc_esi', c_uint), ('sc_ebp', c_uint), ('sc_esp', c_uint), ('sc_ss', c_uint), ('sc_eflags', c_uint), ('sc_eip', c_uint), ('sc_cs', c_uint), ('sc_ds', c_uint), ('sc_es', c_uint), ('sc_fs', c_uint), ('sc_gs', c_uint), ] assert sizeof(sigcontext) == 72, sizeof(sigcontext) assert alignment(sigcontext) == 4, alignment(sigcontext) u_int8_t = c_ubyte u_int16_t = c_ushort u_int32_t = c_uint u_int64_t = c_ulonglong int32_t = c_int register_t = int32_t user_addr_t = u_int64_t user_size_t = u_int64_t int64_t = c_longlong user_ssize_t = int64_t user_long_t = int64_t user_ulong_t = u_int64_t user_time_t = int64_t syscall_arg_t = u_int64_t # values for unnamed enumeration class aes_key_st(Structure): pass aes_key_st._fields_ = [ ('rd_key', c_ulong * 60), ('rounds', c_int), ] assert sizeof(aes_key_st) == 244, sizeof(aes_key_st) assert alignment(aes_key_st) == 4, alignment(aes_key_st) AES_KEY = aes_key_st class asn1_ctx_st(Structure): pass asn1_ctx_st._fields_ = [ ('p', POINTER(c_ubyte)), ('eos', c_int), ('error', c_int), ('inf', c_int), ('tag', c_int), ('xclass', c_int), ('slen', c_long), ('max', POINTER(c_ubyte)), ('q', POINTER(c_ubyte)), ('pp', POINTER(POINTER(c_ubyte))), ('line', c_int), ] assert sizeof(asn1_ctx_st) == 44, sizeof(asn1_ctx_st) assert alignment(asn1_ctx_st) == 4, alignment(asn1_ctx_st) ASN1_CTX = asn1_ctx_st class asn1_object_st(Structure): pass asn1_object_st._fields_ = [ ('sn', STRING), ('ln', STRING), ('nid', c_int), ('length', c_int), ('data', POINTER(c_ubyte)), ('flags', c_int), ] assert sizeof(asn1_object_st) == 24, sizeof(asn1_object_st) assert alignment(asn1_object_st) == 4, alignment(asn1_object_st) ASN1_OBJECT = asn1_object_st class asn1_string_st(Structure): pass asn1_string_st._fields_ = [ ('length', c_int), ('type', c_int), ('data', POINTER(c_ubyte)), ('flags', c_long), ] assert sizeof(asn1_string_st) == 16, sizeof(asn1_string_st) assert alignment(asn1_string_st) == 4, alignment(asn1_string_st) ASN1_STRING = asn1_string_st class ASN1_ENCODING_st(Structure): pass ASN1_ENCODING_st._fields_ = [ ('enc', POINTER(c_ubyte)), ('len', c_long), ('modified', c_int), ] assert sizeof(ASN1_ENCODING_st) == 12, sizeof(ASN1_ENCODING_st) assert alignment(ASN1_ENCODING_st) == 4, alignment(ASN1_ENCODING_st) ASN1_ENCODING = ASN1_ENCODING_st class asn1_string_table_st(Structure): pass asn1_string_table_st._fields_ = [ ('nid', c_int), ('minsize', c_long), ('maxsize', c_long), ('mask', c_ulong), ('flags', c_ulong), ] assert sizeof(asn1_string_table_st) == 20, sizeof(asn1_string_table_st) assert alignment(asn1_string_table_st) == 4, alignment(asn1_string_table_st) ASN1_STRING_TABLE = asn1_string_table_st class ASN1_TEMPLATE_st(Structure): pass ASN1_TEMPLATE_st._fields_ = [ ] ASN1_TEMPLATE = ASN1_TEMPLATE_st class ASN1_ITEM_st(Structure): pass ASN1_ITEM = ASN1_ITEM_st ASN1_ITEM_st._fields_ = [ ] class ASN1_TLC_st(Structure): pass ASN1_TLC = ASN1_TLC_st ASN1_TLC_st._fields_ = [ ] class ASN1_VALUE_st(Structure): pass ASN1_VALUE_st._fields_ = [ ] ASN1_VALUE = ASN1_VALUE_st ASN1_ITEM_EXP = ASN1_ITEM class asn1_type_st(Structure): pass class N12asn1_type_st4DOLLAR_11E(Union): pass ASN1_BOOLEAN = c_int ASN1_INTEGER = asn1_string_st ASN1_ENUMERATED = asn1_string_st ASN1_BIT_STRING = asn1_string_st ASN1_OCTET_STRING = asn1_string_st ASN1_PRINTABLESTRING = asn1_string_st ASN1_T61STRING = asn1_string_st ASN1_IA5STRING = asn1_string_st ASN1_GENERALSTRING = asn1_string_st ASN1_BMPSTRING = asn1_string_st ASN1_UNIVERSALSTRING = asn1_string_st ASN1_UTCTIME = asn1_string_st ASN1_GENERALIZEDTIME = asn1_string_st ASN1_VISIBLESTRING = asn1_string_st ASN1_UTF8STRING = asn1_string_st N12asn1_type_st4DOLLAR_11E._fields_ = [ ('ptr', STRING), ('boolean', ASN1_BOOLEAN), ('asn1_string', POINTER(ASN1_STRING)), ('object', POINTER(ASN1_OBJECT)), ('integer', POINTER(ASN1_INTEGER)), ('enumerated', POINTER(ASN1_ENUMERATED)), ('bit_string', POINTER(ASN1_BIT_STRING)), ('octet_string', POINTER(ASN1_OCTET_STRING)), ('printablestring', POINTER(ASN1_PRINTABLESTRING)), ('t61string', POINTER(ASN1_T61STRING)), ('ia5string', POINTER(ASN1_IA5STRING)), ('generalstring', POINTER(ASN1_GENERALSTRING)), ('bmpstring', POINTER(ASN1_BMPSTRING)), ('universalstring', POINTER(ASN1_UNIVERSALSTRING)), ('utctime', POINTER(ASN1_UTCTIME)), ('generalizedtime', POINTER(ASN1_GENERALIZEDTIME)), ('visiblestring', POINTER(ASN1_VISIBLESTRING)), ('utf8string', POINTER(ASN1_UTF8STRING)), ('set', POINTER(ASN1_STRING)), ('sequence', POINTER(ASN1_STRING)), ] assert sizeof(N12asn1_type_st4DOLLAR_11E) == 4, sizeof(N12asn1_type_st4DOLLAR_11E) assert alignment(N12asn1_type_st4DOLLAR_11E) == 4, alignment(N12asn1_type_st4DOLLAR_11E) asn1_type_st._fields_ = [ ('type', c_int), ('value', N12asn1_type_st4DOLLAR_11E), ] assert sizeof(asn1_type_st) == 8, sizeof(asn1_type_st) assert alignment(asn1_type_st) == 4, alignment(asn1_type_st) ASN1_TYPE = asn1_type_st class asn1_method_st(Structure): pass asn1_method_st._fields_ = [ ('i2d', CFUNCTYPE(c_int)), ('d2i', CFUNCTYPE(STRING)), ('create', CFUNCTYPE(STRING)), ('destroy', CFUNCTYPE(None)), ] assert sizeof(asn1_method_st) == 16, sizeof(asn1_method_st) assert alignment(asn1_method_st) == 4, alignment(asn1_method_st) ASN1_METHOD = asn1_method_st class asn1_header_st(Structure): pass asn1_header_st._fields_ = [ ('header', POINTER(ASN1_OCTET_STRING)), ('data', STRING), ('meth', POINTER(ASN1_METHOD)), ] assert sizeof(asn1_header_st) == 12, sizeof(asn1_header_st) assert alignment(asn1_header_st) == 4, alignment(asn1_header_st) ASN1_HEADER = asn1_header_st class BIT_STRING_BITNAME_st(Structure): pass BIT_STRING_BITNAME_st._fields_ = [ ('bitnum', c_int), ('lname', STRING), ('sname', STRING), ] assert sizeof(BIT_STRING_BITNAME_st) == 12, sizeof(BIT_STRING_BITNAME_st) assert alignment(BIT_STRING_BITNAME_st) == 4, alignment(BIT_STRING_BITNAME_st) BIT_STRING_BITNAME = BIT_STRING_BITNAME_st class bio_st(Structure): pass BIO = bio_st bio_info_cb = CFUNCTYPE(None, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long) class bio_method_st(Structure): pass bio_method_st._fields_ = [ ('type', c_int), ('name', STRING), ('bwrite', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), ('bread', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), ('bputs', CFUNCTYPE(c_int, POINTER(BIO), STRING)), ('bgets', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), ('ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, c_long, c_void_p)), ('create', CFUNCTYPE(c_int, POINTER(BIO))), ('destroy', CFUNCTYPE(c_int, POINTER(BIO))), ('callback_ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, POINTER(bio_info_cb))), ] assert sizeof(bio_method_st) == 40, sizeof(bio_method_st) assert alignment(bio_method_st) == 4, alignment(bio_method_st) BIO_METHOD = bio_method_st class crypto_ex_data_st(Structure): pass class stack_st(Structure): pass STACK = stack_st crypto_ex_data_st._fields_ = [ ('sk', POINTER(STACK)), ('dummy', c_int), ] assert sizeof(crypto_ex_data_st) == 8, sizeof(crypto_ex_data_st) assert alignment(crypto_ex_data_st) == 4, alignment(crypto_ex_data_st) CRYPTO_EX_DATA = crypto_ex_data_st bio_st._fields_ = [ ('method', POINTER(BIO_METHOD)), ('callback', CFUNCTYPE(c_long, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)), ('cb_arg', STRING), ('init', c_int), ('shutdown', c_int), ('flags', c_int), ('retry_reason', c_int), ('num', c_int), ('ptr', c_void_p), ('next_bio', POINTER(bio_st)), ('prev_bio', POINTER(bio_st)), ('references', c_int), ('num_read', c_ulong), ('num_write', c_ulong), ('ex_data', CRYPTO_EX_DATA), ] assert sizeof(bio_st) == 64, sizeof(bio_st) assert alignment(bio_st) == 4, alignment(bio_st) class bio_f_buffer_ctx_struct(Structure): pass bio_f_buffer_ctx_struct._fields_ = [ ('ibuf_size', c_int), ('obuf_size', c_int), ('ibuf', STRING), ('ibuf_len', c_int), ('ibuf_off', c_int), ('obuf', STRING), ('obuf_len', c_int), ('obuf_off', c_int), ] assert sizeof(bio_f_buffer_ctx_struct) == 32, sizeof(bio_f_buffer_ctx_struct) assert alignment(bio_f_buffer_ctx_struct) == 4, alignment(bio_f_buffer_ctx_struct) BIO_F_BUFFER_CTX = bio_f_buffer_ctx_struct class hostent(Structure): pass hostent._fields_ = [ ] class bf_key_st(Structure): pass bf_key_st._fields_ = [ ('P', c_uint * 18), ('S', c_uint * 1024), ] assert sizeof(bf_key_st) == 4168, sizeof(bf_key_st) assert alignment(bf_key_st) == 4, alignment(bf_key_st) BF_KEY = bf_key_st class bignum_st(Structure): pass bignum_st._fields_ = [ ('d', POINTER(c_ulong)), ('top', c_int), ('dmax', c_int), ('neg', c_int), ('flags', c_int), ] assert sizeof(bignum_st) == 20, sizeof(bignum_st) assert alignment(bignum_st) == 4, alignment(bignum_st) BIGNUM = bignum_st class bignum_ctx(Structure): pass bignum_ctx._fields_ = [ ] BN_CTX = bignum_ctx class bn_blinding_st(Structure): pass bn_blinding_st._fields_ = [ ('init', c_int), ('A', POINTER(BIGNUM)), ('Ai', POINTER(BIGNUM)), ('mod', POINTER(BIGNUM)), ('thread_id', c_ulong), ] assert sizeof(bn_blinding_st) == 20, sizeof(bn_blinding_st) assert alignment(bn_blinding_st) == 4, alignment(bn_blinding_st) BN_BLINDING = bn_blinding_st class bn_mont_ctx_st(Structure): pass bn_mont_ctx_st._fields_ = [ ('ri', c_int), ('RR', BIGNUM), ('N', BIGNUM), ('Ni', BIGNUM), ('n0', c_ulong), ('flags', c_int), ] assert sizeof(bn_mont_ctx_st) == 72, sizeof(bn_mont_ctx_st) assert alignment(bn_mont_ctx_st) == 4, alignment(bn_mont_ctx_st) BN_MONT_CTX = bn_mont_ctx_st class bn_recp_ctx_st(Structure): pass bn_recp_ctx_st._fields_ = [ ('N', BIGNUM), ('Nr', BIGNUM), ('num_bits', c_int), ('shift', c_int), ('flags', c_int), ] assert sizeof(bn_recp_ctx_st) == 52, sizeof(bn_recp_ctx_st) assert alignment(bn_recp_ctx_st) == 4, alignment(bn_recp_ctx_st) BN_RECP_CTX = bn_recp_ctx_st class buf_mem_st(Structure): pass buf_mem_st._fields_ = [ ('length', c_int), ('data', STRING), ('max', c_int), ] assert sizeof(buf_mem_st) == 12, sizeof(buf_mem_st) assert alignment(buf_mem_st) == 4, alignment(buf_mem_st) BUF_MEM = buf_mem_st class cast_key_st(Structure): pass cast_key_st._fields_ = [ ('data', c_ulong * 32), ('short_key', c_int), ] assert sizeof(cast_key_st) == 132, sizeof(cast_key_st) assert alignment(cast_key_st) == 4, alignment(cast_key_st) CAST_KEY = cast_key_st class comp_method_st(Structure): pass comp_method_st._fields_ = [ ('type', c_int), ('name', STRING), ('init', CFUNCTYPE(c_int)), ('finish', CFUNCTYPE(None)), ('compress', CFUNCTYPE(c_int)), ('expand', CFUNCTYPE(c_int)), ('ctrl', CFUNCTYPE(c_long)), ('callback_ctrl', CFUNCTYPE(c_long)), ] assert sizeof(comp_method_st) == 32, sizeof(comp_method_st) assert alignment(comp_method_st) == 4, alignment(comp_method_st) COMP_METHOD = comp_method_st class comp_ctx_st(Structure): pass comp_ctx_st._fields_ = [ ('meth', POINTER(COMP_METHOD)), ('compress_in', c_ulong), ('compress_out', c_ulong), ('expand_in', c_ulong), ('expand_out', c_ulong), ('ex_data', CRYPTO_EX_DATA), ] assert sizeof(comp_ctx_st) == 28, sizeof(comp_ctx_st) assert alignment(comp_ctx_st) == 4, alignment(comp_ctx_st) COMP_CTX = comp_ctx_st class CRYPTO_dynlock_value(Structure): pass CRYPTO_dynlock_value._fields_ = [ ] class CRYPTO_dynlock(Structure): pass CRYPTO_dynlock._fields_ = [ ('references', c_int), ('data', POINTER(CRYPTO_dynlock_value)), ] assert sizeof(CRYPTO_dynlock) == 8, sizeof(CRYPTO_dynlock) assert alignment(CRYPTO_dynlock) == 4, alignment(CRYPTO_dynlock) BIO_dummy = bio_st CRYPTO_EX_new = CFUNCTYPE(c_int, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p) CRYPTO_EX_free = CFUNCTYPE(None, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p) CRYPTO_EX_dup = CFUNCTYPE(c_int, POINTER(CRYPTO_EX_DATA), POINTER(CRYPTO_EX_DATA), c_void_p, c_int, c_long, c_void_p) class crypto_ex_data_func_st(Structure): pass crypto_ex_data_func_st._fields_ = [ ('argl', c_long), ('argp', c_void_p), ('new_func', POINTER(CRYPTO_EX_new)), ('free_func', POINTER(CRYPTO_EX_free)), ('dup_func', POINTER(CRYPTO_EX_dup)), ] assert sizeof(crypto_ex_data_func_st) == 20, sizeof(crypto_ex_data_func_st) assert alignment(crypto_ex_data_func_st) == 4, alignment(crypto_ex_data_func_st) CRYPTO_EX_DATA_FUNCS = crypto_ex_data_func_st class st_CRYPTO_EX_DATA_IMPL(Structure): pass CRYPTO_EX_DATA_IMPL = st_CRYPTO_EX_DATA_IMPL st_CRYPTO_EX_DATA_IMPL._fields_ = [ ] CRYPTO_MEM_LEAK_CB = CFUNCTYPE(c_void_p, c_ulong, STRING, c_int, c_int, c_void_p) DES_cblock = c_ubyte * 8 const_DES_cblock = c_ubyte * 8 class DES_ks(Structure): pass class N6DES_ks3DOLLAR_9E(Union): pass N6DES_ks3DOLLAR_9E._fields_ = [ ('cblock', DES_cblock), ('deslong', c_ulong * 2), ] assert sizeof(N6DES_ks3DOLLAR_9E) == 8, sizeof(N6DES_ks3DOLLAR_9E) assert alignment(N6DES_ks3DOLLAR_9E) == 4, alignment(N6DES_ks3DOLLAR_9E) DES_ks._fields_ = [ ('ks', N6DES_ks3DOLLAR_9E * 16), ] assert sizeof(DES_ks) == 128, sizeof(DES_ks) assert alignment(DES_ks) == 4, alignment(DES_ks) DES_key_schedule = DES_ks _ossl_old_des_cblock = c_ubyte * 8 class _ossl_old_des_ks_struct(Structure): pass class N23_ossl_old_des_ks_struct4DOLLAR_10E(Union): pass N23_ossl_old_des_ks_struct4DOLLAR_10E._fields_ = [ ('_', _ossl_old_des_cblock), ('pad', c_ulong * 2), ] assert sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 8, sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) assert alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 4, alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) _ossl_old_des_ks_struct._fields_ = [ ('ks', N23_ossl_old_des_ks_struct4DOLLAR_10E), ] assert sizeof(_ossl_old_des_ks_struct) == 8, sizeof(_ossl_old_des_ks_struct) assert alignment(_ossl_old_des_ks_struct) == 4, alignment(_ossl_old_des_ks_struct) _ossl_old_des_key_schedule = _ossl_old_des_ks_struct * 16 class dh_st(Structure): pass DH = dh_st class dh_method(Structure): pass dh_method._fields_ = [ ('name', STRING), ('generate_key', CFUNCTYPE(c_int, POINTER(DH))), ('compute_key', CFUNCTYPE(c_int, POINTER(c_ubyte), POINTER(BIGNUM), POINTER(DH))), ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DH), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), ('init', CFUNCTYPE(c_int, POINTER(DH))), ('finish', CFUNCTYPE(c_int, POINTER(DH))), ('flags', c_int), ('app_data', STRING), ] assert sizeof(dh_method) == 32, sizeof(dh_method) assert alignment(dh_method) == 4, alignment(dh_method) DH_METHOD = dh_method class engine_st(Structure): pass ENGINE = engine_st dh_st._fields_ = [ ('pad', c_int), ('version', c_int), ('p', POINTER(BIGNUM)), ('g', POINTER(BIGNUM)), ('length', c_long), ('pub_key', POINTER(BIGNUM)), ('priv_key', POINTER(BIGNUM)), ('flags', c_int), ('method_mont_p', STRING), ('q', POINTER(BIGNUM)), ('j', POINTER(BIGNUM)), ('seed', POINTER(c_ubyte)), ('seedlen', c_int), ('counter', POINTER(BIGNUM)), ('references', c_int), ('ex_data', CRYPTO_EX_DATA), ('meth', POINTER(DH_METHOD)), ('engine', POINTER(ENGINE)), ] assert sizeof(dh_st) == 76, sizeof(dh_st) assert alignment(dh_st) == 4, alignment(dh_st) class dsa_st(Structure): pass DSA = dsa_st class DSA_SIG_st(Structure): pass DSA_SIG_st._fields_ = [ ('r', POINTER(BIGNUM)), ('s', POINTER(BIGNUM)), ] assert sizeof(DSA_SIG_st) == 8, sizeof(DSA_SIG_st) assert alignment(DSA_SIG_st) == 4, alignment(DSA_SIG_st) DSA_SIG = DSA_SIG_st class dsa_method(Structure): pass dsa_method._fields_ = [ ('name', STRING), ('dsa_do_sign', CFUNCTYPE(POINTER(DSA_SIG), POINTER(c_ubyte), c_int, POINTER(DSA))), ('dsa_sign_setup', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BN_CTX), POINTER(POINTER(BIGNUM)), POINTER(POINTER(BIGNUM)))), ('dsa_do_verify', CFUNCTYPE(c_int, POINTER(c_ubyte), c_int, POINTER(DSA_SIG), POINTER(DSA))), ('dsa_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), ('init', CFUNCTYPE(c_int, POINTER(DSA))), ('finish', CFUNCTYPE(c_int, POINTER(DSA))), ('flags', c_int), ('app_data', STRING), ] assert sizeof(dsa_method) == 40, sizeof(dsa_method) assert alignment(dsa_method) == 4, alignment(dsa_method) DSA_METHOD = dsa_method dsa_st._fields_ = [ ('pad', c_int), ('version', c_long), ('write_params', c_int), ('p', POINTER(BIGNUM)), ('q', POINTER(BIGNUM)), ('g', POINTER(BIGNUM)), ('pub_key', POINTER(BIGNUM)), ('priv_key', POINTER(BIGNUM)), ('kinv', POINTER(BIGNUM)), ('r', POINTER(BIGNUM)), ('flags', c_int), ('method_mont_p', STRING), ('references', c_int), ('ex_data', CRYPTO_EX_DATA), ('meth', POINTER(DSA_METHOD)), ('engine', POINTER(ENGINE)), ] assert sizeof(dsa_st) == 68, sizeof(dsa_st) assert alignment(dsa_st) == 4, alignment(dsa_st) class evp_pkey_st(Structure): pass class N11evp_pkey_st4DOLLAR_12E(Union): pass class rsa_st(Structure): pass N11evp_pkey_st4DOLLAR_12E._fields_ = [ ('ptr', STRING), ('rsa', POINTER(rsa_st)), ('dsa', POINTER(dsa_st)), ('dh', POINTER(dh_st)), ] assert sizeof(N11evp_pkey_st4DOLLAR_12E) == 4, sizeof(N11evp_pkey_st4DOLLAR_12E) assert alignment(N11evp_pkey_st4DOLLAR_12E) == 4, alignment(N11evp_pkey_st4DOLLAR_12E) evp_pkey_st._fields_ = [ ('type', c_int), ('save_type', c_int), ('references', c_int), ('pkey', N11evp_pkey_st4DOLLAR_12E), ('save_parameters', c_int), ('attributes', POINTER(STACK)), ] assert sizeof(evp_pkey_st) == 24, sizeof(evp_pkey_st) assert alignment(evp_pkey_st) == 4, alignment(evp_pkey_st) class env_md_st(Structure): pass class env_md_ctx_st(Structure): pass EVP_MD_CTX = env_md_ctx_st env_md_st._fields_ = [ ('type', c_int), ('pkey_type', c_int), ('md_size', c_int), ('flags', c_ulong), ('init', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))), ('update', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), c_void_p, c_ulong)), ('final', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(c_ubyte))), ('copy', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(EVP_MD_CTX))), ('cleanup', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))), ('sign', CFUNCTYPE(c_int)), ('verify', CFUNCTYPE(c_int)), ('required_pkey_type', c_int * 5), ('block_size', c_int), ('ctx_size', c_int), ] assert sizeof(env_md_st) == 72, sizeof(env_md_st) assert alignment(env_md_st) == 4, alignment(env_md_st) EVP_MD = env_md_st env_md_ctx_st._fields_ = [ ('digest', POINTER(EVP_MD)), ('engine', POINTER(ENGINE)), ('flags', c_ulong), ('md_data', c_void_p), ] assert sizeof(env_md_ctx_st) == 16, sizeof(env_md_ctx_st) assert alignment(env_md_ctx_st) == 4, alignment(env_md_ctx_st) class evp_cipher_st(Structure): pass class evp_cipher_ctx_st(Structure): pass EVP_CIPHER_CTX = evp_cipher_ctx_st evp_cipher_st._fields_ = [ ('nid', c_int), ('block_size', c_int), ('key_len', c_int), ('iv_len', c_int), ('flags', c_ulong), ('init', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_int)), ('do_cipher', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_uint)), ('cleanup', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX))), ('ctx_size', c_int), ('set_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))), ('get_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))), ('ctrl', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), c_int, c_int, c_void_p)), ('app_data', c_void_p), ] assert sizeof(evp_cipher_st) == 52, sizeof(evp_cipher_st) assert alignment(evp_cipher_st) == 4, alignment(evp_cipher_st) class evp_cipher_info_st(Structure): pass EVP_CIPHER = evp_cipher_st evp_cipher_info_st._fields_ = [ ('cipher', POINTER(EVP_CIPHER)), ('iv', c_ubyte * 16), ] assert sizeof(evp_cipher_info_st) == 20, sizeof(evp_cipher_info_st) assert alignment(evp_cipher_info_st) == 4, alignment(evp_cipher_info_st) EVP_CIPHER_INFO = evp_cipher_info_st evp_cipher_ctx_st._fields_ = [ ('cipher', POINTER(EVP_CIPHER)), ('engine', POINTER(ENGINE)), ('encrypt', c_int), ('buf_len', c_int), ('oiv', c_ubyte * 16), ('iv', c_ubyte * 16), ('buf', c_ubyte * 32), ('num', c_int), ('app_data', c_void_p), ('key_len', c_int), ('flags', c_ulong), ('cipher_data', c_void_p), ('final_used', c_int), ('block_mask', c_int), ('final', c_ubyte * 32), ] assert sizeof(evp_cipher_ctx_st) == 140, sizeof(evp_cipher_ctx_st) assert alignment(evp_cipher_ctx_st) == 4, alignment(evp_cipher_ctx_st) class evp_Encode_Ctx_st(Structure): pass evp_Encode_Ctx_st._fields_ = [ ('num', c_int), ('length', c_int), ('enc_data', c_ubyte * 80), ('line_num', c_int), ('expect_nl', c_int), ] assert sizeof(evp_Encode_Ctx_st) == 96, sizeof(evp_Encode_Ctx_st) assert alignment(evp_Encode_Ctx_st) == 4, alignment(evp_Encode_Ctx_st) EVP_ENCODE_CTX = evp_Encode_Ctx_st EVP_PBE_KEYGEN = CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), STRING, c_int, POINTER(ASN1_TYPE), POINTER(EVP_CIPHER), POINTER(EVP_MD), c_int) class lhash_node_st(Structure): pass lhash_node_st._fields_ = [ ('data', c_void_p), ('next', POINTER(lhash_node_st)), ('hash', c_ulong), ] assert sizeof(lhash_node_st) == 12, sizeof(lhash_node_st) assert alignment(lhash_node_st) == 4, alignment(lhash_node_st) LHASH_NODE = lhash_node_st LHASH_COMP_FN_TYPE = CFUNCTYPE(c_int, c_void_p, c_void_p) LHASH_HASH_FN_TYPE = CFUNCTYPE(c_ulong, c_void_p) LHASH_DOALL_FN_TYPE = CFUNCTYPE(None, c_void_p) LHASH_DOALL_ARG_FN_TYPE = CFUNCTYPE(None, c_void_p, c_void_p) class lhash_st(Structure): pass lhash_st._fields_ = [ ('b', POINTER(POINTER(LHASH_NODE))), ('comp', LHASH_COMP_FN_TYPE), ('hash', LHASH_HASH_FN_TYPE), ('num_nodes', c_uint), ('num_alloc_nodes', c_uint), ('p', c_uint), ('pmax', c_uint), ('up_load', c_ulong), ('down_load', c_ulong), ('num_items', c_ulong), ('num_expands', c_ulong), ('num_expand_reallocs', c_ulong), ('num_contracts', c_ulong), ('num_contract_reallocs', c_ulong), ('num_hash_calls', c_ulong), ('num_comp_calls', c_ulong), ('num_insert', c_ulong), ('num_replace', c_ulong), ('num_delete', c_ulong), ('num_no_delete', c_ulong), ('num_retrieve', c_ulong), ('num_retrieve_miss', c_ulong), ('num_hash_comps', c_ulong), ('error', c_int), ] assert sizeof(lhash_st) == 96, sizeof(lhash_st) assert alignment(lhash_st) == 4, alignment(lhash_st) LHASH = lhash_st class MD2state_st(Structure): pass MD2state_st._fields_ = [ ('num', c_int), ('data', c_ubyte * 16), ('cksm', c_uint * 16), ('state', c_uint * 16), ] assert sizeof(MD2state_st) == 148, sizeof(MD2state_st) assert alignment(MD2state_st) == 4, alignment(MD2state_st) MD2_CTX = MD2state_st class MD4state_st(Structure): pass MD4state_st._fields_ = [ ('A', c_uint), ('B', c_uint), ('C', c_uint), ('D', c_uint), ('Nl', c_uint), ('Nh', c_uint), ('data', c_uint * 16), ('num', c_int), ] assert sizeof(MD4state_st) == 92, sizeof(MD4state_st) assert alignment(MD4state_st) == 4, alignment(MD4state_st) MD4_CTX = MD4state_st class MD5state_st(Structure): pass MD5state_st._fields_ = [ ('A', c_uint), ('B', c_uint), ('C', c_uint), ('D', c_uint), ('Nl', c_uint), ('Nh', c_uint), ('data', c_uint * 16), ('num', c_int), ] assert sizeof(MD5state_st) == 92, sizeof(MD5state_st) assert alignment(MD5state_st) == 4, alignment(MD5state_st) MD5_CTX = MD5state_st class mdc2_ctx_st(Structure): pass mdc2_ctx_st._fields_ = [ ('num', c_int), ('data', c_ubyte * 8), ('h', DES_cblock), ('hh', DES_cblock), ('pad_type', c_int), ] assert sizeof(mdc2_ctx_st) == 32, sizeof(mdc2_ctx_st) assert alignment(mdc2_ctx_st) == 4, alignment(mdc2_ctx_st) MDC2_CTX = mdc2_ctx_st class obj_name_st(Structure): pass obj_name_st._fields_ = [ ('type', c_int), ('alias', c_int), ('name', STRING), ('data', STRING), ] assert sizeof(obj_name_st) == 16, sizeof(obj_name_st) assert alignment(obj_name_st) == 4, alignment(obj_name_st) OBJ_NAME = obj_name_st ASN1_TIME = asn1_string_st ASN1_NULL = c_int EVP_PKEY = evp_pkey_st class x509_st(Structure): pass X509 = x509_st class X509_algor_st(Structure): pass X509_ALGOR = X509_algor_st class X509_crl_st(Structure): pass X509_CRL = X509_crl_st class X509_name_st(Structure): pass X509_NAME = X509_name_st class x509_store_st(Structure): pass X509_STORE = x509_store_st class x509_store_ctx_st(Structure): pass X509_STORE_CTX = x509_store_ctx_st engine_st._fields_ = [ ] class PEM_Encode_Seal_st(Structure): pass PEM_Encode_Seal_st._fields_ = [ ('encode', EVP_ENCODE_CTX), ('md', EVP_MD_CTX), ('cipher', EVP_CIPHER_CTX), ] assert sizeof(PEM_Encode_Seal_st) == 252, sizeof(PEM_Encode_Seal_st) assert alignment(PEM_Encode_Seal_st) == 4, alignment(PEM_Encode_Seal_st) PEM_ENCODE_SEAL_CTX = PEM_Encode_Seal_st class pem_recip_st(Structure): pass pem_recip_st._fields_ = [ ('name', STRING), ('dn', POINTER(X509_NAME)), ('cipher', c_int), ('key_enc', c_int), ] assert sizeof(pem_recip_st) == 16, sizeof(pem_recip_st) assert alignment(pem_recip_st) == 4, alignment(pem_recip_st) PEM_USER = pem_recip_st class pem_ctx_st(Structure): pass class N10pem_ctx_st4DOLLAR_16E(Structure): pass N10pem_ctx_st4DOLLAR_16E._fields_ = [ ('version', c_int), ('mode', c_int), ] assert sizeof(N10pem_ctx_st4DOLLAR_16E) == 8, sizeof(N10pem_ctx_st4DOLLAR_16E) assert alignment(N10pem_ctx_st4DOLLAR_16E) == 4, alignment(N10pem_ctx_st4DOLLAR_16E) class N10pem_ctx_st4DOLLAR_17E(Structure): pass N10pem_ctx_st4DOLLAR_17E._fields_ = [ ('cipher', c_int), ] assert sizeof(N10pem_ctx_st4DOLLAR_17E) == 4, sizeof(N10pem_ctx_st4DOLLAR_17E) assert alignment(N10pem_ctx_st4DOLLAR_17E) == 4, alignment(N10pem_ctx_st4DOLLAR_17E) pem_ctx_st._fields_ = [ ('type', c_int), ('proc_type', N10pem_ctx_st4DOLLAR_16E), ('domain', STRING), ('DEK_info', N10pem_ctx_st4DOLLAR_17E), ('originator', POINTER(PEM_USER)), ('num_recipient', c_int), ('recipient', POINTER(POINTER(PEM_USER))), ('x509_chain', POINTER(STACK)), ('md', POINTER(EVP_MD)), ('md_enc', c_int), ('md_len', c_int), ('md_data', STRING), ('dec', POINTER(EVP_CIPHER)), ('key_len', c_int), ('key', POINTER(c_ubyte)), ('data_enc', c_int), ('data_len', c_int), ('data', POINTER(c_ubyte)), ] assert sizeof(pem_ctx_st) == 76, sizeof(pem_ctx_st) assert alignment(pem_ctx_st) == 4, alignment(pem_ctx_st) PEM_CTX = pem_ctx_st pem_password_cb = CFUNCTYPE(c_int, STRING, c_int, c_int, c_void_p) class pkcs7_issuer_and_serial_st(Structure): pass pkcs7_issuer_and_serial_st._fields_ = [ ('issuer', POINTER(X509_NAME)), ('serial', POINTER(ASN1_INTEGER)), ] assert sizeof(pkcs7_issuer_and_serial_st) == 8, sizeof(pkcs7_issuer_and_serial_st) assert alignment(pkcs7_issuer_and_serial_st) == 4, alignment(pkcs7_issuer_and_serial_st) PKCS7_ISSUER_AND_SERIAL = pkcs7_issuer_and_serial_st class pkcs7_signer_info_st(Structure): pass pkcs7_signer_info_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)), ('digest_alg', POINTER(X509_ALGOR)), ('auth_attr', POINTER(STACK)), ('digest_enc_alg', POINTER(X509_ALGOR)), ('enc_digest', POINTER(ASN1_OCTET_STRING)), ('unauth_attr', POINTER(STACK)), ('pkey', POINTER(EVP_PKEY)), ] assert sizeof(pkcs7_signer_info_st) == 32, sizeof(pkcs7_signer_info_st) assert alignment(pkcs7_signer_info_st) == 4, alignment(pkcs7_signer_info_st) PKCS7_SIGNER_INFO = pkcs7_signer_info_st class pkcs7_recip_info_st(Structure): pass pkcs7_recip_info_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)), ('key_enc_algor', POINTER(X509_ALGOR)), ('enc_key', POINTER(ASN1_OCTET_STRING)), ('cert', POINTER(X509)), ] assert sizeof(pkcs7_recip_info_st) == 20, sizeof(pkcs7_recip_info_st) assert alignment(pkcs7_recip_info_st) == 4, alignment(pkcs7_recip_info_st) PKCS7_RECIP_INFO = pkcs7_recip_info_st class pkcs7_signed_st(Structure): pass class pkcs7_st(Structure): pass pkcs7_signed_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('md_algs', POINTER(STACK)), ('cert', POINTER(STACK)), ('crl', POINTER(STACK)), ('signer_info', POINTER(STACK)), ('contents', POINTER(pkcs7_st)), ] assert sizeof(pkcs7_signed_st) == 24, sizeof(pkcs7_signed_st) assert alignment(pkcs7_signed_st) == 4, alignment(pkcs7_signed_st) PKCS7_SIGNED = pkcs7_signed_st class pkcs7_enc_content_st(Structure): pass pkcs7_enc_content_st._fields_ = [ ('content_type', POINTER(ASN1_OBJECT)), ('algorithm', POINTER(X509_ALGOR)), ('enc_data', POINTER(ASN1_OCTET_STRING)), ('cipher', POINTER(EVP_CIPHER)), ] assert sizeof(pkcs7_enc_content_st) == 16, sizeof(pkcs7_enc_content_st) assert alignment(pkcs7_enc_content_st) == 4, alignment(pkcs7_enc_content_st) PKCS7_ENC_CONTENT = pkcs7_enc_content_st class pkcs7_enveloped_st(Structure): pass pkcs7_enveloped_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('recipientinfo', POINTER(STACK)), ('enc_data', POINTER(PKCS7_ENC_CONTENT)), ] assert sizeof(pkcs7_enveloped_st) == 12, sizeof(pkcs7_enveloped_st) assert alignment(pkcs7_enveloped_st) == 4, alignment(pkcs7_enveloped_st) PKCS7_ENVELOPE = pkcs7_enveloped_st class pkcs7_signedandenveloped_st(Structure): pass pkcs7_signedandenveloped_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('md_algs', POINTER(STACK)), ('cert', POINTER(STACK)), ('crl', POINTER(STACK)), ('signer_info', POINTER(STACK)), ('enc_data', POINTER(PKCS7_ENC_CONTENT)), ('recipientinfo', POINTER(STACK)), ] assert sizeof(pkcs7_signedandenveloped_st) == 28, sizeof(pkcs7_signedandenveloped_st) assert alignment(pkcs7_signedandenveloped_st) == 4, alignment(pkcs7_signedandenveloped_st) PKCS7_SIGN_ENVELOPE = pkcs7_signedandenveloped_st class pkcs7_digest_st(Structure): pass pkcs7_digest_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('md', POINTER(X509_ALGOR)), ('contents', POINTER(pkcs7_st)), ('digest', POINTER(ASN1_OCTET_STRING)), ] assert sizeof(pkcs7_digest_st) == 16, sizeof(pkcs7_digest_st) assert alignment(pkcs7_digest_st) == 4, alignment(pkcs7_digest_st) PKCS7_DIGEST = pkcs7_digest_st class pkcs7_encrypted_st(Structure): pass pkcs7_encrypted_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('enc_data', POINTER(PKCS7_ENC_CONTENT)), ] assert sizeof(pkcs7_encrypted_st) == 8, sizeof(pkcs7_encrypted_st) assert alignment(pkcs7_encrypted_st) == 4, alignment(pkcs7_encrypted_st) PKCS7_ENCRYPT = pkcs7_encrypted_st class N8pkcs7_st4DOLLAR_15E(Union): pass N8pkcs7_st4DOLLAR_15E._fields_ = [ ('ptr', STRING), ('data', POINTER(ASN1_OCTET_STRING)), ('sign', POINTER(PKCS7_SIGNED)), ('enveloped', POINTER(PKCS7_ENVELOPE)), ('signed_and_enveloped', POINTER(PKCS7_SIGN_ENVELOPE)), ('digest', POINTER(PKCS7_DIGEST)), ('encrypted', POINTER(PKCS7_ENCRYPT)), ('other', POINTER(ASN1_TYPE)), ] assert sizeof(N8pkcs7_st4DOLLAR_15E) == 4, sizeof(N8pkcs7_st4DOLLAR_15E) assert alignment(N8pkcs7_st4DOLLAR_15E) == 4, alignment(N8pkcs7_st4DOLLAR_15E) pkcs7_st._fields_ = [ ('asn1', POINTER(c_ubyte)), ('length', c_long), ('state', c_int), ('detached', c_int), ('type', POINTER(ASN1_OBJECT)), ('d', N8pkcs7_st4DOLLAR_15E), ] assert sizeof(pkcs7_st) == 24, sizeof(pkcs7_st) assert alignment(pkcs7_st) == 4, alignment(pkcs7_st) PKCS7 = pkcs7_st class rc2_key_st(Structure): pass rc2_key_st._fields_ = [ ('data', c_uint * 64), ] assert sizeof(rc2_key_st) == 256, sizeof(rc2_key_st) assert alignment(rc2_key_st) == 4, alignment(rc2_key_st) RC2_KEY = rc2_key_st class rc4_key_st(Structure): pass rc4_key_st._fields_ = [ ('x', c_ubyte), ('y', c_ubyte), ('data', c_ubyte * 256), ] assert sizeof(rc4_key_st) == 258, sizeof(rc4_key_st) assert alignment(rc4_key_st) == 1, alignment(rc4_key_st) RC4_KEY = rc4_key_st class rc5_key_st(Structure): pass rc5_key_st._fields_ = [ ('rounds', c_int), ('data', c_ulong * 34), ] assert sizeof(rc5_key_st) == 140, sizeof(rc5_key_st) assert alignment(rc5_key_st) == 4, alignment(rc5_key_st) RC5_32_KEY = rc5_key_st class RIPEMD160state_st(Structure): pass RIPEMD160state_st._fields_ = [ ('A', c_uint), ('B', c_uint), ('C', c_uint), ('D', c_uint), ('E', c_uint), ('Nl', c_uint), ('Nh', c_uint), ('data', c_uint * 16), ('num', c_int), ] assert sizeof(RIPEMD160state_st) == 96, sizeof(RIPEMD160state_st) assert alignment(RIPEMD160state_st) == 4, alignment(RIPEMD160state_st) RIPEMD160_CTX = RIPEMD160state_st RSA = rsa_st class rsa_meth_st(Structure): pass rsa_meth_st._fields_ = [ ('name', STRING), ('rsa_pub_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), ('rsa_pub_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), ('rsa_priv_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), ('rsa_priv_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), ('rsa_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(RSA))), ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), ('init', CFUNCTYPE(c_int, POINTER(RSA))), ('finish', CFUNCTYPE(c_int, POINTER(RSA))), ('flags', c_int), ('app_data', STRING), ('rsa_sign', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), POINTER(c_uint), POINTER(RSA))), ('rsa_verify', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), c_uint, POINTER(RSA))), ] assert sizeof(rsa_meth_st) == 52, sizeof(rsa_meth_st) assert alignment(rsa_meth_st) == 4, alignment(rsa_meth_st) RSA_METHOD = rsa_meth_st rsa_st._fields_ = [ ('pad', c_int), ('version', c_long), ('meth', POINTER(RSA_METHOD)), ('engine', POINTER(ENGINE)), ('n', POINTER(BIGNUM)), ('e', POINTER(BIGNUM)), ('d', POINTER(BIGNUM)), ('p', POINTER(BIGNUM)), ('q', POINTER(BIGNUM)), ('dmp1', POINTER(BIGNUM)), ('dmq1', POINTER(BIGNUM)), ('iqmp', POINTER(BIGNUM)), ('ex_data', CRYPTO_EX_DATA), ('references', c_int), ('flags', c_int), ('_method_mod_n', POINTER(BN_MONT_CTX)), ('_method_mod_p', POINTER(BN_MONT_CTX)), ('_method_mod_q', POINTER(BN_MONT_CTX)), ('bignum_data', STRING), ('blinding', POINTER(BN_BLINDING)), ] assert sizeof(rsa_st) == 84, sizeof(rsa_st) assert alignment(rsa_st) == 4, alignment(rsa_st) openssl_fptr = CFUNCTYPE(None) class SHAstate_st(Structure): pass SHAstate_st._fields_ = [ ('h0', c_uint), ('h1', c_uint), ('h2', c_uint), ('h3', c_uint), ('h4', c_uint), ('Nl', c_uint), ('Nh', c_uint), ('data', c_uint * 16), ('num', c_int), ] assert sizeof(SHAstate_st) == 96, sizeof(SHAstate_st) assert alignment(SHAstate_st) == 4, alignment(SHAstate_st) SHA_CTX = SHAstate_st class ssl_st(Structure): pass ssl_crock_st = POINTER(ssl_st) class ssl_cipher_st(Structure): pass ssl_cipher_st._fields_ = [ ('valid', c_int), ('name', STRING), ('id', c_ulong), ('algorithms', c_ulong), ('algo_strength', c_ulong), ('algorithm2', c_ulong), ('strength_bits', c_int), ('alg_bits', c_int), ('mask', c_ulong), ('mask_strength', c_ulong), ] assert sizeof(ssl_cipher_st) == 40, sizeof(ssl_cipher_st) assert alignment(ssl_cipher_st) == 4, alignment(ssl_cipher_st) SSL_CIPHER = ssl_cipher_st SSL = ssl_st class ssl_ctx_st(Structure): pass SSL_CTX = ssl_ctx_st class ssl_method_st(Structure): pass class ssl3_enc_method(Structure): pass ssl_method_st._fields_ = [ ('version', c_int), ('ssl_new', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_clear', CFUNCTYPE(None, POINTER(SSL))), ('ssl_free', CFUNCTYPE(None, POINTER(SSL))), ('ssl_accept', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_connect', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_read', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), ('ssl_peek', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), ('ssl_write', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), ('ssl_shutdown', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_renegotiate', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_renegotiate_check', CFUNCTYPE(c_int, POINTER(SSL))), ('ssl_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, c_long, c_void_p)), ('ssl_ctx_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, c_long, c_void_p)), ('get_cipher_by_char', CFUNCTYPE(POINTER(SSL_CIPHER), POINTER(c_ubyte))), ('put_cipher_by_char', CFUNCTYPE(c_int, POINTER(SSL_CIPHER), POINTER(c_ubyte))), ('ssl_pending', CFUNCTYPE(c_int, POINTER(SSL))), ('num_ciphers', CFUNCTYPE(c_int)), ('get_cipher', CFUNCTYPE(POINTER(SSL_CIPHER), c_uint)), ('get_ssl_method', CFUNCTYPE(POINTER(ssl_method_st), c_int)), ('get_timeout', CFUNCTYPE(c_long)), ('ssl3_enc', POINTER(ssl3_enc_method)), ('ssl_version', CFUNCTYPE(c_int)), ('ssl_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, CFUNCTYPE(None))), ('ssl_ctx_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, CFUNCTYPE(None))), ] assert sizeof(ssl_method_st) == 100, sizeof(ssl_method_st) assert alignment(ssl_method_st) == 4, alignment(ssl_method_st) ssl3_enc_method._fields_ = [ ] SSL_METHOD = ssl_method_st class ssl_session_st(Structure): pass class sess_cert_st(Structure): pass ssl_session_st._fields_ = [ ('ssl_version', c_int), ('key_arg_length', c_uint), ('key_arg', c_ubyte * 8), ('master_key_length', c_int), ('master_key', c_ubyte * 48), ('session_id_length', c_uint), ('session_id', c_ubyte * 32), ('sid_ctx_length', c_uint), ('sid_ctx', c_ubyte * 32), ('not_resumable', c_int), ('sess_cert', POINTER(sess_cert_st)), ('peer', POINTER(X509)), ('verify_result', c_long), ('references', c_int), ('timeout', c_long), ('time', c_long), ('compress_meth', c_int), ('cipher', POINTER(SSL_CIPHER)), ('cipher_id', c_ulong), ('ciphers', POINTER(STACK)), ('ex_data', CRYPTO_EX_DATA), ('prev', POINTER(ssl_session_st)), ('next', POINTER(ssl_session_st)), ] assert sizeof(ssl_session_st) == 200, sizeof(ssl_session_st) assert alignment(ssl_session_st) == 4, alignment(ssl_session_st) sess_cert_st._fields_ = [ ] SSL_SESSION = ssl_session_st GEN_SESSION_CB = CFUNCTYPE(c_int, POINTER(SSL), POINTER(c_ubyte), POINTER(c_uint)) class ssl_comp_st(Structure): pass ssl_comp_st._fields_ = [ ('id', c_int), ('name', STRING), ('method', POINTER(COMP_METHOD)), ] assert sizeof(ssl_comp_st) == 12, sizeof(ssl_comp_st) assert alignment(ssl_comp_st) == 4, alignment(ssl_comp_st) SSL_COMP = ssl_comp_st class N10ssl_ctx_st4DOLLAR_18E(Structure): pass N10ssl_ctx_st4DOLLAR_18E._fields_ = [ ('sess_connect', c_int), ('sess_connect_renegotiate', c_int), ('sess_connect_good', c_int), ('sess_accept', c_int), ('sess_accept_renegotiate', c_int), ('sess_accept_good', c_int), ('sess_miss', c_int), ('sess_timeout', c_int), ('sess_cache_full', c_int), ('sess_hit', c_int), ('sess_cb_hit', c_int), ] assert sizeof(N10ssl_ctx_st4DOLLAR_18E) == 44, sizeof(N10ssl_ctx_st4DOLLAR_18E) assert alignment(N10ssl_ctx_st4DOLLAR_18E) == 4, alignment(N10ssl_ctx_st4DOLLAR_18E) class cert_st(Structure): pass ssl_ctx_st._fields_ = [ ('method', POINTER(SSL_METHOD)), ('cipher_list', POINTER(STACK)), ('cipher_list_by_id', POINTER(STACK)), ('cert_store', POINTER(x509_store_st)), ('sessions', POINTER(lhash_st)), ('session_cache_size', c_ulong), ('session_cache_head', POINTER(ssl_session_st)), ('session_cache_tail', POINTER(ssl_session_st)), ('session_cache_mode', c_int), ('session_timeout', c_long), ('new_session_cb', CFUNCTYPE(c_int, POINTER(ssl_st), POINTER(SSL_SESSION))), ('remove_session_cb', CFUNCTYPE(None, POINTER(ssl_ctx_st), POINTER(SSL_SESSION))), ('get_session_cb', CFUNCTYPE(POINTER(SSL_SESSION), POINTER(ssl_st), POINTER(c_ubyte), c_int, POINTER(c_int))), ('stats', N10ssl_ctx_st4DOLLAR_18E), ('references', c_int), ('app_verify_callback', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), c_void_p)), ('app_verify_arg', c_void_p), ('default_passwd_callback', POINTER(pem_password_cb)), ('default_passwd_callback_userdata', c_void_p), ('client_cert_cb', CFUNCTYPE(c_int, POINTER(SSL), POINTER(POINTER(X509)), POINTER(POINTER(EVP_PKEY)))), ('ex_data', CRYPTO_EX_DATA), ('rsa_md5', POINTER(EVP_MD)), ('md5', POINTER(EVP_MD)), ('sha1', POINTER(EVP_MD)), ('extra_certs', POINTER(STACK)), ('comp_methods', POINTER(STACK)), ('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)), ('client_CA', POINTER(STACK)), ('options', c_ulong), ('mode', c_ulong), ('max_cert_list', c_long), ('cert', POINTER(cert_st)), ('read_ahead', c_int), ('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)), ('msg_callback_arg', c_void_p), ('verify_mode', c_int), ('verify_depth', c_int), ('sid_ctx_length', c_uint), ('sid_ctx', c_ubyte * 32), ('default_verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), ('generate_session_id', GEN_SESSION_CB), ('purpose', c_int), ('trust', c_int), ('quiet_shutdown', c_int), ] assert sizeof(ssl_ctx_st) == 248, sizeof(ssl_ctx_st) assert alignment(ssl_ctx_st) == 4, alignment(ssl_ctx_st) cert_st._fields_ = [ ] class ssl2_state_st(Structure): pass class ssl3_state_st(Structure): pass ssl_st._fields_ = [ ('version', c_int), ('type', c_int), ('method', POINTER(SSL_METHOD)), ('rbio', POINTER(BIO)), ('wbio', POINTER(BIO)), ('bbio', POINTER(BIO)), ('rwstate', c_int), ('in_handshake', c_int), ('handshake_func', CFUNCTYPE(c_int)), ('server', c_int), ('new_session', c_int), ('quiet_shutdown', c_int), ('shutdown', c_int), ('state', c_int), ('rstate', c_int), ('init_buf', POINTER(BUF_MEM)), ('init_msg', c_void_p), ('init_num', c_int), ('init_off', c_int), ('packet', POINTER(c_ubyte)), ('packet_length', c_uint), ('s2', POINTER(ssl2_state_st)), ('s3', POINTER(ssl3_state_st)), ('read_ahead', c_int), ('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)), ('msg_callback_arg', c_void_p), ('hit', c_int), ('purpose', c_int), ('trust', c_int), ('cipher_list', POINTER(STACK)), ('cipher_list_by_id', POINTER(STACK)), ('enc_read_ctx', POINTER(EVP_CIPHER_CTX)), ('read_hash', POINTER(EVP_MD)), ('expand', POINTER(COMP_CTX)), ('enc_write_ctx', POINTER(EVP_CIPHER_CTX)), ('write_hash', POINTER(EVP_MD)), ('compress', POINTER(COMP_CTX)), ('cert', POINTER(cert_st)), ('sid_ctx_length', c_uint), ('sid_ctx', c_ubyte * 32), ('session', POINTER(SSL_SESSION)), ('generate_session_id', GEN_SESSION_CB), ('verify_mode', c_int), ('verify_depth', c_int), ('verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), ('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)), ('error', c_int), ('error_code', c_int), ('ctx', POINTER(SSL_CTX)), ('debug', c_int), ('verify_result', c_long), ('ex_data', CRYPTO_EX_DATA), ('client_CA', POINTER(STACK)), ('references', c_int), ('options', c_ulong), ('mode', c_ulong), ('max_cert_list', c_long), ('first_packet', c_int), ('client_version', c_int), ] assert sizeof(ssl_st) == 268, sizeof(ssl_st) assert alignment(ssl_st) == 4, alignment(ssl_st) class N13ssl2_state_st4DOLLAR_19E(Structure): pass N13ssl2_state_st4DOLLAR_19E._fields_ = [ ('conn_id_length', c_uint), ('cert_type', c_uint), ('cert_length', c_uint), ('csl', c_uint), ('clear', c_uint), ('enc', c_uint), ('ccl', c_ubyte * 32), ('cipher_spec_length', c_uint), ('session_id_length', c_uint), ('clen', c_uint), ('rlen', c_uint), ] assert sizeof(N13ssl2_state_st4DOLLAR_19E) == 72, sizeof(N13ssl2_state_st4DOLLAR_19E) assert alignment(N13ssl2_state_st4DOLLAR_19E) == 4, alignment(N13ssl2_state_st4DOLLAR_19E) ssl2_state_st._fields_ = [ ('three_byte_header', c_int), ('clear_text', c_int), ('escape', c_int), ('ssl2_rollback', c_int), ('wnum', c_uint), ('wpend_tot', c_int), ('wpend_buf', POINTER(c_ubyte)), ('wpend_off', c_int), ('wpend_len', c_int), ('wpend_ret', c_int), ('rbuf_left', c_int), ('rbuf_offs', c_int), ('rbuf', POINTER(c_ubyte)), ('wbuf', POINTER(c_ubyte)), ('write_ptr', POINTER(c_ubyte)), ('padding', c_uint), ('rlength', c_uint), ('ract_data_length', c_int), ('wlength', c_uint), ('wact_data_length', c_int), ('ract_data', POINTER(c_ubyte)), ('wact_data', POINTER(c_ubyte)), ('mac_data', POINTER(c_ubyte)), ('read_key', POINTER(c_ubyte)), ('write_key', POINTER(c_ubyte)), ('challenge_length', c_uint), ('challenge', c_ubyte * 32), ('conn_id_length', c_uint), ('conn_id', c_ubyte * 16), ('key_material_length', c_uint), ('key_material', c_ubyte * 48), ('read_sequence', c_ulong), ('write_sequence', c_ulong), ('tmp', N13ssl2_state_st4DOLLAR_19E), ] assert sizeof(ssl2_state_st) == 288, sizeof(ssl2_state_st) assert alignment(ssl2_state_st) == 4, alignment(ssl2_state_st) SSL2_STATE = ssl2_state_st class ssl3_record_st(Structure): pass ssl3_record_st._fields_ = [ ('type', c_int), ('length', c_uint), ('off', c_uint), ('data', POINTER(c_ubyte)), ('input', POINTER(c_ubyte)), ('comp', POINTER(c_ubyte)), ] assert sizeof(ssl3_record_st) == 24, sizeof(ssl3_record_st) assert alignment(ssl3_record_st) == 4, alignment(ssl3_record_st) SSL3_RECORD = ssl3_record_st class ssl3_buffer_st(Structure): pass size_t = __darwin_size_t ssl3_buffer_st._fields_ = [ ('buf', POINTER(c_ubyte)), ('len', size_t), ('offset', c_int), ('left', c_int), ] assert sizeof(ssl3_buffer_st) == 16, sizeof(ssl3_buffer_st) assert alignment(ssl3_buffer_st) == 4, alignment(ssl3_buffer_st) SSL3_BUFFER = ssl3_buffer_st class N13ssl3_state_st4DOLLAR_20E(Structure): pass N13ssl3_state_st4DOLLAR_20E._fields_ = [ ('cert_verify_md', c_ubyte * 72), ('finish_md', c_ubyte * 72), ('finish_md_len', c_int), ('peer_finish_md', c_ubyte * 72), ('peer_finish_md_len', c_int), ('message_size', c_ulong), ('message_type', c_int), ('new_cipher', POINTER(SSL_CIPHER)), ('dh', POINTER(DH)), ('next_state', c_int), ('reuse_message', c_int), ('cert_req', c_int), ('ctype_num', c_int), ('ctype', c_char * 7), ('ca_names', POINTER(STACK)), ('use_rsa_tmp', c_int), ('key_block_length', c_int), ('key_block', POINTER(c_ubyte)), ('new_sym_enc', POINTER(EVP_CIPHER)), ('new_hash', POINTER(EVP_MD)), ('new_compression', POINTER(SSL_COMP)), ('cert_request', c_int), ] assert sizeof(N13ssl3_state_st4DOLLAR_20E) == 296, sizeof(N13ssl3_state_st4DOLLAR_20E) assert alignment(N13ssl3_state_st4DOLLAR_20E) == 4, alignment(N13ssl3_state_st4DOLLAR_20E) ssl3_state_st._fields_ = [ ('flags', c_long), ('delay_buf_pop_ret', c_int), ('read_sequence', c_ubyte * 8), ('read_mac_secret', c_ubyte * 36), ('write_sequence', c_ubyte * 8), ('write_mac_secret', c_ubyte * 36), ('server_random', c_ubyte * 32), ('client_random', c_ubyte * 32), ('need_empty_fragments', c_int), ('empty_fragment_done', c_int), ('rbuf', SSL3_BUFFER), ('wbuf', SSL3_BUFFER), ('rrec', SSL3_RECORD), ('wrec', SSL3_RECORD), ('alert_fragment', c_ubyte * 2), ('alert_fragment_len', c_uint), ('handshake_fragment', c_ubyte * 4), ('handshake_fragment_len', c_uint), ('wnum', c_uint), ('wpend_tot', c_int), ('wpend_type', c_int), ('wpend_ret', c_int), ('wpend_buf', POINTER(c_ubyte)), ('finish_dgst1', EVP_MD_CTX), ('finish_dgst2', EVP_MD_CTX), ('change_cipher_spec', c_int), ('warn_alert', c_int), ('fatal_alert', c_int), ('alert_dispatch', c_int), ('send_alert', c_ubyte * 2), ('renegotiate', c_int), ('total_renegotiations', c_int), ('num_renegotiations', c_int), ('in_read_app_data', c_int), ('tmp', N13ssl3_state_st4DOLLAR_20E), ] assert sizeof(ssl3_state_st) == 648, sizeof(ssl3_state_st) assert alignment(ssl3_state_st) == 4, alignment(ssl3_state_st) SSL3_STATE = ssl3_state_st stack_st._fields_ = [ ('num', c_int), ('data', POINTER(STRING)), ('sorted', c_int), ('num_alloc', c_int), ('comp', CFUNCTYPE(c_int, POINTER(STRING), POINTER(STRING))), ] assert sizeof(stack_st) == 20, sizeof(stack_st) assert alignment(stack_st) == 4, alignment(stack_st) class ui_st(Structure): pass ui_st._fields_ = [ ] UI = ui_st class ui_method_st(Structure): pass ui_method_st._fields_ = [ ] UI_METHOD = ui_method_st class ui_string_st(Structure): pass ui_string_st._fields_ = [ ] UI_STRING = ui_string_st # values for enumeration 'UI_string_types' UI_string_types = c_int # enum class X509_objects_st(Structure): pass X509_objects_st._fields_ = [ ('nid', c_int), ('a2i', CFUNCTYPE(c_int)), ('i2a', CFUNCTYPE(c_int)), ] assert sizeof(X509_objects_st) == 12, sizeof(X509_objects_st) assert alignment(X509_objects_st) == 4, alignment(X509_objects_st) X509_OBJECTS = X509_objects_st X509_algor_st._fields_ = [ ('algorithm', POINTER(ASN1_OBJECT)), ('parameter', POINTER(ASN1_TYPE)), ] assert sizeof(X509_algor_st) == 8, sizeof(X509_algor_st) assert alignment(X509_algor_st) == 4, alignment(X509_algor_st) class X509_val_st(Structure): pass X509_val_st._fields_ = [ ('notBefore', POINTER(ASN1_TIME)), ('notAfter', POINTER(ASN1_TIME)), ] assert sizeof(X509_val_st) == 8, sizeof(X509_val_st) assert alignment(X509_val_st) == 4, alignment(X509_val_st) X509_VAL = X509_val_st class X509_pubkey_st(Structure): pass X509_pubkey_st._fields_ = [ ('algor', POINTER(X509_ALGOR)), ('public_key', POINTER(ASN1_BIT_STRING)), ('pkey', POINTER(EVP_PKEY)), ] assert sizeof(X509_pubkey_st) == 12, sizeof(X509_pubkey_st) assert alignment(X509_pubkey_st) == 4, alignment(X509_pubkey_st) X509_PUBKEY = X509_pubkey_st class X509_sig_st(Structure): pass X509_sig_st._fields_ = [ ('algor', POINTER(X509_ALGOR)), ('digest', POINTER(ASN1_OCTET_STRING)), ] assert sizeof(X509_sig_st) == 8, sizeof(X509_sig_st) assert alignment(X509_sig_st) == 4, alignment(X509_sig_st) X509_SIG = X509_sig_st class X509_name_entry_st(Structure): pass X509_name_entry_st._fields_ = [ ('object', POINTER(ASN1_OBJECT)), ('value', POINTER(ASN1_STRING)), ('set', c_int), ('size', c_int), ] assert sizeof(X509_name_entry_st) == 16, sizeof(X509_name_entry_st) assert alignment(X509_name_entry_st) == 4, alignment(X509_name_entry_st) X509_NAME_ENTRY = X509_name_entry_st X509_name_st._fields_ = [ ('entries', POINTER(STACK)), ('modified', c_int), ('bytes', POINTER(BUF_MEM)), ('hash', c_ulong), ] assert sizeof(X509_name_st) == 16, sizeof(X509_name_st) assert alignment(X509_name_st) == 4, alignment(X509_name_st) class X509_extension_st(Structure): pass X509_extension_st._fields_ = [ ('object', POINTER(ASN1_OBJECT)), ('critical', ASN1_BOOLEAN), ('value', POINTER(ASN1_OCTET_STRING)), ] assert sizeof(X509_extension_st) == 12, sizeof(X509_extension_st) assert alignment(X509_extension_st) == 4, alignment(X509_extension_st) X509_EXTENSION = X509_extension_st class x509_attributes_st(Structure): pass class N18x509_attributes_st4DOLLAR_13E(Union): pass N18x509_attributes_st4DOLLAR_13E._fields_ = [ ('ptr', STRING), ('set', POINTER(STACK)), ('single', POINTER(ASN1_TYPE)), ] assert sizeof(N18x509_attributes_st4DOLLAR_13E) == 4, sizeof(N18x509_attributes_st4DOLLAR_13E) assert alignment(N18x509_attributes_st4DOLLAR_13E) == 4, alignment(N18x509_attributes_st4DOLLAR_13E) x509_attributes_st._fields_ = [ ('object', POINTER(ASN1_OBJECT)), ('single', c_int), ('value', N18x509_attributes_st4DOLLAR_13E), ] assert sizeof(x509_attributes_st) == 12, sizeof(x509_attributes_st) assert alignment(x509_attributes_st) == 4, alignment(x509_attributes_st) X509_ATTRIBUTE = x509_attributes_st class X509_req_info_st(Structure): pass X509_req_info_st._fields_ = [ ('enc', ASN1_ENCODING), ('version', POINTER(ASN1_INTEGER)), ('subject', POINTER(X509_NAME)), ('pubkey', POINTER(X509_PUBKEY)), ('attributes', POINTER(STACK)), ] assert sizeof(X509_req_info_st) == 28, sizeof(X509_req_info_st) assert alignment(X509_req_info_st) == 4, alignment(X509_req_info_st) X509_REQ_INFO = X509_req_info_st class X509_req_st(Structure): pass X509_req_st._fields_ = [ ('req_info', POINTER(X509_REQ_INFO)), ('sig_alg', POINTER(X509_ALGOR)), ('signature', POINTER(ASN1_BIT_STRING)), ('references', c_int), ] assert sizeof(X509_req_st) == 16, sizeof(X509_req_st) assert alignment(X509_req_st) == 4, alignment(X509_req_st) X509_REQ = X509_req_st class x509_cinf_st(Structure): pass x509_cinf_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('serialNumber', POINTER(ASN1_INTEGER)), ('signature', POINTER(X509_ALGOR)), ('issuer', POINTER(X509_NAME)), ('validity', POINTER(X509_VAL)), ('subject', POINTER(X509_NAME)), ('key', POINTER(X509_PUBKEY)), ('issuerUID', POINTER(ASN1_BIT_STRING)), ('subjectUID', POINTER(ASN1_BIT_STRING)), ('extensions', POINTER(STACK)), ] assert sizeof(x509_cinf_st) == 40, sizeof(x509_cinf_st) assert alignment(x509_cinf_st) == 4, alignment(x509_cinf_st) X509_CINF = x509_cinf_st class x509_cert_aux_st(Structure): pass x509_cert_aux_st._fields_ = [ ('trust', POINTER(STACK)), ('reject', POINTER(STACK)), ('alias', POINTER(ASN1_UTF8STRING)), ('keyid', POINTER(ASN1_OCTET_STRING)), ('other', POINTER(STACK)), ] assert sizeof(x509_cert_aux_st) == 20, sizeof(x509_cert_aux_st) assert alignment(x509_cert_aux_st) == 4, alignment(x509_cert_aux_st) X509_CERT_AUX = x509_cert_aux_st class AUTHORITY_KEYID_st(Structure): pass x509_st._fields_ = [ ('cert_info', POINTER(X509_CINF)), ('sig_alg', POINTER(X509_ALGOR)), ('signature', POINTER(ASN1_BIT_STRING)), ('valid', c_int), ('references', c_int), ('name', STRING), ('ex_data', CRYPTO_EX_DATA), ('ex_pathlen', c_long), ('ex_flags', c_ulong), ('ex_kusage', c_ulong), ('ex_xkusage', c_ulong), ('ex_nscert', c_ulong), ('skid', POINTER(ASN1_OCTET_STRING)), ('akid', POINTER(AUTHORITY_KEYID_st)), ('sha1_hash', c_ubyte * 20), ('aux', POINTER(X509_CERT_AUX)), ] assert sizeof(x509_st) == 84, sizeof(x509_st) assert alignment(x509_st) == 4, alignment(x509_st) AUTHORITY_KEYID_st._fields_ = [ ] class x509_trust_st(Structure): pass x509_trust_st._fields_ = [ ('trust', c_int), ('flags', c_int), ('check_trust', CFUNCTYPE(c_int, POINTER(x509_trust_st), POINTER(X509), c_int)), ('name', STRING), ('arg1', c_int), ('arg2', c_void_p), ] assert sizeof(x509_trust_st) == 24, sizeof(x509_trust_st) assert alignment(x509_trust_st) == 4, alignment(x509_trust_st) X509_TRUST = x509_trust_st class X509_revoked_st(Structure): pass X509_revoked_st._fields_ = [ ('serialNumber', POINTER(ASN1_INTEGER)), ('revocationDate', POINTER(ASN1_TIME)), ('extensions', POINTER(STACK)), ('sequence', c_int), ] assert sizeof(X509_revoked_st) == 16, sizeof(X509_revoked_st) assert alignment(X509_revoked_st) == 4, alignment(X509_revoked_st) X509_REVOKED = X509_revoked_st class X509_crl_info_st(Structure): pass X509_crl_info_st._fields_ = [ ('version', POINTER(ASN1_INTEGER)), ('sig_alg', POINTER(X509_ALGOR)), ('issuer', POINTER(X509_NAME)), ('lastUpdate', POINTER(ASN1_TIME)), ('nextUpdate', POINTER(ASN1_TIME)), ('revoked', POINTER(STACK)), ('extensions', POINTER(STACK)), ('enc', ASN1_ENCODING), ] assert sizeof(X509_crl_info_st) == 40, sizeof(X509_crl_info_st) assert alignment(X509_crl_info_st) == 4, alignment(X509_crl_info_st) X509_CRL_INFO = X509_crl_info_st X509_crl_st._fields_ = [ ('crl', POINTER(X509_CRL_INFO)), ('sig_alg', POINTER(X509_ALGOR)), ('signature', POINTER(ASN1_BIT_STRING)), ('references', c_int), ] assert sizeof(X509_crl_st) == 16, sizeof(X509_crl_st) assert alignment(X509_crl_st) == 4, alignment(X509_crl_st) class private_key_st(Structure): pass private_key_st._fields_ = [ ('version', c_int), ('enc_algor', POINTER(X509_ALGOR)), ('enc_pkey', POINTER(ASN1_OCTET_STRING)), ('dec_pkey', POINTER(EVP_PKEY)), ('key_length', c_int), ('key_data', STRING), ('key_free', c_int), ('cipher', EVP_CIPHER_INFO), ('references', c_int), ] assert sizeof(private_key_st) == 52, sizeof(private_key_st) assert alignment(private_key_st) == 4, alignment(private_key_st) X509_PKEY = private_key_st class X509_info_st(Structure): pass X509_info_st._fields_ = [ ('x509', POINTER(X509)), ('crl', POINTER(X509_CRL)), ('x_pkey', POINTER(X509_PKEY)), ('enc_cipher', EVP_CIPHER_INFO), ('enc_len', c_int), ('enc_data', STRING), ('references', c_int), ] assert sizeof(X509_info_st) == 44, sizeof(X509_info_st) assert alignment(X509_info_st) == 4, alignment(X509_info_st) X509_INFO = X509_info_st class Netscape_spkac_st(Structure): pass Netscape_spkac_st._fields_ = [ ('pubkey', POINTER(X509_PUBKEY)), ('challenge', POINTER(ASN1_IA5STRING)), ] assert sizeof(Netscape_spkac_st) == 8, sizeof(Netscape_spkac_st) assert alignment(Netscape_spkac_st) == 4, alignment(Netscape_spkac_st) NETSCAPE_SPKAC = Netscape_spkac_st class Netscape_spki_st(Structure): pass Netscape_spki_st._fields_ = [ ('spkac', POINTER(NETSCAPE_SPKAC)), ('sig_algor', POINTER(X509_ALGOR)), ('signature', POINTER(ASN1_BIT_STRING)), ] assert sizeof(Netscape_spki_st) == 12, sizeof(Netscape_spki_st) assert alignment(Netscape_spki_st) == 4, alignment(Netscape_spki_st) NETSCAPE_SPKI = Netscape_spki_st class Netscape_certificate_sequence(Structure): pass Netscape_certificate_sequence._fields_ = [ ('type', POINTER(ASN1_OBJECT)), ('certs', POINTER(STACK)), ] assert sizeof(Netscape_certificate_sequence) == 8, sizeof(Netscape_certificate_sequence) assert alignment(Netscape_certificate_sequence) == 4, alignment(Netscape_certificate_sequence) NETSCAPE_CERT_SEQUENCE = Netscape_certificate_sequence class PBEPARAM_st(Structure): pass PBEPARAM_st._fields_ = [ ('salt', POINTER(ASN1_OCTET_STRING)), ('iter', POINTER(ASN1_INTEGER)), ] assert sizeof(PBEPARAM_st) == 8, sizeof(PBEPARAM_st) assert alignment(PBEPARAM_st) == 4, alignment(PBEPARAM_st) PBEPARAM = PBEPARAM_st class PBE2PARAM_st(Structure): pass PBE2PARAM_st._fields_ = [ ('keyfunc', POINTER(X509_ALGOR)), ('encryption', POINTER(X509_ALGOR)), ] assert sizeof(PBE2PARAM_st) == 8, sizeof(PBE2PARAM_st) assert alignment(PBE2PARAM_st) == 4, alignment(PBE2PARAM_st) PBE2PARAM = PBE2PARAM_st class PBKDF2PARAM_st(Structure): pass PBKDF2PARAM_st._fields_ = [ ('salt', POINTER(ASN1_TYPE)), ('iter', POINTER(ASN1_INTEGER)), ('keylength', POINTER(ASN1_INTEGER)), ('prf', POINTER(X509_ALGOR)), ] assert sizeof(PBKDF2PARAM_st) == 16, sizeof(PBKDF2PARAM_st) assert alignment(PBKDF2PARAM_st) == 4, alignment(PBKDF2PARAM_st) PBKDF2PARAM = PBKDF2PARAM_st class pkcs8_priv_key_info_st(Structure): pass pkcs8_priv_key_info_st._fields_ = [ ('broken', c_int), ('version', POINTER(ASN1_INTEGER)), ('pkeyalg', POINTER(X509_ALGOR)), ('pkey', POINTER(ASN1_TYPE)), ('attributes', POINTER(STACK)), ] assert sizeof(pkcs8_priv_key_info_st) == 20, sizeof(pkcs8_priv_key_info_st) assert alignment(pkcs8_priv_key_info_st) == 4, alignment(pkcs8_priv_key_info_st) PKCS8_PRIV_KEY_INFO = pkcs8_priv_key_info_st class x509_hash_dir_st(Structure): pass x509_hash_dir_st._fields_ = [ ('num_dirs', c_int), ('dirs', POINTER(STRING)), ('dirs_type', POINTER(c_int)), ('num_dirs_alloced', c_int), ] assert sizeof(x509_hash_dir_st) == 16, sizeof(x509_hash_dir_st) assert alignment(x509_hash_dir_st) == 4, alignment(x509_hash_dir_st) X509_HASH_DIR_CTX = x509_hash_dir_st class x509_file_st(Structure): pass x509_file_st._fields_ = [ ('num_paths', c_int), ('num_alloced', c_int), ('paths', POINTER(STRING)), ('path_type', POINTER(c_int)), ] assert sizeof(x509_file_st) == 16, sizeof(x509_file_st) assert alignment(x509_file_st) == 4, alignment(x509_file_st) X509_CERT_FILE_CTX = x509_file_st class x509_object_st(Structure): pass class N14x509_object_st4DOLLAR_14E(Union): pass N14x509_object_st4DOLLAR_14E._fields_ = [ ('ptr', STRING), ('x509', POINTER(X509)), ('crl', POINTER(X509_CRL)), ('pkey', POINTER(EVP_PKEY)), ] assert sizeof(N14x509_object_st4DOLLAR_14E) == 4, sizeof(N14x509_object_st4DOLLAR_14E) assert alignment(N14x509_object_st4DOLLAR_14E) == 4, alignment(N14x509_object_st4DOLLAR_14E) x509_object_st._fields_ = [ ('type', c_int), ('data', N14x509_object_st4DOLLAR_14E), ] assert sizeof(x509_object_st) == 8, sizeof(x509_object_st) assert alignment(x509_object_st) == 4, alignment(x509_object_st) X509_OBJECT = x509_object_st class x509_lookup_st(Structure): pass X509_LOOKUP = x509_lookup_st class x509_lookup_method_st(Structure): pass x509_lookup_method_st._fields_ = [ ('name', STRING), ('new_item', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), ('free', CFUNCTYPE(None, POINTER(X509_LOOKUP))), ('init', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), ('shutdown', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), ('ctrl', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_long, POINTER(STRING))), ('get_by_subject', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(X509_OBJECT))), ('get_by_issuer_serial', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(ASN1_INTEGER), POINTER(X509_OBJECT))), ('get_by_fingerprint', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(c_ubyte), c_int, POINTER(X509_OBJECT))), ('get_by_alias', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_int, POINTER(X509_OBJECT))), ] assert sizeof(x509_lookup_method_st) == 40, sizeof(x509_lookup_method_st) assert alignment(x509_lookup_method_st) == 4, alignment(x509_lookup_method_st) X509_LOOKUP_METHOD = x509_lookup_method_st x509_store_st._fields_ = [ ('cache', c_int), ('objs', POINTER(STACK)), ('get_cert_methods', POINTER(STACK)), ('flags', c_ulong), ('purpose', c_int), ('trust', c_int), ('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), ('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))), ('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))), ('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))), ('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))), ('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))), ('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('ex_data', CRYPTO_EX_DATA), ('references', c_int), ('depth', c_int), ] assert sizeof(x509_store_st) == 76, sizeof(x509_store_st) assert alignment(x509_store_st) == 4, alignment(x509_store_st) x509_lookup_st._fields_ = [ ('init', c_int), ('skip', c_int), ('method', POINTER(X509_LOOKUP_METHOD)), ('method_data', STRING), ('store_ctx', POINTER(X509_STORE)), ] assert sizeof(x509_lookup_st) == 20, sizeof(x509_lookup_st) assert alignment(x509_lookup_st) == 4, alignment(x509_lookup_st) time_t = __darwin_time_t x509_store_ctx_st._fields_ = [ ('ctx', POINTER(X509_STORE)), ('current_method', c_int), ('cert', POINTER(X509)), ('untrusted', POINTER(STACK)), ('purpose', c_int), ('trust', c_int), ('check_time', time_t), ('flags', c_ulong), ('other_ctx', c_void_p), ('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), ('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))), ('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))), ('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))), ('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))), ('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))), ('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), ('depth', c_int), ('valid', c_int), ('last_untrusted', c_int), ('chain', POINTER(STACK)), ('error_depth', c_int), ('error', c_int), ('current_cert', POINTER(X509)), ('current_issuer', POINTER(X509)), ('current_crl', POINTER(X509_CRL)), ('ex_data', CRYPTO_EX_DATA), ] assert sizeof(x509_store_ctx_st) == 116, sizeof(x509_store_ctx_st) assert alignment(x509_store_ctx_st) == 4, alignment(x509_store_ctx_st) va_list = __darwin_va_list __darwin_off_t = __int64_t fpos_t = __darwin_off_t class __sbuf(Structure): pass __sbuf._fields_ = [ ('_base', POINTER(c_ubyte)), ('_size', c_int), ] assert sizeof(__sbuf) == 8, sizeof(__sbuf) assert alignment(__sbuf) == 4, alignment(__sbuf) class __sFILEX(Structure): pass __sFILEX._fields_ = [ ] class __sFILE(Structure): pass __sFILE._pack_ = 4 __sFILE._fields_ = [ ('_p', POINTER(c_ubyte)), ('_r', c_int), ('_w', c_int), ('_flags', c_short), ('_file', c_short), ('_bf', __sbuf), ('_lbfsize', c_int), ('_cookie', c_void_p), ('_close', CFUNCTYPE(c_int, c_void_p)), ('_read', CFUNCTYPE(c_int, c_void_p, STRING, c_int)), ('_seek', CFUNCTYPE(fpos_t, c_void_p, c_longlong, c_int)), ('_write', CFUNCTYPE(c_int, c_void_p, STRING, c_int)), ('_ub', __sbuf), ('_extra', POINTER(__sFILEX)), ('_ur', c_int), ('_ubuf', c_ubyte * 3), ('_nbuf', c_ubyte * 1), ('_lb', __sbuf), ('_blksize', c_int), ('_offset', fpos_t), ] assert sizeof(__sFILE) == 88, sizeof(__sFILE) assert alignment(__sFILE) == 4, alignment(__sFILE) FILE = __sFILE ct_rune_t = __darwin_ct_rune_t rune_t = __darwin_rune_t class div_t(Structure): pass div_t._fields_ = [ ('quot', c_int), ('rem', c_int), ] assert sizeof(div_t) == 8, sizeof(div_t) assert alignment(div_t) == 4, alignment(div_t) class ldiv_t(Structure): pass ldiv_t._fields_ = [ ('quot', c_long), ('rem', c_long), ] assert sizeof(ldiv_t) == 8, sizeof(ldiv_t) assert alignment(ldiv_t) == 4, alignment(ldiv_t) class lldiv_t(Structure): pass lldiv_t._pack_ = 4 lldiv_t._fields_ = [ ('quot', c_longlong), ('rem', c_longlong), ] assert sizeof(lldiv_t) == 16, sizeof(lldiv_t) assert alignment(lldiv_t) == 4, alignment(lldiv_t) __darwin_dev_t = __int32_t dev_t = __darwin_dev_t __darwin_mode_t = __uint16_t mode_t = __darwin_mode_t class mcontext(Structure): pass mcontext._fields_ = [ ] class mcontext64(Structure): pass mcontext64._fields_ = [ ] class __darwin_pthread_handler_rec(Structure): pass __darwin_pthread_handler_rec._fields_ = [ ('__routine', CFUNCTYPE(None, c_void_p)), ('__arg', c_void_p), ('__next', POINTER(__darwin_pthread_handler_rec)), ] assert sizeof(__darwin_pthread_handler_rec) == 12, sizeof(__darwin_pthread_handler_rec) assert alignment(__darwin_pthread_handler_rec) == 4, alignment(__darwin_pthread_handler_rec) class _opaque_pthread_attr_t(Structure): pass _opaque_pthread_attr_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 36), ] assert sizeof(_opaque_pthread_attr_t) == 40, sizeof(_opaque_pthread_attr_t) assert alignment(_opaque_pthread_attr_t) == 4, alignment(_opaque_pthread_attr_t) class _opaque_pthread_cond_t(Structure): pass _opaque_pthread_cond_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 24), ] assert sizeof(_opaque_pthread_cond_t) == 28, sizeof(_opaque_pthread_cond_t) assert alignment(_opaque_pthread_cond_t) == 4, alignment(_opaque_pthread_cond_t) class _opaque_pthread_condattr_t(Structure): pass _opaque_pthread_condattr_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 4), ] assert sizeof(_opaque_pthread_condattr_t) == 8, sizeof(_opaque_pthread_condattr_t) assert alignment(_opaque_pthread_condattr_t) == 4, alignment(_opaque_pthread_condattr_t) class _opaque_pthread_mutex_t(Structure): pass _opaque_pthread_mutex_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 40), ] assert sizeof(_opaque_pthread_mutex_t) == 44, sizeof(_opaque_pthread_mutex_t) assert alignment(_opaque_pthread_mutex_t) == 4, alignment(_opaque_pthread_mutex_t) class _opaque_pthread_mutexattr_t(Structure): pass _opaque_pthread_mutexattr_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 8), ] assert sizeof(_opaque_pthread_mutexattr_t) == 12, sizeof(_opaque_pthread_mutexattr_t) assert alignment(_opaque_pthread_mutexattr_t) == 4, alignment(_opaque_pthread_mutexattr_t) class _opaque_pthread_once_t(Structure): pass _opaque_pthread_once_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 4), ] assert sizeof(_opaque_pthread_once_t) == 8, sizeof(_opaque_pthread_once_t) assert alignment(_opaque_pthread_once_t) == 4, alignment(_opaque_pthread_once_t) class _opaque_pthread_rwlock_t(Structure): pass _opaque_pthread_rwlock_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 124), ] assert sizeof(_opaque_pthread_rwlock_t) == 128, sizeof(_opaque_pthread_rwlock_t) assert alignment(_opaque_pthread_rwlock_t) == 4, alignment(_opaque_pthread_rwlock_t) class _opaque_pthread_rwlockattr_t(Structure): pass _opaque_pthread_rwlockattr_t._fields_ = [ ('__sig', c_long), ('__opaque', c_char * 12), ] assert sizeof(_opaque_pthread_rwlockattr_t) == 16, sizeof(_opaque_pthread_rwlockattr_t) assert alignment(_opaque_pthread_rwlockattr_t) == 4, alignment(_opaque_pthread_rwlockattr_t) class _opaque_pthread_t(Structure): pass _opaque_pthread_t._fields_ = [ ('__sig', c_long), ('__cleanup_stack', POINTER(__darwin_pthread_handler_rec)), ('__opaque', c_char * 596), ] assert sizeof(_opaque_pthread_t) == 604, sizeof(_opaque_pthread_t) assert alignment(_opaque_pthread_t) == 4, alignment(_opaque_pthread_t) __darwin_blkcnt_t = __int64_t __darwin_blksize_t = __int32_t __darwin_fsblkcnt_t = c_uint __darwin_fsfilcnt_t = c_uint __darwin_gid_t = __uint32_t __darwin_id_t = __uint32_t __darwin_ino_t = __uint32_t __darwin_mach_port_name_t = __darwin_natural_t __darwin_mach_port_t = __darwin_mach_port_name_t __darwin_mcontext_t = POINTER(mcontext) __darwin_mcontext64_t = POINTER(mcontext64) __darwin_pid_t = __int32_t __darwin_pthread_attr_t = _opaque_pthread_attr_t __darwin_pthread_cond_t = _opaque_pthread_cond_t __darwin_pthread_condattr_t = _opaque_pthread_condattr_t __darwin_pthread_key_t = c_ulong __darwin_pthread_mutex_t = _opaque_pthread_mutex_t __darwin_pthread_mutexattr_t = _opaque_pthread_mutexattr_t __darwin_pthread_once_t = _opaque_pthread_once_t __darwin_pthread_rwlock_t = _opaque_pthread_rwlock_t __darwin_pthread_rwlockattr_t = _opaque_pthread_rwlockattr_t __darwin_pthread_t = POINTER(_opaque_pthread_t) __darwin_sigset_t = __uint32_t __darwin_suseconds_t = __int32_t __darwin_uid_t = __uint32_t __darwin_useconds_t = __uint32_t __darwin_uuid_t = c_ubyte * 16 class sigaltstack(Structure): pass sigaltstack._fields_ = [ ('ss_sp', c_void_p), ('ss_size', __darwin_size_t), ('ss_flags', c_int), ] assert sizeof(sigaltstack) == 12, sizeof(sigaltstack) assert alignment(sigaltstack) == 4, alignment(sigaltstack) __darwin_stack_t = sigaltstack class ucontext(Structure): pass ucontext._fields_ = [ ('uc_onstack', c_int), ('uc_sigmask', __darwin_sigset_t), ('uc_stack', __darwin_stack_t), ('uc_link', POINTER(ucontext)), ('uc_mcsize', __darwin_size_t), ('uc_mcontext', __darwin_mcontext_t), ] assert sizeof(ucontext) == 32, sizeof(ucontext) assert alignment(ucontext) == 4, alignment(ucontext) __darwin_ucontext_t = ucontext class ucontext64(Structure): pass ucontext64._fields_ = [ ('uc_onstack', c_int), ('uc_sigmask', __darwin_sigset_t), ('uc_stack', __darwin_stack_t), ('uc_link', POINTER(ucontext64)), ('uc_mcsize', __darwin_size_t), ('uc_mcontext64', __darwin_mcontext64_t), ] assert sizeof(ucontext64) == 32, sizeof(ucontext64) assert alignment(ucontext64) == 4, alignment(ucontext64) __darwin_ucontext64_t = ucontext64 class timeval(Structure): pass timeval._fields_ = [ ('tv_sec', __darwin_time_t), ('tv_usec', __darwin_suseconds_t), ] assert sizeof(timeval) == 8, sizeof(timeval) assert alignment(timeval) == 4, alignment(timeval) rlim_t = __int64_t class rusage(Structure): pass rusage._fields_ = [ ('ru_utime', timeval), ('ru_stime', timeval), ('ru_maxrss', c_long), ('ru_ixrss', c_long), ('ru_idrss', c_long), ('ru_isrss', c_long), ('ru_minflt', c_long), ('ru_majflt', c_long), ('ru_nswap', c_long), ('ru_inblock', c_long), ('ru_oublock', c_long), ('ru_msgsnd', c_long), ('ru_msgrcv', c_long), ('ru_nsignals', c_long), ('ru_nvcsw', c_long), ('ru_nivcsw', c_long), ] assert sizeof(rusage) == 72, sizeof(rusage) assert alignment(rusage) == 4, alignment(rusage) class rlimit(Structure): pass rlimit._pack_ = 4 rlimit._fields_ = [ ('rlim_cur', rlim_t), ('rlim_max', rlim_t), ] assert sizeof(rlimit) == 16, sizeof(rlimit) assert alignment(rlimit) == 4, alignment(rlimit) mcontext_t = __darwin_mcontext_t mcontext64_t = __darwin_mcontext64_t pthread_attr_t = __darwin_pthread_attr_t sigset_t = __darwin_sigset_t ucontext_t = __darwin_ucontext_t ucontext64_t = __darwin_ucontext64_t uid_t = __darwin_uid_t class sigval(Union): pass sigval._fields_ = [ ('sival_int', c_int), ('sival_ptr', c_void_p), ] assert sizeof(sigval) == 4, sizeof(sigval) assert alignment(sigval) == 4, alignment(sigval) class sigevent(Structure): pass sigevent._fields_ = [ ('sigev_notify', c_int), ('sigev_signo', c_int), ('sigev_value', sigval), ('sigev_notify_function', CFUNCTYPE(None, sigval)), ('sigev_notify_attributes', POINTER(pthread_attr_t)), ] assert sizeof(sigevent) == 20, sizeof(sigevent) assert alignment(sigevent) == 4, alignment(sigevent) class __siginfo(Structure): pass pid_t = __darwin_pid_t __siginfo._fields_ = [ ('si_signo', c_int), ('si_errno', c_int), ('si_code', c_int), ('si_pid', pid_t), ('si_uid', uid_t), ('si_status', c_int), ('si_addr', c_void_p), ('si_value', sigval), ('si_band', c_long), ('pad', c_ulong * 7), ] assert sizeof(__siginfo) == 64, sizeof(__siginfo) assert alignment(__siginfo) == 4, alignment(__siginfo) siginfo_t = __siginfo class __sigaction_u(Union): pass __sigaction_u._fields_ = [ ('__sa_handler', CFUNCTYPE(None, c_int)), ('__sa_sigaction', CFUNCTYPE(None, c_int, POINTER(__siginfo), c_void_p)), ] assert sizeof(__sigaction_u) == 4, sizeof(__sigaction_u) assert alignment(__sigaction_u) == 4, alignment(__sigaction_u) class __sigaction(Structure): pass __sigaction._fields_ = [ ('__sigaction_u', __sigaction_u), ('sa_tramp', CFUNCTYPE(None, c_void_p, c_int, c_int, POINTER(siginfo_t), c_void_p)), ('sa_mask', sigset_t), ('sa_flags', c_int), ] assert sizeof(__sigaction) == 16, sizeof(__sigaction) assert alignment(__sigaction) == 4, alignment(__sigaction) class sigaction(Structure): pass sigaction._fields_ = [ ('__sigaction_u', __sigaction_u), ('sa_mask', sigset_t), ('sa_flags', c_int), ] assert sizeof(sigaction) == 12, sizeof(sigaction) assert alignment(sigaction) == 4, alignment(sigaction) sig_t = CFUNCTYPE(None, c_int) stack_t = __darwin_stack_t class sigvec(Structure): pass sigvec._fields_ = [ ('sv_handler', CFUNCTYPE(None, c_int)), ('sv_mask', c_int), ('sv_flags', c_int), ] assert sizeof(sigvec) == 12, sizeof(sigvec) assert alignment(sigvec) == 4, alignment(sigvec) class sigstack(Structure): pass sigstack._fields_ = [ ('ss_sp', STRING), ('ss_onstack', c_int), ] assert sizeof(sigstack) == 8, sizeof(sigstack) assert alignment(sigstack) == 4, alignment(sigstack) u_char = c_ubyte u_short = c_ushort u_int = c_uint u_long = c_ulong ushort = c_ushort uint = c_uint u_quad_t = u_int64_t quad_t = int64_t qaddr_t = POINTER(quad_t) caddr_t = STRING daddr_t = int32_t fixpt_t = u_int32_t blkcnt_t = __darwin_blkcnt_t blksize_t = __darwin_blksize_t gid_t = __darwin_gid_t in_addr_t = __uint32_t in_port_t = __uint16_t ino_t = __darwin_ino_t key_t = __int32_t nlink_t = __uint16_t off_t = __darwin_off_t segsz_t = int32_t swblk_t = int32_t clock_t = __darwin_clock_t ssize_t = __darwin_ssize_t useconds_t = __darwin_useconds_t suseconds_t = __darwin_suseconds_t fd_mask = __int32_t class fd_set(Structure): pass fd_set._fields_ = [ ('fds_bits', __int32_t * 32), ] assert sizeof(fd_set) == 128, sizeof(fd_set) assert alignment(fd_set) == 4, alignment(fd_set) pthread_cond_t = __darwin_pthread_cond_t pthread_condattr_t = __darwin_pthread_condattr_t pthread_mutex_t = __darwin_pthread_mutex_t pthread_mutexattr_t = __darwin_pthread_mutexattr_t pthread_once_t = __darwin_pthread_once_t pthread_rwlock_t = __darwin_pthread_rwlock_t pthread_rwlockattr_t = __darwin_pthread_rwlockattr_t pthread_t = __darwin_pthread_t pthread_key_t = __darwin_pthread_key_t fsblkcnt_t = __darwin_fsblkcnt_t fsfilcnt_t = __darwin_fsfilcnt_t # values for enumeration 'idtype_t' idtype_t = c_int # enum id_t = __darwin_id_t class wait(Union): pass class N4wait3DOLLAR_3E(Structure): pass N4wait3DOLLAR_3E._fields_ = [ ('w_Termsig', c_uint, 7), ('w_Coredump', c_uint, 1), ('w_Retcode', c_uint, 8), ('w_Filler', c_uint, 16), ] assert sizeof(N4wait3DOLLAR_3E) == 4, sizeof(N4wait3DOLLAR_3E) assert alignment(N4wait3DOLLAR_3E) == 4, alignment(N4wait3DOLLAR_3E) class N4wait3DOLLAR_4E(Structure): pass N4wait3DOLLAR_4E._fields_ = [ ('w_Stopval', c_uint, 8), ('w_Stopsig', c_uint, 8), ('w_Filler', c_uint, 16), ] assert sizeof(N4wait3DOLLAR_4E) == 4, sizeof(N4wait3DOLLAR_4E) assert alignment(N4wait3DOLLAR_4E) == 4, alignment(N4wait3DOLLAR_4E) wait._fields_ = [ ('w_status', c_int), ('w_T', N4wait3DOLLAR_3E), ('w_S', N4wait3DOLLAR_4E), ] assert sizeof(wait) == 4, sizeof(wait) assert alignment(wait) == 4, alignment(wait) class timespec(Structure): pass timespec._fields_ = [ ('tv_sec', time_t), ('tv_nsec', c_long), ] assert sizeof(timespec) == 8, sizeof(timespec) assert alignment(timespec) == 4, alignment(timespec) class tm(Structure): pass tm._fields_ = [ ('tm_sec', c_int), ('tm_min', c_int), ('tm_hour', c_int), ('tm_mday', c_int), ('tm_mon', c_int), ('tm_year', c_int), ('tm_wday', c_int), ('tm_yday', c_int), ('tm_isdst', c_int), ('tm_gmtoff', c_long), ('tm_zone', STRING), ] assert sizeof(tm) == 44, sizeof(tm) assert alignment(tm) == 4, alignment(tm) __gnuc_va_list = STRING ptrdiff_t = c_int int8_t = c_byte int16_t = c_short uint8_t = c_ubyte uint16_t = c_ushort uint32_t = c_uint uint64_t = c_ulonglong int_least8_t = int8_t int_least16_t = int16_t int_least32_t = int32_t int_least64_t = int64_t uint_least8_t = uint8_t uint_least16_t = uint16_t uint_least32_t = uint32_t uint_least64_t = uint64_t int_fast8_t = int8_t int_fast16_t = int16_t int_fast32_t = int32_t int_fast64_t = int64_t uint_fast8_t = uint8_t uint_fast16_t = uint16_t uint_fast32_t = uint32_t uint_fast64_t = uint64_t intptr_t = c_long uintptr_t = c_ulong intmax_t = c_longlong uintmax_t = c_ulonglong __all__ = ['ENGINE', 'pkcs7_enc_content_st', '__int16_t', 'X509_REVOKED', 'SSL_CTX', 'UIT_BOOLEAN', '__darwin_time_t', 'ucontext64_t', 'int_fast32_t', 'pem_ctx_st', 'uint8_t', 'fpos_t', 'X509', 'COMP_CTX', 'tm', 'N10pem_ctx_st4DOLLAR_17E', 'swblk_t', 'ASN1_TEMPLATE', '__darwin_pthread_t', 'fixpt_t', 'BIO_METHOD', 'ASN1_PRINTABLESTRING', 'EVP_ENCODE_CTX', 'dh_method', 'bio_f_buffer_ctx_struct', 'in_port_t', 'X509_SIG', '__darwin_ssize_t', '__darwin_sigset_t', 'wait', 'uint_fast16_t', 'N12asn1_type_st4DOLLAR_11E', 'uint_least8_t', 'pthread_rwlock_t', 'ASN1_IA5STRING', 'fsfilcnt_t', 'ucontext', '__uint64_t', 'timespec', 'x509_cinf_st', 'COMP_METHOD', 'MD5_CTX', 'buf_mem_st', 'ASN1_ENCODING_st', 'PBEPARAM', 'X509_NAME_ENTRY', '__darwin_va_list', 'ucontext_t', 'lhash_st', 'N4wait3DOLLAR_4E', '__darwin_uuid_t', '_ossl_old_des_ks_struct', 'id_t', 'ASN1_BIT_STRING', 'va_list', '__darwin_wchar_t', 'pthread_key_t', 'pkcs7_signer_info_st', 'ASN1_METHOD', 'DSA_SIG', 'DSA', 'UIT_NONE', 'pthread_t', '__darwin_useconds_t', 'uint_fast8_t', 'UI_STRING', 'DES_cblock', '__darwin_mcontext64_t', 'rlim_t', 'PEM_Encode_Seal_st', 'SHAstate_st', 'u_quad_t', 'openssl_fptr', '_opaque_pthread_rwlockattr_t', 'N18x509_attributes_st4DOLLAR_13E', '__darwin_pthread_rwlock_t', 'daddr_t', 'ui_string_st', 'x509_file_st', 'X509_req_info_st', 'int_least64_t', 'evp_Encode_Ctx_st', 'X509_OBJECTS', 'CRYPTO_EX_DATA', '__int8_t', 'AUTHORITY_KEYID_st', '_opaque_pthread_attr_t', 'sigstack', 'EVP_CIPHER_CTX', 'X509_extension_st', 'pid_t', 'RSA_METHOD', 'PEM_USER', 'pem_recip_st', 'env_md_ctx_st', 'rc5_key_st', 'ui_st', 'X509_PUBKEY', 'u_int8_t', 'ASN1_ITEM_st', 'pkcs7_recip_info_st', 'ssl2_state_st', 'off_t', 'N10ssl_ctx_st4DOLLAR_18E', 'crypto_ex_data_st', 'ui_method_st', '__darwin_pthread_rwlockattr_t', 'CRYPTO_EX_dup', '__darwin_ino_t', '__sFILE', 'OSUnknownByteOrder', 'BN_MONT_CTX', 'ASN1_NULL', 'time_t', 'CRYPTO_EX_new', 'asn1_type_st', 'CRYPTO_EX_DATA_FUNCS', 'user_time_t', 'BIGNUM', 'pthread_rwlockattr_t', 'ASN1_VALUE_st', 'DH_METHOD', '__darwin_off_t', '_opaque_pthread_t', 'bn_blinding_st', 'RSA', 'ssize_t', 'mcontext64_t', 'user_long_t', 'fsblkcnt_t', 'cert_st', '__darwin_pthread_condattr_t', 'X509_PKEY', '__darwin_id_t', '__darwin_nl_item', 'SSL2_STATE', 'FILE', 'pthread_mutexattr_t', 'size_t', '_ossl_old_des_key_schedule', 'pkcs7_issuer_and_serial_st', 'sigval', 'CRYPTO_MEM_LEAK_CB', 'X509_NAME', 'blkcnt_t', 'uint_least16_t', '__darwin_dev_t', 'evp_cipher_info_st', 'BN_BLINDING', 'ssl3_state_st', 'uint_least64_t', 'user_addr_t', 'DES_key_schedule', 'RIPEMD160_CTX', 'u_char', 'X509_algor_st', 'uid_t', 'sess_cert_st', 'u_int64_t', 'u_int16_t', 'sigset_t', '__darwin_ptrdiff_t', 'ASN1_CTX', 'STACK', '__int32_t', 'UI_METHOD', 'NETSCAPE_SPKI', 'UIT_PROMPT', 'st_CRYPTO_EX_DATA_IMPL', 'cast_key_st', 'X509_HASH_DIR_CTX', 'sigevent', 'user_ssize_t', 'clock_t', 'aes_key_st', '__darwin_socklen_t', '__darwin_intptr_t', 'int_fast64_t', 'asn1_string_table_st', 'uint_fast32_t', 'ASN1_VISIBLESTRING', 'DSA_SIG_st', 'obj_name_st', 'X509_LOOKUP_METHOD', 'u_int32_t', 'EVP_CIPHER_INFO', '__gnuc_va_list', 'AES_KEY', 'PKCS7_ISSUER_AND_SERIAL', 'BN_CTX', '__darwin_blkcnt_t', 'key_t', 'SHA_CTX', 'pkcs7_signed_st', 'SSL', 'N10pem_ctx_st4DOLLAR_16E', 'pthread_attr_t', 'EVP_MD', 'uint', 'ASN1_BOOLEAN', 'ino_t', '__darwin_clock_t', 'ASN1_OCTET_STRING', 'asn1_ctx_st', 'BIO_F_BUFFER_CTX', 'bn_mont_ctx_st', 'X509_REQ_INFO', 'PEM_CTX', 'sigvec', '__darwin_pthread_mutexattr_t', 'x509_attributes_st', 'stack_t', '__darwin_mode_t', '__mbstate_t', 'asn1_object_st', 'ASN1_ENCODING', '__uint8_t', 'LHASH_NODE', 'PKCS7_SIGNER_INFO', 'asn1_method_st', 'stack_st', 'bio_info_cb', 'div_t', 'UIT_VERIFY', 'PBEPARAM_st', 'N4wait3DOLLAR_3E', 'quad_t', '__siginfo', '__darwin_mbstate_t', 'rsa_st', 'ASN1_UNIVERSALSTRING', 'uint64_t', 'ssl_comp_st', 'X509_OBJECT', 'pthread_cond_t', 'DH', '__darwin_wctype_t', 'PKCS7_ENVELOPE', 'ASN1_TLC_st', 'sig_atomic_t', 'BIO', 'nlink_t', 'BUF_MEM', 'SSL3_RECORD', 'bio_method_st', 'timeval', 'UI_string_types', 'BIO_dummy', 'ssl_ctx_st', 'NETSCAPE_CERT_SEQUENCE', 'BIT_STRING_BITNAME_st', '__darwin_pthread_attr_t', 'int8_t', '__darwin_wint_t', 'OBJ_NAME', 'PKCS8_PRIV_KEY_INFO', 'PBE2PARAM_st', 'LHASH_DOALL_FN_TYPE', 'x509_st', 'X509_VAL', 'dev_t', 'ASN1_TEMPLATE_st', 'MD5state_st', '__uint16_t', 'LHASH_DOALL_ARG_FN_TYPE', 'mdc2_ctx_st', 'SSL3_STATE', 'ssl3_buffer_st', 'ASN1_ITEM_EXP', '_opaque_pthread_condattr_t', 'mode_t', 'ASN1_VALUE', 'qaddr_t', '__darwin_gid_t', 'EVP_PKEY', 'CRYPTO_EX_free', '_ossl_old_des_cblock', 'X509_INFO', 'asn1_string_st', 'intptr_t', 'UIT_INFO', 'int_fast8_t', 'sigaltstack', 'env_md_st', 'LHASH', '__darwin_ucontext_t', 'PKCS7_SIGN_ENVELOPE', '__darwin_mcontext_t', 'ct_rune_t', 'MD2_CTX', 'pthread_once_t', 'SSL3_BUFFER', 'fd_mask', 'ASN1_TYPE', 'PKCS7_SIGNED', 'ssl3_record_st', 'BF_KEY', 'MD4state_st', 'MD4_CTX', 'int16_t', 'SSL_CIPHER', 'rune_t', 'X509_TRUST', 'siginfo_t', 'X509_STORE', '__sbuf', 'X509_STORE_CTX', '__darwin_blksize_t', 'ldiv_t', 'ASN1_TIME', 'SSL_METHOD', 'X509_LOOKUP', 'Netscape_spki_st', 'P_PID', 'sigaction', 'sig_t', 'hostent', 'x509_cert_aux_st', '_opaque_pthread_cond_t', 'segsz_t', 'ushort', '__darwin_ct_rune_t', 'fd_set', 'BN_RECP_CTX', 'x509_lookup_st', 'uint16_t', 'pkcs7_st', 'asn1_header_st', '__darwin_pthread_key_t', 'x509_trust_st', '__darwin_pthread_handler_rec', 'int32_t', 'X509_CRL_INFO', 'N11evp_pkey_st4DOLLAR_12E', 'MDC2_CTX', 'N23_ossl_old_des_ks_struct4DOLLAR_10E', 'ASN1_HEADER', 'X509_crl_info_st', 'LHASH_HASH_FN_TYPE', '_opaque_pthread_mutexattr_t', 'ssl_st', 'N8pkcs7_st4DOLLAR_15E', 'evp_pkey_st', 'pkcs7_signedandenveloped_st', '__darwin_mach_port_t', 'EVP_PBE_KEYGEN', '_opaque_pthread_mutex_t', 'ASN1_UTCTIME', 'mcontext', 'crypto_ex_data_func_st', 'u_long', 'PBKDF2PARAM_st', 'rc4_key_st', 'DSA_METHOD', 'EVP_CIPHER', 'BIT_STRING_BITNAME', 'PKCS7_RECIP_INFO', 'ssl3_enc_method', 'X509_CERT_AUX', 'uintmax_t', 'int_fast16_t', 'RC5_32_KEY', 'ucontext64', 'ASN1_INTEGER', 'u_short', 'N14x509_object_st4DOLLAR_14E', 'mcontext64', 'X509_sig_st', 'ASN1_GENERALSTRING', 'PKCS7', '__sFILEX', 'X509_name_entry_st', 'ssl_session_st', 'caddr_t', 'bignum_st', 'X509_CINF', '__darwin_pthread_cond_t', 'ASN1_TLC', 'PKCS7_ENCRYPT', 'NETSCAPE_SPKAC', 'Netscape_spkac_st', 'idtype_t', 'UIT_ERROR', 'uint_fast64_t', 'in_addr_t', 'pthread_mutex_t', '__int64_t', 'ASN1_BMPSTRING', 'uint32_t', 'PEM_ENCODE_SEAL_CTX', 'suseconds_t', 'ASN1_OBJECT', 'X509_val_st', 'private_key_st', 'CRYPTO_dynlock', 'X509_objects_st', 'CRYPTO_EX_DATA_IMPL', 'pthread_condattr_t', 'PKCS7_DIGEST', 'uint_least32_t', 'ASN1_STRING', '__uint32_t', 'P_PGID', 'rsa_meth_st', 'X509_crl_st', 'RC2_KEY', '__darwin_fsfilcnt_t', 'X509_revoked_st', 'PBE2PARAM', 'blksize_t', 'Netscape_certificate_sequence', 'ssl_cipher_st', 'bignum_ctx', 'register_t', 'ASN1_UTF8STRING', 'pkcs7_encrypted_st', 'RC4_KEY', '__darwin_ucontext64_t', 'N13ssl2_state_st4DOLLAR_19E', 'bn_recp_ctx_st', 'CAST_KEY', 'X509_ATTRIBUTE', '__darwin_suseconds_t', '__sigaction', 'user_ulong_t', 'syscall_arg_t', 'evp_cipher_ctx_st', 'X509_ALGOR', 'mcontext_t', 'const_DES_cblock', '__darwin_fsblkcnt_t', 'dsa_st', 'int_least8_t', 'MD2state_st', 'X509_EXTENSION', 'GEN_SESSION_CB', 'int_least16_t', '__darwin_wctrans_t', 'PBKDF2PARAM', 'x509_lookup_method_st', 'pem_password_cb', 'X509_info_st', 'x509_store_st', '__darwin_natural_t', 'X509_pubkey_st', 'pkcs7_digest_st', '__darwin_size_t', 'ASN1_STRING_TABLE', 'OSLittleEndian', 'RIPEMD160state_st', 'pkcs7_enveloped_st', 'UI', 'ptrdiff_t', 'X509_REQ', 'CRYPTO_dynlock_value', 'X509_req_st', 'x509_store_ctx_st', 'N13ssl3_state_st4DOLLAR_20E', 'lhash_node_st', '__darwin_pthread_mutex_t', 'LHASH_COMP_FN_TYPE', '__darwin_rune_t', 'rlimit', '__darwin_pthread_once_t', 'OSBigEndian', 'uintptr_t', '__darwin_uid_t', 'u_int', 'ASN1_T61STRING', 'gid_t', 'ssl_method_st', 'ASN1_ITEM', 'ASN1_ENUMERATED', '_opaque_pthread_rwlock_t', 'pkcs8_priv_key_info_st', 'intmax_t', 'sigcontext', 'X509_CRL', 'rc2_key_st', 'engine_st', 'x509_object_st', '_opaque_pthread_once_t', 'DES_ks', 'SSL_COMP', 'dsa_method', 'int64_t', 'bio_st', 'bf_key_st', 'ASN1_GENERALIZEDTIME', 'PKCS7_ENC_CONTENT', '__darwin_pid_t', 'lldiv_t', 'comp_method_st', 'EVP_MD_CTX', 'evp_cipher_st', 'X509_name_st', 'x509_hash_dir_st', '__darwin_mach_port_name_t', 'useconds_t', 'user_size_t', 'SSL_SESSION', 'rusage', 'ssl_crock_st', 'int_least32_t', '__sigaction_u', 'dh_st', 'P_ALL', '__darwin_stack_t', 'N6DES_ks3DOLLAR_9E', 'comp_ctx_st', 'X509_CERT_FILE_CTX']
Python
from lib2to3.fixer_base import BaseFix from lib2to3.fixer_util import Name class FixParrot(BaseFix): """ Change functions named 'parrot' to 'cheese'. """ PATTERN = """funcdef < 'def' name='parrot' any* >""" def transform(self, node, results): name = results["name"] name.replace(Name("cheese", name.prefix))
Python
from lib2to3.fixer_base import BaseFix class FixLast(BaseFix): run_order = 10 def match(self, node): return False
Python
from lib2to3.fixer_base import BaseFix class FixPreorder(BaseFix): order = "pre" def match(self, node): return False
Python
from lib2to3.fixer_base import BaseFix class FixFirst(BaseFix): run_order = 1 def match(self, node): return False
Python
from lib2to3.fixer_base import BaseFix class FixExplicit(BaseFix): explicit = True def match(self): return False
Python
def parrot(): pass
Python
from lib2to3.fixer_base import BaseFix class FixBadOrder(BaseFix): order = "crazy"
Python
# This is empty so trying to fetch the fixer class gives an AttributeError
Python
# Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. # NOTE: When you run this test as a script from the command line, you # get warnings about certain hex/oct constants. Since those are # issued by the parser, you can't suppress them by adding a # filterwarnings() call to this module. Therefore, to shut up the # regression test, the filterwarnings() call has been added to # regrtest.py. from test.test_support import run_unittest, check_syntax_error import unittest import sys # testing import * from sys import * class TokenTests(unittest.TestCase): def testBackslash(self): # Backslash means line continuation: x = 1 \ + 1 self.assertEquals(x, 2, 'backslash for line continuation') # Backslash does not means continuation in comments :\ x = 0 self.assertEquals(x, 0, 'backslash ending comment') def testPlainIntegers(self): self.assertEquals(0xff, 255) self.assertEquals(0377, 255) self.assertEquals(2147483647, 017777777777) # "0x" is not a valid literal self.assertRaises(SyntaxError, eval, "0x") from sys import maxint if maxint == 2147483647: self.assertEquals(-2147483647-1, -020000000000) # XXX -2147483648 self.assert_(037777777777 > 0) self.assert_(0xffffffff > 0) for s in '2147483648', '040000000000', '0x100000000': try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) elif maxint == 9223372036854775807: self.assertEquals(-9223372036854775807-1, -01000000000000000000000) self.assert_(01777777777777777777777 > 0) self.assert_(0xffffffffffffffff > 0) for s in '9223372036854775808', '02000000000000000000000', \ '0x10000000000000000': try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) else: self.fail('Weird maxint value %r' % maxint) def testLongIntegers(self): x = 0L x = 0l x = 0xffffffffffffffffL x = 0xffffffffffffffffl x = 077777777777777777L x = 077777777777777777l x = 123456789012345678901234567890L x = 123456789012345678901234567890l def testFloats(self): x = 3.14 x = 314. x = 0.314 # XXX x = 000.314 x = .314 x = 3e14 x = 3E14 x = 3e-14 x = 3e+14 x = 3.e14 x = .3e14 x = 3.1e4 def testStringLiterals(self): x = ''; y = ""; self.assert_(len(x) == 0 and x == y) x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) x = "doesn't \"shrink\" does it" y = 'doesn\'t "shrink" does it' self.assert_(len(x) == 24 and x == y) x = "does \"shrink\" doesn't it" y = 'does "shrink" doesn\'t it' self.assert_(len(x) == 24 and x == y) x = """ The "quick" brown fox jumps over the 'lazy' dog. """ y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' self.assertEquals(x, y) y = ''' The "quick" brown fox jumps over the 'lazy' dog. ''' self.assertEquals(x, y) y = "\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the 'lazy' dog.\n\ " self.assertEquals(x, y) y = '\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the \'lazy\' dog.\n\ ' self.assertEquals(x, y) class GrammarTests(unittest.TestCase): # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE # XXX can't test in a script -- this rule is only used when interactive # file_input: (NEWLINE | stmt)* ENDMARKER # Being tested as this very moment this very module # expr_input: testlist NEWLINE # XXX Hard to test -- used only in calls to input() def testEvalInput(self): # testlist ENDMARKER x = eval('1, 0 or 1') def testFuncdef(self): ### 'def' NAME parameters ':' suite ### parameters: '(' [varargslist] ')' ### varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' ('**'|'*' '*') NAME] ### | ('**'|'*' '*') NAME) ### | fpdef ['=' test] (',' fpdef ['=' test])* [','] ### fpdef: NAME | '(' fplist ')' ### fplist: fpdef (',' fpdef)* [','] ### arglist: (argument ',')* (argument | *' test [',' '**' test] | '**' test) ### argument: [test '='] test # Really [keyword '='] test def f1(): pass f1() f1(*()) f1(*(), **{}) def f2(one_argument): pass def f3(two, arguments): pass def f4(two, (compound, (argument, list))): pass def f5((compound, first), two): pass self.assertEquals(f2.func_code.co_varnames, ('one_argument',)) self.assertEquals(f3.func_code.co_varnames, ('two', 'arguments')) if sys.platform.startswith('java'): self.assertEquals(f4.func_code.co_varnames, ('two', '(compound, (argument, list))', 'compound', 'argument', 'list',)) self.assertEquals(f5.func_code.co_varnames, ('(compound, first)', 'two', 'compound', 'first')) else: self.assertEquals(f4.func_code.co_varnames, ('two', '.1', 'compound', 'argument', 'list')) self.assertEquals(f5.func_code.co_varnames, ('.0', 'two', 'compound', 'first')) def a1(one_arg,): pass def a2(two, args,): pass def v0(*rest): pass def v1(a, *rest): pass def v2(a, b, *rest): pass def v3(a, (b, c), *rest): return a, b, c, rest f1() f2(1) f2(1,) f3(1, 2) f3(1, 2,) f4(1, (2, (3, 4))) v0() v0(1) v0(1,) v0(1,2) v0(1,2,3,4,5,6,7,8,9,0) v1(1) v1(1,) v1(1,2) v1(1,2,3) v1(1,2,3,4,5,6,7,8,9,0) v2(1,2) v2(1,2,3) v2(1,2,3,4) v2(1,2,3,4,5,6,7,8,9,0) v3(1,(2,3)) v3(1,(2,3),4) v3(1,(2,3),4,5,6,7,8,9,0) # ceval unpacks the formal arguments into the first argcount names; # thus, the names nested inside tuples must appear after these names. if sys.platform.startswith('java'): self.assertEquals(v3.func_code.co_varnames, ('a', '(b, c)', 'rest', 'b', 'c')) else: self.assertEquals(v3.func_code.co_varnames, ('a', '.1', 'rest', 'b', 'c')) self.assertEquals(v3(1, (2, 3), 4), (1, 2, 3, (4,))) def d01(a=1): pass d01() d01(1) d01(*(1,)) d01(**{'a':2}) def d11(a, b=1): pass d11(1) d11(1, 2) d11(1, **{'b':2}) def d21(a, b, c=1): pass d21(1, 2) d21(1, 2, 3) d21(*(1, 2, 3)) d21(1, *(2, 3)) d21(1, 2, *(3,)) d21(1, 2, **{'c':3}) def d02(a=1, b=2): pass d02() d02(1) d02(1, 2) d02(*(1, 2)) d02(1, *(2,)) d02(1, **{'b':2}) d02(**{'a': 1, 'b': 2}) def d12(a, b=1, c=2): pass d12(1) d12(1, 2) d12(1, 2, 3) def d22(a, b, c=1, d=2): pass d22(1, 2) d22(1, 2, 3) d22(1, 2, 3, 4) def d01v(a=1, *rest): pass d01v() d01v(1) d01v(1, 2) d01v(*(1, 2, 3, 4)) d01v(*(1,)) d01v(**{'a':2}) def d11v(a, b=1, *rest): pass d11v(1) d11v(1, 2) d11v(1, 2, 3) def d21v(a, b, c=1, *rest): pass d21v(1, 2) d21v(1, 2, 3) d21v(1, 2, 3, 4) d21v(*(1, 2, 3, 4)) d21v(1, 2, **{'c': 3}) def d02v(a=1, b=2, *rest): pass d02v() d02v(1) d02v(1, 2) d02v(1, 2, 3) d02v(1, *(2, 3, 4)) d02v(**{'a': 1, 'b': 2}) def d12v(a, b=1, c=2, *rest): pass d12v(1) d12v(1, 2) d12v(1, 2, 3) d12v(1, 2, 3, 4) d12v(*(1, 2, 3, 4)) d12v(1, 2, *(3, 4, 5)) d12v(1, *(2,), **{'c': 3}) def d22v(a, b, c=1, d=2, *rest): pass d22v(1, 2) d22v(1, 2, 3) d22v(1, 2, 3, 4) d22v(1, 2, 3, 4, 5) d22v(*(1, 2, 3, 4)) d22v(1, 2, *(3, 4, 5)) d22v(1, *(2, 3), **{'d': 4}) def d31v((x)): pass d31v(1) def d32v((x,)): pass d32v((1,)) # keyword arguments after *arglist def f(*args, **kwargs): return args, kwargs self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4), {'x':2, 'y':5})) self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)") self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)") # Check ast errors in *args and *kwargs check_syntax_error(self, "f(*g(1=2))") check_syntax_error(self, "f(**g(1=2))") def testLambdef(self): ### lambdef: 'lambda' [varargslist] ':' test l1 = lambda : 0 self.assertEquals(l1(), 0) l2 = lambda : a[d] # XXX just testing the expression l3 = lambda : [2 < x for x in [-1, 3, 0L]] self.assertEquals(l3(), [0, 1, 0]) l4 = lambda x = lambda y = lambda z=1 : z : y() : x() self.assertEquals(l4(), 1) l5 = lambda x, y, z=2: x + y + z self.assertEquals(l5(1, 2), 5) self.assertEquals(l5(1, 2, 3), 6) check_syntax_error(self, "lambda x: x = 2") check_syntax_error(self, "lambda (None,): None") ### stmt: simple_stmt | compound_stmt # Tested below def testSimpleStmt(self): ### simple_stmt: small_stmt (';' small_stmt)* [';'] x = 1; pass; del x def foo(): # verify statments that end with semi-colons x = 1; pass; del x; foo() ### small_stmt: expr_stmt | print_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt | exec_stmt # Tested below def testExprStmt(self): # (exprlist '=')* exprlist 1 1, 2, 3 x = 1 x = 1, 2, 3 x = y = z = 1, 2, 3 x, y, z = 1, 2, 3 abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) check_syntax_error(self, "x + 1 = 1") check_syntax_error(self, "a + 1 = b + 2") def testPrintStmt(self): # 'print' (test ',')* [test] import StringIO # Can't test printing to real stdout without comparing output # which is not available in unittest. save_stdout = sys.stdout sys.stdout = StringIO.StringIO() print 1, 2, 3 print 1, 2, 3, print print 0 or 1, 0 or 1, print 0 or 1 # 'print' '>>' test ',' print >> sys.stdout, 1, 2, 3 print >> sys.stdout, 1, 2, 3, print >> sys.stdout print >> sys.stdout, 0 or 1, 0 or 1, print >> sys.stdout, 0 or 1 # test printing to an instance class Gulp: def write(self, msg): pass gulp = Gulp() print >> gulp, 1, 2, 3 print >> gulp, 1, 2, 3, print >> gulp print >> gulp, 0 or 1, 0 or 1, print >> gulp, 0 or 1 # test print >> None def driver(): oldstdout = sys.stdout sys.stdout = Gulp() try: tellme(Gulp()) tellme() finally: sys.stdout = oldstdout # we should see this once def tellme(file=sys.stdout): print >> file, 'hello world' driver() # we should not see this at all def tellme(file=None): print >> file, 'goodbye universe' driver() self.assertEqual(sys.stdout.getvalue(), '''\ 1 2 3 1 2 3 1 1 1 1 2 3 1 2 3 1 1 1 hello world ''') sys.stdout = save_stdout # syntax errors check_syntax_error(self, 'print ,') check_syntax_error(self, 'print >> x,') def testDelStmt(self): # 'del' exprlist abc = [1,2,3] x, y, z = abc xyz = x, y, z del abc del x, y, (z, xyz) def testPassStmt(self): # 'pass' pass # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt # Tested below def testBreakStmt(self): # 'break' while 1: break def testContinueStmt(self): # 'continue' i = 1 while i: i = 0; continue msg = "" while not msg: msg = "ok" try: continue msg = "continue failed to continue inside try" except: msg = "continue inside try called except block" if msg != "ok": self.fail(msg) msg = "" while not msg: msg = "finally block not called" try: continue finally: msg = "ok" if msg != "ok": self.fail(msg) def test_break_continue_loop(self): # This test warrants an explanation. It is a test specifically for SF bugs # #463359 and #462937. The bug is that a 'break' statement executed or # exception raised inside a try/except inside a loop, *after* a continue # statement has been executed in that loop, will cause the wrong number of # arguments to be popped off the stack and the instruction pointer reset to # a very small number (usually 0.) Because of this, the following test # *must* written as a function, and the tracking vars *must* be function # arguments with default values. Otherwise, the test will loop and loop. def test_inner(extra_burning_oil = 1, count=0): big_hippo = 2 while big_hippo: count += 1 try: if extra_burning_oil and big_hippo == 1: extra_burning_oil -= 1 break big_hippo -= 1 continue except: raise if count > 2 or big_hippo <> 1: self.fail("continue then break in try/except in loop broken!") test_inner() def testReturn(self): # 'return' [testlist] def g1(): return def g2(): return 1 g1() x = g2() check_syntax_error(self, "class foo:return 1") def testYield(self): check_syntax_error(self, "class foo:yield 1") def testRaise(self): # 'raise' test [',' test] try: raise RuntimeError, 'just testing' except RuntimeError: pass try: raise KeyboardInterrupt except KeyboardInterrupt: pass def testImport(self): # 'import' dotted_as_names import sys import time, sys # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) from time import time from time import (time) # not testable inside a function, but already done at top of the module # from sys import * from sys import path, argv from sys import (path, argv) from sys import (path, argv,) def testGlobal(self): # 'global' NAME (',' NAME)* global a global a, b global one, two, three, four, five, six, seven, eight, nine, ten def testExec(self): # 'exec' expr ['in' expr [',' expr]] z = None del z exec 'z=1+1\n' if z != 2: self.fail('exec \'z=1+1\'\\n') del z exec 'z=1+1' if z != 2: self.fail('exec \'z=1+1\'') z = None del z import types if hasattr(types, "UnicodeType"): exec r"""if 1: exec u'z=1+1\n' if z != 2: self.fail('exec u\'z=1+1\'\\n') del z exec u'z=1+1' if z != 2: self.fail('exec u\'z=1+1\'')""" g = {} exec 'z = 1' in g if g.has_key('__builtins__'): del g['__builtins__'] if g != {'z': 1}: self.fail('exec \'z = 1\' in g') g = {} l = {} import warnings warnings.filterwarnings("ignore", "global statement", module="<string>") exec 'global a; a = 1; b = 2' in g, l if g.has_key('__builtins__'): del g['__builtins__'] if l.has_key('__builtins__'): del l['__builtins__'] if (g, l) != ({'a':1}, {'b':2}): self.fail('exec ... in g (%s), l (%s)' %(g,l)) def testAssert(self): # assert_stmt: 'assert' test [',' test] assert 1 assert 1, 1 assert lambda x:x assert 1, lambda x:x+1 try: assert 0, "msg" except AssertionError, e: self.assertEquals(e.args[0], "msg") else: if __debug__: self.fail("AssertionError not raised by assert 0") ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef # Tested below def testIf(self): # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] if 1: pass if 1: pass else: pass if 0: pass elif 0: pass if 0: pass elif 0: pass elif 0: pass elif 0: pass else: pass def testWhile(self): # 'while' test ':' suite ['else' ':' suite] while 0: pass while 0: pass else: pass # Issue1920: "while 0" is optimized away, # ensure that the "else" clause is still present. x = 0 while 0: x = 1 else: x = 2 self.assertEquals(x, 2) def testFor(self): # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite] for i in 1, 2, 3: pass for i, j, k in (): pass else: pass class Squares: def __init__(self, max): self.max = max self.sofar = [] def __len__(self): return len(self.sofar) def __getitem__(self, i): if not 0 <= i < self.max: raise IndexError n = len(self.sofar) while n <= i: self.sofar.append(n*n) n = n+1 return self.sofar[i] n = 0 for x in Squares(10): n = n+x if n != 285: self.fail('for over growing sequence') result = [] for x, in [(1,), (2,), (3,)]: result.append(x) self.assertEqual(result, [1, 2, 3]) def testTry(self): ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] ### | 'try' ':' suite 'finally' ':' suite ### except_clause: 'except' [expr [('as' | ',') expr]] try: 1/0 except ZeroDivisionError: pass else: pass try: 1/0 except EOFError: pass except TypeError as msg: pass except RuntimeError, msg: pass except: pass else: pass try: 1/0 except (EOFError, TypeError, ZeroDivisionError): pass try: 1/0 except (EOFError, TypeError, ZeroDivisionError), msg: pass try: pass finally: pass def testSuite(self): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT if 1: pass if 1: pass if 1: # # # pass pass # pass # def testTest(self): ### and_test ('or' and_test)* ### and_test: not_test ('and' not_test)* ### not_test: 'not' not_test | comparison if not 1: pass if 1 and 1: pass if 1 or 1: pass if not not not 1: pass if not 1 and 1 and 1: pass if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass def testComparison(self): ### comparison: expr (comp_op expr)* ### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' if 1: pass x = (1 == 1) if 1 == 1: pass if 1 != 1: pass if 1 <> 1: pass if 1 < 1: pass if 1 > 1: pass if 1 <= 1: pass if 1 >= 1: pass if 1 is 1: pass if 1 is not 1: pass if 1 in (): pass if 1 not in (): pass if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass def testBinaryMaskOps(self): x = 1 & 1 x = 1 ^ 1 x = 1 | 1 def testShiftOps(self): x = 1 << 1 x = 1 >> 1 x = 1 << 1 >> 1 def testAdditiveOps(self): x = 1 x = 1 + 1 x = 1 - 1 - 1 x = 1 - 1 + 1 - 1 + 1 def testMultiplicativeOps(self): x = 1 * 1 x = 1 / 1 x = 1 % 1 x = 1 / 1 * 1 % 1 def testUnaryOps(self): x = +1 x = -1 x = ~1 x = ~1 ^ 1 & 1 | 1 & 1 ^ -1 x = -1*1/1 + 1*1 - ---1*1 def testSelectors(self): ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME ### subscript: expr | [expr] ':' [expr] import sys, time c = sys.path[0] x = time.time() x = sys.modules['time'].time() a = '01234' c = a[0] c = a[-1] s = a[0:5] s = a[:5] s = a[0:] s = a[:] s = a[-5:] s = a[:-1] s = a[-4:-3] # A rough test of SF bug 1333982. http://python.org/sf/1333982 # The testing here is fairly incomplete. # Test cases should include: commas with 1 and 2 colons d = {} d[1] = 1 d[1,] = 2 d[1,2] = 3 d[1,2,3] = 4 L = list(d) L.sort() self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]') def testAtoms(self): ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING ### dictmaker: test ':' test (',' test ':' test)* [','] x = (1) x = (1 or 2 or 3) x = (1 or 2 or 3, 2, 3) x = [] x = [1] x = [1 or 2 or 3] x = [1 or 2 or 3, 2, 3] x = [] x = {} x = {'one': 1} x = {'one': 1,} x = {'one' or 'two': 1 or 2} x = {'one': 1, 'two': 2} x = {'one': 1, 'two': 2,} x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6} x = `x` x = `1 or 2 or 3` self.assertEqual(`1,2`, '(1, 2)') x = x x = 'x' x = 123 ### exprlist: expr (',' expr)* [','] ### testlist: test (',' test)* [','] # These have been exercised enough above def testClassdef(self): # 'class' NAME ['(' [testlist] ')'] ':' suite class B: pass class B2(): pass class C1(B): pass class C2(B): pass class D(C1, C2, B): pass class C: def meth1(self): pass def meth2(self, arg): pass def meth3(self, a1, a2): pass # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE # decorators: decorator+ # decorated: decorators (classdef | funcdef) def class_decorator(x): x.decorated = True return x @class_decorator class G: pass self.assertEqual(G.decorated, True) def testListcomps(self): # list comprehension tests nums = [1, 2, 3, 4, 5] strs = ["Apple", "Banana", "Coconut"] spcs = [" Apple", " Banana ", "Coco nut "] self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut']) self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15]) self.assertEqual([x for x in nums if x > 2], [3, 4, 5]) self.assertEqual([(i, s) for i in nums for s in strs], [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'), (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'), (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'), (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'), (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')]) self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]], [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'), (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'), (5, 'Banana'), (5, 'Coconut')]) self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)], [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]]) def test_in_func(l): return [None < x < 3 for x in l if x > 2] self.assertEqual(test_in_func(nums), [False, False, False]) def test_nested_front(): self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]], [[1, 2], [3, 4], [5, 6]]) test_nested_front() check_syntax_error(self, "[i, s for i in nums for s in strs]") check_syntax_error(self, "[x if y]") suppliers = [ (1, "Boeing"), (2, "Ford"), (3, "Macdonalds") ] parts = [ (10, "Airliner"), (20, "Engine"), (30, "Cheeseburger") ] suppart = [ (1, 10), (1, 20), (2, 20), (3, 30) ] x = [ (sname, pname) for (sno, sname) in suppliers for (pno, pname) in parts for (sp_sno, sp_pno) in suppart if sno == sp_sno and pno == sp_pno ] self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'), ('Macdonalds', 'Cheeseburger')]) def testGenexps(self): # generator expression tests g = ([x for x in range(10)] for x in range(1)) self.assertEqual(g.next(), [x for x in range(10)]) try: g.next() self.fail('should produce StopIteration exception') except StopIteration: pass a = 1 try: g = (a for d in a) g.next() self.fail('should produce TypeError') except TypeError: pass self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd']) self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy']) a = [x for x in range(10)] b = (x for x in (y for y in a)) self.assertEqual(sum(b), sum([x for x in range(10)])) self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)])) self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2])) self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)])) self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0) check_syntax_error(self, "foo(x for x in range(10), 100)") check_syntax_error(self, "foo(100, x for x in range(10))") def testComprehensionSpecials(self): # test for outmost iterable precomputation x = 10; g = (i for i in range(x)); x = 5 self.assertEqual(len(list(g)), 10) # This should hold, since we're only precomputing outmost iterable. x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x)) x = 5; t = True; self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g)) # Grammar allows multiple adjacent 'if's in listcomps and genexps, # even though it's silly. Make sure it works (ifelse broke this.) self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7]) self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7]) # verify unpacking single element tuples in listcomp/genexp. self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6]) self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9]) def test_with_statement(self): class manager(object): def __enter__(self): return (1, 2) def __exit__(self, *args): pass with manager(): pass with manager() as x: pass with manager() as (x, y): pass with manager(), manager(): pass with manager() as x, manager() as y: pass with manager() as x, manager(): pass def testIfElseExpr(self): # Test ifelse expressions in various cases def _checkeval(msg, ret): "helper to check that evaluation of expressions is done correctly" print x return ret self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5) self.assertEqual((5 and 6 if 0 else 1), 1) self.assertEqual(((5 and 6) if 0 else 1), 1) self.assertEqual((5 and (6 if 1 else 1)), 6) self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3) self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1) self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5) self.assertEqual((not 5 if 1 else 1), False) self.assertEqual((not 5 if 0 else 1), 1) self.assertEqual((6 + 1 if 1 else 2), 7) self.assertEqual((6 - 1 if 1 else 2), 5) self.assertEqual((6 * 2 if 1 else 4), 12) self.assertEqual((6 / 2 if 1 else 3), 3) self.assertEqual((6 < 4 if 0 else 2), 2) def test_main(): run_unittest(TokenTests, GrammarTests) if __name__ == '__main__': test_main()
Python
#!/usr/bin/env python # -*- coding: utf-8 -*- print u'ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ' def f(x): print '%s\t-> α(%2i):%s β(%s)'
Python
# Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. # NOTE: When you run this test as a script from the command line, you # get warnings about certain hex/oct constants. Since those are # issued by the parser, you can't suppress them by adding a # filterwarnings() call to this module. Therefore, to shut up the # regression test, the filterwarnings() call has been added to # regrtest.py. from test.support import run_unittest, check_syntax_error import unittest import sys # testing import * from sys import * class TokenTests(unittest.TestCase): def testBackslash(self): # Backslash means line continuation: x = 1 \ + 1 self.assertEquals(x, 2, 'backslash for line continuation') # Backslash does not means continuation in comments :\ x = 0 self.assertEquals(x, 0, 'backslash ending comment') def testPlainIntegers(self): self.assertEquals(type(000), type(0)) self.assertEquals(0xff, 255) self.assertEquals(0o377, 255) self.assertEquals(2147483647, 0o17777777777) self.assertEquals(0b1001, 9) # "0x" is not a valid literal self.assertRaises(SyntaxError, eval, "0x") from sys import maxsize if maxsize == 2147483647: self.assertEquals(-2147483647-1, -0o20000000000) # XXX -2147483648 self.assert_(0o37777777777 > 0) self.assert_(0xffffffff > 0) self.assert_(0b1111111111111111111111111111111 > 0) for s in ('2147483648', '0o40000000000', '0x100000000', '0b10000000000000000000000000000000'): try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) elif maxsize == 9223372036854775807: self.assertEquals(-9223372036854775807-1, -0o1000000000000000000000) self.assert_(0o1777777777777777777777 > 0) self.assert_(0xffffffffffffffff > 0) self.assert_(0b11111111111111111111111111111111111111111111111111111111111111 > 0) for s in '9223372036854775808', '0o2000000000000000000000', \ '0x10000000000000000', \ '0b100000000000000000000000000000000000000000000000000000000000000': try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) else: self.fail('Weird maxsize value %r' % maxsize) def testLongIntegers(self): x = 0 x = 0xffffffffffffffff x = 0Xffffffffffffffff x = 0o77777777777777777 x = 0O77777777777777777 x = 123456789012345678901234567890 x = 0b100000000000000000000000000000000000000000000000000000000000000000000 x = 0B111111111111111111111111111111111111111111111111111111111111111111111 def testFloats(self): x = 3.14 x = 314. x = 0.314 # XXX x = 000.314 x = .314 x = 3e14 x = 3E14 x = 3e-14 x = 3e+14 x = 3.e14 x = .3e14 x = 3.1e4 def testStringLiterals(self): x = ''; y = ""; self.assert_(len(x) == 0 and x == y) x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) x = "doesn't \"shrink\" does it" y = 'doesn\'t "shrink" does it' self.assert_(len(x) == 24 and x == y) x = "does \"shrink\" doesn't it" y = 'does "shrink" doesn\'t it' self.assert_(len(x) == 24 and x == y) x = """ The "quick" brown fox jumps over the 'lazy' dog. """ y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' self.assertEquals(x, y) y = ''' The "quick" brown fox jumps over the 'lazy' dog. ''' self.assertEquals(x, y) y = "\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the 'lazy' dog.\n\ " self.assertEquals(x, y) y = '\n\ The \"quick\"\n\ brown fox\n\ jumps over\n\ the \'lazy\' dog.\n\ ' self.assertEquals(x, y) def testEllipsis(self): x = ... self.assert_(x is Ellipsis) self.assertRaises(SyntaxError, eval, ".. .") class GrammarTests(unittest.TestCase): # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE # XXX can't test in a script -- this rule is only used when interactive # file_input: (NEWLINE | stmt)* ENDMARKER # Being tested as this very moment this very module # expr_input: testlist NEWLINE # XXX Hard to test -- used only in calls to input() def testEvalInput(self): # testlist ENDMARKER x = eval('1, 0 or 1') def testFuncdef(self): ### [decorators] 'def' NAME parameters ['->' test] ':' suite ### decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE ### decorators: decorator+ ### parameters: '(' [typedargslist] ')' ### typedargslist: ((tfpdef ['=' test] ',')* ### ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef) ### | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) ### tfpdef: NAME [':' test] ### varargslist: ((vfpdef ['=' test] ',')* ### ('*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef) ### | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) ### vfpdef: NAME def f1(): pass f1() f1(*()) f1(*(), **{}) def f2(one_argument): pass def f3(two, arguments): pass self.assertEquals(f2.__code__.co_varnames, ('one_argument',)) self.assertEquals(f3.__code__.co_varnames, ('two', 'arguments')) def a1(one_arg,): pass def a2(two, args,): pass def v0(*rest): pass def v1(a, *rest): pass def v2(a, b, *rest): pass f1() f2(1) f2(1,) f3(1, 2) f3(1, 2,) v0() v0(1) v0(1,) v0(1,2) v0(1,2,3,4,5,6,7,8,9,0) v1(1) v1(1,) v1(1,2) v1(1,2,3) v1(1,2,3,4,5,6,7,8,9,0) v2(1,2) v2(1,2,3) v2(1,2,3,4) v2(1,2,3,4,5,6,7,8,9,0) def d01(a=1): pass d01() d01(1) d01(*(1,)) d01(**{'a':2}) def d11(a, b=1): pass d11(1) d11(1, 2) d11(1, **{'b':2}) def d21(a, b, c=1): pass d21(1, 2) d21(1, 2, 3) d21(*(1, 2, 3)) d21(1, *(2, 3)) d21(1, 2, *(3,)) d21(1, 2, **{'c':3}) def d02(a=1, b=2): pass d02() d02(1) d02(1, 2) d02(*(1, 2)) d02(1, *(2,)) d02(1, **{'b':2}) d02(**{'a': 1, 'b': 2}) def d12(a, b=1, c=2): pass d12(1) d12(1, 2) d12(1, 2, 3) def d22(a, b, c=1, d=2): pass d22(1, 2) d22(1, 2, 3) d22(1, 2, 3, 4) def d01v(a=1, *rest): pass d01v() d01v(1) d01v(1, 2) d01v(*(1, 2, 3, 4)) d01v(*(1,)) d01v(**{'a':2}) def d11v(a, b=1, *rest): pass d11v(1) d11v(1, 2) d11v(1, 2, 3) def d21v(a, b, c=1, *rest): pass d21v(1, 2) d21v(1, 2, 3) d21v(1, 2, 3, 4) d21v(*(1, 2, 3, 4)) d21v(1, 2, **{'c': 3}) def d02v(a=1, b=2, *rest): pass d02v() d02v(1) d02v(1, 2) d02v(1, 2, 3) d02v(1, *(2, 3, 4)) d02v(**{'a': 1, 'b': 2}) def d12v(a, b=1, c=2, *rest): pass d12v(1) d12v(1, 2) d12v(1, 2, 3) d12v(1, 2, 3, 4) d12v(*(1, 2, 3, 4)) d12v(1, 2, *(3, 4, 5)) d12v(1, *(2,), **{'c': 3}) def d22v(a, b, c=1, d=2, *rest): pass d22v(1, 2) d22v(1, 2, 3) d22v(1, 2, 3, 4) d22v(1, 2, 3, 4, 5) d22v(*(1, 2, 3, 4)) d22v(1, 2, *(3, 4, 5)) d22v(1, *(2, 3), **{'d': 4}) # keyword argument type tests try: str('x', **{b'foo':1 }) except TypeError: pass else: self.fail('Bytes should not work as keyword argument names') # keyword only argument tests def pos0key1(*, key): return key pos0key1(key=100) def pos2key2(p1, p2, *, k1, k2=100): return p1,p2,k1,k2 pos2key2(1, 2, k1=100) pos2key2(1, 2, k1=100, k2=200) pos2key2(1, 2, k2=100, k1=200) def pos2key2dict(p1, p2, *, k1=100, k2, **kwarg): return p1,p2,k1,k2,kwarg pos2key2dict(1,2,k2=100,tokwarg1=100,tokwarg2=200) pos2key2dict(1,2,tokwarg1=100,tokwarg2=200, k2=100) # keyword arguments after *arglist def f(*args, **kwargs): return args, kwargs self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4), {'x':2, 'y':5})) self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)") self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)") # argument annotation tests def f(x) -> list: pass self.assertEquals(f.__annotations__, {'return': list}) def f(x:int): pass self.assertEquals(f.__annotations__, {'x': int}) def f(*x:str): pass self.assertEquals(f.__annotations__, {'x': str}) def f(**x:float): pass self.assertEquals(f.__annotations__, {'x': float}) def f(x, y:1+2): pass self.assertEquals(f.__annotations__, {'y': 3}) def f(a, b:1, c:2, d): pass self.assertEquals(f.__annotations__, {'b': 1, 'c': 2}) def f(a, b:1, c:2, d, e:3=4, f=5, *g:6): pass self.assertEquals(f.__annotations__, {'b': 1, 'c': 2, 'e': 3, 'g': 6}) def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass self.assertEquals(f.__annotations__, {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9, 'k': 11, 'return': 12}) # Check for SF Bug #1697248 - mixing decorators and a return annotation def null(x): return x @null def f(x) -> list: pass self.assertEquals(f.__annotations__, {'return': list}) # test MAKE_CLOSURE with a variety of oparg's closure = 1 def f(): return closure def f(x=1): return closure def f(*, k=1): return closure def f() -> int: return closure # Check ast errors in *args and *kwargs check_syntax_error(self, "f(*g(1=2))") check_syntax_error(self, "f(**g(1=2))") def testLambdef(self): ### lambdef: 'lambda' [varargslist] ':' test l1 = lambda : 0 self.assertEquals(l1(), 0) l2 = lambda : a[d] # XXX just testing the expression l3 = lambda : [2 < x for x in [-1, 3, 0]] self.assertEquals(l3(), [0, 1, 0]) l4 = lambda x = lambda y = lambda z=1 : z : y() : x() self.assertEquals(l4(), 1) l5 = lambda x, y, z=2: x + y + z self.assertEquals(l5(1, 2), 5) self.assertEquals(l5(1, 2, 3), 6) check_syntax_error(self, "lambda x: x = 2") check_syntax_error(self, "lambda (None,): None") l6 = lambda x, y, *, k=20: x+y+k self.assertEquals(l6(1,2), 1+2+20) self.assertEquals(l6(1,2,k=10), 1+2+10) ### stmt: simple_stmt | compound_stmt # Tested below def testSimpleStmt(self): ### simple_stmt: small_stmt (';' small_stmt)* [';'] x = 1; pass; del x def foo(): # verify statments that end with semi-colons x = 1; pass; del x; foo() ### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt # Tested below def testExprStmt(self): # (exprlist '=')* exprlist 1 1, 2, 3 x = 1 x = 1, 2, 3 x = y = z = 1, 2, 3 x, y, z = 1, 2, 3 abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) check_syntax_error(self, "x + 1 = 1") check_syntax_error(self, "a + 1 = b + 2") def testDelStmt(self): # 'del' exprlist abc = [1,2,3] x, y, z = abc xyz = x, y, z del abc del x, y, (z, xyz) def testPassStmt(self): # 'pass' pass # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt # Tested below def testBreakStmt(self): # 'break' while 1: break def testContinueStmt(self): # 'continue' i = 1 while i: i = 0; continue msg = "" while not msg: msg = "ok" try: continue msg = "continue failed to continue inside try" except: msg = "continue inside try called except block" if msg != "ok": self.fail(msg) msg = "" while not msg: msg = "finally block not called" try: continue finally: msg = "ok" if msg != "ok": self.fail(msg) def test_break_continue_loop(self): # This test warrants an explanation. It is a test specifically for SF bugs # #463359 and #462937. The bug is that a 'break' statement executed or # exception raised inside a try/except inside a loop, *after* a continue # statement has been executed in that loop, will cause the wrong number of # arguments to be popped off the stack and the instruction pointer reset to # a very small number (usually 0.) Because of this, the following test # *must* written as a function, and the tracking vars *must* be function # arguments with default values. Otherwise, the test will loop and loop. def test_inner(extra_burning_oil = 1, count=0): big_hippo = 2 while big_hippo: count += 1 try: if extra_burning_oil and big_hippo == 1: extra_burning_oil -= 1 break big_hippo -= 1 continue except: raise if count > 2 or big_hippo != 1: self.fail("continue then break in try/except in loop broken!") test_inner() def testReturn(self): # 'return' [testlist] def g1(): return def g2(): return 1 g1() x = g2() check_syntax_error(self, "class foo:return 1") def testYield(self): check_syntax_error(self, "class foo:yield 1") def testRaise(self): # 'raise' test [',' test] try: raise RuntimeError('just testing') except RuntimeError: pass try: raise KeyboardInterrupt except KeyboardInterrupt: pass def testImport(self): # 'import' dotted_as_names import sys import time, sys # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) from time import time from time import (time) # not testable inside a function, but already done at top of the module # from sys import * from sys import path, argv from sys import (path, argv) from sys import (path, argv,) def testGlobal(self): # 'global' NAME (',' NAME)* global a global a, b global one, two, three, four, five, six, seven, eight, nine, ten def testNonlocal(self): # 'nonlocal' NAME (',' NAME)* x = 0 y = 0 def f(): nonlocal x nonlocal x, y def testAssert(self): # assert_stmt: 'assert' test [',' test] assert 1 assert 1, 1 assert lambda x:x assert 1, lambda x:x+1 try: assert 0, "msg" except AssertionError as e: self.assertEquals(e.args[0], "msg") else: if __debug__: self.fail("AssertionError not raised by assert 0") ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef # Tested below def testIf(self): # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] if 1: pass if 1: pass else: pass if 0: pass elif 0: pass if 0: pass elif 0: pass elif 0: pass elif 0: pass else: pass def testWhile(self): # 'while' test ':' suite ['else' ':' suite] while 0: pass while 0: pass else: pass # Issue1920: "while 0" is optimized away, # ensure that the "else" clause is still present. x = 0 while 0: x = 1 else: x = 2 self.assertEquals(x, 2) def testFor(self): # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite] for i in 1, 2, 3: pass for i, j, k in (): pass else: pass class Squares: def __init__(self, max): self.max = max self.sofar = [] def __len__(self): return len(self.sofar) def __getitem__(self, i): if not 0 <= i < self.max: raise IndexError n = len(self.sofar) while n <= i: self.sofar.append(n*n) n = n+1 return self.sofar[i] n = 0 for x in Squares(10): n = n+x if n != 285: self.fail('for over growing sequence') result = [] for x, in [(1,), (2,), (3,)]: result.append(x) self.assertEqual(result, [1, 2, 3]) def testTry(self): ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] ### | 'try' ':' suite 'finally' ':' suite ### except_clause: 'except' [expr ['as' expr]] try: 1/0 except ZeroDivisionError: pass else: pass try: 1/0 except EOFError: pass except TypeError as msg: pass except RuntimeError as msg: pass except: pass else: pass try: 1/0 except (EOFError, TypeError, ZeroDivisionError): pass try: 1/0 except (EOFError, TypeError, ZeroDivisionError) as msg: pass try: pass finally: pass def testSuite(self): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT if 1: pass if 1: pass if 1: # # # pass pass # pass # def testTest(self): ### and_test ('or' and_test)* ### and_test: not_test ('and' not_test)* ### not_test: 'not' not_test | comparison if not 1: pass if 1 and 1: pass if 1 or 1: pass if not not not 1: pass if not 1 and 1 and 1: pass if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass def testComparison(self): ### comparison: expr (comp_op expr)* ### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not' if 1: pass x = (1 == 1) if 1 == 1: pass if 1 != 1: pass if 1 < 1: pass if 1 > 1: pass if 1 <= 1: pass if 1 >= 1: pass if 1 is 1: pass if 1 is not 1: pass if 1 in (): pass if 1 not in (): pass if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass def testBinaryMaskOps(self): x = 1 & 1 x = 1 ^ 1 x = 1 | 1 def testShiftOps(self): x = 1 << 1 x = 1 >> 1 x = 1 << 1 >> 1 def testAdditiveOps(self): x = 1 x = 1 + 1 x = 1 - 1 - 1 x = 1 - 1 + 1 - 1 + 1 def testMultiplicativeOps(self): x = 1 * 1 x = 1 / 1 x = 1 % 1 x = 1 / 1 * 1 % 1 def testUnaryOps(self): x = +1 x = -1 x = ~1 x = ~1 ^ 1 & 1 | 1 & 1 ^ -1 x = -1*1/1 + 1*1 - ---1*1 def testSelectors(self): ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME ### subscript: expr | [expr] ':' [expr] import sys, time c = sys.path[0] x = time.time() x = sys.modules['time'].time() a = '01234' c = a[0] c = a[-1] s = a[0:5] s = a[:5] s = a[0:] s = a[:] s = a[-5:] s = a[:-1] s = a[-4:-3] # A rough test of SF bug 1333982. http://python.org/sf/1333982 # The testing here is fairly incomplete. # Test cases should include: commas with 1 and 2 colons d = {} d[1] = 1 d[1,] = 2 d[1,2] = 3 d[1,2,3] = 4 L = list(d) L.sort(key=lambda x: x if isinstance(x, tuple) else ()) self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]') def testAtoms(self): ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictsetmaker] '}' | NAME | NUMBER | STRING ### dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [',']) x = (1) x = (1 or 2 or 3) x = (1 or 2 or 3, 2, 3) x = [] x = [1] x = [1 or 2 or 3] x = [1 or 2 or 3, 2, 3] x = [] x = {} x = {'one': 1} x = {'one': 1,} x = {'one' or 'two': 1 or 2} x = {'one': 1, 'two': 2} x = {'one': 1, 'two': 2,} x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6} x = {'one'} x = {'one', 1,} x = {'one', 'two', 'three'} x = {2, 3, 4,} x = x x = 'x' x = 123 ### exprlist: expr (',' expr)* [','] ### testlist: test (',' test)* [','] # These have been exercised enough above def testClassdef(self): # 'class' NAME ['(' [testlist] ')'] ':' suite class B: pass class B2(): pass class C1(B): pass class C2(B): pass class D(C1, C2, B): pass class C: def meth1(self): pass def meth2(self, arg): pass def meth3(self, a1, a2): pass # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE # decorators: decorator+ # decorated: decorators (classdef | funcdef) def class_decorator(x): return x @class_decorator class G: pass def testDictcomps(self): # dictorsetmaker: ( (test ':' test (comp_for | # (',' test ':' test)* [','])) | # (test (comp_for | (',' test)* [','])) ) nums = [1, 2, 3] self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4}) def testListcomps(self): # list comprehension tests nums = [1, 2, 3, 4, 5] strs = ["Apple", "Banana", "Coconut"] spcs = [" Apple", " Banana ", "Coco nut "] self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut']) self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15]) self.assertEqual([x for x in nums if x > 2], [3, 4, 5]) self.assertEqual([(i, s) for i in nums for s in strs], [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'), (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'), (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'), (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'), (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')]) self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]], [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'), (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'), (5, 'Banana'), (5, 'Coconut')]) self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)], [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]]) def test_in_func(l): return [0 < x < 3 for x in l if x > 2] self.assertEqual(test_in_func(nums), [False, False, False]) def test_nested_front(): self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]], [[1, 2], [3, 4], [5, 6]]) test_nested_front() check_syntax_error(self, "[i, s for i in nums for s in strs]") check_syntax_error(self, "[x if y]") suppliers = [ (1, "Boeing"), (2, "Ford"), (3, "Macdonalds") ] parts = [ (10, "Airliner"), (20, "Engine"), (30, "Cheeseburger") ] suppart = [ (1, 10), (1, 20), (2, 20), (3, 30) ] x = [ (sname, pname) for (sno, sname) in suppliers for (pno, pname) in parts for (sp_sno, sp_pno) in suppart if sno == sp_sno and pno == sp_pno ] self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'), ('Macdonalds', 'Cheeseburger')]) def testGenexps(self): # generator expression tests g = ([x for x in range(10)] for x in range(1)) self.assertEqual(next(g), [x for x in range(10)]) try: next(g) self.fail('should produce StopIteration exception') except StopIteration: pass a = 1 try: g = (a for d in a) next(g) self.fail('should produce TypeError') except TypeError: pass self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd']) self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy']) a = [x for x in range(10)] b = (x for x in (y for y in a)) self.assertEqual(sum(b), sum([x for x in range(10)])) self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)])) self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2])) self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)])) self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)])) self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0) check_syntax_error(self, "foo(x for x in range(10), 100)") check_syntax_error(self, "foo(100, x for x in range(10))") def testComprehensionSpecials(self): # test for outmost iterable precomputation x = 10; g = (i for i in range(x)); x = 5 self.assertEqual(len(list(g)), 10) # This should hold, since we're only precomputing outmost iterable. x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x)) x = 5; t = True; self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g)) # Grammar allows multiple adjacent 'if's in listcomps and genexps, # even though it's silly. Make sure it works (ifelse broke this.) self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7]) self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7]) # verify unpacking single element tuples in listcomp/genexp. self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6]) self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9]) def test_with_statement(self): class manager(object): def __enter__(self): return (1, 2) def __exit__(self, *args): pass with manager(): pass with manager() as x: pass with manager() as (x, y): pass with manager(), manager(): pass with manager() as x, manager() as y: pass with manager() as x, manager(): pass def testIfElseExpr(self): # Test ifelse expressions in various cases def _checkeval(msg, ret): "helper to check that evaluation of expressions is done correctly" print(x) return ret # the next line is not allowed anymore #self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5) self.assertEqual((5 and 6 if 0 else 1), 1) self.assertEqual(((5 and 6) if 0 else 1), 1) self.assertEqual((5 and (6 if 1 else 1)), 6) self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3) self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1) self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5) self.assertEqual((not 5 if 1 else 1), False) self.assertEqual((not 5 if 0 else 1), 1) self.assertEqual((6 + 1 if 1 else 2), 7) self.assertEqual((6 - 1 if 1 else 2), 5) self.assertEqual((6 * 2 if 1 else 4), 12) self.assertEqual((6 / 2 if 1 else 3), 3) self.assertEqual((6 < 4 if 0 else 2), 2) def test_main(): run_unittest(TokenTests, GrammarTests) if __name__ == '__main__': test_main()
Python
print "hi" print "Like bad Windows newlines?"
Python
"""Support code for test_*.py files""" # Author: Collin Winter # Python imports import unittest import sys import os import os.path import re from textwrap import dedent # Local imports from lib2to3 import pytree, refactor from lib2to3.pgen2 import driver test_dir = os.path.dirname(__file__) proj_dir = os.path.normpath(os.path.join(test_dir, "..")) grammar_path = os.path.join(test_dir, "..", "Grammar.txt") grammar = driver.load_grammar(grammar_path) driver = driver.Driver(grammar, convert=pytree.convert) def parse_string(string): return driver.parse_string(reformat(string), debug=True) def run_all_tests(test_mod=None, tests=None): if tests is None: tests = unittest.TestLoader().loadTestsFromModule(test_mod) unittest.TextTestRunner(verbosity=2).run(tests) def reformat(string): return dedent(string) + u"\n\n" def get_refactorer(fixer_pkg="lib2to3", fixers=None, options=None): """ A convenience function for creating a RefactoringTool for tests. fixers is a list of fixers for the RefactoringTool to use. By default "lib2to3.fixes.*" is used. options is an optional dictionary of options to be passed to the RefactoringTool. """ if fixers is not None: fixers = [fixer_pkg + ".fixes.fix_" + fix for fix in fixers] else: fixers = refactor.get_fixers_from_package(fixer_pkg + ".fixes") options = options or {} return refactor.RefactoringTool(fixers, options, explicit=True) def all_project_files(): for dirpath, dirnames, filenames in os.walk(proj_dir): for filename in filenames: if filename.endswith(".py"): yield os.path.join(dirpath, filename) TestCase = unittest.TestCase
Python
"""Make tests/ into a package. This allows us to "import tests" and have tests.all_tests be a TestSuite representing all test cases from all test_*.py files in tests/.""" # Author: Collin Winter import os import os.path import unittest import types from . import support all_tests = unittest.TestSuite() tests_dir = os.path.join(os.path.dirname(__file__), '..', 'tests') tests = [t[0:-3] for t in os.listdir(tests_dir) if t.startswith('test_') and t.endswith('.py')] loader = unittest.TestLoader() for t in tests: __import__("",globals(),locals(),[t],level=1) mod = globals()[t] all_tests.addTests(loader.loadTestsFromModule(mod))
Python
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. # Pgen imports from . import grammar, token, tokenize class PgenGrammar(grammar.Grammar): pass class ParserGenerator(object): def __init__(self, filename, stream=None): close_stream = None if stream is None: stream = open(filename) close_stream = stream.close self.filename = filename self.stream = stream self.generator = tokenize.generate_tokens(stream.readline) self.gettoken() # Initialize lookahead self.dfas, self.startsymbol = self.parse() if close_stream is not None: close_stream() self.first = {} # map from symbol name to set of tokens self.addfirstsets() def make_grammar(self): c = PgenGrammar() names = self.dfas.keys() names.sort() names.remove(self.startsymbol) names.insert(0, self.startsymbol) for name in names: i = 256 + len(c.symbol2number) c.symbol2number[name] = i c.number2symbol[i] = name for name in names: dfa = self.dfas[name] states = [] for state in dfa: arcs = [] for label, next in state.arcs.iteritems(): arcs.append((self.make_label(c, label), dfa.index(next))) if state.isfinal: arcs.append((0, dfa.index(state))) states.append(arcs) c.states.append(states) c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name)) c.start = c.symbol2number[self.startsymbol] return c def make_first(self, c, name): rawfirst = self.first[name] first = {} for label in rawfirst: ilabel = self.make_label(c, label) ##assert ilabel not in first # XXX failed on <> ... != first[ilabel] = 1 return first def make_label(self, c, label): # XXX Maybe this should be a method on a subclass of converter? ilabel = len(c.labels) if label[0].isalpha(): # Either a symbol name or a named token if label in c.symbol2number: # A symbol name (a non-terminal) if label in c.symbol2label: return c.symbol2label[label] else: c.labels.append((c.symbol2number[label], None)) c.symbol2label[label] = ilabel return ilabel else: # A named token (NAME, NUMBER, STRING) itoken = getattr(token, label, None) assert isinstance(itoken, int), label assert itoken in token.tok_name, label if itoken in c.tokens: return c.tokens[itoken] else: c.labels.append((itoken, None)) c.tokens[itoken] = ilabel return ilabel else: # Either a keyword or an operator assert label[0] in ('"', "'"), label value = eval(label) if value[0].isalpha(): # A keyword if value in c.keywords: return c.keywords[value] else: c.labels.append((token.NAME, value)) c.keywords[value] = ilabel return ilabel else: # An operator (any non-numeric token) itoken = grammar.opmap[value] # Fails if unknown token if itoken in c.tokens: return c.tokens[itoken] else: c.labels.append((itoken, None)) c.tokens[itoken] = ilabel return ilabel def addfirstsets(self): names = self.dfas.keys() names.sort() for name in names: if name not in self.first: self.calcfirst(name) #print name, self.first[name].keys() def calcfirst(self, name): dfa = self.dfas[name] self.first[name] = None # dummy to detect left recursion state = dfa[0] totalset = {} overlapcheck = {} for label, next in state.arcs.iteritems(): if label in self.dfas: if label in self.first: fset = self.first[label] if fset is None: raise ValueError("recursion for rule %r" % name) else: self.calcfirst(label) fset = self.first[label] totalset.update(fset) overlapcheck[label] = fset else: totalset[label] = 1 overlapcheck[label] = {label: 1} inverse = {} for label, itsfirst in overlapcheck.iteritems(): for symbol in itsfirst: if symbol in inverse: raise ValueError("rule %s is ambiguous; %s is in the" " first sets of %s as well as %s" % (name, symbol, label, inverse[symbol])) inverse[symbol] = label self.first[name] = totalset def parse(self): dfas = {} startsymbol = None # MSTART: (NEWLINE | RULE)* ENDMARKER while self.type != token.ENDMARKER: while self.type == token.NEWLINE: self.gettoken() # RULE: NAME ':' RHS NEWLINE name = self.expect(token.NAME) self.expect(token.OP, ":") a, z = self.parse_rhs() self.expect(token.NEWLINE) #self.dump_nfa(name, a, z) dfa = self.make_dfa(a, z) #self.dump_dfa(name, dfa) oldlen = len(dfa) self.simplify_dfa(dfa) newlen = len(dfa) dfas[name] = dfa #print name, oldlen, newlen if startsymbol is None: startsymbol = name return dfas, startsymbol def make_dfa(self, start, finish): # To turn an NFA into a DFA, we define the states of the DFA # to correspond to *sets* of states of the NFA. Then do some # state reduction. Let's represent sets as dicts with 1 for # values. assert isinstance(start, NFAState) assert isinstance(finish, NFAState) def closure(state): base = {} addclosure(state, base) return base def addclosure(state, base): assert isinstance(state, NFAState) if state in base: return base[state] = 1 for label, next in state.arcs: if label is None: addclosure(next, base) states = [DFAState(closure(start), finish)] for state in states: # NB states grows while we're iterating arcs = {} for nfastate in state.nfaset: for label, next in nfastate.arcs: if label is not None: addclosure(next, arcs.setdefault(label, {})) for label, nfaset in arcs.iteritems(): for st in states: if st.nfaset == nfaset: break else: st = DFAState(nfaset, finish) states.append(st) state.addarc(st, label) return states # List of DFAState instances; first one is start def dump_nfa(self, name, start, finish): print "Dump of NFA for", name todo = [start] for i, state in enumerate(todo): print " State", i, state is finish and "(final)" or "" for label, next in state.arcs: if next in todo: j = todo.index(next) else: j = len(todo) todo.append(next) if label is None: print " -> %d" % j else: print " %s -> %d" % (label, j) def dump_dfa(self, name, dfa): print "Dump of DFA for", name for i, state in enumerate(dfa): print " State", i, state.isfinal and "(final)" or "" for label, next in state.arcs.iteritems(): print " %s -> %d" % (label, dfa.index(next)) def simplify_dfa(self, dfa): # This is not theoretically optimal, but works well enough. # Algorithm: repeatedly look for two states that have the same # set of arcs (same labels pointing to the same nodes) and # unify them, until things stop changing. # dfa is a list of DFAState instances changes = True while changes: changes = False for i, state_i in enumerate(dfa): for j in range(i+1, len(dfa)): state_j = dfa[j] if state_i == state_j: #print " unify", i, j del dfa[j] for state in dfa: state.unifystate(state_j, state_i) changes = True break def parse_rhs(self): # RHS: ALT ('|' ALT)* a, z = self.parse_alt() if self.value != "|": return a, z else: aa = NFAState() zz = NFAState() aa.addarc(a) z.addarc(zz) while self.value == "|": self.gettoken() a, z = self.parse_alt() aa.addarc(a) z.addarc(zz) return aa, zz def parse_alt(self): # ALT: ITEM+ a, b = self.parse_item() while (self.value in ("(", "[") or self.type in (token.NAME, token.STRING)): c, d = self.parse_item() b.addarc(c) b = d return a, b def parse_item(self): # ITEM: '[' RHS ']' | ATOM ['+' | '*'] if self.value == "[": self.gettoken() a, z = self.parse_rhs() self.expect(token.OP, "]") a.addarc(z) return a, z else: a, z = self.parse_atom() value = self.value if value not in ("+", "*"): return a, z self.gettoken() z.addarc(a) if value == "+": return a, z else: return a, a def parse_atom(self): # ATOM: '(' RHS ')' | NAME | STRING if self.value == "(": self.gettoken() a, z = self.parse_rhs() self.expect(token.OP, ")") return a, z elif self.type in (token.NAME, token.STRING): a = NFAState() z = NFAState() a.addarc(z, self.value) self.gettoken() return a, z else: self.raise_error("expected (...) or NAME or STRING, got %s/%s", self.type, self.value) def expect(self, type, value=None): if self.type != type or (value is not None and self.value != value): self.raise_error("expected %s/%s, got %s/%s", type, value, self.type, self.value) value = self.value self.gettoken() return value def gettoken(self): tup = self.generator.next() while tup[0] in (tokenize.COMMENT, tokenize.NL): tup = self.generator.next() self.type, self.value, self.begin, self.end, self.line = tup #print token.tok_name[self.type], repr(self.value) def raise_error(self, msg, *args): if args: try: msg = msg % args except: msg = " ".join([msg] + map(str, args)) raise SyntaxError(msg, (self.filename, self.end[0], self.end[1], self.line)) class NFAState(object): def __init__(self): self.arcs = [] # list of (label, NFAState) pairs def addarc(self, next, label=None): assert label is None or isinstance(label, str) assert isinstance(next, NFAState) self.arcs.append((label, next)) class DFAState(object): def __init__(self, nfaset, final): assert isinstance(nfaset, dict) assert isinstance(iter(nfaset).next(), NFAState) assert isinstance(final, NFAState) self.nfaset = nfaset self.isfinal = final in nfaset self.arcs = {} # map from label to DFAState def addarc(self, next, label): assert isinstance(label, str) assert label not in self.arcs assert isinstance(next, DFAState) self.arcs[label] = next def unifystate(self, old, new): for label, next in self.arcs.iteritems(): if next is old: self.arcs[label] = new def __eq__(self, other): # Equality test -- ignore the nfaset instance variable assert isinstance(other, DFAState) if self.isfinal != other.isfinal: return False # Can't just return self.arcs == other.arcs, because that # would invoke this method recursively, with cycles... if len(self.arcs) != len(other.arcs): return False for label, next in self.arcs.iteritems(): if next is not other.arcs.get(label): return False return True __hash__ = None # For Py3 compatibility. def generate_grammar(filename="Grammar.txt"): p = ParserGenerator(filename) return p.make_grammar()
Python
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Convert graminit.[ch] spit out by pgen to Python code. Pgen is the Python parser generator. It is useful to quickly create a parser from a grammar file in Python's grammar notation. But I don't want my parsers to be written in C (yet), so I'm translating the parsing tables to Python data structures and writing a Python parse engine. Note that the token numbers are constants determined by the standard Python tokenizer. The standard token module defines these numbers and their names (the names are not used much). The token numbers are hardcoded into the Python tokenizer and into pgen. A Python implementation of the Python tokenizer is also available, in the standard tokenize module. On the other hand, symbol numbers (representing the grammar's non-terminals) are assigned by pgen based on the actual grammar input. Note: this module is pretty much obsolete; the pgen module generates equivalent grammar tables directly from the Grammar.txt input file without having to invoke the Python pgen C program. """ # Python imports import re # Local imports from pgen2 import grammar, token class Converter(grammar.Grammar): """Grammar subclass that reads classic pgen output files. The run() method reads the tables as produced by the pgen parser generator, typically contained in two C files, graminit.h and graminit.c. The other methods are for internal use only. See the base class for more documentation. """ def run(self, graminit_h, graminit_c): """Load the grammar tables from the text files written by pgen.""" self.parse_graminit_h(graminit_h) self.parse_graminit_c(graminit_c) self.finish_off() def parse_graminit_h(self, filename): """Parse the .h file writen by pgen. (Internal) This file is a sequence of #define statements defining the nonterminals of the grammar as numbers. We build two tables mapping the numbers to names and back. """ try: f = open(filename) except IOError, err: print "Can't open %s: %s" % (filename, err) return False self.symbol2number = {} self.number2symbol = {} lineno = 0 for line in f: lineno += 1 mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line) if not mo and line.strip(): print "%s(%s): can't parse %s" % (filename, lineno, line.strip()) else: symbol, number = mo.groups() number = int(number) assert symbol not in self.symbol2number assert number not in self.number2symbol self.symbol2number[symbol] = number self.number2symbol[number] = symbol return True def parse_graminit_c(self, filename): """Parse the .c file writen by pgen. (Internal) The file looks as follows. The first two lines are always this: #include "pgenheaders.h" #include "grammar.h" After that come four blocks: 1) one or more state definitions 2) a table defining dfas 3) a table defining labels 4) a struct defining the grammar A state definition has the following form: - one or more arc arrays, each of the form: static arc arcs_<n>_<m>[<k>] = { {<i>, <j>}, ... }; - followed by a state array, of the form: static state states_<s>[<t>] = { {<k>, arcs_<n>_<m>}, ... }; """ try: f = open(filename) except IOError, err: print "Can't open %s: %s" % (filename, err) return False # The code below essentially uses f's iterator-ness! lineno = 0 # Expect the two #include lines lineno, line = lineno+1, f.next() assert line == '#include "pgenheaders.h"\n', (lineno, line) lineno, line = lineno+1, f.next() assert line == '#include "grammar.h"\n', (lineno, line) # Parse the state definitions lineno, line = lineno+1, f.next() allarcs = {} states = [] while line.startswith("static arc "): while line.startswith("static arc "): mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", line) assert mo, (lineno, line) n, m, k = map(int, mo.groups()) arcs = [] for _ in range(k): lineno, line = lineno+1, f.next() mo = re.match(r"\s+{(\d+), (\d+)},$", line) assert mo, (lineno, line) i, j = map(int, mo.groups()) arcs.append((i, j)) lineno, line = lineno+1, f.next() assert line == "};\n", (lineno, line) allarcs[(n, m)] = arcs lineno, line = lineno+1, f.next() mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line) assert mo, (lineno, line) s, t = map(int, mo.groups()) assert s == len(states), (lineno, line) state = [] for _ in range(t): lineno, line = lineno+1, f.next() mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line) assert mo, (lineno, line) k, n, m = map(int, mo.groups()) arcs = allarcs[n, m] assert k == len(arcs), (lineno, line) state.append(arcs) states.append(state) lineno, line = lineno+1, f.next() assert line == "};\n", (lineno, line) lineno, line = lineno+1, f.next() self.states = states # Parse the dfas dfas = {} mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line) assert mo, (lineno, line) ndfas = int(mo.group(1)) for i in range(ndfas): lineno, line = lineno+1, f.next() mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', line) assert mo, (lineno, line) symbol = mo.group(2) number, x, y, z = map(int, mo.group(1, 3, 4, 5)) assert self.symbol2number[symbol] == number, (lineno, line) assert self.number2symbol[number] == symbol, (lineno, line) assert x == 0, (lineno, line) state = states[z] assert y == len(state), (lineno, line) lineno, line = lineno+1, f.next() mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line) assert mo, (lineno, line) first = {} rawbitset = eval(mo.group(1)) for i, c in enumerate(rawbitset): byte = ord(c) for j in range(8): if byte & (1<<j): first[i*8 + j] = 1 dfas[number] = (state, first) lineno, line = lineno+1, f.next() assert line == "};\n", (lineno, line) self.dfas = dfas # Parse the labels labels = [] lineno, line = lineno+1, f.next() mo = re.match(r"static label labels\[(\d+)\] = {$", line) assert mo, (lineno, line) nlabels = int(mo.group(1)) for i in range(nlabels): lineno, line = lineno+1, f.next() mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line) assert mo, (lineno, line) x, y = mo.groups() x = int(x) if y == "0": y = None else: y = eval(y) labels.append((x, y)) lineno, line = lineno+1, f.next() assert line == "};\n", (lineno, line) self.labels = labels # Parse the grammar struct lineno, line = lineno+1, f.next() assert line == "grammar _PyParser_Grammar = {\n", (lineno, line) lineno, line = lineno+1, f.next() mo = re.match(r"\s+(\d+),$", line) assert mo, (lineno, line) ndfas = int(mo.group(1)) assert ndfas == len(self.dfas) lineno, line = lineno+1, f.next() assert line == "\tdfas,\n", (lineno, line) lineno, line = lineno+1, f.next() mo = re.match(r"\s+{(\d+), labels},$", line) assert mo, (lineno, line) nlabels = int(mo.group(1)) assert nlabels == len(self.labels), (lineno, line) lineno, line = lineno+1, f.next() mo = re.match(r"\s+(\d+)$", line) assert mo, (lineno, line) start = int(mo.group(1)) assert start in self.number2symbol, (lineno, line) self.start = start lineno, line = lineno+1, f.next() assert line == "};\n", (lineno, line) try: lineno, line = lineno+1, f.next() except StopIteration: pass else: assert 0, (lineno, line) def finish_off(self): """Create additional useful structures. (Internal).""" self.keywords = {} # map from keyword strings to arc labels self.tokens = {} # map from numeric token values to arc labels for ilabel, (type, value) in enumerate(self.labels): if type == token.NAME and value is not None: self.keywords[value] = ilabel elif value is None: self.tokens[type] = ilabel
Python
#! /usr/bin/env python """Token constants (from "token.h").""" # Taken from Python (r53757) and modified to include some tokens # originally monkeypatched in by pgen2.tokenize #--start constants-- ENDMARKER = 0 NAME = 1 NUMBER = 2 STRING = 3 NEWLINE = 4 INDENT = 5 DEDENT = 6 LPAR = 7 RPAR = 8 LSQB = 9 RSQB = 10 COLON = 11 COMMA = 12 SEMI = 13 PLUS = 14 MINUS = 15 STAR = 16 SLASH = 17 VBAR = 18 AMPER = 19 LESS = 20 GREATER = 21 EQUAL = 22 DOT = 23 PERCENT = 24 BACKQUOTE = 25 LBRACE = 26 RBRACE = 27 EQEQUAL = 28 NOTEQUAL = 29 LESSEQUAL = 30 GREATEREQUAL = 31 TILDE = 32 CIRCUMFLEX = 33 LEFTSHIFT = 34 RIGHTSHIFT = 35 DOUBLESTAR = 36 PLUSEQUAL = 37 MINEQUAL = 38 STAREQUAL = 39 SLASHEQUAL = 40 PERCENTEQUAL = 41 AMPEREQUAL = 42 VBAREQUAL = 43 CIRCUMFLEXEQUAL = 44 LEFTSHIFTEQUAL = 45 RIGHTSHIFTEQUAL = 46 DOUBLESTAREQUAL = 47 DOUBLESLASH = 48 DOUBLESLASHEQUAL = 49 AT = 50 OP = 51 COMMENT = 52 NL = 53 RARROW = 54 ERRORTOKEN = 55 N_TOKENS = 56 NT_OFFSET = 256 #--end constants-- tok_name = {} for _name, _value in globals().items(): if type(_value) is type(0): tok_name[_value] = _name def ISTERMINAL(x): return x < NT_OFFSET def ISNONTERMINAL(x): return x >= NT_OFFSET def ISEOF(x): return x == ENDMARKER
Python
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Safely evaluate Python string literals without using eval().""" import re simple_escapes = {"a": "\a", "b": "\b", "f": "\f", "n": "\n", "r": "\r", "t": "\t", "v": "\v", "'": "'", '"': '"', "\\": "\\"} def escape(m): all, tail = m.group(0, 1) assert all.startswith("\\") esc = simple_escapes.get(tail) if esc is not None: return esc if tail.startswith("x"): hexes = tail[1:] if len(hexes) < 2: raise ValueError("invalid hex string escape ('\\%s')" % tail) try: i = int(hexes, 16) except ValueError: raise ValueError("invalid hex string escape ('\\%s')" % tail) else: try: i = int(tail, 8) except ValueError: raise ValueError("invalid octal string escape ('\\%s')" % tail) return chr(i) def evalString(s): assert s.startswith("'") or s.startswith('"'), repr(s[:1]) q = s[0] if s[:3] == q*3: q = q*3 assert s.endswith(q), repr(s[-len(q):]) assert len(s) >= 2*len(q) s = s[len(q):-len(q)] return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s) def test(): for i in range(256): c = chr(i) s = repr(c) e = evalString(s) if e != c: print i, c, s, e if __name__ == "__main__": test()
Python
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. # Modifications: # Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Parser driver. This provides a high-level interface to parse a file into a syntax tree. """ __author__ = "Guido van Rossum <guido@python.org>" __all__ = ["Driver", "load_grammar"] # Python imports import codecs import os import logging import sys # Pgen imports from . import grammar, parse, token, tokenize, pgen class Driver(object): def __init__(self, grammar, convert=None, logger=None): self.grammar = grammar if logger is None: logger = logging.getLogger() self.logger = logger self.convert = convert def parse_tokens(self, tokens, debug=False): """Parse a series of tokens and return the syntax tree.""" # XXX Move the prefix computation into a wrapper around tokenize. p = parse.Parser(self.grammar, self.convert) p.setup() lineno = 1 column = 0 type = value = start = end = line_text = None prefix = u"" for quintuple in tokens: type, value, start, end, line_text = quintuple if start != (lineno, column): assert (lineno, column) <= start, ((lineno, column), start) s_lineno, s_column = start if lineno < s_lineno: prefix += "\n" * (s_lineno - lineno) lineno = s_lineno column = 0 if column < s_column: prefix += line_text[column:s_column] column = s_column if type in (tokenize.COMMENT, tokenize.NL): prefix += value lineno, column = end if value.endswith("\n"): lineno += 1 column = 0 continue if type == token.OP: type = grammar.opmap[value] if debug: self.logger.debug("%s %r (prefix=%r)", token.tok_name[type], value, prefix) if p.addtoken(type, value, (prefix, start)): if debug: self.logger.debug("Stop.") break prefix = "" lineno, column = end if value.endswith("\n"): lineno += 1 column = 0 else: # We never broke out -- EOF is too soon (how can this happen???) raise parse.ParseError("incomplete input", type, value, (prefix, start)) return p.rootnode def parse_stream_raw(self, stream, debug=False): """Parse a stream and return the syntax tree.""" tokens = tokenize.generate_tokens(stream.readline) return self.parse_tokens(tokens, debug) def parse_stream(self, stream, debug=False): """Parse a stream and return the syntax tree.""" return self.parse_stream_raw(stream, debug) def parse_file(self, filename, encoding=None, debug=False): """Parse a file and return the syntax tree.""" stream = codecs.open(filename, "r", encoding) try: return self.parse_stream(stream, debug) finally: stream.close() def parse_string(self, text, debug=False): """Parse a string and return the syntax tree.""" tokens = tokenize.generate_tokens(generate_lines(text).next) return self.parse_tokens(tokens, debug) def generate_lines(text): """Generator that behaves like readline without using StringIO.""" for line in text.splitlines(True): yield line while True: yield "" def load_grammar(gt="Grammar.txt", gp=None, save=True, force=False, logger=None): """Load the grammar (maybe from a pickle).""" if logger is None: logger = logging.getLogger() if gp is None: head, tail = os.path.splitext(gt) if tail == ".txt": tail = "" gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle" if force or not _newer(gp, gt): logger.info("Generating grammar tables from %s", gt) g = pgen.generate_grammar(gt) if save: logger.info("Writing grammar tables to %s", gp) try: g.dump(gp) except IOError, e: logger.info("Writing failed:"+str(e)) else: g = grammar.Grammar() g.load(gp) return g def _newer(a, b): """Inquire whether file a was written since file b.""" if not os.path.exists(a): return False if not os.path.exists(b): return True return os.path.getmtime(a) >= os.path.getmtime(b)
Python
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """The pgen2 package."""
Python
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation. # All rights reserved. """Tokenization help for Python programs. generate_tokens(readline) is a generator that breaks a stream of text into Python tokens. It accepts a readline-like method which is called repeatedly to get the next line of input (or "" for EOF). It generates 5-tuples with these members: the token type (see token.py) the token (a string) the starting (row, column) indices of the token (a 2-tuple of ints) the ending (row, column) indices of the token (a 2-tuple of ints) the original line (string) It is designed to match the working of the Python tokenizer exactly, except that it produces COMMENT tokens for comments and gives type OP for all operators Older entry points tokenize_loop(readline, tokeneater) tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.""" __author__ = 'Ka-Ping Yee <ping@lfw.org>' __credits__ = \ 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro' import string, re from codecs import BOM_UTF8, lookup from lib2to3.pgen2.token import * from . import token __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", "generate_tokens", "untokenize"] del token try: bytes except NameError: # Support bytes type in Python <= 2.5, so 2to3 turns itself into # valid Python 3 code. bytes = str def group(*choices): return '(' + '|'.join(choices) + ')' def any(*choices): return group(*choices) + '*' def maybe(*choices): return group(*choices) + '?' Whitespace = r'[ \f\t]*' Comment = r'#[^\r\n]*' Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment) Name = r'[a-zA-Z_]\w*' Binnumber = r'0[bB][01]*' Hexnumber = r'0[xX][\da-fA-F]*[lL]?' Octnumber = r'0[oO]?[0-7]*[lL]?' Decnumber = r'[1-9]\d*[lL]?' Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber) Exponent = r'[eE][-+]?\d+' Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent) Expfloat = r'\d+' + Exponent Floatnumber = group(Pointfloat, Expfloat) Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]') Number = group(Imagnumber, Floatnumber, Intnumber) # Tail end of ' string. Single = r"[^'\\]*(?:\\.[^'\\]*)*'" # Tail end of " string. Double = r'[^"\\]*(?:\\.[^"\\]*)*"' # Tail end of ''' string. Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" # Tail end of """ string. Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' Triple = group("[ubUB]?[rR]?'''", '[ubUB]?[rR]?"""') # Single-line ' or " string. String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"') # Because of leftmost-then-longest match semantics, be sure to put the # longest operators first (e.g., if = came before ==, == would get # recognized as two instances of =). Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", r"//=?", r"->", r"[+\-*/%&|^=<>]=?", r"~") Bracket = '[][(){}]' Special = group(r'\r?\n', r'[:;.,`@]') Funny = group(Operator, Bracket, Special) PlainToken = group(Number, Funny, String, Name) Token = Ignore + PlainToken # First (or only) line of ' or " string. ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + group("'", r'\\\r?\n'), r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' + group('"', r'\\\r?\n')) PseudoExtras = group(r'\\\r?\n', Comment, Triple) PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) tokenprog, pseudoprog, single3prog, double3prog = map( re.compile, (Token, PseudoToken, Single3, Double3)) endprogs = {"'": re.compile(Single), '"': re.compile(Double), "'''": single3prog, '"""': double3prog, "r'''": single3prog, 'r"""': double3prog, "u'''": single3prog, 'u"""': double3prog, "b'''": single3prog, 'b"""': double3prog, "ur'''": single3prog, 'ur"""': double3prog, "br'''": single3prog, 'br"""': double3prog, "R'''": single3prog, 'R"""': double3prog, "U'''": single3prog, 'U"""': double3prog, "B'''": single3prog, 'B"""': double3prog, "uR'''": single3prog, 'uR"""': double3prog, "Ur'''": single3prog, 'Ur"""': double3prog, "UR'''": single3prog, 'UR"""': double3prog, "bR'''": single3prog, 'bR"""': double3prog, "Br'''": single3prog, 'Br"""': double3prog, "BR'''": single3prog, 'BR"""': double3prog, 'r': None, 'R': None, 'u': None, 'U': None, 'b': None, 'B': None} triple_quoted = {} for t in ("'''", '"""', "r'''", 'r"""', "R'''", 'R"""', "u'''", 'u"""', "U'''", 'U"""', "b'''", 'b"""', "B'''", 'B"""', "ur'''", 'ur"""', "Ur'''", 'Ur"""', "uR'''", 'uR"""', "UR'''", 'UR"""', "br'''", 'br"""', "Br'''", 'Br"""', "bR'''", 'bR"""', "BR'''", 'BR"""',): triple_quoted[t] = t single_quoted = {} for t in ("'", '"', "r'", 'r"', "R'", 'R"', "u'", 'u"', "U'", 'U"', "b'", 'b"', "B'", 'B"', "ur'", 'ur"', "Ur'", 'Ur"', "uR'", 'uR"', "UR'", 'UR"', "br'", 'br"', "Br'", 'Br"', "bR'", 'bR"', "BR'", 'BR"', ): single_quoted[t] = t tabsize = 8 class TokenError(Exception): pass class StopTokenizing(Exception): pass def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing print "%d,%d-%d,%d:\t%s\t%s" % \ (srow, scol, erow, ecol, tok_name[type], repr(token)) def tokenize(readline, tokeneater=printtoken): """ The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens(). """ try: tokenize_loop(readline, tokeneater) except StopTokenizing: pass # backwards compatible interface def tokenize_loop(readline, tokeneater): for token_info in generate_tokens(readline): tokeneater(*token_info) class Untokenizer: def __init__(self): self.tokens = [] self.prev_row = 1 self.prev_col = 0 def add_whitespace(self, start): row, col = start assert row <= self.prev_row col_offset = col - self.prev_col if col_offset: self.tokens.append(" " * col_offset) def untokenize(self, iterable): for t in iterable: if len(t) == 2: self.compat(t, iterable) break tok_type, token, start, end, line = t self.add_whitespace(start) self.tokens.append(token) self.prev_row, self.prev_col = end if tok_type in (NEWLINE, NL): self.prev_row += 1 self.prev_col = 0 return "".join(self.tokens) def compat(self, token, iterable): startline = False indents = [] toks_append = self.tokens.append toknum, tokval = token if toknum in (NAME, NUMBER): tokval += ' ' if toknum in (NEWLINE, NL): startline = True for tok in iterable: toknum, tokval = tok[:2] if toknum in (NAME, NUMBER): tokval += ' ' if toknum == INDENT: indents.append(tokval) continue elif toknum == DEDENT: indents.pop() continue elif toknum in (NEWLINE, NL): startline = True elif startline and indents: toks_append(indents[-1]) startline = False toks_append(tokval) cookie_re = re.compile("coding[:=]\s*([-\w.]+)") def _get_normal_name(orig_enc): """Imitates get_normal_name in tokenizer.c.""" # Only care about the first 12 characters. enc = orig_enc[:12].lower().replace("_", "-") if enc == "utf-8" or enc.startswith("utf-8-"): return "utf-8" if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): return "iso-8859-1" return orig_enc def detect_encoding(readline): """ The detect_encoding() function is used to detect the encoding that should be used to decode a Python source file. It requires one argment, readline, in the same way as the tokenize() generator. It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (left as bytes) it has read in. It detects the encoding from the presence of a utf-8 bom or an encoding cookie as specified in pep-0263. If both a bom and a cookie are present, but disagree, a SyntaxError will be raised. If the encoding cookie is an invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, 'utf-8-sig' is returned. If no encoding is specified, then the default of 'utf-8' will be returned. """ bom_found = False encoding = None default = 'utf-8' def read_or_stop(): try: return readline() except StopIteration: return bytes() def find_cookie(line): try: line_string = line.decode('ascii') except UnicodeDecodeError: return None matches = cookie_re.findall(line_string) if not matches: return None encoding = _get_normal_name(matches[0]) try: codec = lookup(encoding) except LookupError: # This behaviour mimics the Python interpreter raise SyntaxError("unknown encoding: " + encoding) if bom_found: if codec.name != 'utf-8': # This behaviour mimics the Python interpreter raise SyntaxError('encoding problem: utf-8') encoding += '-sig' return encoding first = read_or_stop() if first.startswith(BOM_UTF8): bom_found = True first = first[3:] default = 'utf-8-sig' if not first: return default, [] encoding = find_cookie(first) if encoding: return encoding, [first] second = read_or_stop() if not second: return default, [first] encoding = find_cookie(second) if encoding: return encoding, [first, second] return default, [first, second] def untokenize(iterable): """Transform tokens back into Python source code. Each element returned by the iterable must be a token sequence with at least two elements, a token number and token value. If only two tokens are passed, the resulting output is poor. Round-trip invariant for full input: Untokenized source will match input source exactly Round-trip invariant for limited intput: # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) readline = iter(newcode.splitlines(1)).next t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2 """ ut = Untokenizer() return ut.untokenize(iterable) def generate_tokens(readline): """ The generate_tokens() generator requires one argment, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: readline = open(myfile).next # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included. """ lnum = parenlev = continued = 0 namechars, numchars = string.ascii_letters + '_', '0123456789' contstr, needcont = '', 0 contline = None indents = [0] while 1: # loop over lines in stream try: line = readline() except StopIteration: line = '' lnum = lnum + 1 pos, max = 0, len(line) if contstr: # continued string if not line: raise TokenError, ("EOF in multi-line string", strstart) endmatch = endprog.match(line) if endmatch: pos = end = endmatch.end(0) yield (STRING, contstr + line[:end], strstart, (lnum, end), contline + line) contstr, needcont = '', 0 contline = None elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': yield (ERRORTOKEN, contstr + line, strstart, (lnum, len(line)), contline) contstr = '' contline = None continue else: contstr = contstr + line contline = contline + line continue elif parenlev == 0 and not continued: # new statement if not line: break column = 0 while pos < max: # measure leading whitespace if line[pos] == ' ': column = column + 1 elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize elif line[pos] == '\f': column = 0 else: break pos = pos + 1 if pos == max: break if line[pos] in '#\r\n': # skip comments or blank lines if line[pos] == '#': comment_token = line[pos:].rstrip('\r\n') nl_pos = pos + len(comment_token) yield (COMMENT, comment_token, (lnum, pos), (lnum, pos + len(comment_token)), line) yield (NL, line[nl_pos:], (lnum, nl_pos), (lnum, len(line)), line) else: yield ((NL, COMMENT)[line[pos] == '#'], line[pos:], (lnum, pos), (lnum, len(line)), line) continue if column > indents[-1]: # count indents or dedents indents.append(column) yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) while column < indents[-1]: if column not in indents: raise IndentationError( "unindent does not match any outer indentation level", ("<tokenize>", lnum, pos, line)) indents = indents[:-1] yield (DEDENT, '', (lnum, pos), (lnum, pos), line) else: # continued statement if not line: raise TokenError, ("EOF in multi-line statement", (lnum, 0)) continued = 0 while pos < max: pseudomatch = pseudoprog.match(line, pos) if pseudomatch: # scan for tokens start, end = pseudomatch.span(1) spos, epos, pos = (lnum, start), (lnum, end), end token, initial = line[start:end], line[start] if initial in numchars or \ (initial == '.' and token != '.'): # ordinary number yield (NUMBER, token, spos, epos, line) elif initial in '\r\n': newline = NEWLINE if parenlev > 0: newline = NL yield (newline, token, spos, epos, line) elif initial == '#': assert not token.endswith("\n") yield (COMMENT, token, spos, epos, line) elif token in triple_quoted: endprog = endprogs[token] endmatch = endprog.match(line, pos) if endmatch: # all on one line pos = endmatch.end(0) token = line[start:pos] yield (STRING, token, spos, (lnum, pos), line) else: strstart = (lnum, start) # multiple lines contstr = line[start:] contline = line break elif initial in single_quoted or \ token[:2] in single_quoted or \ token[:3] in single_quoted: if token[-1] == '\n': # continued string strstart = (lnum, start) endprog = (endprogs[initial] or endprogs[token[1]] or endprogs[token[2]]) contstr, needcont = line[start:], 1 contline = line break else: # ordinary string yield (STRING, token, spos, epos, line) elif initial in namechars: # ordinary name yield (NAME, token, spos, epos, line) elif initial == '\\': # continued stmt # This yield is new; needed for better idempotency: yield (NL, token, spos, (lnum, pos), line) continued = 1 else: if initial in '([{': parenlev = parenlev + 1 elif initial in ')]}': parenlev = parenlev - 1 yield (OP, token, spos, epos, line) else: yield (ERRORTOKEN, line[pos], (lnum, pos), (lnum, pos+1), line) pos = pos + 1 for indent in indents[1:]: # pop remaining indent levels yield (DEDENT, '', (lnum, 0), (lnum, 0), '') yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '') if __name__ == '__main__': # testing import sys if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) else: tokenize(sys.stdin.readline)
Python
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """This module defines the data structures used to represent a grammar. These are a bit arcane because they are derived from the data structures used by Python's 'pgen' parser generator. There's also a table here mapping operators to their names in the token module; the Python tokenize module reports all operators as the fallback token code OP, but the parser needs the actual token code. """ # Python imports import pickle # Local imports from . import token, tokenize class Grammar(object): """Pgen parsing tables tables conversion class. Once initialized, this class supplies the grammar tables for the parsing engine implemented by parse.py. The parsing engine accesses the instance variables directly. The class here does not provide initialization of the tables; several subclasses exist to do this (see the conv and pgen modules). The load() method reads the tables from a pickle file, which is much faster than the other ways offered by subclasses. The pickle file is written by calling dump() (after loading the grammar tables using a subclass). The report() method prints a readable representation of the tables to stdout, for debugging. The instance variables are as follows: symbol2number -- a dict mapping symbol names to numbers. Symbol numbers are always 256 or higher, to distinguish them from token numbers, which are between 0 and 255 (inclusive). number2symbol -- a dict mapping numbers to symbol names; these two are each other's inverse. states -- a list of DFAs, where each DFA is a list of states, each state is is a list of arcs, and each arc is a (i, j) pair where i is a label and j is a state number. The DFA number is the index into this list. (This name is slightly confusing.) Final states are represented by a special arc of the form (0, j) where j is its own state number. dfas -- a dict mapping symbol numbers to (DFA, first) pairs, where DFA is an item from the states list above, and first is a set of tokens that can begin this grammar rule (represented by a dict whose values are always 1). labels -- a list of (x, y) pairs where x is either a token number or a symbol number, and y is either None or a string; the strings are keywords. The label number is the index in this list; label numbers are used to mark state transitions (arcs) in the DFAs. start -- the number of the grammar's start symbol. keywords -- a dict mapping keyword strings to arc labels. tokens -- a dict mapping token numbers to arc labels. """ def __init__(self): self.symbol2number = {} self.number2symbol = {} self.states = [] self.dfas = {} self.labels = [(0, "EMPTY")] self.keywords = {} self.tokens = {} self.symbol2label = {} self.start = 256 def dump(self, filename): """Dump the grammar tables to a pickle file.""" f = open(filename, "wb") pickle.dump(self.__dict__, f, 2) f.close() def load(self, filename): """Load the grammar tables from a pickle file.""" f = open(filename, "rb") d = pickle.load(f) f.close() self.__dict__.update(d) def copy(self): """ Copy the grammar. """ new = self.__class__() for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords", "tokens", "symbol2label"): setattr(new, dict_attr, getattr(self, dict_attr).copy()) new.labels = self.labels[:] new.states = self.states[:] new.start = self.start return new def report(self): """Dump the grammar tables to standard output, for debugging.""" from pprint import pprint print "s2n" pprint(self.symbol2number) print "n2s" pprint(self.number2symbol) print "states" pprint(self.states) print "dfas" pprint(self.dfas) print "labels" pprint(self.labels) print "start", self.start # Map from operator to number (since tokenize doesn't do this) opmap_raw = """ ( LPAR ) RPAR [ LSQB ] RSQB : COLON , COMMA ; SEMI + PLUS - MINUS * STAR / SLASH | VBAR & AMPER < LESS > GREATER = EQUAL . DOT % PERCENT ` BACKQUOTE { LBRACE } RBRACE @ AT == EQEQUAL != NOTEQUAL <> NOTEQUAL <= LESSEQUAL >= GREATEREQUAL ~ TILDE ^ CIRCUMFLEX << LEFTSHIFT >> RIGHTSHIFT ** DOUBLESTAR += PLUSEQUAL -= MINEQUAL *= STAREQUAL /= SLASHEQUAL %= PERCENTEQUAL &= AMPEREQUAL |= VBAREQUAL ^= CIRCUMFLEXEQUAL <<= LEFTSHIFTEQUAL >>= RIGHTSHIFTEQUAL **= DOUBLESTAREQUAL // DOUBLESLASH //= DOUBLESLASHEQUAL -> RARROW """ opmap = {} for line in opmap_raw.splitlines(): if line: op, name = line.split() opmap[op] = getattr(token, name)
Python
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Refactoring framework. Used as a main program, this can refactor any number of files and/or recursively descend down directories. Imported as a module, this provides infrastructure to write your own refactoring tool. """ from __future__ import with_statement __author__ = "Guido van Rossum <guido@python.org>" # Python imports import os import sys import logging import operator import collections import StringIO from itertools import chain # Local imports from .pgen2 import driver, tokenize, token from . import pytree, pygram def get_all_fix_names(fixer_pkg, remove_prefix=True): """Return a sorted list of all available fix names in the given package.""" pkg = __import__(fixer_pkg, [], [], ["*"]) fixer_dir = os.path.dirname(pkg.__file__) fix_names = [] for name in sorted(os.listdir(fixer_dir)): if name.startswith("fix_") and name.endswith(".py"): if remove_prefix: name = name[4:] fix_names.append(name[:-3]) return fix_names class _EveryNode(Exception): pass def _get_head_types(pat): """ Accepts a pytree Pattern Node and returns a set of the pattern types which will match first. """ if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)): # NodePatters must either have no type and no content # or a type and content -- so they don't get any farther # Always return leafs if pat.type is None: raise _EveryNode return set([pat.type]) if isinstance(pat, pytree.NegatedPattern): if pat.content: return _get_head_types(pat.content) raise _EveryNode # Negated Patterns don't have a type if isinstance(pat, pytree.WildcardPattern): # Recurse on each node in content r = set() for p in pat.content: for x in p: r.update(_get_head_types(x)) return r raise Exception("Oh no! I don't understand pattern %s" %(pat)) def _get_headnode_dict(fixer_list): """ Accepts a list of fixers and returns a dictionary of head node type --> fixer list. """ head_nodes = collections.defaultdict(list) every = [] for fixer in fixer_list: if fixer.pattern: try: heads = _get_head_types(fixer.pattern) except _EveryNode: every.append(fixer) else: for node_type in heads: head_nodes[node_type].append(fixer) else: if fixer._accept_type is not None: head_nodes[fixer._accept_type].append(fixer) else: every.append(fixer) for node_type in chain(pygram.python_grammar.symbol2number.itervalues(), pygram.python_grammar.tokens): head_nodes[node_type].extend(every) return dict(head_nodes) def get_fixers_from_package(pkg_name): """ Return the fully qualified names for fixers in the package pkg_name. """ return [pkg_name + "." + fix_name for fix_name in get_all_fix_names(pkg_name, False)] def _identity(obj): return obj if sys.version_info < (3, 0): import codecs _open_with_encoding = codecs.open # codecs.open doesn't translate newlines sadly. def _from_system_newlines(input): return input.replace(u"\r\n", u"\n") def _to_system_newlines(input): if os.linesep != "\n": return input.replace(u"\n", os.linesep) else: return input else: _open_with_encoding = open _from_system_newlines = _identity _to_system_newlines = _identity def _detect_future_features(source): have_docstring = False gen = tokenize.generate_tokens(StringIO.StringIO(source).readline) def advance(): tok = gen.next() return tok[0], tok[1] ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT)) features = set() try: while True: tp, value = advance() if tp in ignore: continue elif tp == token.STRING: if have_docstring: break have_docstring = True elif tp == token.NAME and value == u"from": tp, value = advance() if tp != token.NAME or value != u"__future__": break tp, value = advance() if tp != token.NAME or value != u"import": break tp, value = advance() if tp == token.OP and value == u"(": tp, value = advance() while tp == token.NAME: features.add(value) tp, value = advance() if tp != token.OP or value != u",": break tp, value = advance() else: break except StopIteration: pass return frozenset(features) class FixerError(Exception): """A fixer could not be loaded.""" class RefactoringTool(object): _default_options = {"print_function" : False} CLASS_PREFIX = "Fix" # The prefix for fixer classes FILE_PREFIX = "fix_" # The prefix for modules with a fixer within def __init__(self, fixer_names, options=None, explicit=None): """Initializer. Args: fixer_names: a list of fixers to import options: an dict with configuration. explicit: a list of fixers to run even if they are explicit. """ self.fixers = fixer_names self.explicit = explicit or [] self.options = self._default_options.copy() if options is not None: self.options.update(options) if self.options["print_function"]: self.grammar = pygram.python_grammar_no_print_statement else: self.grammar = pygram.python_grammar self.errors = [] self.logger = logging.getLogger("RefactoringTool") self.fixer_log = [] self.wrote = False self.driver = driver.Driver(self.grammar, convert=pytree.convert, logger=self.logger) self.pre_order, self.post_order = self.get_fixers() self.pre_order_heads = _get_headnode_dict(self.pre_order) self.post_order_heads = _get_headnode_dict(self.post_order) self.files = [] # List of files that were or should be modified def get_fixers(self): """Inspects the options to load the requested patterns and handlers. Returns: (pre_order, post_order), where pre_order is the list of fixers that want a pre-order AST traversal, and post_order is the list that want post-order traversal. """ pre_order_fixers = [] post_order_fixers = [] for fix_mod_path in self.fixers: mod = __import__(fix_mod_path, {}, {}, ["*"]) fix_name = fix_mod_path.rsplit(".", 1)[-1] if fix_name.startswith(self.FILE_PREFIX): fix_name = fix_name[len(self.FILE_PREFIX):] parts = fix_name.split("_") class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts]) try: fix_class = getattr(mod, class_name) except AttributeError: raise FixerError("Can't find %s.%s" % (fix_name, class_name)) fixer = fix_class(self.options, self.fixer_log) if fixer.explicit and self.explicit is not True and \ fix_mod_path not in self.explicit: self.log_message("Skipping implicit fixer: %s", fix_name) continue self.log_debug("Adding transformation: %s", fix_name) if fixer.order == "pre": pre_order_fixers.append(fixer) elif fixer.order == "post": post_order_fixers.append(fixer) else: raise FixerError("Illegal fixer order: %r" % fixer.order) key_func = operator.attrgetter("run_order") pre_order_fixers.sort(key=key_func) post_order_fixers.sort(key=key_func) return (pre_order_fixers, post_order_fixers) def log_error(self, msg, *args, **kwds): """Called when an error occurs.""" raise def log_message(self, msg, *args): """Hook to log a message.""" if args: msg = msg % args self.logger.info(msg) def log_debug(self, msg, *args): if args: msg = msg % args self.logger.debug(msg) def print_output(self, old_text, new_text, filename, equal): """Called with the old version, new version, and filename of a refactored file.""" pass def refactor(self, items, write=False, doctests_only=False): """Refactor a list of files and directories.""" for dir_or_file in items: if os.path.isdir(dir_or_file): self.refactor_dir(dir_or_file, write, doctests_only) else: self.refactor_file(dir_or_file, write, doctests_only) def refactor_dir(self, dir_name, write=False, doctests_only=False): """Descends down a directory and refactor every Python file found. Python files are assumed to have a .py extension. Files and subdirectories starting with '.' are skipped. """ for dirpath, dirnames, filenames in os.walk(dir_name): self.log_debug("Descending into %s", dirpath) dirnames.sort() filenames.sort() for name in filenames: if not name.startswith(".") and \ os.path.splitext(name)[1].endswith("py"): fullname = os.path.join(dirpath, name) self.refactor_file(fullname, write, doctests_only) # Modify dirnames in-place to remove subdirs with leading dots dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")] def _read_python_source(self, filename): """ Do our best to decode a Python source file correctly. """ try: f = open(filename, "rb") except IOError, err: self.log_error("Can't open %s: %s", filename, err) return None, None try: encoding = tokenize.detect_encoding(f.readline)[0] finally: f.close() with _open_with_encoding(filename, "r", encoding=encoding) as f: return _from_system_newlines(f.read()), encoding def refactor_file(self, filename, write=False, doctests_only=False): """Refactors a file.""" input, encoding = self._read_python_source(filename) if input is None: # Reading the file failed. return input += u"\n" # Silence certain parse errors if doctests_only: self.log_debug("Refactoring doctests in %s", filename) output = self.refactor_docstring(input, filename) if output != input: self.processed_file(output, filename, input, write, encoding) else: self.log_debug("No doctest changes in %s", filename) else: tree = self.refactor_string(input, filename) if tree and tree.was_changed: # The [:-1] is to take off the \n we added earlier self.processed_file(unicode(tree)[:-1], filename, write=write, encoding=encoding) else: self.log_debug("No changes in %s", filename) def refactor_string(self, data, name): """Refactor a given input string. Args: data: a string holding the code to be refactored. name: a human-readable name for use in error/log messages. Returns: An AST corresponding to the refactored input stream; None if there were errors during the parse. """ features = _detect_future_features(data) if "print_function" in features: self.driver.grammar = pygram.python_grammar_no_print_statement try: tree = self.driver.parse_string(data) except Exception, err: self.log_error("Can't parse %s: %s: %s", name, err.__class__.__name__, err) return finally: self.driver.grammar = self.grammar tree.future_features = features self.log_debug("Refactoring %s", name) self.refactor_tree(tree, name) return tree def refactor_stdin(self, doctests_only=False): input = sys.stdin.read() if doctests_only: self.log_debug("Refactoring doctests in stdin") output = self.refactor_docstring(input, "<stdin>") if output != input: self.processed_file(output, "<stdin>", input) else: self.log_debug("No doctest changes in stdin") else: tree = self.refactor_string(input, "<stdin>") if tree and tree.was_changed: self.processed_file(unicode(tree), "<stdin>", input) else: self.log_debug("No changes in stdin") def refactor_tree(self, tree, name): """Refactors a parse tree (modifying the tree in place). Args: tree: a pytree.Node instance representing the root of the tree to be refactored. name: a human-readable name for this tree. Returns: True if the tree was modified, False otherwise. """ for fixer in chain(self.pre_order, self.post_order): fixer.start_tree(tree, name) self.traverse_by(self.pre_order_heads, tree.pre_order()) self.traverse_by(self.post_order_heads, tree.post_order()) for fixer in chain(self.pre_order, self.post_order): fixer.finish_tree(tree, name) return tree.was_changed def traverse_by(self, fixers, traversal): """Traverse an AST, applying a set of fixers to each node. This is a helper method for refactor_tree(). Args: fixers: a list of fixer instances. traversal: a generator that yields AST nodes. Returns: None """ if not fixers: return for node in traversal: for fixer in fixers[node.type]: results = fixer.match(node) if results: new = fixer.transform(node, results) if new is not None: node.replace(new) node = new def processed_file(self, new_text, filename, old_text=None, write=False, encoding=None): """ Called when a file has been refactored, and there are changes. """ self.files.append(filename) if old_text is None: old_text = self._read_python_source(filename)[0] if old_text is None: return equal = old_text == new_text self.print_output(old_text, new_text, filename, equal) if equal: self.log_debug("No changes to %s", filename) return if write: self.write_file(new_text, filename, old_text, encoding) else: self.log_debug("Not writing changes to %s", filename) def write_file(self, new_text, filename, old_text, encoding=None): """Writes a string to a file. It first shows a unified diff between the old text and the new text, and then rewrites the file; the latter is only done if the write option is set. """ try: f = _open_with_encoding(filename, "w", encoding=encoding) except os.error, err: self.log_error("Can't create %s: %s", filename, err) return try: f.write(_to_system_newlines(new_text)) except os.error, err: self.log_error("Can't write %s: %s", filename, err) finally: f.close() self.log_debug("Wrote changes to %s", filename) self.wrote = True PS1 = ">>> " PS2 = "... " def refactor_docstring(self, input, filename): """Refactors a docstring, looking for doctests. This returns a modified version of the input string. It looks for doctests, which start with a ">>>" prompt, and may be continued with "..." prompts, as long as the "..." is indented the same as the ">>>". (Unfortunately we can't use the doctest module's parser, since, like most parsers, it is not geared towards preserving the original source.) """ result = [] block = None block_lineno = None indent = None lineno = 0 for line in input.splitlines(True): lineno += 1 if line.lstrip().startswith(self.PS1): if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) block_lineno = lineno block = [line] i = line.find(self.PS1) indent = line[:i] elif (indent is not None and (line.startswith(indent + self.PS2) or line == indent + self.PS2.rstrip() + u"\n")): block.append(line) else: if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) block = None indent = None result.append(line) if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) return u"".join(result) def refactor_doctest(self, block, lineno, indent, filename): """Refactors one doctest. A doctest is given as a block of lines, the first of which starts with ">>>" (possibly indented), while the remaining lines start with "..." (identically indented). """ try: tree = self.parse_block(block, lineno, indent) except Exception, err: if self.log.isEnabledFor(logging.DEBUG): for line in block: self.log_debug("Source: %s", line.rstrip(u"\n")) self.log_error("Can't parse docstring in %s line %s: %s: %s", filename, lineno, err.__class__.__name__, err) return block if self.refactor_tree(tree, filename): new = unicode(tree).splitlines(True) # Undo the adjustment of the line numbers in wrap_toks() below. clipped, new = new[:lineno-1], new[lineno-1:] assert clipped == [u"\n"] * (lineno-1), clipped if not new[-1].endswith(u"\n"): new[-1] += u"\n" block = [indent + self.PS1 + new.pop(0)] if new: block += [indent + self.PS2 + line for line in new] return block def summarize(self): if self.wrote: were = "were" else: were = "need to be" if not self.files: self.log_message("No files %s modified.", were) else: self.log_message("Files that %s modified:", were) for file in self.files: self.log_message(file) if self.fixer_log: self.log_message("Warnings/messages while refactoring:") for message in self.fixer_log: self.log_message(message) if self.errors: if len(self.errors) == 1: self.log_message("There was 1 error:") else: self.log_message("There were %d errors:", len(self.errors)) for msg, args, kwds in self.errors: self.log_message(msg, *args, **kwds) def parse_block(self, block, lineno, indent): """Parses a block into a tree. This is necessary to get correct line number / offset information in the parser diagnostics and embedded into the parse tree. """ tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent)) tree.future_features = frozenset() return tree def wrap_toks(self, block, lineno, indent): """Wraps a tokenize stream to systematically modify start/end.""" tokens = tokenize.generate_tokens(self.gen_lines(block, indent).next) for type, value, (line0, col0), (line1, col1), line_text in tokens: line0 += lineno - 1 line1 += lineno - 1 # Don't bother updating the columns; this is too complicated # since line_text would also have to be updated and it would # still break for tokens spanning lines. Let the user guess # that the column numbers for doctests are relative to the # end of the prompt string (PS1 or PS2). yield type, value, (line0, col0), (line1, col1), line_text def gen_lines(self, block, indent): """Generates lines as expected by tokenize from a list of lines. This strips the first len(indent + self.PS1) characters off each line. """ prefix1 = indent + self.PS1 prefix2 = indent + self.PS2 prefix = prefix1 for line in block: if line.startswith(prefix): yield line[len(prefix):] elif line == prefix.rstrip() + u"\n": yield u"\n" else: raise AssertionError("line=%r, prefix=%r" % (line, prefix)) prefix = prefix2 while True: yield "" class MultiprocessingUnsupported(Exception): pass class MultiprocessRefactoringTool(RefactoringTool): def __init__(self, *args, **kwargs): super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs) self.queue = None self.output_lock = None def refactor(self, items, write=False, doctests_only=False, num_processes=1): if num_processes == 1: return super(MultiprocessRefactoringTool, self).refactor( items, write, doctests_only) try: import multiprocessing except ImportError: raise MultiprocessingUnsupported if self.queue is not None: raise RuntimeError("already doing multiple processes") self.queue = multiprocessing.JoinableQueue() self.output_lock = multiprocessing.Lock() processes = [multiprocessing.Process(target=self._child) for i in xrange(num_processes)] try: for p in processes: p.start() super(MultiprocessRefactoringTool, self).refactor(items, write, doctests_only) finally: self.queue.join() for i in xrange(num_processes): self.queue.put(None) for p in processes: if p.is_alive(): p.join() self.queue = None def _child(self): task = self.queue.get() while task is not None: args, kwargs = task try: super(MultiprocessRefactoringTool, self).refactor_file( *args, **kwargs) finally: self.queue.task_done() task = self.queue.get() def refactor_file(self, *args, **kwargs): if self.queue is not None: self.queue.put((args, kwargs)) else: return super(MultiprocessRefactoringTool, self).refactor_file( *args, **kwargs)
Python
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Export the Python grammar and symbols.""" # Python imports import os # Local imports from .pgen2 import token from .pgen2 import driver from . import pytree # The grammar file _GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt") class Symbols(object): def __init__(self, grammar): """Initializer. Creates an attribute for each grammar symbol (nonterminal), whose value is the symbol's type (an int >= 256). """ for name, symbol in grammar.symbol2number.iteritems(): setattr(self, name, symbol) python_grammar = driver.load_grammar(_GRAMMAR_FILE) python_symbols = Symbols(python_grammar) python_grammar_no_print_statement = python_grammar.copy() del python_grammar_no_print_statement.keywords["print"]
Python
#empty
Python
#!/usr/bin/env python import sys from lib2to3.main import main sys.exit(main("lib2to3.fixes"))
Python
#!/usr/bin/env python2.5 """ This is a benchmarking script to test the speed of 2to3's pattern matching system. It's equivalent to "refactor.py -f all" for every Python module in sys.modules, but without engaging the actual transformations. """ __author__ = "Collin Winter <collinw at gmail.com>" # Python imports import os.path import sys from time import time # Test imports from .support import adjust_path adjust_path() # Local imports from .. import refactor ### Mock code for refactor.py and the fixers ############################################################################### class Options: def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) self.verbose = False def dummy_transform(*args, **kwargs): pass ### Collect list of modules to match against ############################################################################### files = [] for mod in sys.modules.values(): if mod is None or not hasattr(mod, '__file__'): continue f = mod.__file__ if f.endswith('.pyc'): f = f[:-1] if f.endswith('.py'): files.append(f) ### Set up refactor and run the benchmark ############################################################################### options = Options(fix=["all"], print_function=False, doctests_only=False) refactor = refactor.RefactoringTool(options) for fixer in refactor.fixers: # We don't want them to actually fix the tree, just match against it. fixer.transform = dummy_transform t = time() for f in files: print "Matching", f refactor.refactor_file(f) print "%d seconds to match %d files" % (time() - t, len(sys.modules))
Python
#!/usr/bin/env python """Script that makes determining PATTERN for a new fix much easier. Figuring out exactly what PATTERN I want for a given fixer class is getting tedious. This script will step through each possible subtree for a given string, allowing you to select which one you want. It will then try to figure out an appropriate pattern to match that tree. This pattern will require some editing (it will be overly restrictive) but should provide a solid base to work with and handle the tricky parts. Usage: python find_pattern.py "g.throw(E, V, T)" This will step through each subtree in the parse. To reject a candidate subtree, hit enter; to accept a candidate, hit "y" and enter. The pattern will be spit out to stdout. For example, the above will yield a succession of possible snippets, skipping all leaf-only trees. I accept 'g.throw(E, V, T)' This causes find_pattern to spit out power< 'g' trailer< '.' 'throw' > trailer< '(' arglist< 'E' ',' 'V' ',' 'T' > ')' > > Some minor tweaks later, I'm left with power< any trailer< '.' 'throw' > trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > > which is exactly what I was after. Larger snippets can be placed in a file (as opposed to a command-line arg) and processed with the -f option. """ __author__ = "Collin Winter <collinw@gmail.com>" # Python imports import optparse import sys from StringIO import StringIO # Local imports from lib2to3 import pytree from lib2to3.pgen2 import driver from lib2to3.pygram import python_symbols, python_grammar driver = driver.Driver(python_grammar, convert=pytree.convert) def main(args): parser = optparse.OptionParser(usage="find_pattern.py [options] [string]") parser.add_option("-f", "--file", action="store", help="Read a code snippet from the specified file") # Parse command line arguments options, args = parser.parse_args(args) if options.file: tree = driver.parse_file(options.file) elif len(args) > 1: tree = driver.parse_stream(StringIO(args[1] + "\n")) else: print >>sys.stderr, "You must specify an input file or an input string" return 1 examine_tree(tree) return 0 def examine_tree(tree): for node in tree.post_order(): if isinstance(node, pytree.Leaf): continue print repr(str(node)) verdict = raw_input() if verdict.strip(): print find_pattern(node) return def find_pattern(node): if isinstance(node, pytree.Leaf): return repr(node.value) return find_symbol(node.type) + \ "< " + " ".join(find_pattern(n) for n in node.children) + " >" def find_symbol(sym): for n, v in python_symbols.__dict__.items(): if v == sym: return n if __name__ == "__main__": sys.exit(main(sys.argv))
Python
# Copyright 2010 G. M. Bond. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. __author__ = "G. Matthew Bond <gmattbond+python@gmail.com>" import sys import gvgen from . import graph class GvGraph(graph.Graph): """This is an implementation of the Graph class using GvGen.""" def __init__(self): """Set up the graph.""" self.graph = gvgen.GvGen() def add_node(self, node_name): """Return a new node with the caption node_name.""" return self.graph.newItem(node_name) def add_edge(self, parent, child): """Create and return a new edge between parent and child.""" return self.graph.newLink(parent, child) def output(self, fp=sys.stdout): """Output the graph in dot format to the file descriptor fd.""" return self.graph.dot(fp)
Python
#!/usr/bin/env python """Main test file for 2to3. Running "python test.py" will run all tests in tests/test_*.py. """ # Author: Collin Winter import unittest from lib2to3 import tests import lib2to3.tests.support from sys import exit, argv if "-h" in argv or "--help" in argv or len(argv) > 2: print "Usage: %s [-h] [test suite[.test class]]" %(argv[0]) print "default : run all tests in lib2to3/tests/test_*.py" print "test suite: run tests in lib2to3/tests/<test suite>" print "test class : run tests in <test suite>.<test class>" exit(1) if len(argv) == 2: mod = tests for m in argv[1].split("."): mod = getattr(mod, m, None) if not mod: print "Error importing %s" %(m) exit(1) if argv[1].find(".") == -1: # Just the module was specified, load all the tests suite = unittest.TestLoader().loadTestsFromModule(mod) else: # A class was specified, load that suite = unittest.makeSuite(mod) else: suite = tests.all_tests try: tests.support.run_all_tests(tests=suite) except KeyboardInterrupt: pass
Python
#!/usr/bin/python # comment indented by tab """Docstring. Here are some doctest exampes: >>> print 42 42 >>> d = {1: 1, 2: 2, 2: 2} >>> d.keys().sort() >>> print d {1: 1, 2: 2} >>> for i in d.keys(): ... print i, d[i] And a tricky one: >>> class X(Structure): ... _fields_ = [("x", c_int), ("y", c_int), ("array", c_char_p * 5)] ... >>> x = X() >>> print x._objects None >>> """ import sys def unicode_examples(): a = unicode(b) a = u"xxx" a = U"""xxx""" a = ur'xxx' a = UR'''xxx''' a = Ur"xxx" a = uR"""xxx""" b = u"..." u'...' def ne_examples(): if x <> y: pass if x<>y: pass if x<>y<>z: pass def has_key_examples(): # x = d.has_key("x") or d.has_key("y") # x = a.b.c.d.has_key("x") ** 3 # x = a.b.has_key(1 + 2).__repr__() # x = a.b.has_key(1 + 2).__repr__() ** -3 ** 4 # x = a.has_key(f or g) # x = a + b.has_key(c) # x = a.has_key(lambda: 12) # x = a.has_key(a for a in b) # if not a.has_key(b): pass # if not a.has_key(b).__repr__(): pass # if not a.has_key(b) ** 2: pass def foo(): pass # body indented by tab def test_ws_comma(): yield 1,2 ,3 f(1,2 ,3) `a ,b` def f(a,b ,c): pass { a:b,c:d , e : f } def apply_examples(): x = apply(f, g + h) y = apply(f, g, h) z = apply(fs[0], g or h, h or g) # Hello apply(f, (x, y) + t) apply(f, args,) apply(f, args, kwds,) # Test that complex functions are parenthesized x = apply(f+g, args) x = apply(f*g, args) x = apply(f**g, args) # But dotted names etc. not x = apply(f.g, args) x = apply(f[x], args) x = apply(f(), args) # Extreme case x = apply(a.b.c.d.e.f, args, kwds) # XXX Comments in weird places still get lost apply( # foo f, # bar args) def bad_apply_examples(): # These should *not* be touched apply() apply(f) apply(f,) apply(f, args, kwds, extras) apply(f, *args, **kwds) apply(f, *args) apply(func=f, args=args, kwds=kwds) apply(f, args=args, kwds=kwds) apply(f, args, kwds=kwds) def metaclass_examples(): class X: __metaclass__ = Meta class X(b1, b2): bar = 23 # Comment on me! __metaclass__ = Meta spam = 27.23 # Laughable class X: __metaclass__ = Meta; x = 23; y = 34 # Yes, I can handle this, too. def intern_examples(): # # These should be refactored: # x = intern(a) # y = intern("b" # test ) # z = intern(a+b+c.d,) # intern("y%s" % 5).replace("y", "") # # These not: # intern(a=1) # intern(f, g) # intern(*h) # intern(**i) def print_examples(): # plain vanilla print 1, 1+1, 1+1+1 # print 1, 2 # print 1 print # trailing commas print 1, 2, 3, # print 1, 2, # print 1, # print # >> stuff print >>sys.stderr, 1, 2, 3 # no trailing comma # print >>sys.stdder, 1, 2, # trailing comma # print >>sys.stderr, 1+1 # no trailing comma # print >> sys.stderr # spaces before sys.stderr def exec_examples(): # exec code # exec code in ns # exec code in ns1, ns2 # exec (a.b()) in ns # exec a.b() + c in ns # # These should not be touched: # exec(code) # exec (code) # exec(code, ns) # exec(code, ns1, ns2) def repr_examples(): x = `1 + 2` # y = `x` # z = `y`.__repr__() # x = `1, 2, 3` # x = `1 + `2`` # x = `1, 2 + `3, 4`` def except_examples(): try: pass except Exception, (f, e): pass except ImportError, e: print e.args # try: pass except (RuntimeError, ImportError), e: pass # try: pass except Exception, (a, b): pass # try: pass except Exception, d[5]: pass # try: pass except Exception, a.foo: pass # try: pass except Exception, a().foo: pass # # These should not be touched: # try: pass except: pass # try: pass except Exception: pass # try: pass except (Exception, SystemExit): pass def raise_examples(): raise Exception, 5 # raise Exception,5 # raise Exception, (5, 6, 7) # # These should not be touched # raise Exception # raise Exception(5, 6) # # These should produce a warning # TODO: convert "raise E, V, T" to # "e = E(V); e.__traceback__ = T; raise e;" # raise Exception, 5, 6 # raise Exception,5,6 # raise Exception, (5, 6, 7), 6 def long_examples(): x = long(x) y = isinstance(x, long) z = type(x) in (int, long) a = 12L b = 0x12l # unchanged: a = 12 b = 0x12 c = 3.14 def dict_examples(): # # Plain method calls # print d.keys() print d.items() print d.values() # # Plain method calls in special contexts # print iter(e.keys()) for i in e.keys(): print i [i for i in e.keys()] (i for i in e.keys()) # # Iterator method calls # print f.iterkeys() print f.iteritems() print f.itervalues() # # Iterator method calls in special contexts # print list(g.iterkeys()) print sorted(g.iterkeys()) print iter(g.iterkeys()) for i in g.iterkeys(): print i [i for i in g.iterkeys()] (i for i in g.iterkeys()) # # Examples with a "tail"; these are never "special" # print h.iterkeys().next() print h.keys()[0] print list(h.iterkeys().next()) for x in h.keys()[0]: print x # # Examples with dict views # print d.viewkeys() print d.viewitems() print d.viewvalues() def dict_negative_examples(): # # These should all remain unchanged: # print list(h.keys()) print sorted(h.keys()) def xrange_examples(): for i in xrange(100): print i for i in xrange(0, 100): print i for i in xrange(0, 100, 10): print i def input_examples(): a = input() b = input(str(a)) def raw_input_examples(): a = raw_input() b = raw_input(a.rstrip()) def filter_examples(): filter(os.unlink, filenames) filter(None, "whatever") filter(lambda x: not x, range(4)) def map_examples(): map(None, foo.bar) map(None, foo.bar,) map(None, foo, bar) map(f, foo.bar) map(lambda x: x+1, range(10)) def basestring_examples(): if isinstance(x, basestring): pass def buffer_examples(): x = buffer(y) def sys_exc_examples(): print sys.exc_type, sys.exc_value, sys.exc_traceback def operator_examples(): import operator operator.isCallable(foo) operator.sequenceIncludes(foo, bar) from operator import isCallable, sequenceIncludes # These should produce warnings. isCallable(foo) sequenceIncludes(foo, bar) class X: def maximum(self): return max(self.data.values()) def total(self): return sum(self.data.values()) # This is the last line.
Python
#!/usr/bin/python def buffer_examples(): x = buffer(y)
Python
#!/usr/bin/env python # Copyright 2010 G. M. Bond. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. #Python imports import sys import re import graph #import lib2to3.pytree as pytree #For testing, let's just import one fixer by hand. Later this should take # a list of fixers on the command line #from lib2to3.fixes import fix_buffer from lib2to3.fixes import fix_next def graph_node(graph, node): ##For Nodes #print "Doing: %s" % node.__repr__ #print "Node ID: %d (%d children)" % (id(node), len(node.children)) #this = node #this = graph.newItem("%s\\nTYPE: %s\\n%s" % (node.__class__.__name__, # node.type_repr(), # re.escape(node.__unicode__()))) #i = 0 #for current_child in node.children: #i += 1 # child = graph_node(graph, current_child) # graph.newLink(this, child) #print "Processed %d children from %d" % (i, id(node)) #return this ##For NodePatterns recurseChildren = False nodename = "" #this switch might be better served by adding some sort of method to the #NodePattern classes that could be called polymorphically if isinstance(node, pytree.LeafPattern): nodename += "\\nclasstype: Leaf" if node.type is None: nodename += "\\nMatch type: any" else: nodename += "\\nMatch type: " + node.type_repr() if node.content is not None: nodename += "\\nMatch string: " + node.content elif isinstance(node, pytree.NodePattern): nodename += "\\nclasstype: Node" if node.type is None: nodename += "\\nMatch type: any single node" else: nodename += "\\nMatch type: " + node.type_repr() if node.content is not None: recurseChildren = True elif isinstance(node, pytree.WildcardPattern): nodename += "\\nclasstype: Wildcard" if node.content is None: if node.max == 0x7FFFFFFF: nodename += "Match %d-* of any" % node.min else: nodename += "Match %d-%d of any" % (node.min, node.max) else: if len(node.content) == 1: recurseChildren = True print node.content #Recuse each alternative elif isinstance(node, pytree.NegatedPattern): nodename += "\\nclasstype: Negated" if node.content is None: nodename += "\\nMatch $" else: nodename += "\\nMatch if children do NOT match" recurseChildren = True else: print "uh oh, not a node?" if node.name is not None: nodename += "\\nStore as: " + node.name #if it's a leaf, include content if not empty #if it's a NodePattern, recurse over children #this = graph.newItem("type: %s\\n this = graph.newItem(nodename) if recurseChildren: for n in node.content: child = graph_node(graph, n) graph.newLink(this, child) return this def main(argv=None): #Change this out once input can be taken from argv fix_log = [] #target_fixers = [fix_buffer.FixBuffer(None, fix_log)] target_fixers = [fix_next.FixNext(None, fix_log)] #for showing syntax tree #for fixer in target_fixers: # fixer.compile_sm_pattern() g = graph.GvGraph() fixer = target_fixers[0] #print fixer.sm_root.__repr__() #graph_node(graph, fixer.sm_root) #graph_node(graph, fixer.pattern) fixer.pattern.graph_node(g) g.output() #lib2to3.pytree.node #build_tree_graph(fixer.sm_root) #print target_fixers #print fix_log if __name__ == '__main__': sys.exit(main())
Python
# Copyright 2010 G. M. Bond. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. __author__ = "G. Matthew Bond <gmattbond+python@gmail.com>" import sys class Graph(object): """ Base class for adapting graphviz dot libraries for use in lib2to3. This class provides the interface that should be used for adapting a graphviz library for use with the graph_node method of compiled patterns in fixers. This class was added so there would be no direct dependence on any one graphviz implementation. """ def add_node(self, node_name): """Add a node calle node_name to the graph.""" raise NotImplementedError def add_edge(self, parent, child): """Add an edge between parent and child.""" raise NotImplementedError def output(self, fp=sys.stdout): """Output the graph in dot format to the file descriptor fd.""" raise NotImplementedError
Python
#!/usr/bin/python # Copyright (C) 2007-2009 Mihai Preda. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. bytecodesStr = '''RESERVED 0 CONST 0 CALL -1 ADD 2 SUB 2 MUL 2 DIV 2 MOD 2 RND 0 UMIN 1 POWER 2 FACT 1 PERCENT 1 SQRT 1 CBRT 1 EXP 1 LN 1 SIN 1 COS 1 TAN 1 ASIN 1 ACOS 1 ATAN 1 SINH 1 COSH 1 TANH 1 ASINH 1 ACOSH 1 ATANH 1 ABS 1 FLOOR 1 CEIL 1 SIGN 1 MIN 2 MAX 2 GCD 2 COMB 2 PERM 2 LOAD0 0 LOAD1 0 LOAD2 0 LOAD3 0 LOAD4 0 REAL 1 IMAG 1''' builtins = """rnd sqrt cbrt sin cos tan asin acos atan sinh cosh tanh asinh acosh atanh exp ln abs floor ceil sign min max gcd comb perm mod real imag""" template = '''// This file is automatically generated by the build.py script. Do not edit! /* * Copyright (C) 2008-2009 Mihai Preda. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.javia.arity; class VM { static final byte %(bytecodes)s; static final String[] opcodeName = {%(names)s}; static final byte[] arity = {%(arity)s}; static final byte[] builtins = {%(builtins)s}; } ''' def genVM(): bytecodes = [(y[0].lower(), int(y[1])) for y in (x.split() for x in bytecodesStr.split('\n'))] str1 = ',\n'.join(['%s = %d' % (name[0].upper(), id) for (name, id) in zip(bytecodes, xrange(256))]) names = ', '.join(['"%s"' % name[0] for name in bytecodes]) arity = ', '.join(['%d'%bc[1] for bc in bytecodes]) builtinsStr = ', '.join([s.upper() for s in builtins.split()]) fo = open('src/org/javia/arity/VM.java', 'w') fo.write(template % dict(bytecodes=str1, names=names, arity=arity, builtins=builtinsStr)) fo.close() genVM() import sys, os from os import path import shutil import glob def run(cmd): #print cmd parts = cmd.split() err = os.spawnvp(os.P_WAIT, parts[0], parts) assert not err, cmd def mkdir(*dirs): for path in dirs: try: os.makedirs(path) except OSError: pass VER='2.1.6' mkdir('tmp/class', 'rel') run('rm -rf tmp/class/org') name = 'rel/arity-'+VER+'' run("javac -sourcepath src -d tmp/class src/org/javia/arity/UnitTest.java") run("jar cfe %(name)s.jar org.javia.arity.UnitTest -C tmp/class ." % dict(name=name)) #run("javadoc -notimestamp -nodeprecatedlist -notree -noindex -nohelp -noqualifier java.lang -d javadoc -sourcepath src org.javia.arity")
Python
############################################################################## # # Copyright (c) 2006 Zope Corporation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Bootstrap a buildout-based project Simply run this script in a directory containing a buildout.cfg. The script accepts buildout command-line options, so you can use the -c option to specify an alternate configuration file. """ import os, shutil, sys, tempfile, urllib2 from optparse import OptionParser tmpeggs = tempfile.mkdtemp() is_jython = sys.platform.startswith('java') # parsing arguments parser = OptionParser() parser.add_option("-v", "--version", dest="version", help="use a specific zc.buildout version") parser.add_option("-d", "--distribute", action="store_true", dest="distribute", default=False, help="Use Disribute rather than Setuptools.") parser.add_option("-c", None, action="store", dest="config_file", help=("Specify the path to the buildout configuration " "file to be used.")) options, args = parser.parse_args() # if -c was provided, we push it back into args for buildout' main function if options.config_file is not None: args += ['-c', options.config_file] if options.version is not None: VERSION = '==%s' % options.version else: VERSION = '' # We decided to always use distribute, make sure this is the default for us # USE_DISTRIBUTE = options.distribute USE_DISTRIBUTE = True args = args + ['bootstrap'] to_reload = False try: import pkg_resources if not hasattr(pkg_resources, '_distribute'): to_reload = True raise ImportError except ImportError: ez = {} if USE_DISTRIBUTE: exec urllib2.urlopen('http://python-distribute.org/distribute_setup.py' ).read() in ez ez['use_setuptools'](to_dir=tmpeggs, download_delay=0, no_fake=True) else: exec urllib2.urlopen('http://peak.telecommunity.com/dist/ez_setup.py' ).read() in ez ez['use_setuptools'](to_dir=tmpeggs, download_delay=0) if to_reload: reload(pkg_resources) else: import pkg_resources if sys.platform == 'win32': def quote(c): if ' ' in c: return '"%s"' % c # work around spawn lamosity on windows else: return c else: def quote (c): return c cmd = 'from setuptools.command.easy_install import main; main()' ws = pkg_resources.working_set if USE_DISTRIBUTE: requirement = 'distribute' else: requirement = 'setuptools' if is_jython: import subprocess assert subprocess.Popen([sys.executable] + ['-c', quote(cmd), '-mqNxd', quote(tmpeggs), 'zc.buildout' + VERSION], env=dict(os.environ, PYTHONPATH= ws.find(pkg_resources.Requirement.parse(requirement)).location ), ).wait() == 0 else: assert os.spawnle( os.P_WAIT, sys.executable, quote (sys.executable), '-c', quote (cmd), '-mqNxd', quote (tmpeggs), 'zc.buildout' + VERSION, dict(os.environ, PYTHONPATH= ws.find(pkg_resources.Requirement.parse(requirement)).location ), ) == 0 ws.add_entry(tmpeggs) ws.require('zc.buildout' + VERSION) import zc.buildout.buildout zc.buildout.buildout.main(args) shutil.rmtree(tmpeggs)
Python
#!/usr/bin/python2.6 # # Simple http server to emulate api.playfoursquare.com import logging import shutil import sys import urlparse import SimpleHTTPServer import BaseHTTPServer class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): """Handle playfoursquare.com requests, for testing.""" def do_GET(self): logging.warn('do_GET: %s, %s', self.command, self.path) url = urlparse.urlparse(self.path) logging.warn('do_GET: %s', url) query = urlparse.parse_qs(url.query) query_keys = [pair[0] for pair in query] response = self.handle_url(url) if response != None: self.send_200() shutil.copyfileobj(response, self.wfile) self.wfile.close() do_POST = do_GET def handle_url(self, url): path = None if url.path == '/v1/venue': path = '../captures/api/v1/venue.xml' elif url.path == '/v1/addvenue': path = '../captures/api/v1/venue.xml' elif url.path == '/v1/venues': path = '../captures/api/v1/venues.xml' elif url.path == '/v1/user': path = '../captures/api/v1/user.xml' elif url.path == '/v1/checkcity': path = '../captures/api/v1/checkcity.xml' elif url.path == '/v1/checkins': path = '../captures/api/v1/checkins.xml' elif url.path == '/v1/cities': path = '../captures/api/v1/cities.xml' elif url.path == '/v1/switchcity': path = '../captures/api/v1/switchcity.xml' elif url.path == '/v1/tips': path = '../captures/api/v1/tips.xml' elif url.path == '/v1/checkin': path = '../captures/api/v1/checkin.xml' elif url.path == '/history/12345.rss': path = '../captures/api/v1/feed.xml' if path is None: self.send_error(404) else: logging.warn('Using: %s' % path) return open(path) def send_200(self): self.send_response(200) self.send_header('Content-type', 'text/xml') self.end_headers() def main(): if len(sys.argv) > 1: port = int(sys.argv[1]) else: port = 8080 server_address = ('0.0.0.0', port) httpd = BaseHTTPServer.HTTPServer(server_address, RequestHandler) sa = httpd.socket.getsockname() print "Serving HTTP on", sa[0], "port", sa[1], "..." httpd.serve_forever() if __name__ == '__main__': main()
Python
#!/usr/bin/python import os import subprocess import sys BASEDIR = '../main/src/com/joelapenna/foursquare' TYPESDIR = '../captures/types/v1' captures = sys.argv[1:] if not captures: captures = os.listdir(TYPESDIR) for f in captures: basename = f.split('.')[0] javaname = ''.join([c.capitalize() for c in basename.split('_')]) fullpath = os.path.join(TYPESDIR, f) typepath = os.path.join(BASEDIR, 'types', javaname + '.java') parserpath = os.path.join(BASEDIR, 'parsers', javaname + 'Parser.java') cmd = 'python gen_class.py %s > %s' % (fullpath, typepath) print cmd subprocess.call(cmd, stdout=sys.stdout, shell=True) cmd = 'python gen_parser.py %s > %s' % (fullpath, parserpath) print cmd subprocess.call(cmd, stdout=sys.stdout, shell=True)
Python
#!/usr/bin/python """ Pull a oAuth protected page from foursquare. Expects ~/.oget to contain (one on each line): CONSUMER_KEY CONSUMER_KEY_SECRET USERNAME PASSWORD Don't forget to chmod 600 the file! """ import httplib import os import re import sys import urllib import urllib2 import urlparse import user from xml.dom import pulldom from xml.dom import minidom import oauth """From: http://groups.google.com/group/foursquare-api/web/oauth @consumer = OAuth::Consumer.new("consumer_token","consumer_secret", { :site => "http://foursquare.com", :scheme => :header, :http_method => :post, :request_token_path => "/oauth/request_token", :access_token_path => "/oauth/access_token", :authorize_path => "/oauth/authorize" }) """ SERVER = 'api.foursquare.com:80' CONTENT_TYPE_HEADER = {'Content-Type' :'application/x-www-form-urlencoded'} SIGNATURE_METHOD = oauth.OAuthSignatureMethod_HMAC_SHA1() AUTHEXCHANGE_URL = 'http://api.foursquare.com/v1/authexchange' def parse_auth_response(auth_response): return ( re.search('<oauth_token>(.*)</oauth_token>', auth_response).groups()[0], re.search('<oauth_token_secret>(.*)</oauth_token_secret>', auth_response).groups()[0] ) def create_signed_oauth_request(username, password, consumer): oauth_request = oauth.OAuthRequest.from_consumer_and_token( consumer, http_method='POST', http_url=AUTHEXCHANGE_URL, parameters=dict(fs_username=username, fs_password=password)) oauth_request.sign_request(SIGNATURE_METHOD, consumer, None) return oauth_request def main(): url = urlparse.urlparse(sys.argv[1]) # Nevermind that the query can have repeated keys. parameters = dict(urlparse.parse_qsl(url.query)) password_file = open(os.path.join(user.home, '.oget')) lines = [line.strip() for line in password_file.readlines()] if len(lines) == 4: cons_key, cons_key_secret, username, password = lines access_token = None else: cons_key, cons_key_secret, username, password, token, secret = lines access_token = oauth.OAuthToken(token, secret) consumer = oauth.OAuthConsumer(cons_key, cons_key_secret) if not access_token: oauth_request = create_signed_oauth_request(username, password, consumer) connection = httplib.HTTPConnection(SERVER) headers = {'Content-Type' :'application/x-www-form-urlencoded'} connection.request(oauth_request.http_method, AUTHEXCHANGE_URL, body=oauth_request.to_postdata(), headers=headers) auth_response = connection.getresponse().read() token = parse_auth_response(auth_response) access_token = oauth.OAuthToken(*token) open(os.path.join(user.home, '.oget'), 'w').write('\n'.join(( cons_key, cons_key_secret, username, password, token[0], token[1]))) oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer, access_token, http_method='POST', http_url=url.geturl(), parameters=parameters) oauth_request.sign_request(SIGNATURE_METHOD, consumer, access_token) connection = httplib.HTTPConnection(SERVER) connection.request(oauth_request.http_method, oauth_request.to_url(), body=oauth_request.to_postdata(), headers=CONTENT_TYPE_HEADER) print connection.getresponse().read() #print minidom.parse(connection.getresponse()).toprettyxml(indent=' ') if __name__ == '__main__': main()
Python
#!/usr/bin/python import datetime import sys import textwrap import common from xml.dom import pulldom PARSER = """\ /** * Copyright 2009 Joe LaPenna */ package com.joelapenna.foursquare.parsers; import com.joelapenna.foursquare.Foursquare; import com.joelapenna.foursquare.error.FoursquareError; import com.joelapenna.foursquare.error.FoursquareParseException; import com.joelapenna.foursquare.types.%(type_name)s; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.util.logging.Level; import java.util.logging.Logger; /** * Auto-generated: %(timestamp)s * * @author Joe LaPenna (joe@joelapenna.com) * @param <T> */ public class %(type_name)sParser extends AbstractParser<%(type_name)s> { private static final Logger LOG = Logger.getLogger(%(type_name)sParser.class.getCanonicalName()); private static final boolean DEBUG = Foursquare.PARSER_DEBUG; @Override public %(type_name)s parseInner(XmlPullParser parser) throws XmlPullParserException, IOException, FoursquareError, FoursquareParseException { parser.require(XmlPullParser.START_TAG, null, null); %(type_name)s %(top_node_name)s = new %(type_name)s(); while (parser.nextTag() == XmlPullParser.START_TAG) { String name = parser.getName(); %(stanzas)s } else { // Consume something we don't understand. if (DEBUG) LOG.log(Level.FINE, "Found tag that we don't recognize: " + name); skipSubTree(parser); } } return %(top_node_name)s; } }""" BOOLEAN_STANZA = """\ } else if ("%(name)s".equals(name)) { %(top_node_name)s.set%(camel_name)s(Boolean.valueOf(parser.nextText())); """ GROUP_STANZA = """\ } else if ("%(name)s".equals(name)) { %(top_node_name)s.set%(camel_name)s(new GroupParser(new %(sub_parser_camel_case)s()).parse(parser)); """ COMPLEX_STANZA = """\ } else if ("%(name)s".equals(name)) { %(top_node_name)s.set%(camel_name)s(new %(parser_name)s().parse(parser)); """ STANZA = """\ } else if ("%(name)s".equals(name)) { %(top_node_name)s.set%(camel_name)s(parser.nextText()); """ def main(): type_name, top_node_name, attributes = common.WalkNodesForAttributes( sys.argv[1]) GenerateClass(type_name, top_node_name, attributes) def GenerateClass(type_name, top_node_name, attributes): """generate it. type_name: the type of object the parser returns top_node_name: the name of the object the parser returns. per common.WalkNodsForAttributes """ stanzas = [] for name in sorted(attributes): typ, children = attributes[name] replacements = Replacements(top_node_name, name, typ, children) if typ == common.BOOLEAN: stanzas.append(BOOLEAN_STANZA % replacements) elif typ == common.GROUP: stanzas.append(GROUP_STANZA % replacements) elif typ in common.COMPLEX: stanzas.append(COMPLEX_STANZA % replacements) else: stanzas.append(STANZA % replacements) if stanzas: # pop off the extranious } else for the first conditional stanza. stanzas[0] = stanzas[0].replace('} else ', '', 1) replacements = Replacements(top_node_name, name, typ, [None]) replacements['stanzas'] = '\n'.join(stanzas).strip() print PARSER % replacements def Replacements(top_node_name, name, typ, children): # CameCaseClassName type_name = ''.join([word.capitalize() for word in top_node_name.split('_')]) # CamelCaseClassName camel_name = ''.join([word.capitalize() for word in name.split('_')]) # camelCaseLocalName attribute_name = camel_name.lower().capitalize() # mFieldName field_name = 'm' + camel_name if children[0]: sub_parser_camel_case = children[0] + 'Parser' else: sub_parser_camel_case = (camel_name[:-1] + 'Parser') return { 'type_name': type_name, 'name': name, 'top_node_name': top_node_name, 'camel_name': camel_name, 'parser_name': typ + 'Parser', 'attribute_name': attribute_name, 'field_name': field_name, 'typ': typ, 'timestamp': datetime.datetime.now(), 'sub_parser_camel_case': sub_parser_camel_case, 'sub_type': children[0] } if __name__ == '__main__': main()
Python
#!/usr/bin/python import logging from xml.dom import minidom from xml.dom import pulldom BOOLEAN = "boolean" STRING = "String" GROUP = "Group" # Interfaces that all FoursquareTypes implement. DEFAULT_INTERFACES = ['FoursquareType'] # Interfaces that specific FoursqureTypes implement. INTERFACES = { } DEFAULT_CLASS_IMPORTS = [ ] CLASS_IMPORTS = { # 'Checkin': DEFAULT_CLASS_IMPORTS + [ # 'import com.joelapenna.foursquare.filters.VenueFilterable' # ], # 'Venue': DEFAULT_CLASS_IMPORTS + [ # 'import com.joelapenna.foursquare.filters.VenueFilterable' # ], # 'Tip': DEFAULT_CLASS_IMPORTS + [ # 'import com.joelapenna.foursquare.filters.VenueFilterable' # ], } COMPLEX = [ 'Group', 'Badge', 'Beenhere', 'Checkin', 'CheckinResponse', 'City', 'Credentials', 'Data', 'Mayor', 'Rank', 'Score', 'Scoring', 'Settings', 'Stats', 'Tags', 'Tip', 'User', 'Venue', ] TYPES = COMPLEX + ['boolean'] def WalkNodesForAttributes(path): """Parse the xml file getting all attributes. <venue> <attribute>value</attribute> </venue> Returns: type_name - The java-style name the top node will have. "Venue" top_node_name - unadultured name of the xml stanza, probably the type of java class we're creating. "venue" attributes - {'attribute': 'value'} """ doc = pulldom.parse(path) type_name = None top_node_name = None attributes = {} level = 0 for event, node in doc: # For skipping parts of a tree. if level > 0: if event == pulldom.END_ELEMENT: level-=1 logging.warn('(%s) Skip end: %s' % (str(level), node)) continue elif event == pulldom.START_ELEMENT: logging.warn('(%s) Skipping: %s' % (str(level), node)) level+=1 continue if event == pulldom.START_ELEMENT: logging.warn('Parsing: ' + node.tagName) # Get the type name to use. if type_name is None: type_name = ''.join([word.capitalize() for word in node.tagName.split('_')]) top_node_name = node.tagName logging.warn('Found Top Node Name: ' + top_node_name) continue typ = node.getAttribute('type') child = node.getAttribute('child') # We don't want to walk complex types. if typ in COMPLEX: logging.warn('Found Complex: ' + node.tagName) level = 1 elif typ not in TYPES: logging.warn('Found String: ' + typ) typ = STRING else: logging.warn('Found Type: ' + typ) logging.warn('Adding: ' + str((node, typ))) attributes.setdefault(node.tagName, (typ, [child])) logging.warn('Attr: ' + str((type_name, top_node_name, attributes))) return type_name, top_node_name, attributes
Python
#!/usr/bin/env python # # Copyright (c) 2005 Niels Provos <provos@citi.umich.edu> # All rights reserved. # # Generates marshaling code based on libevent. import sys import re # _NAME = "event_rpcgen.py" _VERSION = "0.1" _STRUCT_RE = '[a-z][a-z_0-9]*' # Globals line_count = 0 white = re.compile(r'^\s+') cppcomment = re.compile(r'\/\/.*$') headerdirect = [] cppdirect = [] # Holds everything that makes a struct class Struct: def __init__(self, name): self._name = name self._entries = [] self._tags = {} print >>sys.stderr, ' Created struct: %s' % name def AddEntry(self, entry): if self._tags.has_key(entry.Tag()): print >>sys.stderr, ( 'Entry "%s" duplicates tag number ' '%d from "%s" around line %d' ) % ( entry.Name(), entry.Tag(), self._tags[entry.Tag()], line_count) sys.exit(1) self._entries.append(entry) self._tags[entry.Tag()] = entry.Name() print >>sys.stderr, ' Added entry: %s' % entry.Name() def Name(self): return self._name def EntryTagName(self, entry): """Creates the name inside an enumeration for distinguishing data types.""" name = "%s_%s" % (self._name, entry.Name()) return name.upper() def PrintIdented(self, file, ident, code): """Takes an array, add indentation to each entry and prints it.""" for entry in code: print >>file, '%s%s' % (ident, entry) def PrintTags(self, file): """Prints the tag definitions for a structure.""" print >>file, '/* Tag definition for %s */' % self._name print >>file, 'enum %s_ {' % self._name.lower() for entry in self._entries: print >>file, ' %s=%d,' % (self.EntryTagName(entry), entry.Tag()) print >>file, ' %s_MAX_TAGS' % (self._name.upper()) print >>file, '};\n' def PrintForwardDeclaration(self, file): print >>file, 'struct %s;' % self._name def PrintDeclaration(self, file): print >>file, '/* Structure declaration for %s */' % self._name print >>file, 'struct %s_access_ {' % self._name for entry in self._entries: dcl = entry.AssignDeclaration('(*%s_assign)' % entry.Name()) dcl.extend( entry.GetDeclaration('(*%s_get)' % entry.Name())) if entry.Array(): dcl.extend( entry.AddDeclaration('(*%s_add)' % entry.Name())) self.PrintIdented(file, ' ', dcl) print >>file, '};\n' print >>file, 'struct %s {' % self._name print >>file, ' struct %s_access_ *base;\n' % self._name for entry in self._entries: dcl = entry.Declaration() self.PrintIdented(file, ' ', dcl) print >>file, '' for entry in self._entries: print >>file, ' ev_uint8_t %s_set;' % entry.Name() print >>file, '};\n' print >>file, \ """struct %(name)s *%(name)s_new(void); void %(name)s_free(struct %(name)s *); void %(name)s_clear(struct %(name)s *); void %(name)s_marshal(struct evbuffer *, const struct %(name)s *); int %(name)s_unmarshal(struct %(name)s *, struct evbuffer *); int %(name)s_complete(struct %(name)s *); void evtag_marshal_%(name)s(struct evbuffer *, ev_uint32_t, const struct %(name)s *); int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t, struct %(name)s *);""" % { 'name' : self._name } # Write a setting function of every variable for entry in self._entries: self.PrintIdented(file, '', entry.AssignDeclaration( entry.AssignFuncName())) self.PrintIdented(file, '', entry.GetDeclaration( entry.GetFuncName())) if entry.Array(): self.PrintIdented(file, '', entry.AddDeclaration( entry.AddFuncName())) print >>file, '/* --- %s done --- */\n' % self._name def PrintCode(self, file): print >>file, ('/*\n' ' * Implementation of %s\n' ' */\n') % self._name print >>file, \ 'static struct %(name)s_access_ __%(name)s_base = {' % \ { 'name' : self._name } for entry in self._entries: self.PrintIdented(file, ' ', entry.CodeBase()) print >>file, '};\n' # Creation print >>file, ( 'struct %(name)s *\n' '%(name)s_new(void)\n' '{\n' ' struct %(name)s *tmp;\n' ' if ((tmp = malloc(sizeof(struct %(name)s))) == NULL) {\n' ' event_warn("%%s: malloc", __func__);\n' ' return (NULL);\n' ' }\n' ' tmp->base = &__%(name)s_base;\n') % { 'name' : self._name } for entry in self._entries: self.PrintIdented(file, ' ', entry.CodeNew('tmp')) print >>file, ' tmp->%s_set = 0;\n' % entry.Name() print >>file, ( ' return (tmp);\n' '}\n') # Adding for entry in self._entries: if entry.Array(): self.PrintIdented(file, '', entry.CodeAdd()) print >>file, '' # Assigning for entry in self._entries: self.PrintIdented(file, '', entry.CodeAssign()) print >>file, '' # Getting for entry in self._entries: self.PrintIdented(file, '', entry.CodeGet()) print >>file, '' # Clearing print >>file, ( 'void\n' '%(name)s_clear(struct %(name)s *tmp)\n' '{' ) % { 'name' : self._name } for entry in self._entries: self.PrintIdented(file, ' ', entry.CodeClear('tmp')) print >>file, '}\n' # Freeing print >>file, ( 'void\n' '%(name)s_free(struct %(name)s *tmp)\n' '{' ) % { 'name' : self._name } for entry in self._entries: self.PrintIdented(file, ' ', entry.CodeFree('tmp')) print >>file, (' free(tmp);\n' '}\n') # Marshaling print >>file, ('void\n' '%(name)s_marshal(struct evbuffer *evbuf, ' 'const struct %(name)s *tmp)' '{') % { 'name' : self._name } for entry in self._entries: indent = ' ' # Optional entries do not have to be set if entry.Optional(): indent += ' ' print >>file, ' if (tmp->%s_set) {' % entry.Name() self.PrintIdented( file, indent, entry.CodeMarshal('evbuf', self.EntryTagName(entry), 'tmp')) if entry.Optional(): print >>file, ' }' print >>file, '}\n' # Unmarshaling print >>file, ('int\n' '%(name)s_unmarshal(struct %(name)s *tmp, ' ' struct evbuffer *evbuf)\n' '{\n' ' ev_uint32_t tag;\n' ' while (EVBUFFER_LENGTH(evbuf) > 0) {\n' ' if (evtag_peek(evbuf, &tag) == -1)\n' ' return (-1);\n' ' switch (tag) {\n' ) % { 'name' : self._name } for entry in self._entries: print >>file, ' case %s:\n' % self.EntryTagName(entry) if not entry.Array(): print >>file, ( ' if (tmp->%s_set)\n' ' return (-1);' ) % (entry.Name()) self.PrintIdented( file, ' ', entry.CodeUnmarshal('evbuf', self.EntryTagName(entry), 'tmp')) print >>file, ( ' tmp->%s_set = 1;\n' % entry.Name() + ' break;\n' ) print >>file, ( ' default:\n' ' return -1;\n' ' }\n' ' }\n' ) # Check if it was decoded completely print >>file, ( ' if (%(name)s_complete(tmp) == -1)\n' ' return (-1);' ) % { 'name' : self._name } # Successfully decoded print >>file, ( ' return (0);\n' '}\n') # Checking if a structure has all the required data print >>file, ( 'int\n' '%(name)s_complete(struct %(name)s *msg)\n' '{' ) % { 'name' : self._name } for entry in self._entries: self.PrintIdented( file, ' ', entry.CodeComplete('msg')) print >>file, ( ' return (0);\n' '}\n' ) # Complete message unmarshaling print >>file, ( 'int\n' 'evtag_unmarshal_%(name)s(struct evbuffer *evbuf, ' 'ev_uint32_t need_tag, struct %(name)s *msg)\n' '{\n' ' ev_uint32_t tag;\n' ' int res = -1;\n' '\n' ' struct evbuffer *tmp = evbuffer_new();\n' '\n' ' if (evtag_unmarshal(evbuf, &tag, tmp) == -1' ' || tag != need_tag)\n' ' goto error;\n' '\n' ' if (%(name)s_unmarshal(msg, tmp) == -1)\n' ' goto error;\n' '\n' ' res = 0;\n' '\n' ' error:\n' ' evbuffer_free(tmp);\n' ' return (res);\n' '}\n' ) % { 'name' : self._name } # Complete message marshaling print >>file, ( 'void\n' 'evtag_marshal_%(name)s(struct evbuffer *evbuf, ev_uint32_t tag, ' 'const struct %(name)s *msg)\n' '{\n' ' struct evbuffer *_buf = evbuffer_new();\n' ' assert(_buf != NULL);\n' ' evbuffer_drain(_buf, -1);\n' ' %(name)s_marshal(_buf, msg);\n' ' evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), ' 'EVBUFFER_LENGTH(_buf));\n' ' evbuffer_free(_buf);\n' '}\n' ) % { 'name' : self._name } class Entry: def __init__(self, type, name, tag): self._type = type self._name = name self._tag = int(tag) self._ctype = type self._optional = 0 self._can_be_array = 0 self._array = 0 self._line_count = -1 self._struct = None self._refname = None def GetTranslation(self): return { "parent_name" : self._struct.Name(), "name" : self._name, "ctype" : self._ctype, "refname" : self._refname } def SetStruct(self, struct): self._struct = struct def LineCount(self): assert self._line_count != -1 return self._line_count def SetLineCount(self, number): self._line_count = number def Array(self): return self._array def Optional(self): return self._optional def Tag(self): return self._tag def Name(self): return self._name def Type(self): return self._type def MakeArray(self, yes=1): self._array = yes def MakeOptional(self): self._optional = 1 def GetFuncName(self): return '%s_%s_get' % (self._struct.Name(), self._name) def GetDeclaration(self, funcname): code = [ 'int %s(struct %s *, %s *);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def CodeGet(self): code = ( 'int', '%(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, ' '%(ctype)s *value)', '{', ' if (msg->%(name)s_set != 1)', ' return (-1);', ' *value = msg->%(name)s_data;', ' return (0);', '}' ) code = '\n'.join(code) code = code % self.GetTranslation() return code.split('\n') def AssignFuncName(self): return '%s_%s_assign' % (self._struct.Name(), self._name) def AddFuncName(self): return '%s_%s_add' % (self._struct.Name(), self._name) def AssignDeclaration(self, funcname): code = [ 'int %s(struct %s *, const %s);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def CodeAssign(self): code = [ 'int', '%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,' ' const %(ctype)s value)', '{', ' msg->%(name)s_set = 1;', ' msg->%(name)s_data = value;', ' return (0);', '}' ] code = '\n'.join(code) code = code % self.GetTranslation() return code.split('\n') def CodeClear(self, structname): code = [ '%s->%s_set = 0;' % (structname, self.Name()) ] return code def CodeComplete(self, structname): if self.Optional(): return [] code = [ 'if (!%s->%s_set)' % (structname, self.Name()), ' return (-1);' ] return code def CodeFree(self, name): return [] def CodeBase(self): code = [ '%(parent_name)s_%(name)s_assign,', '%(parent_name)s_%(name)s_get,' ] if self.Array(): code.append('%(parent_name)s_%(name)s_add,') code = '\n'.join(code) code = code % self.GetTranslation() return code.split('\n') def Verify(self): if self.Array() and not self._can_be_array: print >>sys.stderr, ( 'Entry "%s" cannot be created as an array ' 'around line %d' ) % (self._name, self.LineCount()) sys.exit(1) if not self._struct: print >>sys.stderr, ( 'Entry "%s" does not know which struct it belongs to ' 'around line %d' ) % (self._name, self.LineCount()) sys.exit(1) if self._optional and self._array: print >>sys.stderr, ( 'Entry "%s" has illegal combination of ' 'optional and array around line %d' ) % ( self._name, self.LineCount() ) sys.exit(1) class EntryBytes(Entry): def __init__(self, type, name, tag, length): # Init base class Entry.__init__(self, type, name, tag) self._length = length self._ctype = 'ev_uint8_t' def GetDeclaration(self, funcname): code = [ 'int %s(struct %s *, %s **);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def AssignDeclaration(self, funcname): code = [ 'int %s(struct %s *, const %s *);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def Declaration(self): dcl = ['ev_uint8_t %s_data[%s];' % (self._name, self._length)] return dcl def CodeGet(self): name = self._name code = [ 'int', '%s_%s_get(struct %s *msg, %s **value)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' if (msg->%s_set != 1)' % name, ' return (-1);', ' *value = msg->%s_data;' % name, ' return (0);', '}' ] return code def CodeAssign(self): name = self._name code = [ 'int', '%s_%s_assign(struct %s *msg, const %s *value)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' msg->%s_set = 1;' % name, ' memcpy(msg->%s_data, value, %s);' % ( name, self._length), ' return (0);', '}' ] return code def CodeUnmarshal(self, buf, tag_name, var_name): code = [ 'if (evtag_unmarshal_fixed(%s, %s, ' % (buf, tag_name) + '%s->%s_data, ' % (var_name, self._name) + 'sizeof(%s->%s_data)) == -1) {' % ( var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal(%s, %s, %s->%s_data, sizeof(%s->%s_data));' % ( buf, tag_name, var_name, self._name, var_name, self._name )] return code def CodeClear(self, structname): code = [ '%s->%s_set = 0;' % (structname, self.Name()), 'memset(%s->%s_data, 0, sizeof(%s->%s_data));' % ( structname, self._name, structname, self._name)] return code def CodeNew(self, name): code = ['memset(%s->%s_data, 0, sizeof(%s->%s_data));' % ( name, self._name, name, self._name)] return code def Verify(self): if not self._length: print >>sys.stderr, 'Entry "%s" needs a length around line %d' % ( self._name, self.LineCount() ) sys.exit(1) Entry.Verify(self) class EntryInt(Entry): def __init__(self, type, name, tag): # Init base class Entry.__init__(self, type, name, tag) self._ctype = 'ev_uint32_t' def CodeUnmarshal(self, buf, tag_name, var_name): code = ['if (evtag_unmarshal_int(%s, %s, &%s->%s_data) == -1) {' % ( buf, tag_name, var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal_int(%s, %s, %s->%s_data);' % ( buf, tag_name, var_name, self._name)] return code def Declaration(self): dcl = ['ev_uint32_t %s_data;' % self._name] return dcl def CodeNew(self, name): code = ['%s->%s_data = 0;' % (name, self._name)] return code class EntryString(Entry): def __init__(self, type, name, tag): # Init base class Entry.__init__(self, type, name, tag) self._ctype = 'char *' def CodeAssign(self): name = self._name code = """int %(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, const %(ctype)s value) { if (msg->%(name)s_data != NULL) free(msg->%(name)s_data); if ((msg->%(name)s_data = strdup(value)) == NULL) return (-1); msg->%(name)s_set = 1; return (0); }""" % self.GetTranslation() return code.split('\n') def CodeUnmarshal(self, buf, tag_name, var_name): code = ['if (evtag_unmarshal_string(%s, %s, &%s->%s_data) == -1) {' % ( buf, tag_name, var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal_string(%s, %s, %s->%s_data);' % ( buf, tag_name, var_name, self._name)] return code def CodeClear(self, structname): code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()), ' free (%s->%s_data);' % (structname, self.Name()), ' %s->%s_data = NULL;' % (structname, self.Name()), ' %s->%s_set = 0;' % (structname, self.Name()), '}' ] return code def CodeNew(self, name): code = ['%s->%s_data = NULL;' % (name, self._name)] return code def CodeFree(self, name): code = ['if (%s->%s_data != NULL)' % (name, self._name), ' free (%s->%s_data); ' % (name, self._name)] return code def Declaration(self): dcl = ['char *%s_data;' % self._name] return dcl class EntryStruct(Entry): def __init__(self, type, name, tag, refname): # Init base class Entry.__init__(self, type, name, tag) self._can_be_array = 1 self._refname = refname self._ctype = 'struct %s*' % refname def CodeGet(self): name = self._name code = [ 'int', '%s_%s_get(struct %s *msg, %s *value)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' if (msg->%s_set != 1) {' % name, ' msg->%s_data = %s_new();' % (name, self._refname), ' if (msg->%s_data == NULL)' % name, ' return (-1);', ' msg->%s_set = 1;' % name, ' }', ' *value = msg->%s_data;' % name, ' return (0);', '}' ] return code def CodeAssign(self): name = self._name code = """int %(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, const %(ctype)s value) { struct evbuffer *tmp = NULL; if (msg->%(name)s_set) { %(refname)s_clear(msg->%(name)s_data); msg->%(name)s_set = 0; } else { msg->%(name)s_data = %(refname)s_new(); if (msg->%(name)s_data == NULL) { event_warn("%%s: %(refname)s_new()", __func__); goto error; } } if ((tmp = evbuffer_new()) == NULL) { event_warn("%%s: evbuffer_new()", __func__); goto error; } %(refname)s_marshal(tmp, value); if (%(refname)s_unmarshal(msg->%(name)s_data, tmp) == -1) { event_warnx("%%s: %(refname)s_unmarshal", __func__); goto error; } msg->%(name)s_set = 1; evbuffer_free(tmp); return (0); error: if (tmp != NULL) evbuffer_free(tmp); if (msg->%(name)s_data != NULL) { %(refname)s_free(msg->%(name)s_data); msg->%(name)s_data = NULL; } return (-1); }""" % self.GetTranslation() return code.split('\n') def CodeComplete(self, structname): if self.Optional(): code = [ 'if (%s->%s_set && %s_complete(%s->%s_data) == -1)' % ( structname, self.Name(), self._refname, structname, self.Name()), ' return (-1);' ] else: code = [ 'if (%s_complete(%s->%s_data) == -1)' % ( self._refname, structname, self.Name()), ' return (-1);' ] return code def CodeUnmarshal(self, buf, tag_name, var_name): code = ['%s->%s_data = %s_new();' % ( var_name, self._name, self._refname), 'if (%s->%s_data == NULL)' % (var_name, self._name), ' return (-1);', 'if (evtag_unmarshal_%s(%s, %s, %s->%s_data) == -1) {' % ( self._refname, buf, tag_name, var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal_%s(%s, %s, %s->%s_data);' % ( self._refname, buf, tag_name, var_name, self._name)] return code def CodeClear(self, structname): code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()), ' %s_free(%s->%s_data);' % ( self._refname, structname, self.Name()), ' %s->%s_data = NULL;' % (structname, self.Name()), ' %s->%s_set = 0;' % (structname, self.Name()), '}' ] return code def CodeNew(self, name): code = ['%s->%s_data = NULL;' % (name, self._name)] return code def CodeFree(self, name): code = ['if (%s->%s_data != NULL)' % (name, self._name), ' %s_free(%s->%s_data); ' % ( self._refname, name, self._name)] return code def Declaration(self): dcl = ['%s %s_data;' % (self._ctype, self._name)] return dcl class EntryVarBytes(Entry): def __init__(self, type, name, tag): # Init base class Entry.__init__(self, type, name, tag) self._ctype = 'ev_uint8_t *' def GetDeclaration(self, funcname): code = [ 'int %s(struct %s *, %s *, ev_uint32_t *);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def AssignDeclaration(self, funcname): code = [ 'int %s(struct %s *, const %s, ev_uint32_t);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def CodeAssign(self): name = self._name code = [ 'int', '%s_%s_assign(struct %s *msg, ' 'const %s value, ev_uint32_t len)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' if (msg->%s_data != NULL)' % name, ' free (msg->%s_data);' % name, ' msg->%s_data = malloc(len);' % name, ' if (msg->%s_data == NULL)' % name, ' return (-1);', ' msg->%s_set = 1;' % name, ' msg->%s_length = len;' % name, ' memcpy(msg->%s_data, value, len);' % name, ' return (0);', '}' ] return code def CodeGet(self): name = self._name code = [ 'int', '%s_%s_get(struct %s *msg, %s *value, ev_uint32_t *plen)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' if (msg->%s_set != 1)' % name, ' return (-1);', ' *value = msg->%s_data;' % name, ' *plen = msg->%s_length;' % name, ' return (0);', '}' ] return code def CodeUnmarshal(self, buf, tag_name, var_name): code = ['if (evtag_payload_length(%s, &%s->%s_length) == -1)' % ( buf, var_name, self._name), ' return (-1);', # We do not want DoS opportunities 'if (%s->%s_length > EVBUFFER_LENGTH(%s))' % ( var_name, self._name, buf), ' return (-1);', 'if ((%s->%s_data = malloc(%s->%s_length)) == NULL)' % ( var_name, self._name, var_name, self._name), ' return (-1);', 'if (evtag_unmarshal_fixed(%s, %s, %s->%s_data, ' '%s->%s_length) == -1) {' % ( buf, tag_name, var_name, self._name, var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal(%s, %s, %s->%s_data, %s->%s_length);' % ( buf, tag_name, var_name, self._name, var_name, self._name)] return code def CodeClear(self, structname): code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()), ' free (%s->%s_data);' % (structname, self.Name()), ' %s->%s_data = NULL;' % (structname, self.Name()), ' %s->%s_length = 0;' % (structname, self.Name()), ' %s->%s_set = 0;' % (structname, self.Name()), '}' ] return code def CodeNew(self, name): code = ['%s->%s_data = NULL;' % (name, self._name), '%s->%s_length = 0;' % (name, self._name) ] return code def CodeFree(self, name): code = ['if (%s->%s_data != NULL)' % (name, self._name), ' free (%s->%s_data); ' % (name, self._name)] return code def Declaration(self): dcl = ['ev_uint8_t *%s_data;' % self._name, 'ev_uint32_t %s_length;' % self._name] return dcl class EntryArray(Entry): def __init__(self, entry): # Init base class Entry.__init__(self, entry._type, entry._name, entry._tag) self._entry = entry self._refname = entry._refname self._ctype = 'struct %s *' % self._refname def GetDeclaration(self, funcname): """Allows direct access to elements of the array.""" translate = self.GetTranslation() translate["funcname"] = funcname code = [ 'int %(funcname)s(struct %(parent_name)s *, int, %(ctype)s *);' % translate ] return code def AssignDeclaration(self, funcname): code = [ 'int %s(struct %s *, int, const %s);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def AddDeclaration(self, funcname): code = [ '%s %s(struct %s *);' % ( self._ctype, funcname, self._struct.Name() ) ] return code def CodeGet(self): code = """int %(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, int offset, %(ctype)s *value) { if (!msg->%(name)s_set || offset < 0 || offset >= msg->%(name)s_length) return (-1); *value = msg->%(name)s_data[offset]; return (0); }""" % self.GetTranslation() return code.split('\n') def CodeAssign(self): code = """int %(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, int off, const %(ctype)s value) { struct evbuffer *tmp = NULL; if (!msg->%(name)s_set || off < 0 || off >= msg->%(name)s_length) return (-1); %(refname)s_clear(msg->%(name)s_data[off]); if ((tmp = evbuffer_new()) == NULL) { event_warn("%%s: evbuffer_new()", __func__); goto error; } %(refname)s_marshal(tmp, value); if (%(refname)s_unmarshal(msg->%(name)s_data[off], tmp) == -1) { event_warnx("%%s: %(refname)s_unmarshal", __func__); goto error; } evbuffer_free(tmp); return (0); error: if (tmp != NULL) evbuffer_free(tmp); %(refname)s_clear(msg->%(name)s_data[off]); return (-1); }""" % self.GetTranslation() return code.split('\n') def CodeAdd(self): code = \ """%(ctype)s %(parent_name)s_%(name)s_add(struct %(parent_name)s *msg) { if (++msg->%(name)s_length >= msg->%(name)s_num_allocated) { int tobe_allocated = msg->%(name)s_num_allocated; %(ctype)s* new_data = NULL; tobe_allocated = !tobe_allocated ? 1 : tobe_allocated << 1; new_data = (%(ctype)s*) realloc(msg->%(name)s_data, tobe_allocated * sizeof(%(ctype)s)); if (new_data == NULL) goto error; msg->%(name)s_data = new_data; msg->%(name)s_num_allocated = tobe_allocated; } msg->%(name)s_data[msg->%(name)s_length - 1] = %(refname)s_new(); if (msg->%(name)s_data[msg->%(name)s_length - 1] == NULL) goto error; msg->%(name)s_set = 1; return (msg->%(name)s_data[msg->%(name)s_length - 1]); error: --msg->%(name)s_length; return (NULL); } """ % self.GetTranslation() return code.split('\n') def CodeComplete(self, structname): code = [] translate = self.GetTranslation() if self.Optional(): code.append( 'if (%(structname)s->%(name)s_set)' % translate) translate["structname"] = structname tmp = """{ int i; for (i = 0; i < %(structname)s->%(name)s_length; ++i) { if (%(refname)s_complete(%(structname)s->%(name)s_data[i]) == -1) return (-1); } }""" % translate code.extend(tmp.split('\n')) return code def CodeUnmarshal(self, buf, tag_name, var_name): translate = self.GetTranslation() translate["var_name"] = var_name translate["buf"] = buf translate["tag_name"] = tag_name code = """if (%(parent_name)s_%(name)s_add(%(var_name)s) == NULL) return (-1); if (evtag_unmarshal_%(refname)s(%(buf)s, %(tag_name)s, %(var_name)s->%(name)s_data[%(var_name)s->%(name)s_length - 1]) == -1) { --%(var_name)s->%(name)s_length; event_warnx("%%s: failed to unmarshal %(name)s", __func__); return (-1); }""" % translate return code.split('\n') def CodeMarshal(self, buf, tag_name, var_name): code = ['{', ' int i;', ' for (i = 0; i < %s->%s_length; ++i) {' % ( var_name, self._name), ' evtag_marshal_%s(%s, %s, %s->%s_data[i]);' % ( self._refname, buf, tag_name, var_name, self._name), ' }', '}' ] return code def CodeClear(self, structname): code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()), ' int i;', ' for (i = 0; i < %s->%s_length; ++i) {' % ( structname, self.Name()), ' %s_free(%s->%s_data[i]);' % ( self._refname, structname, self.Name()), ' }', ' free(%s->%s_data);' % (structname, self.Name()), ' %s->%s_data = NULL;' % (structname, self.Name()), ' %s->%s_set = 0;' % (structname, self.Name()), ' %s->%s_length = 0;' % (structname, self.Name()), ' %s->%s_num_allocated = 0;' % (structname, self.Name()), '}' ] return code def CodeNew(self, name): code = ['%s->%s_data = NULL;' % (name, self._name), '%s->%s_length = 0;' % (name, self._name), '%s->%s_num_allocated = 0;' % (name, self._name)] return code def CodeFree(self, name): code = ['if (%s->%s_data != NULL) {' % (name, self._name), ' int i;', ' for (i = 0; i < %s->%s_length; ++i) {' % ( name, self._name), ' %s_free(%s->%s_data[i]); ' % ( self._refname, name, self._name), ' %s->%s_data[i] = NULL;' % (name, self._name), ' }', ' free(%s->%s_data);' % (name, self._name), ' %s->%s_data = NULL;' % (name, self._name), ' %s->%s_length = 0;' % (name, self._name), ' %s->%s_num_allocated = 0;' % (name, self._name), '}' ] return code def Declaration(self): dcl = ['struct %s **%s_data;' % (self._refname, self._name), 'int %s_length;' % self._name, 'int %s_num_allocated;' % self._name ] return dcl def NormalizeLine(line): global white global cppcomment line = cppcomment.sub('', line) line = line.strip() line = white.sub(' ', line) return line def ProcessOneEntry(newstruct, entry): optional = 0 array = 0 entry_type = '' name = '' tag = '' tag_set = None separator = '' fixed_length = '' tokens = entry.split(' ') while tokens: token = tokens[0] tokens = tokens[1:] if not entry_type: if not optional and token == 'optional': optional = 1 continue if not array and token == 'array': array = 1 continue if not entry_type: entry_type = token continue if not name: res = re.match(r'^([^\[\]]+)(\[.*\])?$', token) if not res: print >>sys.stderr, 'Cannot parse name: \"%s\" around %d' % ( entry, line_count) sys.exit(1) name = res.group(1) fixed_length = res.group(2) if fixed_length: fixed_length = fixed_length[1:-1] continue if not separator: separator = token if separator != '=': print >>sys.stderr, 'Expected "=" after name \"%s\" got %s' % ( name, token) sys.exit(1) continue if not tag_set: tag_set = 1 if not re.match(r'^(0x)?[0-9]+$', token): print >>sys.stderr, 'Expected tag number: \"%s\"' % entry sys.exit(1) tag = int(token, 0) continue print >>sys.stderr, 'Cannot parse \"%s\"' % entry sys.exit(1) if not tag_set: print >>sys.stderr, 'Need tag number: \"%s\"' % entry sys.exit(1) # Create the right entry if entry_type == 'bytes': if fixed_length: newentry = EntryBytes(entry_type, name, tag, fixed_length) else: newentry = EntryVarBytes(entry_type, name, tag) elif entry_type == 'int' and not fixed_length: newentry = EntryInt(entry_type, name, tag) elif entry_type == 'string' and not fixed_length: newentry = EntryString(entry_type, name, tag) else: res = re.match(r'^struct\[(%s)\]$' % _STRUCT_RE, entry_type, re.IGNORECASE) if res: # References another struct defined in our file newentry = EntryStruct(entry_type, name, tag, res.group(1)) else: print >>sys.stderr, 'Bad type: "%s" in "%s"' % (entry_type, entry) sys.exit(1) structs = [] if optional: newentry.MakeOptional() if array: newentry.MakeArray() newentry.SetStruct(newstruct) newentry.SetLineCount(line_count) newentry.Verify() if array: # We need to encapsulate this entry into a struct newname = newentry.Name()+ '_array' # Now borgify the new entry. newentry = EntryArray(newentry) newentry.SetStruct(newstruct) newentry.SetLineCount(line_count) newentry.MakeArray() newstruct.AddEntry(newentry) return structs def ProcessStruct(data): tokens = data.split(' ') # First three tokens are: 'struct' 'name' '{' newstruct = Struct(tokens[1]) inside = ' '.join(tokens[3:-1]) tokens = inside.split(';') structs = [] for entry in tokens: entry = NormalizeLine(entry) if not entry: continue # It's possible that new structs get defined in here structs.extend(ProcessOneEntry(newstruct, entry)) structs.append(newstruct) return structs def GetNextStruct(file): global line_count global cppdirect got_struct = 0 processed_lines = [] have_c_comment = 0 data = '' while 1: line = file.readline() if not line: break line_count += 1 line = line[:-1] if not have_c_comment and re.search(r'/\*', line): if re.search(r'/\*.*\*/', line): line = re.sub(r'/\*.*\*/', '', line) else: line = re.sub(r'/\*.*$', '', line) have_c_comment = 1 if have_c_comment: if not re.search(r'\*/', line): continue have_c_comment = 0 line = re.sub(r'^.*\*/', '', line) line = NormalizeLine(line) if not line: continue if not got_struct: if re.match(r'#include ["<].*[>"]', line): cppdirect.append(line) continue if re.match(r'^#(if( |def)|endif)', line): cppdirect.append(line) continue if re.match(r'^#define', line): headerdirect.append(line) continue if not re.match(r'^struct %s {$' % _STRUCT_RE, line, re.IGNORECASE): print >>sys.stderr, 'Missing struct on line %d: %s' % ( line_count, line) sys.exit(1) else: got_struct = 1 data += line continue # We are inside the struct tokens = line.split('}') if len(tokens) == 1: data += ' ' + line continue if len(tokens[1]): print >>sys.stderr, 'Trailing garbage after struct on line %d' % ( line_count ) sys.exit(1) # We found the end of the struct data += ' %s}' % tokens[0] break # Remove any comments, that might be in there data = re.sub(r'/\*.*\*/', '', data) return data def Parse(file): """ Parses the input file and returns C code and corresponding header file. """ entities = [] while 1: # Just gets the whole struct nicely formatted data = GetNextStruct(file) if not data: break entities.extend(ProcessStruct(data)) return entities def GuardName(name): name = '_'.join(name.split('.')) name = '_'.join(name.split('/')) guard = '_'+name.upper()+'_' return guard def HeaderPreamble(name): guard = GuardName(name) pre = ( '/*\n' ' * Automatically generated from %s\n' ' */\n\n' '#ifndef %s\n' '#define %s\n\n' ) % ( name, guard, guard) # insert stdint.h - let's hope everyone has it pre += ( '#include <event-config.h>\n' '#ifdef _EVENT_HAVE_STDINT_H\n' '#include <stdint.h>\n' '#endif\n' ) for statement in headerdirect: pre += '%s\n' % statement if headerdirect: pre += '\n' pre += ( '#define EVTAG_HAS(msg, member) ((msg)->member##_set == 1)\n' '#ifdef __GNUC__\n' '#define EVTAG_ASSIGN(msg, member, args...) ' '(*(msg)->base->member##_assign)(msg, ## args)\n' '#define EVTAG_GET(msg, member, args...) ' '(*(msg)->base->member##_get)(msg, ## args)\n' '#else\n' '#define EVTAG_ASSIGN(msg, member, ...) ' '(*(msg)->base->member##_assign)(msg, ## __VA_ARGS__)\n' '#define EVTAG_GET(msg, member, ...) ' '(*(msg)->base->member##_get)(msg, ## __VA_ARGS__)\n' '#endif\n' '#define EVTAG_ADD(msg, member) (*(msg)->base->member##_add)(msg)\n' '#define EVTAG_LEN(msg, member) ((msg)->member##_length)\n' ) return pre def HeaderPostamble(name): guard = GuardName(name) return '#endif /* %s */' % guard def BodyPreamble(name): global _NAME global _VERSION header_file = '.'.join(name.split('.')[:-1]) + '.gen.h' pre = ( '/*\n' ' * Automatically generated from %s\n' ' * by %s/%s. DO NOT EDIT THIS FILE.\n' ' */\n\n' ) % (name, _NAME, _VERSION) pre += ( '#include <sys/types.h>\n' '#ifdef _EVENT_HAVE_SYS_TIME_H\n' '#include <sys/time.h>\n' '#endif\n' '#include <stdlib.h>\n' '#include <string.h>\n' '#include <assert.h>\n' '#define EVENT_NO_STRUCT\n' '#include <event.h>\n\n' '#ifdef _EVENT___func__\n' '#define __func__ _EVENT___func__\n' '#endif\n' ) for statement in cppdirect: pre += '%s\n' % statement pre += '\n#include "%s"\n\n' % header_file pre += 'void event_err(int eval, const char *fmt, ...);\n' pre += 'void event_warn(const char *fmt, ...);\n' pre += 'void event_errx(int eval, const char *fmt, ...);\n' pre += 'void event_warnx(const char *fmt, ...);\n\n' return pre def main(argv): if len(argv) < 2 or not argv[1]: print >>sys.stderr, 'Need RPC description file as first argument.' sys.exit(1) filename = argv[1] ext = filename.split('.')[-1] if ext != 'rpc': print >>sys.stderr, 'Unrecognized file extension: %s' % ext sys.exit(1) print >>sys.stderr, 'Reading \"%s\"' % filename fp = open(filename, 'r') entities = Parse(fp) fp.close() header_file = '.'.join(filename.split('.')[:-1]) + '.gen.h' impl_file = '.'.join(filename.split('.')[:-1]) + '.gen.c' print >>sys.stderr, '... creating "%s"' % header_file header_fp = open(header_file, 'w') print >>header_fp, HeaderPreamble(filename) # Create forward declarations: allows other structs to reference # each other for entry in entities: entry.PrintForwardDeclaration(header_fp) print >>header_fp, '' for entry in entities: entry.PrintTags(header_fp) entry.PrintDeclaration(header_fp) print >>header_fp, HeaderPostamble(filename) header_fp.close() print >>sys.stderr, '... creating "%s"' % impl_file impl_fp = open(impl_file, 'w') print >>impl_fp, BodyPreamble(filename) for entry in entities: entry.PrintCode(impl_fp) impl_fp.close() if __name__ == '__main__': main(sys.argv)
Python
#!/usr/bin/env python # # Copyright (c) 2005 Niels Provos <provos@citi.umich.edu> # All rights reserved. # # Generates marshaling code based on libevent. import sys import re # _NAME = "event_rpcgen.py" _VERSION = "0.1" _STRUCT_RE = '[a-z][a-z_0-9]*' # Globals line_count = 0 white = re.compile(r'^\s+') cppcomment = re.compile(r'\/\/.*$') headerdirect = [] cppdirect = [] # Holds everything that makes a struct class Struct: def __init__(self, name): self._name = name self._entries = [] self._tags = {} print >>sys.stderr, ' Created struct: %s' % name def AddEntry(self, entry): if self._tags.has_key(entry.Tag()): print >>sys.stderr, ( 'Entry "%s" duplicates tag number ' '%d from "%s" around line %d' ) % ( entry.Name(), entry.Tag(), self._tags[entry.Tag()], line_count) sys.exit(1) self._entries.append(entry) self._tags[entry.Tag()] = entry.Name() print >>sys.stderr, ' Added entry: %s' % entry.Name() def Name(self): return self._name def EntryTagName(self, entry): """Creates the name inside an enumeration for distinguishing data types.""" name = "%s_%s" % (self._name, entry.Name()) return name.upper() def PrintIdented(self, file, ident, code): """Takes an array, add indentation to each entry and prints it.""" for entry in code: print >>file, '%s%s' % (ident, entry) def PrintTags(self, file): """Prints the tag definitions for a structure.""" print >>file, '/* Tag definition for %s */' % self._name print >>file, 'enum %s_ {' % self._name.lower() for entry in self._entries: print >>file, ' %s=%d,' % (self.EntryTagName(entry), entry.Tag()) print >>file, ' %s_MAX_TAGS' % (self._name.upper()) print >>file, '};\n' def PrintForwardDeclaration(self, file): print >>file, 'struct %s;' % self._name def PrintDeclaration(self, file): print >>file, '/* Structure declaration for %s */' % self._name print >>file, 'struct %s_access_ {' % self._name for entry in self._entries: dcl = entry.AssignDeclaration('(*%s_assign)' % entry.Name()) dcl.extend( entry.GetDeclaration('(*%s_get)' % entry.Name())) if entry.Array(): dcl.extend( entry.AddDeclaration('(*%s_add)' % entry.Name())) self.PrintIdented(file, ' ', dcl) print >>file, '};\n' print >>file, 'struct %s {' % self._name print >>file, ' struct %s_access_ *base;\n' % self._name for entry in self._entries: dcl = entry.Declaration() self.PrintIdented(file, ' ', dcl) print >>file, '' for entry in self._entries: print >>file, ' ev_uint8_t %s_set;' % entry.Name() print >>file, '};\n' print >>file, \ """struct %(name)s *%(name)s_new(void); void %(name)s_free(struct %(name)s *); void %(name)s_clear(struct %(name)s *); void %(name)s_marshal(struct evbuffer *, const struct %(name)s *); int %(name)s_unmarshal(struct %(name)s *, struct evbuffer *); int %(name)s_complete(struct %(name)s *); void evtag_marshal_%(name)s(struct evbuffer *, ev_uint32_t, const struct %(name)s *); int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t, struct %(name)s *);""" % { 'name' : self._name } # Write a setting function of every variable for entry in self._entries: self.PrintIdented(file, '', entry.AssignDeclaration( entry.AssignFuncName())) self.PrintIdented(file, '', entry.GetDeclaration( entry.GetFuncName())) if entry.Array(): self.PrintIdented(file, '', entry.AddDeclaration( entry.AddFuncName())) print >>file, '/* --- %s done --- */\n' % self._name def PrintCode(self, file): print >>file, ('/*\n' ' * Implementation of %s\n' ' */\n') % self._name print >>file, \ 'static struct %(name)s_access_ __%(name)s_base = {' % \ { 'name' : self._name } for entry in self._entries: self.PrintIdented(file, ' ', entry.CodeBase()) print >>file, '};\n' # Creation print >>file, ( 'struct %(name)s *\n' '%(name)s_new(void)\n' '{\n' ' struct %(name)s *tmp;\n' ' if ((tmp = malloc(sizeof(struct %(name)s))) == NULL) {\n' ' event_warn("%%s: malloc", __func__);\n' ' return (NULL);\n' ' }\n' ' tmp->base = &__%(name)s_base;\n') % { 'name' : self._name } for entry in self._entries: self.PrintIdented(file, ' ', entry.CodeNew('tmp')) print >>file, ' tmp->%s_set = 0;\n' % entry.Name() print >>file, ( ' return (tmp);\n' '}\n') # Adding for entry in self._entries: if entry.Array(): self.PrintIdented(file, '', entry.CodeAdd()) print >>file, '' # Assigning for entry in self._entries: self.PrintIdented(file, '', entry.CodeAssign()) print >>file, '' # Getting for entry in self._entries: self.PrintIdented(file, '', entry.CodeGet()) print >>file, '' # Clearing print >>file, ( 'void\n' '%(name)s_clear(struct %(name)s *tmp)\n' '{' ) % { 'name' : self._name } for entry in self._entries: self.PrintIdented(file, ' ', entry.CodeClear('tmp')) print >>file, '}\n' # Freeing print >>file, ( 'void\n' '%(name)s_free(struct %(name)s *tmp)\n' '{' ) % { 'name' : self._name } for entry in self._entries: self.PrintIdented(file, ' ', entry.CodeFree('tmp')) print >>file, (' free(tmp);\n' '}\n') # Marshaling print >>file, ('void\n' '%(name)s_marshal(struct evbuffer *evbuf, ' 'const struct %(name)s *tmp)' '{') % { 'name' : self._name } for entry in self._entries: indent = ' ' # Optional entries do not have to be set if entry.Optional(): indent += ' ' print >>file, ' if (tmp->%s_set) {' % entry.Name() self.PrintIdented( file, indent, entry.CodeMarshal('evbuf', self.EntryTagName(entry), 'tmp')) if entry.Optional(): print >>file, ' }' print >>file, '}\n' # Unmarshaling print >>file, ('int\n' '%(name)s_unmarshal(struct %(name)s *tmp, ' ' struct evbuffer *evbuf)\n' '{\n' ' ev_uint32_t tag;\n' ' while (EVBUFFER_LENGTH(evbuf) > 0) {\n' ' if (evtag_peek(evbuf, &tag) == -1)\n' ' return (-1);\n' ' switch (tag) {\n' ) % { 'name' : self._name } for entry in self._entries: print >>file, ' case %s:\n' % self.EntryTagName(entry) if not entry.Array(): print >>file, ( ' if (tmp->%s_set)\n' ' return (-1);' ) % (entry.Name()) self.PrintIdented( file, ' ', entry.CodeUnmarshal('evbuf', self.EntryTagName(entry), 'tmp')) print >>file, ( ' tmp->%s_set = 1;\n' % entry.Name() + ' break;\n' ) print >>file, ( ' default:\n' ' return -1;\n' ' }\n' ' }\n' ) # Check if it was decoded completely print >>file, ( ' if (%(name)s_complete(tmp) == -1)\n' ' return (-1);' ) % { 'name' : self._name } # Successfully decoded print >>file, ( ' return (0);\n' '}\n') # Checking if a structure has all the required data print >>file, ( 'int\n' '%(name)s_complete(struct %(name)s *msg)\n' '{' ) % { 'name' : self._name } for entry in self._entries: self.PrintIdented( file, ' ', entry.CodeComplete('msg')) print >>file, ( ' return (0);\n' '}\n' ) # Complete message unmarshaling print >>file, ( 'int\n' 'evtag_unmarshal_%(name)s(struct evbuffer *evbuf, ' 'ev_uint32_t need_tag, struct %(name)s *msg)\n' '{\n' ' ev_uint32_t tag;\n' ' int res = -1;\n' '\n' ' struct evbuffer *tmp = evbuffer_new();\n' '\n' ' if (evtag_unmarshal(evbuf, &tag, tmp) == -1' ' || tag != need_tag)\n' ' goto error;\n' '\n' ' if (%(name)s_unmarshal(msg, tmp) == -1)\n' ' goto error;\n' '\n' ' res = 0;\n' '\n' ' error:\n' ' evbuffer_free(tmp);\n' ' return (res);\n' '}\n' ) % { 'name' : self._name } # Complete message marshaling print >>file, ( 'void\n' 'evtag_marshal_%(name)s(struct evbuffer *evbuf, ev_uint32_t tag, ' 'const struct %(name)s *msg)\n' '{\n' ' struct evbuffer *_buf = evbuffer_new();\n' ' assert(_buf != NULL);\n' ' evbuffer_drain(_buf, -1);\n' ' %(name)s_marshal(_buf, msg);\n' ' evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), ' 'EVBUFFER_LENGTH(_buf));\n' ' evbuffer_free(_buf);\n' '}\n' ) % { 'name' : self._name } class Entry: def __init__(self, type, name, tag): self._type = type self._name = name self._tag = int(tag) self._ctype = type self._optional = 0 self._can_be_array = 0 self._array = 0 self._line_count = -1 self._struct = None self._refname = None def GetTranslation(self): return { "parent_name" : self._struct.Name(), "name" : self._name, "ctype" : self._ctype, "refname" : self._refname } def SetStruct(self, struct): self._struct = struct def LineCount(self): assert self._line_count != -1 return self._line_count def SetLineCount(self, number): self._line_count = number def Array(self): return self._array def Optional(self): return self._optional def Tag(self): return self._tag def Name(self): return self._name def Type(self): return self._type def MakeArray(self, yes=1): self._array = yes def MakeOptional(self): self._optional = 1 def GetFuncName(self): return '%s_%s_get' % (self._struct.Name(), self._name) def GetDeclaration(self, funcname): code = [ 'int %s(struct %s *, %s *);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def CodeGet(self): code = ( 'int', '%(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, ' '%(ctype)s *value)', '{', ' if (msg->%(name)s_set != 1)', ' return (-1);', ' *value = msg->%(name)s_data;', ' return (0);', '}' ) code = '\n'.join(code) code = code % self.GetTranslation() return code.split('\n') def AssignFuncName(self): return '%s_%s_assign' % (self._struct.Name(), self._name) def AddFuncName(self): return '%s_%s_add' % (self._struct.Name(), self._name) def AssignDeclaration(self, funcname): code = [ 'int %s(struct %s *, const %s);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def CodeAssign(self): code = [ 'int', '%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,' ' const %(ctype)s value)', '{', ' msg->%(name)s_set = 1;', ' msg->%(name)s_data = value;', ' return (0);', '}' ] code = '\n'.join(code) code = code % self.GetTranslation() return code.split('\n') def CodeClear(self, structname): code = [ '%s->%s_set = 0;' % (structname, self.Name()) ] return code def CodeComplete(self, structname): if self.Optional(): return [] code = [ 'if (!%s->%s_set)' % (structname, self.Name()), ' return (-1);' ] return code def CodeFree(self, name): return [] def CodeBase(self): code = [ '%(parent_name)s_%(name)s_assign,', '%(parent_name)s_%(name)s_get,' ] if self.Array(): code.append('%(parent_name)s_%(name)s_add,') code = '\n'.join(code) code = code % self.GetTranslation() return code.split('\n') def Verify(self): if self.Array() and not self._can_be_array: print >>sys.stderr, ( 'Entry "%s" cannot be created as an array ' 'around line %d' ) % (self._name, self.LineCount()) sys.exit(1) if not self._struct: print >>sys.stderr, ( 'Entry "%s" does not know which struct it belongs to ' 'around line %d' ) % (self._name, self.LineCount()) sys.exit(1) if self._optional and self._array: print >>sys.stderr, ( 'Entry "%s" has illegal combination of ' 'optional and array around line %d' ) % ( self._name, self.LineCount() ) sys.exit(1) class EntryBytes(Entry): def __init__(self, type, name, tag, length): # Init base class Entry.__init__(self, type, name, tag) self._length = length self._ctype = 'ev_uint8_t' def GetDeclaration(self, funcname): code = [ 'int %s(struct %s *, %s **);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def AssignDeclaration(self, funcname): code = [ 'int %s(struct %s *, const %s *);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def Declaration(self): dcl = ['ev_uint8_t %s_data[%s];' % (self._name, self._length)] return dcl def CodeGet(self): name = self._name code = [ 'int', '%s_%s_get(struct %s *msg, %s **value)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' if (msg->%s_set != 1)' % name, ' return (-1);', ' *value = msg->%s_data;' % name, ' return (0);', '}' ] return code def CodeAssign(self): name = self._name code = [ 'int', '%s_%s_assign(struct %s *msg, const %s *value)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' msg->%s_set = 1;' % name, ' memcpy(msg->%s_data, value, %s);' % ( name, self._length), ' return (0);', '}' ] return code def CodeUnmarshal(self, buf, tag_name, var_name): code = [ 'if (evtag_unmarshal_fixed(%s, %s, ' % (buf, tag_name) + '%s->%s_data, ' % (var_name, self._name) + 'sizeof(%s->%s_data)) == -1) {' % ( var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal(%s, %s, %s->%s_data, sizeof(%s->%s_data));' % ( buf, tag_name, var_name, self._name, var_name, self._name )] return code def CodeClear(self, structname): code = [ '%s->%s_set = 0;' % (structname, self.Name()), 'memset(%s->%s_data, 0, sizeof(%s->%s_data));' % ( structname, self._name, structname, self._name)] return code def CodeNew(self, name): code = ['memset(%s->%s_data, 0, sizeof(%s->%s_data));' % ( name, self._name, name, self._name)] return code def Verify(self): if not self._length: print >>sys.stderr, 'Entry "%s" needs a length around line %d' % ( self._name, self.LineCount() ) sys.exit(1) Entry.Verify(self) class EntryInt(Entry): def __init__(self, type, name, tag): # Init base class Entry.__init__(self, type, name, tag) self._ctype = 'ev_uint32_t' def CodeUnmarshal(self, buf, tag_name, var_name): code = ['if (evtag_unmarshal_int(%s, %s, &%s->%s_data) == -1) {' % ( buf, tag_name, var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal_int(%s, %s, %s->%s_data);' % ( buf, tag_name, var_name, self._name)] return code def Declaration(self): dcl = ['ev_uint32_t %s_data;' % self._name] return dcl def CodeNew(self, name): code = ['%s->%s_data = 0;' % (name, self._name)] return code class EntryString(Entry): def __init__(self, type, name, tag): # Init base class Entry.__init__(self, type, name, tag) self._ctype = 'char *' def CodeAssign(self): name = self._name code = """int %(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, const %(ctype)s value) { if (msg->%(name)s_data != NULL) free(msg->%(name)s_data); if ((msg->%(name)s_data = strdup(value)) == NULL) return (-1); msg->%(name)s_set = 1; return (0); }""" % self.GetTranslation() return code.split('\n') def CodeUnmarshal(self, buf, tag_name, var_name): code = ['if (evtag_unmarshal_string(%s, %s, &%s->%s_data) == -1) {' % ( buf, tag_name, var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal_string(%s, %s, %s->%s_data);' % ( buf, tag_name, var_name, self._name)] return code def CodeClear(self, structname): code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()), ' free (%s->%s_data);' % (structname, self.Name()), ' %s->%s_data = NULL;' % (structname, self.Name()), ' %s->%s_set = 0;' % (structname, self.Name()), '}' ] return code def CodeNew(self, name): code = ['%s->%s_data = NULL;' % (name, self._name)] return code def CodeFree(self, name): code = ['if (%s->%s_data != NULL)' % (name, self._name), ' free (%s->%s_data); ' % (name, self._name)] return code def Declaration(self): dcl = ['char *%s_data;' % self._name] return dcl class EntryStruct(Entry): def __init__(self, type, name, tag, refname): # Init base class Entry.__init__(self, type, name, tag) self._can_be_array = 1 self._refname = refname self._ctype = 'struct %s*' % refname def CodeGet(self): name = self._name code = [ 'int', '%s_%s_get(struct %s *msg, %s *value)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' if (msg->%s_set != 1) {' % name, ' msg->%s_data = %s_new();' % (name, self._refname), ' if (msg->%s_data == NULL)' % name, ' return (-1);', ' msg->%s_set = 1;' % name, ' }', ' *value = msg->%s_data;' % name, ' return (0);', '}' ] return code def CodeAssign(self): name = self._name code = """int %(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, const %(ctype)s value) { struct evbuffer *tmp = NULL; if (msg->%(name)s_set) { %(refname)s_clear(msg->%(name)s_data); msg->%(name)s_set = 0; } else { msg->%(name)s_data = %(refname)s_new(); if (msg->%(name)s_data == NULL) { event_warn("%%s: %(refname)s_new()", __func__); goto error; } } if ((tmp = evbuffer_new()) == NULL) { event_warn("%%s: evbuffer_new()", __func__); goto error; } %(refname)s_marshal(tmp, value); if (%(refname)s_unmarshal(msg->%(name)s_data, tmp) == -1) { event_warnx("%%s: %(refname)s_unmarshal", __func__); goto error; } msg->%(name)s_set = 1; evbuffer_free(tmp); return (0); error: if (tmp != NULL) evbuffer_free(tmp); if (msg->%(name)s_data != NULL) { %(refname)s_free(msg->%(name)s_data); msg->%(name)s_data = NULL; } return (-1); }""" % self.GetTranslation() return code.split('\n') def CodeComplete(self, structname): if self.Optional(): code = [ 'if (%s->%s_set && %s_complete(%s->%s_data) == -1)' % ( structname, self.Name(), self._refname, structname, self.Name()), ' return (-1);' ] else: code = [ 'if (%s_complete(%s->%s_data) == -1)' % ( self._refname, structname, self.Name()), ' return (-1);' ] return code def CodeUnmarshal(self, buf, tag_name, var_name): code = ['%s->%s_data = %s_new();' % ( var_name, self._name, self._refname), 'if (%s->%s_data == NULL)' % (var_name, self._name), ' return (-1);', 'if (evtag_unmarshal_%s(%s, %s, %s->%s_data) == -1) {' % ( self._refname, buf, tag_name, var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal_%s(%s, %s, %s->%s_data);' % ( self._refname, buf, tag_name, var_name, self._name)] return code def CodeClear(self, structname): code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()), ' %s_free(%s->%s_data);' % ( self._refname, structname, self.Name()), ' %s->%s_data = NULL;' % (structname, self.Name()), ' %s->%s_set = 0;' % (structname, self.Name()), '}' ] return code def CodeNew(self, name): code = ['%s->%s_data = NULL;' % (name, self._name)] return code def CodeFree(self, name): code = ['if (%s->%s_data != NULL)' % (name, self._name), ' %s_free(%s->%s_data); ' % ( self._refname, name, self._name)] return code def Declaration(self): dcl = ['%s %s_data;' % (self._ctype, self._name)] return dcl class EntryVarBytes(Entry): def __init__(self, type, name, tag): # Init base class Entry.__init__(self, type, name, tag) self._ctype = 'ev_uint8_t *' def GetDeclaration(self, funcname): code = [ 'int %s(struct %s *, %s *, ev_uint32_t *);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def AssignDeclaration(self, funcname): code = [ 'int %s(struct %s *, const %s, ev_uint32_t);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def CodeAssign(self): name = self._name code = [ 'int', '%s_%s_assign(struct %s *msg, ' 'const %s value, ev_uint32_t len)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' if (msg->%s_data != NULL)' % name, ' free (msg->%s_data);' % name, ' msg->%s_data = malloc(len);' % name, ' if (msg->%s_data == NULL)' % name, ' return (-1);', ' msg->%s_set = 1;' % name, ' msg->%s_length = len;' % name, ' memcpy(msg->%s_data, value, len);' % name, ' return (0);', '}' ] return code def CodeGet(self): name = self._name code = [ 'int', '%s_%s_get(struct %s *msg, %s *value, ev_uint32_t *plen)' % ( self._struct.Name(), name, self._struct.Name(), self._ctype), '{', ' if (msg->%s_set != 1)' % name, ' return (-1);', ' *value = msg->%s_data;' % name, ' *plen = msg->%s_length;' % name, ' return (0);', '}' ] return code def CodeUnmarshal(self, buf, tag_name, var_name): code = ['if (evtag_payload_length(%s, &%s->%s_length) == -1)' % ( buf, var_name, self._name), ' return (-1);', # We do not want DoS opportunities 'if (%s->%s_length > EVBUFFER_LENGTH(%s))' % ( var_name, self._name, buf), ' return (-1);', 'if ((%s->%s_data = malloc(%s->%s_length)) == NULL)' % ( var_name, self._name, var_name, self._name), ' return (-1);', 'if (evtag_unmarshal_fixed(%s, %s, %s->%s_data, ' '%s->%s_length) == -1) {' % ( buf, tag_name, var_name, self._name, var_name, self._name), ' event_warnx("%%s: failed to unmarshal %s", __func__);' % ( self._name ), ' return (-1);', '}' ] return code def CodeMarshal(self, buf, tag_name, var_name): code = ['evtag_marshal(%s, %s, %s->%s_data, %s->%s_length);' % ( buf, tag_name, var_name, self._name, var_name, self._name)] return code def CodeClear(self, structname): code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()), ' free (%s->%s_data);' % (structname, self.Name()), ' %s->%s_data = NULL;' % (structname, self.Name()), ' %s->%s_length = 0;' % (structname, self.Name()), ' %s->%s_set = 0;' % (structname, self.Name()), '}' ] return code def CodeNew(self, name): code = ['%s->%s_data = NULL;' % (name, self._name), '%s->%s_length = 0;' % (name, self._name) ] return code def CodeFree(self, name): code = ['if (%s->%s_data != NULL)' % (name, self._name), ' free (%s->%s_data); ' % (name, self._name)] return code def Declaration(self): dcl = ['ev_uint8_t *%s_data;' % self._name, 'ev_uint32_t %s_length;' % self._name] return dcl class EntryArray(Entry): def __init__(self, entry): # Init base class Entry.__init__(self, entry._type, entry._name, entry._tag) self._entry = entry self._refname = entry._refname self._ctype = 'struct %s *' % self._refname def GetDeclaration(self, funcname): """Allows direct access to elements of the array.""" translate = self.GetTranslation() translate["funcname"] = funcname code = [ 'int %(funcname)s(struct %(parent_name)s *, int, %(ctype)s *);' % translate ] return code def AssignDeclaration(self, funcname): code = [ 'int %s(struct %s *, int, const %s);' % ( funcname, self._struct.Name(), self._ctype ) ] return code def AddDeclaration(self, funcname): code = [ '%s %s(struct %s *);' % ( self._ctype, funcname, self._struct.Name() ) ] return code def CodeGet(self): code = """int %(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, int offset, %(ctype)s *value) { if (!msg->%(name)s_set || offset < 0 || offset >= msg->%(name)s_length) return (-1); *value = msg->%(name)s_data[offset]; return (0); }""" % self.GetTranslation() return code.split('\n') def CodeAssign(self): code = """int %(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, int off, const %(ctype)s value) { struct evbuffer *tmp = NULL; if (!msg->%(name)s_set || off < 0 || off >= msg->%(name)s_length) return (-1); %(refname)s_clear(msg->%(name)s_data[off]); if ((tmp = evbuffer_new()) == NULL) { event_warn("%%s: evbuffer_new()", __func__); goto error; } %(refname)s_marshal(tmp, value); if (%(refname)s_unmarshal(msg->%(name)s_data[off], tmp) == -1) { event_warnx("%%s: %(refname)s_unmarshal", __func__); goto error; } evbuffer_free(tmp); return (0); error: if (tmp != NULL) evbuffer_free(tmp); %(refname)s_clear(msg->%(name)s_data[off]); return (-1); }""" % self.GetTranslation() return code.split('\n') def CodeAdd(self): code = \ """%(ctype)s %(parent_name)s_%(name)s_add(struct %(parent_name)s *msg) { if (++msg->%(name)s_length >= msg->%(name)s_num_allocated) { int tobe_allocated = msg->%(name)s_num_allocated; %(ctype)s* new_data = NULL; tobe_allocated = !tobe_allocated ? 1 : tobe_allocated << 1; new_data = (%(ctype)s*) realloc(msg->%(name)s_data, tobe_allocated * sizeof(%(ctype)s)); if (new_data == NULL) goto error; msg->%(name)s_data = new_data; msg->%(name)s_num_allocated = tobe_allocated; } msg->%(name)s_data[msg->%(name)s_length - 1] = %(refname)s_new(); if (msg->%(name)s_data[msg->%(name)s_length - 1] == NULL) goto error; msg->%(name)s_set = 1; return (msg->%(name)s_data[msg->%(name)s_length - 1]); error: --msg->%(name)s_length; return (NULL); } """ % self.GetTranslation() return code.split('\n') def CodeComplete(self, structname): code = [] translate = self.GetTranslation() if self.Optional(): code.append( 'if (%(structname)s->%(name)s_set)' % translate) translate["structname"] = structname tmp = """{ int i; for (i = 0; i < %(structname)s->%(name)s_length; ++i) { if (%(refname)s_complete(%(structname)s->%(name)s_data[i]) == -1) return (-1); } }""" % translate code.extend(tmp.split('\n')) return code def CodeUnmarshal(self, buf, tag_name, var_name): translate = self.GetTranslation() translate["var_name"] = var_name translate["buf"] = buf translate["tag_name"] = tag_name code = """if (%(parent_name)s_%(name)s_add(%(var_name)s) == NULL) return (-1); if (evtag_unmarshal_%(refname)s(%(buf)s, %(tag_name)s, %(var_name)s->%(name)s_data[%(var_name)s->%(name)s_length - 1]) == -1) { --%(var_name)s->%(name)s_length; event_warnx("%%s: failed to unmarshal %(name)s", __func__); return (-1); }""" % translate return code.split('\n') def CodeMarshal(self, buf, tag_name, var_name): code = ['{', ' int i;', ' for (i = 0; i < %s->%s_length; ++i) {' % ( var_name, self._name), ' evtag_marshal_%s(%s, %s, %s->%s_data[i]);' % ( self._refname, buf, tag_name, var_name, self._name), ' }', '}' ] return code def CodeClear(self, structname): code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()), ' int i;', ' for (i = 0; i < %s->%s_length; ++i) {' % ( structname, self.Name()), ' %s_free(%s->%s_data[i]);' % ( self._refname, structname, self.Name()), ' }', ' free(%s->%s_data);' % (structname, self.Name()), ' %s->%s_data = NULL;' % (structname, self.Name()), ' %s->%s_set = 0;' % (structname, self.Name()), ' %s->%s_length = 0;' % (structname, self.Name()), ' %s->%s_num_allocated = 0;' % (structname, self.Name()), '}' ] return code def CodeNew(self, name): code = ['%s->%s_data = NULL;' % (name, self._name), '%s->%s_length = 0;' % (name, self._name), '%s->%s_num_allocated = 0;' % (name, self._name)] return code def CodeFree(self, name): code = ['if (%s->%s_data != NULL) {' % (name, self._name), ' int i;', ' for (i = 0; i < %s->%s_length; ++i) {' % ( name, self._name), ' %s_free(%s->%s_data[i]); ' % ( self._refname, name, self._name), ' %s->%s_data[i] = NULL;' % (name, self._name), ' }', ' free(%s->%s_data);' % (name, self._name), ' %s->%s_data = NULL;' % (name, self._name), ' %s->%s_length = 0;' % (name, self._name), ' %s->%s_num_allocated = 0;' % (name, self._name), '}' ] return code def Declaration(self): dcl = ['struct %s **%s_data;' % (self._refname, self._name), 'int %s_length;' % self._name, 'int %s_num_allocated;' % self._name ] return dcl def NormalizeLine(line): global white global cppcomment line = cppcomment.sub('', line) line = line.strip() line = white.sub(' ', line) return line def ProcessOneEntry(newstruct, entry): optional = 0 array = 0 entry_type = '' name = '' tag = '' tag_set = None separator = '' fixed_length = '' tokens = entry.split(' ') while tokens: token = tokens[0] tokens = tokens[1:] if not entry_type: if not optional and token == 'optional': optional = 1 continue if not array and token == 'array': array = 1 continue if not entry_type: entry_type = token continue if not name: res = re.match(r'^([^\[\]]+)(\[.*\])?$', token) if not res: print >>sys.stderr, 'Cannot parse name: \"%s\" around %d' % ( entry, line_count) sys.exit(1) name = res.group(1) fixed_length = res.group(2) if fixed_length: fixed_length = fixed_length[1:-1] continue if not separator: separator = token if separator != '=': print >>sys.stderr, 'Expected "=" after name \"%s\" got %s' % ( name, token) sys.exit(1) continue if not tag_set: tag_set = 1 if not re.match(r'^(0x)?[0-9]+$', token): print >>sys.stderr, 'Expected tag number: \"%s\"' % entry sys.exit(1) tag = int(token, 0) continue print >>sys.stderr, 'Cannot parse \"%s\"' % entry sys.exit(1) if not tag_set: print >>sys.stderr, 'Need tag number: \"%s\"' % entry sys.exit(1) # Create the right entry if entry_type == 'bytes': if fixed_length: newentry = EntryBytes(entry_type, name, tag, fixed_length) else: newentry = EntryVarBytes(entry_type, name, tag) elif entry_type == 'int' and not fixed_length: newentry = EntryInt(entry_type, name, tag) elif entry_type == 'string' and not fixed_length: newentry = EntryString(entry_type, name, tag) else: res = re.match(r'^struct\[(%s)\]$' % _STRUCT_RE, entry_type, re.IGNORECASE) if res: # References another struct defined in our file newentry = EntryStruct(entry_type, name, tag, res.group(1)) else: print >>sys.stderr, 'Bad type: "%s" in "%s"' % (entry_type, entry) sys.exit(1) structs = [] if optional: newentry.MakeOptional() if array: newentry.MakeArray() newentry.SetStruct(newstruct) newentry.SetLineCount(line_count) newentry.Verify() if array: # We need to encapsulate this entry into a struct newname = newentry.Name()+ '_array' # Now borgify the new entry. newentry = EntryArray(newentry) newentry.SetStruct(newstruct) newentry.SetLineCount(line_count) newentry.MakeArray() newstruct.AddEntry(newentry) return structs def ProcessStruct(data): tokens = data.split(' ') # First three tokens are: 'struct' 'name' '{' newstruct = Struct(tokens[1]) inside = ' '.join(tokens[3:-1]) tokens = inside.split(';') structs = [] for entry in tokens: entry = NormalizeLine(entry) if not entry: continue # It's possible that new structs get defined in here structs.extend(ProcessOneEntry(newstruct, entry)) structs.append(newstruct) return structs def GetNextStruct(file): global line_count global cppdirect got_struct = 0 processed_lines = [] have_c_comment = 0 data = '' while 1: line = file.readline() if not line: break line_count += 1 line = line[:-1] if not have_c_comment and re.search(r'/\*', line): if re.search(r'/\*.*\*/', line): line = re.sub(r'/\*.*\*/', '', line) else: line = re.sub(r'/\*.*$', '', line) have_c_comment = 1 if have_c_comment: if not re.search(r'\*/', line): continue have_c_comment = 0 line = re.sub(r'^.*\*/', '', line) line = NormalizeLine(line) if not line: continue if not got_struct: if re.match(r'#include ["<].*[>"]', line): cppdirect.append(line) continue if re.match(r'^#(if( |def)|endif)', line): cppdirect.append(line) continue if re.match(r'^#define', line): headerdirect.append(line) continue if not re.match(r'^struct %s {$' % _STRUCT_RE, line, re.IGNORECASE): print >>sys.stderr, 'Missing struct on line %d: %s' % ( line_count, line) sys.exit(1) else: got_struct = 1 data += line continue # We are inside the struct tokens = line.split('}') if len(tokens) == 1: data += ' ' + line continue if len(tokens[1]): print >>sys.stderr, 'Trailing garbage after struct on line %d' % ( line_count ) sys.exit(1) # We found the end of the struct data += ' %s}' % tokens[0] break # Remove any comments, that might be in there data = re.sub(r'/\*.*\*/', '', data) return data def Parse(file): """ Parses the input file and returns C code and corresponding header file. """ entities = [] while 1: # Just gets the whole struct nicely formatted data = GetNextStruct(file) if not data: break entities.extend(ProcessStruct(data)) return entities def GuardName(name): name = '_'.join(name.split('.')) name = '_'.join(name.split('/')) guard = '_'+name.upper()+'_' return guard def HeaderPreamble(name): guard = GuardName(name) pre = ( '/*\n' ' * Automatically generated from %s\n' ' */\n\n' '#ifndef %s\n' '#define %s\n\n' ) % ( name, guard, guard) # insert stdint.h - let's hope everyone has it pre += ( '#include <event-config.h>\n' '#ifdef _EVENT_HAVE_STDINT_H\n' '#include <stdint.h>\n' '#endif\n' ) for statement in headerdirect: pre += '%s\n' % statement if headerdirect: pre += '\n' pre += ( '#define EVTAG_HAS(msg, member) ((msg)->member##_set == 1)\n' '#ifdef __GNUC__\n' '#define EVTAG_ASSIGN(msg, member, args...) ' '(*(msg)->base->member##_assign)(msg, ## args)\n' '#define EVTAG_GET(msg, member, args...) ' '(*(msg)->base->member##_get)(msg, ## args)\n' '#else\n' '#define EVTAG_ASSIGN(msg, member, ...) ' '(*(msg)->base->member##_assign)(msg, ## __VA_ARGS__)\n' '#define EVTAG_GET(msg, member, ...) ' '(*(msg)->base->member##_get)(msg, ## __VA_ARGS__)\n' '#endif\n' '#define EVTAG_ADD(msg, member) (*(msg)->base->member##_add)(msg)\n' '#define EVTAG_LEN(msg, member) ((msg)->member##_length)\n' ) return pre def HeaderPostamble(name): guard = GuardName(name) return '#endif /* %s */' % guard def BodyPreamble(name): global _NAME global _VERSION header_file = '.'.join(name.split('.')[:-1]) + '.gen.h' pre = ( '/*\n' ' * Automatically generated from %s\n' ' * by %s/%s. DO NOT EDIT THIS FILE.\n' ' */\n\n' ) % (name, _NAME, _VERSION) pre += ( '#include <sys/types.h>\n' '#ifdef _EVENT_HAVE_SYS_TIME_H\n' '#include <sys/time.h>\n' '#endif\n' '#include <stdlib.h>\n' '#include <string.h>\n' '#include <assert.h>\n' '#define EVENT_NO_STRUCT\n' '#include <event.h>\n\n' '#ifdef _EVENT___func__\n' '#define __func__ _EVENT___func__\n' '#endif\n' ) for statement in cppdirect: pre += '%s\n' % statement pre += '\n#include "%s"\n\n' % header_file pre += 'void event_err(int eval, const char *fmt, ...);\n' pre += 'void event_warn(const char *fmt, ...);\n' pre += 'void event_errx(int eval, const char *fmt, ...);\n' pre += 'void event_warnx(const char *fmt, ...);\n\n' return pre def main(argv): if len(argv) < 2 or not argv[1]: print >>sys.stderr, 'Need RPC description file as first argument.' sys.exit(1) filename = argv[1] ext = filename.split('.')[-1] if ext != 'rpc': print >>sys.stderr, 'Unrecognized file extension: %s' % ext sys.exit(1) print >>sys.stderr, 'Reading \"%s\"' % filename fp = open(filename, 'r') entities = Parse(fp) fp.close() header_file = '.'.join(filename.split('.')[:-1]) + '.gen.h' impl_file = '.'.join(filename.split('.')[:-1]) + '.gen.c' print >>sys.stderr, '... creating "%s"' % header_file header_fp = open(header_file, 'w') print >>header_fp, HeaderPreamble(filename) # Create forward declarations: allows other structs to reference # each other for entry in entities: entry.PrintForwardDeclaration(header_fp) print >>header_fp, '' for entry in entities: entry.PrintTags(header_fp) entry.PrintDeclaration(header_fp) print >>header_fp, HeaderPostamble(filename) header_fp.close() print >>sys.stderr, '... creating "%s"' % impl_file impl_fp = open(impl_file, 'w') print >>impl_fp, BodyPreamble(filename) for entry in entities: entry.PrintCode(impl_fp) impl_fp.close() if __name__ == '__main__': main(sys.argv)
Python
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages try: __import__('pkg_resources').declare_namespace(__name__) except ImportError: from pkgutil import extend_path __path__ = extend_path(__path__, __name__)
Python
# -*- coding: utf-8 -*- from Products.CMFPlone import PloneMessageFactory as _ from Products.CMFCore.utils import getToolByName from zope import interface from Products.PloneboardNotify.interfaces import ILocalBoardNotify def _getAllValidEmailsFromGroup(putils, acl_users, group): """Look at every user in the group, return all valid emails""" return [m.getProperty('email') for m in group.getGroupMembers() if putils.validateSingleEmailAddress(m.getProperty('email'))] def _getConfiguration(object): """Return the local or global configuration settings for notify""" # BBB: the best is to refactor this using adapters if not ILocalBoardNotify.providedBy(object): ploneboard_notify_properties = getToolByName(object,'portal_properties')['ploneboard_notify_properties'] sendto_all = ploneboard_notify_properties.sendto_all sendto_values = ploneboard_notify_properties.sendto_values else: # Local configuration sendto_all = object.getProperty('forum_sendto_all', False) sendto_values = object.getProperty('forum_sendto_values', []) return sendto_all, sendto_values def _getSendToValues(object): """Load the portal configuration for the notify system and obtain a list of emails. If the sendto_all is True, the mail will be sent to all members of the Plone site. The sendto_values value is used to look for name of groups, then name on users in the portal and finally for normal emails. @return a tuple with (cc emails, bcc emails) inside """ sendto_all, sendto_values = _getConfiguration(object) acl_users = getToolByName(object, 'acl_users') mtool = getToolByName(object, 'portal_membership') putils = getToolByName(object, 'plone_utils') emails = [] emails_bcc = [] if sendto_all: users = acl_users.getUsers() emails_bcc.extend([m.getProperty('email') for m in users if putils.validateSingleEmailAddress(m.getProperty('email'))]) for entry in sendto_values: if entry.startswith("#"): # I also support comment inside the emails data continue inBcc = False if entry.endswith("|bcc") or entry.endswith("|BCC"): entry = entry[:-4] inBcc = True group = acl_users.getGroupById(entry) # 1 - is a group? if group: if inBcc: emails_bcc.extend(_getAllValidEmailsFromGroup(putils, acl_users, group)) else: emails.extend(_getAllValidEmailsFromGroup(putils, acl_users, group)) continue # 2 - is a member? #user = acl_users.getUserById(entry) # BBB: seems not working... only on Plone 2.5? user = mtool.getMemberById(entry) if user: email = user.getProperty('email') if putils.validateSingleEmailAddress(email): if inBcc: emails_bcc.append(email) else: emails.append(email) continue # 3 - is a valid email address? if putils.validateSingleEmailAddress(entry): if inBcc: emails_bcc.append(entry) else: emails.append(entry) continue # 4 - don't know how to handle this print "Can't use the %s info to send notification" % entry emails = set(emails) emails_bcc = set(emails_bcc) return [x for x in emails if x not in emails_bcc], list(emails_bcc) def sendMail(object, event): """A Zope3 event for sending emails""" ploneboard_notify_properties = getToolByName(object,'portal_properties')['ploneboard_notify_properties'] debug_mode = ploneboard_notify_properties.debug_mode notify_encode = ploneboard_notify_properties.notify_encode portal = getToolByName(object,"portal_url").getPortalObject() portal_transforms = getToolByName(object, "portal_transforms") send_from = portal.getProperty('email_from_address') if send_from and type(send_from)==tuple: send_from = send_from[0] # Conversation or comment? conversation = object.getConversation() forum = conversation.getForum() send_to, send_to_bcc = _getSendToValues(forum) if not send_to and not send_to_bcc: return translation_service = getToolByName(object,'translation_service') dummy = _(u"New comment added on the forum: ") msg_sbj = u"New comment added on the forum: " subject = translation_service.utranslate(domain='Products.PloneboardNotify', msgid=msg_sbj, default=msg_sbj, context=object) subject+= forum.Title().decode('utf-8') dummy = _(u"Argument is: ") msg_txt = u"Argument is: " text = translation_service.utranslate(domain='Products.PloneboardNotify', msgid=msg_txt, default=msg_txt, context=object) text+=conversation.Title().decode('utf-8')+"\n" dummy = _(u"The new message is:") msg_txt = u"The new message is:" text += translation_service.utranslate(domain='Products.PloneboardNotify', msgid=msg_txt, default=msg_txt, context=object) try: data_body_to_plaintext = portal_transforms.convert("html_to_web_intelligent_plain_text", object.REQUEST.form['text']) except: # Probably Plone 2.5.x data_body_to_plaintext = portal_transforms.convert("html_to_text", object.REQUEST.form['text']) body_to_plaintext = data_body_to_plaintext.getData() text += "\n" + body_to_plaintext.decode('utf-8') text += "\n" + object.absolute_url() mail_host = getToolByName(object, 'MailHost') if notify_encode: text = text.encode(notify_encode) try: if debug_mode: object.plone_log("Notification from message subject: %s" % subject) object.plone_log("Notification from message text:\n%s" % text) object.plone_log("Notification from message sent to %s (and to %s in bcc)" % (", ".join(send_to) or 'no-one', ", ".join(send_to_bcc) or 'no-one')) else: mail_host.secureSend(text, mto=send_to, mfrom=send_from, subject=subject, charset=notify_encode, mbcc=send_to_bcc) except Exception, inst: putils = getToolByName(object,'plone_utils') putils.addPortalMessage(_(u'Not able to send notifications')) object.plone_log("Error sending notification: %s" % str(inst))
Python
# -*- coding: utf-8 -*- __author__ = """RedTurtle Technology""" __docformat__ = 'plaintext' import string from Products.CMFCore.utils import getToolByName from Products.CMFPlone.utils import getFSVersionTuple from StringIO import StringIO from Products.PloneboardNotify import config def install(self, reinstall=False): out = StringIO() configPortalSetup(self, out) print >> out, "Successfully installed" return out.getvalue() def configPortalSetup(self, out): """Run GenericSetup steps""" portal_setup=getToolByName(self, "portal_setup") if getFSVersionTuple()[:3]>=(3,0,0): portal_setup.runAllImportStepsFromProfile( "profile-Products.%s:default" % config.PROJECTNAME, purge_old=False) else: plone_base_profileid = "profile-CMFPlone:plone" portal_setup.setImportContext(plone_base_profileid) portal_setup.setImportContext("profile-Products.%s:default" % config.PROJECTNAME) portal_setup.runAllImportSteps(purge_old=False) portal_setup.setImportContext(plone_base_profileid)
Python
# -*- coding: utf-8 -*- from Products.Five.browser import BrowserView # from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile # Plone 2.5 compatibility from Products.CMFCore.utils import getToolByName from Products.Ploneboard.interfaces import IPloneboard, IForum from Products.PloneboardNotify.interfaces import ILocalBoardNotify class PloneboardNotificationSystemView(BrowserView): """View for managing Ploneboard notification system in control panel""" def __init__(self, context, request): BrowserView.__init__(self, context, request) request.set('disable_border', True) self.portal_properties = getToolByName(context, 'portal_properties') def __call__(self): request = self.request if request.form.get("pbn_save"): self._updateConfiguration(request.form) request.response.redirect(self.context.absolute_url()+"/@@ploneboard_notification") return self.template() template = ZopeTwoPageTemplateFile("ploneboard_notification.pt") def _updateConfiguration(self, form): """Update saved configuration data""" ploneboard_notify_properties = self.portal_properties['ploneboard_notify_properties'] sendto_values = [x.strip() for x in form.get("sendto_values").replace("\r","").split("\n") if x] if form.get("sendto_all"): sendto_all = True else: sendto_all = False ploneboard_notify_properties.sendto_all = sendto_all ploneboard_notify_properties.sendto_values = sendto_values @property def portal_boards(self): """Perform a catalog search for all ploneboard objects in the portal""" # BBB: get rid of object_implements as soon as Plone 2.5 support will be dropped catalog = getToolByName(self.context, 'portal_catalog') return catalog(object_provides=IPloneboard.__identifier__, object_implements=IPloneboard.__identifier__,) def getForums(self, area_brain): """Return all forums inside the area forum passed""" catalog = getToolByName(self.context, 'portal_catalog') return catalog(object_provides=IForum.__identifier__, object_implements=IForum.__identifier__, path='/'.join(area_brain.getPhysicalPath())) def isLocalEnabled(self, forum_brain): """Check is the Forum use local configuration, so if provides ILocalBoardNotify""" forum = forum_brain.getObject() return ILocalBoardNotify.providedBy(forum) def load_sendto_values(self): """Load the global ploneboard_notify_properties value""" return "\n".join(self.portal_properties['ploneboard_notify_properties'].sendto_values) def load_sendto_all(self): """Load the sendto_all value""" return self.portal_properties['ploneboard_notify_properties'].sendto_all
Python
# -*- coding: utf-8 -*- import zope.interface from Products.Five.browser import BrowserView # from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile # Plone 2.5 compatibility from Products.CMFCore.utils import getToolByName from Products.Ploneboard.interfaces import IPloneboard, IForum from Products.PloneboardNotify.interfaces import ILocalBoardNotify class PloneboardNotificationSystemView(BrowserView): """View for managing Ploneboard notification system in control panel""" def __init__(self, context, request): BrowserView.__init__(self, context, request) request.set('disable_border', True) def __call__(self): request = self.request if request.form.get("pbn_save"): self._updateConfiguration(request.form) request.response.redirect(self.context.absolute_url()+"/@@ploneboard_notification") return self.template() template = ZopeTwoPageTemplateFile("ploneboard_notification_forum.pt") def _resetLocalConfiguration(self): """Remove no more used properties from the context""" context = self.context context.manage_delProperties(['forum_sendto_values','forum_sendto_all']) # zope.interface.noLongerProvides(context, ILocalBoardNotify) # Do not use until Plone 2.5 support will be dropped zope.interface.directlyProvides(context, zope.interface.directlyProvidedBy(context)-ILocalBoardNotify) def _addNeededProperties(self, context): """Add the properties forum_sendto_values and forum_sendto_all if not existings""" if not context.hasProperty('forum_sendto_values'): context.manage_addProperty('forum_sendto_values', [], 'lines') if not context.hasProperty('forum_sendto_all'): context.manage_addProperty('forum_sendto_all', False, 'boolean') def _updateConfiguration(self, form): """Update saved configuration data""" context = self.context sendto_values = [x.strip() for x in form.get("sendto_values").replace("\r","").split("\n") if x] if form.get("sendto_all"): sendto_all = True else: sendto_all = False if not sendto_all and not sendto_values: # Empty values remove properties AND the provided interface self._resetLocalConfiguration() else: zope.interface.directlyProvides(context, ILocalBoardNotify) self._addNeededProperties(context) context.manage_changeProperties(forum_sendto_values=sendto_values, forum_sendto_all=sendto_all) def load_sendto_values(self): """Load the local forum_sendto_values value""" context = self.context return "\n".join(context.getProperty('forum_sendto_values', [])) def load_sendto_all(self): """Load the sendto_all value""" context = self.context return context.getProperty('forum_sendto_all', False)
Python
from zope.interface import Interface class ILocalBoardNotify(Interface): """ Marker interface for Ploneboard objects that use local notification parameters """
Python
def initialize(context): """Initializer called when used as a Zope 2 product."""
Python