repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
shinate/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/checkout/diff_parser_unittest.py
124
8145
# Copyright (C) 2009 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import cStringIO as StringIO import unittest2 as unittest import diff_parser import re from webkitpy.common.checkout.diff_test_data import DIFF_TEST_DATA class DiffParserTest(unittest.TestCase): maxDiff = None def test_diff_parser(self, parser = None): if not parser: parser = diff_parser.DiffParser(DIFF_TEST_DATA.splitlines()) self.assertEqual(3, len(parser.files)) self.assertTrue('WebCore/rendering/style/StyleFlexibleBoxData.h' in parser.files) diff = parser.files['WebCore/rendering/style/StyleFlexibleBoxData.h'] self.assertEqual(7, len(diff.lines)) # The first two unchaged lines. self.assertEqual((47, 47), diff.lines[0][0:2]) self.assertEqual('', diff.lines[0][2]) self.assertEqual((48, 48), diff.lines[1][0:2]) self.assertEqual(' unsigned align : 3; // EBoxAlignment', diff.lines[1][2]) # The deleted line self.assertEqual((50, 0), diff.lines[3][0:2]) self.assertEqual(' unsigned orient: 1; // EBoxOrient', diff.lines[3][2]) # The first file looks OK. Let's check the next, more complicated file. self.assertTrue('WebCore/rendering/style/StyleRareInheritedData.cpp' in parser.files) diff = parser.files['WebCore/rendering/style/StyleRareInheritedData.cpp'] # There are 3 chunks. self.assertEqual(7 + 7 + 9, len(diff.lines)) # Around an added line. self.assertEqual((60, 61), diff.lines[9][0:2]) self.assertEqual((0, 62), diff.lines[10][0:2]) self.assertEqual((61, 63), diff.lines[11][0:2]) # Look through the last chunk, which contains both add's and delete's. self.assertEqual((81, 83), diff.lines[14][0:2]) self.assertEqual((82, 84), diff.lines[15][0:2]) self.assertEqual((83, 85), diff.lines[16][0:2]) self.assertEqual((84, 0), diff.lines[17][0:2]) self.assertEqual((0, 86), diff.lines[18][0:2]) self.assertEqual((0, 87), diff.lines[19][0:2]) self.assertEqual((85, 88), diff.lines[20][0:2]) self.assertEqual((86, 89), diff.lines[21][0:2]) self.assertEqual((87, 90), diff.lines[22][0:2]) # Check if a newly added file is correctly handled. diff = parser.files['LayoutTests/platform/mac/fast/flexbox/box-orient-button-expected.checksum'] self.assertEqual(1, len(diff.lines)) self.assertEqual((0, 1), diff.lines[0][0:2]) def test_diff_converter(self): comment_lines = [ "Hey guys,\n", "\n", "See my awesome patch below!\n", "\n", " - Cool Hacker\n", "\n", ] revision_lines = [ "Subversion Revision 289799\n", ] svn_diff_lines = [ "Index: Tools/Scripts/webkitpy/common/checkout/diff_parser.py\n", "===================================================================\n", "--- Tools/Scripts/webkitpy/common/checkout/diff_parser.py\n", "+++ Tools/Scripts/webkitpy/common/checkout/diff_parser.py\n", "@@ -59,6 +59,7 @@ def git_diff_to_svn_diff(line):\n", ] self.assertEqual(diff_parser.get_diff_converter(svn_diff_lines), diff_parser.svn_diff_to_svn_diff) self.assertEqual(diff_parser.get_diff_converter(comment_lines + svn_diff_lines), diff_parser.svn_diff_to_svn_diff) self.assertEqual(diff_parser.get_diff_converter(revision_lines + svn_diff_lines), diff_parser.svn_diff_to_svn_diff) git_diff_lines = [ "diff --git a/Tools/Scripts/webkitpy/common/checkout/diff_parser.py b/Tools/Scripts/webkitpy/common/checkout/diff_parser.py\n", "index 3c5b45b..0197ead 100644\n", "--- a/Tools/Scripts/webkitpy/common/checkout/diff_parser.py\n", "+++ b/Tools/Scripts/webkitpy/common/checkout/diff_parser.py\n", "@@ -59,6 +59,7 @@ def git_diff_to_svn_diff(line):\n", ] self.assertEqual(diff_parser.get_diff_converter(git_diff_lines), diff_parser.git_diff_to_svn_diff) self.assertEqual(diff_parser.get_diff_converter(comment_lines + git_diff_lines), diff_parser.git_diff_to_svn_diff) self.assertEqual(diff_parser.get_diff_converter(revision_lines + git_diff_lines), diff_parser.git_diff_to_svn_diff) def test_git_mnemonicprefix(self): p = re.compile(r' ([a|b])/') prefixes = [ { 'a' : 'i', 'b' : 'w' }, # git-diff (compares the (i)ndex and the (w)ork tree) { 'a' : 'c', 'b' : 'w' }, # git-diff HEAD (compares a (c)ommit and the (w)ork tree) { 'a' : 'c', 'b' : 'i' }, # git diff --cached (compares a (c)ommit and the (i)ndex) { 'a' : 'o', 'b' : 'w' }, # git-diff HEAD:file1 file2 (compares an (o)bject and a (w)ork tree entity) { 'a' : '1', 'b' : '2' }, # git diff --no-index a b (compares two non-git things (1) and (2)) ] for prefix in prefixes: patch = p.sub(lambda x: " %s/" % prefix[x.group(1)], DIFF_TEST_DATA) self.test_diff_parser(diff_parser.DiffParser(patch.splitlines())) def test_git_diff_to_svn_diff(self): output = """\ Index: Tools/Scripts/webkitpy/common/checkout/diff_parser.py =================================================================== --- Tools/Scripts/webkitpy/common/checkout/diff_parser.py +++ Tools/Scripts/webkitpy/common/checkout/diff_parser.py @@ -59,6 +59,7 @@ def git_diff_to_svn_diff(line): A B C +D E F """ inputfmt = StringIO.StringIO("""\ diff --git a/Tools/Scripts/webkitpy/common/checkout/diff_parser.py b/Tools/Scripts/webkitpy/common/checkout/diff_parser.py index 2ed552c4555db72df16b212547f2c125ae301a04..72870482000c0dba64ce4300ed782c03ee79b74f 100644 --- a/Tools/Scripts/webkitpy/common/checkout/diff_parser.py +++ b/Tools/Scripts/webkitpy/common/checkout/diff_parser.py @@ -59,6 +59,7 @@ def git_diff_to_svn_diff(line): A B C +D E F """) shortfmt = StringIO.StringIO("""\ diff --git a/Tools/Scripts/webkitpy/common/checkout/diff_parser.py b/Tools/Scripts/webkitpy/common/checkout/diff_parser.py index b48b162..f300960 100644 --- a/Tools/Scripts/webkitpy/common/checkout/diff_parser.py +++ b/Tools/Scripts/webkitpy/common/checkout/diff_parser.py @@ -59,6 +59,7 @@ def git_diff_to_svn_diff(line): A B C +D E F """) self.assertMultiLineEqual(output, ''.join(diff_parser.git_diff_to_svn_diff(x) for x in shortfmt.readlines())) self.assertMultiLineEqual(output, ''.join(diff_parser.git_diff_to_svn_diff(x) for x in inputfmt.readlines()))
bsd-3-clause
stwunsch/gnuradio
gr-blocks/python/blocks/qa_sample_and_hold.py
47
1754
#!/usr/bin/env python # # Copyright 2013 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # import time from gnuradio import gr, gr_unittest, blocks class test_sample_and_hold(gr_unittest.TestCase): def setUp(self): self.tb = gr.top_block() def tearDown(self): self.tb = None def test_001(self): src_data = 10*[0,1,2,3,4,5,6,7,8,9,8,7,6,5,4,3,2,1] ctrl_data = 10*[1,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0] expected_result = 10*(0,0,0,0,4,5,6,7,8,9,9,9,9,9,9,9,9,9) src = blocks.vector_source_f(src_data) ctrl = blocks.vector_source_b(ctrl_data) op = blocks.sample_and_hold_ff() dst = blocks.vector_sink_f() self.tb.connect(src, (op,0)) self.tb.connect(ctrl, (op,1)) self.tb.connect(op, dst) self.tb.run() result = dst.data() self.assertFloatTuplesAlmostEqual(expected_result, result, places=6) if __name__ == '__main__': gr_unittest.run(test_sample_and_hold, "test_sample_and_hold.xml")
gpl-3.0
RentennaDev/partial
partial/bundle.py
1
2283
import os import time from lxml import etree from partial.bundleComponent import CompilationException from partial.coffee import CoffeeBundleComponent from partial.sass import SassBundleComponent from partial.templating import TemplateBundleComponent COMPILERS = { 'template': TemplateBundleComponent(), 'sass': SassBundleComponent(), 'coffee': CoffeeBundleComponent(), } class Bundle(object): def __init__(self, path, server): self.path = path self.name = os.path.split(path)[-1].replace(".part.xml", "") self.server = server self.megamodule = None self.compiled = {} self.component = {} self.requirements = [] self.compile() def compile(self): self.compiled = {} self.component = {} try: tree = etree.parse(open(self.path)) except Exception: print "failed parsing xml" return self.megamodule = tree.xpath('/partial/@megamodule')[0] for type in COMPILERS.keys(): component = tree.xpath('/partial/%s/text()' % type) if component: rawSrc = component[0] compiled = self.compileComponent(type, rawSrc) if compiled: self.compiled.append(compiled) self.requirements = self.computeRequirements(tree) def computeRequirements(self, tree): return set(tree.xpath('/partial/require/text()')) def compileComponent(self, type, src): start = time.time() if type in COMPILERS: compiler = COMPILERS[type] else: print "type not recognized" return try: self.component[type] = compiler.read(src, self.name, self.megamodule) if not self.server: self.compiled[type] = compiler.compile(self.component[type], self.name, self.megamodule) end = time.time() print "compiled %s %s in %s" % (self.name, type, (end-start)) except CompilationException: print "compilation error!" def satisfied(self, available): for requirement in self.requirements: if requirement not in available: return False return True
mit
rvs/gpdb
gpMgmt/bin/ext/yaml/constructor.py
117
25056
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', 'ConstructorError'] from error import * from nodes import * import datetime try: set except NameError: from sets import Set as set import binascii, re, sys, types class ConstructorError(MarkedYAMLError): pass class BaseConstructor(object): yaml_constructors = {} yaml_multi_constructors = {} def __init__(self): self.constructed_objects = {} self.recursive_objects = {} self.state_generators = [] self.deep_construct = False def check_data(self): # If there are more documents available? return self.check_node() def get_data(self): # Construct and return the next document. if self.check_node(): return self.construct_document(self.get_node()) def construct_document(self, node): data = self.construct_object(node) while self.state_generators: state_generators = self.state_generators self.state_generators = [] for generator in state_generators: for dummy in generator: pass self.constructed_objects = {} self.recursive_objects = {} self.deep_construct = False return data def construct_object(self, node, deep=False): if deep: old_deep = self.deep_construct self.deep_construct = True if node in self.constructed_objects: return self.constructed_objects[node] if node in self.recursive_objects: raise ConstructorError(None, None, "found unconstructable recursive node", node.start_mark) self.recursive_objects[node] = None constructor = None state_constructor = None tag_suffix = None if node.tag in self.yaml_constructors: constructor = self.yaml_constructors[node.tag] else: for tag_prefix in self.yaml_multi_constructors: if node.tag.startswith(tag_prefix): tag_suffix = node.tag[len(tag_prefix):] constructor = self.yaml_multi_constructors[tag_prefix] break else: if None in self.yaml_multi_constructors: tag_suffix = node.tag constructor = self.yaml_multi_constructors[None] elif None in self.yaml_constructors: constructor = self.yaml_constructors[None] elif isinstance(node, ScalarNode): constructor = self.__class__.construct_scalar elif isinstance(node, SequenceNode): constructor = self.__class__.construct_sequence elif isinstance(node, MappingNode): constructor = self.__class__.construct_mapping if tag_suffix is None: data = constructor(self, node) else: data = constructor(self, tag_suffix, node) if isinstance(data, types.GeneratorType): generator = data data = generator.next() if self.deep_construct: for dummy in generator: pass else: self.state_generators.append(generator) self.constructed_objects[node] = data del self.recursive_objects[node] if deep: self.deep_construct = old_deep return data def construct_scalar(self, node): if not isinstance(node, ScalarNode): raise ConstructorError(None, None, "expected a scalar node, but found %s" % node.id, node.start_mark) return node.value def construct_sequence(self, node, deep=False): if not isinstance(node, SequenceNode): raise ConstructorError(None, None, "expected a sequence node, but found %s" % node.id, node.start_mark) return [self.construct_object(child, deep=deep) for child in node.value] def construct_mapping(self, node, deep=False): if not isinstance(node, MappingNode): raise ConstructorError(None, None, "expected a mapping node, but found %s" % node.id, node.start_mark) mapping = {} for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) try: hash(key) except TypeError, exc: raise ConstructorError("while constructing a mapping", node.start_mark, "found unacceptable key (%s)" % exc, key_node.start_mark) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping def construct_pairs(self, node, deep=False): if not isinstance(node, MappingNode): raise ConstructorError(None, None, "expected a mapping node, but found %s" % node.id, node.start_mark) pairs = [] for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) value = self.construct_object(value_node, deep=deep) pairs.append((key, value)) return pairs def add_constructor(cls, tag, constructor): if not 'yaml_constructors' in cls.__dict__: cls.yaml_constructors = cls.yaml_constructors.copy() cls.yaml_constructors[tag] = constructor add_constructor = classmethod(add_constructor) def add_multi_constructor(cls, tag_prefix, multi_constructor): if not 'yaml_multi_constructors' in cls.__dict__: cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() cls.yaml_multi_constructors[tag_prefix] = multi_constructor add_multi_constructor = classmethod(add_multi_constructor) class SafeConstructor(BaseConstructor): def construct_scalar(self, node): if isinstance(node, MappingNode): for key_node, value_node in node.value: if key_node.tag == u'tag:yaml.org,2002:value': return self.construct_scalar(value_node) return BaseConstructor.construct_scalar(self, node) def flatten_mapping(self, node): merge = [] index = 0 while index < len(node.value): key_node, value_node = node.value[index] if key_node.tag == u'tag:yaml.org,2002:merge': del node.value[index] if isinstance(value_node, MappingNode): self.flatten_mapping(value_node) merge.extend(value_node.value) elif isinstance(value_node, SequenceNode): submerge = [] for subnode in value_node.value: if not isinstance(subnode, MappingNode): raise ConstructorError("while constructing a mapping", node.start_mark, "expected a mapping for merging, but found %s" % subnode.id, subnode.start_mark) self.flatten_mapping(subnode) submerge.append(subnode.value) submerge.reverse() for value in submerge: merge.extend(value) else: raise ConstructorError("while constructing a mapping", node.start_mark, "expected a mapping or list of mappings for merging, but found %s" % value_node.id, value_node.start_mark) elif key_node.tag == u'tag:yaml.org,2002:value': key_node.tag = u'tag:yaml.org,2002:str' index += 1 else: index += 1 if merge: node.value = merge + node.value def construct_mapping(self, node, deep=False): if isinstance(node, MappingNode): self.flatten_mapping(node) return BaseConstructor.construct_mapping(self, node, deep=deep) def construct_yaml_null(self, node): self.construct_scalar(node) return None bool_values = { u'yes': True, u'no': False, u'true': True, u'false': False, u'on': True, u'off': False, } def construct_yaml_bool(self, node): value = self.construct_scalar(node) return self.bool_values[value.lower()] def construct_yaml_int(self, node): value = str(self.construct_scalar(node)) value = value.replace('_', '') sign = +1 if value[0] == '-': sign = -1 if value[0] in '+-': value = value[1:] if value == '0': return 0 elif value.startswith('0b'): return sign*int(value[2:], 2) elif value.startswith('0x'): return sign*int(value[2:], 16) elif value[0] == '0': return sign*int(value, 8) elif ':' in value: digits = [int(part) for part in value.split(':')] digits.reverse() base = 1 value = 0 for digit in digits: value += digit*base base *= 60 return sign*value else: return sign*int(value) inf_value = 1e300 while inf_value != inf_value*inf_value: inf_value *= inf_value nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). def construct_yaml_float(self, node): value = str(self.construct_scalar(node)) value = value.replace('_', '').lower() sign = +1 if value[0] == '-': sign = -1 if value[0] in '+-': value = value[1:] if value == '.inf': return sign*self.inf_value elif value == '.nan': return self.nan_value elif ':' in value: digits = [float(part) for part in value.split(':')] digits.reverse() base = 1 value = 0.0 for digit in digits: value += digit*base base *= 60 return sign*value else: return sign*float(value) def construct_yaml_binary(self, node): value = self.construct_scalar(node) try: return str(value).decode('base64') except (binascii.Error, UnicodeEncodeError), exc: raise ConstructorError(None, None, "failed to decode base64 data: %s" % exc, node.start_mark) timestamp_regexp = re.compile( ur'''^(?P<year>[0-9][0-9][0-9][0-9]) -(?P<month>[0-9][0-9]?) -(?P<day>[0-9][0-9]?) (?:(?:[Tt]|[ \t]+) (?P<hour>[0-9][0-9]?) :(?P<minute>[0-9][0-9]) :(?P<second>[0-9][0-9]) (?:\.(?P<fraction>[0-9]*))? (?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?) (?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X) def construct_yaml_timestamp(self, node): value = self.construct_scalar(node) match = self.timestamp_regexp.match(node.value) values = match.groupdict() year = int(values['year']) month = int(values['month']) day = int(values['day']) if not values['hour']: return datetime.date(year, month, day) hour = int(values['hour']) minute = int(values['minute']) second = int(values['second']) fraction = 0 if values['fraction']: fraction = int(values['fraction'][:6].ljust(6, '0')) delta = None if values['tz_sign']: tz_hour = int(values['tz_hour']) tz_minute = int(values['tz_minute'] or 0) delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) if values['tz_sign'] == '-': delta = -delta data = datetime.datetime(year, month, day, hour, minute, second, fraction) if delta: data -= delta return data def construct_yaml_omap(self, node): # Note: we do not check for duplicate keys, because it's too # CPU-expensive. omap = [] yield omap if not isinstance(node, SequenceNode): raise ConstructorError("while constructing an ordered map", node.start_mark, "expected a sequence, but found %s" % node.id, node.start_mark) for subnode in node.value: if not isinstance(subnode, MappingNode): raise ConstructorError("while constructing an ordered map", node.start_mark, "expected a mapping of length 1, but found %s" % subnode.id, subnode.start_mark) if len(subnode.value) != 1: raise ConstructorError("while constructing an ordered map", node.start_mark, "expected a single mapping item, but found %d items" % len(subnode.value), subnode.start_mark) key_node, value_node = subnode.value[0] key = self.construct_object(key_node) value = self.construct_object(value_node) omap.append((key, value)) def construct_yaml_pairs(self, node): # Note: the same code as `construct_yaml_omap`. pairs = [] yield pairs if not isinstance(node, SequenceNode): raise ConstructorError("while constructing pairs", node.start_mark, "expected a sequence, but found %s" % node.id, node.start_mark) for subnode in node.value: if not isinstance(subnode, MappingNode): raise ConstructorError("while constructing pairs", node.start_mark, "expected a mapping of length 1, but found %s" % subnode.id, subnode.start_mark) if len(subnode.value) != 1: raise ConstructorError("while constructing pairs", node.start_mark, "expected a single mapping item, but found %d items" % len(subnode.value), subnode.start_mark) key_node, value_node = subnode.value[0] key = self.construct_object(key_node) value = self.construct_object(value_node) pairs.append((key, value)) def construct_yaml_set(self, node): data = set() yield data value = self.construct_mapping(node) data.update(value) def construct_yaml_str(self, node): value = self.construct_scalar(node) try: return value.encode('ascii') except UnicodeEncodeError: return value def construct_yaml_seq(self, node): data = [] yield data data.extend(self.construct_sequence(node)) def construct_yaml_map(self, node): data = {} yield data value = self.construct_mapping(node) data.update(value) def construct_yaml_object(self, node, cls): data = cls.__new__(cls) yield data if hasattr(data, '__setstate__'): state = self.construct_mapping(node, deep=True) data.__setstate__(state) else: state = self.construct_mapping(node) data.__dict__.update(state) def construct_undefined(self, node): raise ConstructorError(None, None, "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'), node.start_mark) SafeConstructor.add_constructor( u'tag:yaml.org,2002:null', SafeConstructor.construct_yaml_null) SafeConstructor.add_constructor( u'tag:yaml.org,2002:bool', SafeConstructor.construct_yaml_bool) SafeConstructor.add_constructor( u'tag:yaml.org,2002:int', SafeConstructor.construct_yaml_int) SafeConstructor.add_constructor( u'tag:yaml.org,2002:float', SafeConstructor.construct_yaml_float) SafeConstructor.add_constructor( u'tag:yaml.org,2002:binary', SafeConstructor.construct_yaml_binary) SafeConstructor.add_constructor( u'tag:yaml.org,2002:timestamp', SafeConstructor.construct_yaml_timestamp) SafeConstructor.add_constructor( u'tag:yaml.org,2002:omap', SafeConstructor.construct_yaml_omap) SafeConstructor.add_constructor( u'tag:yaml.org,2002:pairs', SafeConstructor.construct_yaml_pairs) SafeConstructor.add_constructor( u'tag:yaml.org,2002:set', SafeConstructor.construct_yaml_set) SafeConstructor.add_constructor( u'tag:yaml.org,2002:str', SafeConstructor.construct_yaml_str) SafeConstructor.add_constructor( u'tag:yaml.org,2002:seq', SafeConstructor.construct_yaml_seq) SafeConstructor.add_constructor( u'tag:yaml.org,2002:map', SafeConstructor.construct_yaml_map) SafeConstructor.add_constructor(None, SafeConstructor.construct_undefined) class Constructor(SafeConstructor): def construct_python_str(self, node): return self.construct_scalar(node).encode('utf-8') def construct_python_unicode(self, node): return self.construct_scalar(node) def construct_python_long(self, node): return long(self.construct_yaml_int(node)) def construct_python_complex(self, node): return complex(self.construct_scalar(node)) def construct_python_tuple(self, node): return tuple(self.construct_sequence(node)) def find_python_module(self, name, mark): if not name: raise ConstructorError("while constructing a Python module", mark, "expected non-empty name appended to the tag", mark) try: __import__(name) except ImportError, exc: raise ConstructorError("while constructing a Python module", mark, "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark) return sys.modules[name] def find_python_name(self, name, mark): if not name: raise ConstructorError("while constructing a Python object", mark, "expected non-empty name appended to the tag", mark) if u'.' in name: # Python 2.4 only #module_name, object_name = name.rsplit('.', 1) items = name.split('.') object_name = items.pop() module_name = '.'.join(items) else: module_name = '__builtin__' object_name = name try: __import__(module_name) except ImportError, exc: raise ConstructorError("while constructing a Python object", mark, "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark) module = sys.modules[module_name] if not hasattr(module, object_name): raise ConstructorError("while constructing a Python object", mark, "cannot find %r in the module %r" % (object_name.encode('utf-8'), module.__name__), mark) return getattr(module, object_name) def construct_python_name(self, suffix, node): value = self.construct_scalar(node) if value: raise ConstructorError("while constructing a Python name", node.start_mark, "expected the empty value, but found %r" % value.encode('utf-8'), node.start_mark) return self.find_python_name(suffix, node.start_mark) def construct_python_module(self, suffix, node): value = self.construct_scalar(node) if value: raise ConstructorError("while constructing a Python module", node.start_mark, "expected the empty value, but found %r" % value.encode('utf-8'), node.start_mark) return self.find_python_module(suffix, node.start_mark) class classobj: pass def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False): if not args: args = [] if not kwds: kwds = {} cls = self.find_python_name(suffix, node.start_mark) if newobj and isinstance(cls, type(self.classobj)) \ and not args and not kwds: instance = self.classobj() instance.__class__ = cls return instance elif newobj and isinstance(cls, type): return cls.__new__(cls, *args, **kwds) else: return cls(*args, **kwds) def set_python_instance_state(self, instance, state): if hasattr(instance, '__setstate__'): instance.__setstate__(state) else: slotstate = {} if isinstance(state, tuple) and len(state) == 2: state, slotstate = state if hasattr(instance, '__dict__'): instance.__dict__.update(state) elif state: slotstate.update(state) for key, value in slotstate.items(): setattr(object, key, value) def construct_python_object(self, suffix, node): # Format: # !!python/object:module.name { ... state ... } instance = self.make_python_instance(suffix, node, newobj=True) yield instance deep = hasattr(instance, '__setstate__') state = self.construct_mapping(node, deep=deep) self.set_python_instance_state(instance, state) def construct_python_object_apply(self, suffix, node, newobj=False): # Format: # !!python/object/apply # (or !!python/object/new) # args: [ ... arguments ... ] # kwds: { ... keywords ... } # state: ... state ... # listitems: [ ... listitems ... ] # dictitems: { ... dictitems ... } # or short format: # !!python/object/apply [ ... arguments ... ] # The difference between !!python/object/apply and !!python/object/new # is how an object is created, check make_python_instance for details. if isinstance(node, SequenceNode): args = self.construct_sequence(node, deep=True) kwds = {} state = {} listitems = [] dictitems = {} else: value = self.construct_mapping(node, deep=True) args = value.get('args', []) kwds = value.get('kwds', {}) state = value.get('state', {}) listitems = value.get('listitems', []) dictitems = value.get('dictitems', {}) instance = self.make_python_instance(suffix, node, args, kwds, newobj) if state: self.set_python_instance_state(instance, state) if listitems: instance.extend(listitems) if dictitems: for key in dictitems: instance[key] = dictitems[key] return instance def construct_python_object_new(self, suffix, node): return self.construct_python_object_apply(suffix, node, newobj=True) Constructor.add_constructor( u'tag:yaml.org,2002:python/none', Constructor.construct_yaml_null) Constructor.add_constructor( u'tag:yaml.org,2002:python/bool', Constructor.construct_yaml_bool) Constructor.add_constructor( u'tag:yaml.org,2002:python/str', Constructor.construct_python_str) Constructor.add_constructor( u'tag:yaml.org,2002:python/unicode', Constructor.construct_python_unicode) Constructor.add_constructor( u'tag:yaml.org,2002:python/int', Constructor.construct_yaml_int) Constructor.add_constructor( u'tag:yaml.org,2002:python/long', Constructor.construct_python_long) Constructor.add_constructor( u'tag:yaml.org,2002:python/float', Constructor.construct_yaml_float) Constructor.add_constructor( u'tag:yaml.org,2002:python/complex', Constructor.construct_python_complex) Constructor.add_constructor( u'tag:yaml.org,2002:python/list', Constructor.construct_yaml_seq) Constructor.add_constructor( u'tag:yaml.org,2002:python/tuple', Constructor.construct_python_tuple) Constructor.add_constructor( u'tag:yaml.org,2002:python/dict', Constructor.construct_yaml_map) Constructor.add_multi_constructor( u'tag:yaml.org,2002:python/name:', Constructor.construct_python_name) Constructor.add_multi_constructor( u'tag:yaml.org,2002:python/module:', Constructor.construct_python_module) Constructor.add_multi_constructor( u'tag:yaml.org,2002:python/object:', Constructor.construct_python_object) Constructor.add_multi_constructor( u'tag:yaml.org,2002:python/object/apply:', Constructor.construct_python_object_apply) Constructor.add_multi_constructor( u'tag:yaml.org,2002:python/object/new:', Constructor.construct_python_object_new)
apache-2.0
rosmo/ansible
lib/ansible/module_utils/opennebula.py
77
10446
# # Copyright 2018 www.privaz.io Valletech AB # Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) import time import ssl from os import environ from ansible.module_utils.six import string_types from ansible.module_utils.basic import AnsibleModule HAS_PYONE = True try: from pyone import OneException from pyone.server import OneServer except ImportError: OneException = Exception HAS_PYONE = False class OpenNebulaModule: """ Base class for all OpenNebula Ansible Modules. This is basically a wrapper of the common arguments, the pyone client and some utility methods. """ common_args = dict( api_url=dict(type='str', aliases=['api_endpoint'], default=environ.get("ONE_URL")), api_username=dict(type='str', default=environ.get("ONE_USERNAME")), api_password=dict(type='str', no_log=True, aliases=['api_token'], default=environ.get("ONE_PASSWORD")), validate_certs=dict(default=True, type='bool'), wait_timeout=dict(type='int', default=300), ) def __init__(self, argument_spec, supports_check_mode=False, mutually_exclusive=None): module_args = OpenNebulaModule.common_args module_args.update(argument_spec) self.module = AnsibleModule(argument_spec=module_args, supports_check_mode=supports_check_mode, mutually_exclusive=mutually_exclusive) self.result = dict(changed=False, original_message='', message='') self.one = self.create_one_client() self.resolved_parameters = self.resolve_parameters() def create_one_client(self): """ Creates an XMLPRC client to OpenNebula. Returns: the new xmlrpc client. """ # context required for not validating SSL, old python versions won't validate anyway. if hasattr(ssl, '_create_unverified_context'): no_ssl_validation_context = ssl._create_unverified_context() else: no_ssl_validation_context = None # Check if the module can run if not HAS_PYONE: self.fail("pyone is required for this module") if self.module.params.get("api_url"): url = self.module.params.get("api_url") else: self.fail("Either api_url or the environment variable ONE_URL must be provided") if self.module.params.get("api_username"): username = self.module.params.get("api_username") else: self.fail("Either api_username or the environment vairable ONE_USERNAME must be provided") if self.module.params.get("api_password"): password = self.module.params.get("api_password") else: self.fail("Either api_password or the environment vairable ONE_PASSWORD must be provided") session = "%s:%s" % (username, password) if not self.module.params.get("validate_certs") and "PYTHONHTTPSVERIFY" not in environ: return OneServer(url, session=session, context=no_ssl_validation_context) else: return OneServer(url, session) def close_one_client(self): """ Close the pyone session. """ self.one.server_close() def fail(self, msg): """ Utility failure method, will ensure pyone is properly closed before failing. Args: msg: human readable failure reason. """ if hasattr(self, 'one'): self.close_one_client() self.module.fail_json(msg=msg) def exit(self): """ Utility exit method, will ensure pyone is properly closed before exiting. """ if hasattr(self, 'one'): self.close_one_client() self.module.exit_json(**self.result) def resolve_parameters(self): """ This method resolves parameters provided by a secondary ID to the primary ID. For example if cluster_name is present, cluster_id will be introduced by performing the required resolution Returns: a copy of the parameters that includes the resolved parameters. """ resolved_params = dict(self.module.params) if 'cluster_name' in self.module.params: clusters = self.one.clusterpool.info() for cluster in clusters.CLUSTER: if cluster.NAME == self.module.params.get('cluster_name'): resolved_params['cluster_id'] = cluster.ID return resolved_params def is_parameter(self, name): """ Utility method to check if a parameter was provided or is resolved Args: name: the parameter to check """ if name in self.resolved_parameters: return self.get_parameter(name) is not None else: return False def get_parameter(self, name): """ Utility method for accessing parameters that includes resolved ID parameters from provided Name parameters. """ return self.resolved_parameters.get(name) def get_host_by_name(self, name): ''' Returns a host given its name. Args: name: the name of the host Returns: the host object or None if the host is absent. ''' hosts = self.one.hostpool.info() for h in hosts.HOST: if h.NAME == name: return h return None def get_cluster_by_name(self, name): """ Returns a cluster given its name. Args: name: the name of the cluster Returns: the cluster object or None if the host is absent. """ clusters = self.one.clusterpool.info() for c in clusters.CLUSTER: if c.NAME == name: return c return None def get_template_by_name(self, name): ''' Returns a template given its name. Args: name: the name of the template Returns: the template object or None if the host is absent. ''' templates = self.one.templatepool.info() for t in templates.TEMPLATE: if t.NAME == name: return t return None def cast_template(self, template): """ OpenNebula handles all template elements as strings At some point there is a cast being performed on types provided by the user This function mimics that transformation so that required template updates are detected properly additionally an array will be converted to a comma separated list, which works for labels and hopefully for something more. Args: template: the template to transform Returns: the transformed template with data casts applied. """ # TODO: check formally available data types in templates # TODO: some arrays might be converted to space separated for key in template: value = template[key] if isinstance(value, dict): self.cast_template(template[key]) elif isinstance(value, list): template[key] = ', '.join(value) elif not isinstance(value, string_types): template[key] = str(value) def requires_template_update(self, current, desired): """ This function will help decide if a template update is required or not If a desired key is missing from the current dictionary an update is required If the intersection of both dictionaries is not deep equal, an update is required Args: current: current template as a dictionary desired: desired template as a dictionary Returns: True if a template update is required """ if not desired: return False self.cast_template(desired) intersection = dict() for dkey in desired.keys(): if dkey in current.keys(): intersection[dkey] = current[dkey] else: return True return not (desired == intersection) def wait_for_state(self, element_name, state, state_name, target_states, invalid_states=None, transition_states=None, wait_timeout=None): """ Args: element_name: the name of the object we are waiting for: HOST, VM, etc. state: lambda that returns the current state, will be queried until target state is reached state_name: lambda that returns the readable form of a given state target_states: states expected to be reached invalid_states: if any of this states is reached, fail transition_states: when used, these are the valid states during the transition. wait_timeout: timeout period in seconds. Defaults to the provided parameter. """ if not wait_timeout: wait_timeout = self.module.params.get("wait_timeout") start_time = time.time() while (time.time() - start_time) < wait_timeout: current_state = state() if current_state in invalid_states: self.fail('invalid %s state %s' % (element_name, state_name(current_state))) if transition_states: if current_state not in transition_states: self.fail('invalid %s transition state %s' % (element_name, state_name(current_state))) if current_state in target_states: return True time.sleep(self.one.server_retry_interval()) self.fail(msg="Wait timeout has expired!") def run_module(self): """ trigger the start of the execution of the module. Returns: """ try: self.run(self.one, self.module, self.result) except OneException as e: self.fail(msg="OpenNebula Exception: %s" % e) def run(self, one, module, result): """ to be implemented by subclass with the actual module actions. Args: one: the OpenNebula XMLRPC client module: the Ansible Module object result: the Ansible result """ raise NotImplementedError("Method requires implementation")
gpl-3.0
dwadler/QGIS
python/plugins/processing/tests/ToolsTest.py
11
3325
# -*- coding: utf-8 -*- """ *************************************************************************** ToolsTest --------------------- Date : July 2016 Copyright : (C) 2016 by Nyall Dawson Email : nyall dot dawson at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Nyall Dawson' __date__ = 'July 2016' __copyright__ = '(C) 2016, Nyall Dawson' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os import shutil from qgis.core import NULL, QgsVectorLayer from qgis.testing import start_app, unittest from processing.tests.TestData import points from processing.tools import vector testDataPath = os.path.join(os.path.dirname(__file__), 'testdata') start_app() class VectorTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.cleanup_paths = [] @classmethod def tearDownClass(cls): for path in cls.cleanup_paths: shutil.rmtree(path) def testValues(self): test_data = points() test_layer = QgsVectorLayer(test_data, 'test', 'ogr') # field by index res = vector.values(test_layer, 1) self.assertEqual(res[1], [1, 2, 3, 4, 5, 6, 7, 8, 9]) # field by name res = vector.values(test_layer, 'id') self.assertEqual(res['id'], [1, 2, 3, 4, 5, 6, 7, 8, 9]) # two fields res = vector.values(test_layer, 1, 2) self.assertEqual(res[1], [1, 2, 3, 4, 5, 6, 7, 8, 9]) self.assertEqual(res[2], [2, 1, 0, 2, 1, 0, 0, 0, 0]) # two fields by name res = vector.values(test_layer, 'id', 'id2') self.assertEqual(res['id'], [1, 2, 3, 4, 5, 6, 7, 8, 9]) self.assertEqual(res['id2'], [2, 1, 0, 2, 1, 0, 0, 0, 0]) # two fields by name and index res = vector.values(test_layer, 'id', 2) self.assertEqual(res['id'], [1, 2, 3, 4, 5, 6, 7, 8, 9]) self.assertEqual(res[2], [2, 1, 0, 2, 1, 0, 0, 0, 0]) def testConvertNulls(self): self.assertEqual(vector.convert_nulls([]), []) self.assertEqual(vector.convert_nulls([], '_'), []) self.assertEqual(vector.convert_nulls([NULL]), [None]) self.assertEqual(vector.convert_nulls([NULL], '_'), ['_']) self.assertEqual(vector.convert_nulls([NULL], -1), [-1]) self.assertEqual(vector.convert_nulls([1, 2, 3]), [1, 2, 3]) self.assertEqual(vector.convert_nulls([1, None, 3]), [1, None, 3]) self.assertEqual(vector.convert_nulls([1, NULL, 3, NULL]), [1, None, 3, None]) self.assertEqual(vector.convert_nulls([1, NULL, 3, NULL], '_'), [1, '_', 3, '_']) if __name__ == '__main__': unittest.main()
gpl-2.0
lifuzu/repo
pager.py
34
2008
# # Copyright (C) 2008 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import select import sys active = False def RunPager(globalConfig): global active if not os.isatty(0) or not os.isatty(1): return pager = _SelectPager(globalConfig) if pager == '' or pager == 'cat': return # This process turns into the pager; a child it forks will # do the real processing and output back to the pager. This # is necessary to keep the pager in control of the tty. # try: r, w = os.pipe() pid = os.fork() if not pid: os.dup2(w, 1) os.dup2(w, 2) os.close(r) os.close(w) active = True return os.dup2(r, 0) os.close(r) os.close(w) _BecomePager(pager) except Exception: print >>sys.stderr, "fatal: cannot start pager '%s'" % pager os.exit(255) def _SelectPager(globalConfig): try: return os.environ['GIT_PAGER'] except KeyError: pass pager = globalConfig.GetString('core.pager') if pager: return pager try: return os.environ['PAGER'] except KeyError: pass return 'less' def _BecomePager(pager): # Delaying execution of the pager until we have output # ready works around a long-standing bug in popularly # available versions of 'less', a better 'more'. # a, b, c = select.select([0], [], [0]) os.environ['LESS'] = 'FRSX' try: os.execvp(pager, [pager]) except OSError, e: os.execv('/bin/sh', ['sh', '-c', pager])
apache-2.0
PieCrafted/CVCFT
personal-computer/cvcft-map-BET/data/merge.py
7
1194
import os import time import sys import shutil from subprocess import call import pdb dir = sys.argv[1] under = len(sys.argv) == 3 and sys.argv[2] == "under" over = len(sys.argv) == 3 and sys.argv[2] == "over" if under: print "merging all tiles UNDER current tileset" if over: print "merging all tiles OVER current tileset" for x in range(-30, 30): print x for y in range(-30, 30): source = "%s/%d,%d.png" % (dir, x, y); dest = "master/%d,%d.png" % (x, y); if not os.path.exists(source): continue if not os.path.exists(dest): print source shutil.copyfile(source, dest) if under: os.utime(dest, (0, 0)) print "new %s" % source continue desttime = os.path.getmtime(dest) if not over and (under or os.path.getmtime(source) < desttime): # print "older: %s %d" % (source, os.path.getmtime(dest)) call("composite -compose Dst_Over %s %s %s" % (source, dest, dest), shell=True) # print "under %s" % source sourcetime = os.path.getmtime(source) os.utime(dest, (sourcetime, sourcetime)) else: call("composite -compose Over %s %s %s" % (source, dest, dest), shell=True) print "over %s" % source os.utime(dest, (desttime, desttime))
agpl-3.0
artnavsegda/avrnavsegda
xmega/unit_testing/rolling/src/ASF/common/services/gfx_mono/tools/dump_display_over_serial.py
73
4558
## # \file # # \brief Convert display data on a serial line to a graphical representation # # Copyright (C) 2011-2014 Atmel Corporation. All rights reserved. # # \page License # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. The name of Atmel may not be used to endorse or promote products derived # from this software without specific prior written permission. # # 4. This software may only be redistributed and used in connection with an # Atmel microcontroller product. # # THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE # EXPRESSLY AND SPECIFICALLY DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import sys import serial import os.path import argparse def scan_for_ports(): available_ports = [] for index in range(64): try: serial_port = serial.Serial(index) available_ports.append((index, serial_port.portstr)) serial_port.close() except serial.SerialException: pass except IndexError as Error: pass for port_number, port_name in available_ports: print "%02d - %s" % (port_number, port_name) return available_ports def dump_display_data(serial_port, baud_rate, output_file_name): try: output_file = open(output_file_name, 'w') port = serial.Serial(port = serial_port, baudrate = baud_rate, timeout = 1) port.close() port.open() except ValueError as e: print "error: invalid serial port parameters. %s" % (str(e)) output_file.close() return -1 except serial.SerialException as e: print "error: could not open serial port. %s" % (str(e)) output_file.close() return -1 except IOError as e: print "error: could not open output file. %s" % (str(e)) return -1 print "Display on %s: %u,8,N,1" % (port.name, port.baudrate) port.write("D") line = port.readline() display_data = "" while(line[:2] != "};") : display_data = display_data + line[:-1] line = port.readline() display_data = display_data + line port.close() print "Writing data to file %s" % (output_file_name) output_file.write(display_data) output_file.close() def main(): parser = argparse.ArgumentParser(description="This script will try to " "open the given serial port, send a string to " "instruct the target device to dump the contents of " "the display to a serial link in XPM format. The " "received file is then written to 'display.xpm', " "unless a file is specified by the -o option.") parser.add_argument("-p", "--port", dest="serial_port", help="which serial port to open") parser.add_argument("-b", "--baud", dest="baudrate", type=int, help="baud rate to use for serial communication", default=19200) parser.add_argument("-o", "--output", dest="output_file", help="write XPM image to FILE. Default is display.xpm.", metavar="FILE", default="display.xpm") parser.add_argument("-s", "--scan", action="store_true", dest="scan_ports", help="scan for available serial ports and exit", default=False) arguments = parser.parse_args() if arguments.scan_ports: scan_for_ports() sys.exit() if arguments.serial_port is None: parser.print_usage() sys.exit() if os.path.exists(arguments.output_file): print "Warning: output file '%s' already exists" % (arguments.output_file) print "Do you want to write over file '%s'?" % (arguments.output_file) answer = raw_input("[yes/NO] ") if answer not in ("yes", "Yes", "YES"): sys.exit() dump_display_data(arguments.serial_port, arguments.baudrate, arguments.output_file) if __name__ == "__main__": main()
lgpl-3.0
kumar303/addons-server
src/olympia/versions/tests/test_views.py
2
19947
# -*- coding: utf-8 -*- import os import six from django.conf import settings from django.utils.encoding import smart_text from django.core.files import temp from django.core.files.base import File as DjangoFile from django.test.utils import override_settings from django.utils.http import urlquote from unittest import mock from pyquery import PyQuery from olympia import amo from olympia.access import acl from olympia.access.models import Group, GroupUser from olympia.addons.models import Addon from olympia.amo.templatetags.jinja_helpers import user_media_url from olympia.amo.tests import TestCase, addon_factory from olympia.amo.urlresolvers import reverse from olympia.amo.utils import urlencode, urlparams from olympia.files.models import File from olympia.users.models import UserProfile def decode_http_header_value(value): """ Reverse the encoding that django applies to bytestrings in HttpResponse._convert_to_charset(). Needed to test header values that we explicitly pass as bytes such as filenames for content-disposition or xsendfile headers. """ if six.PY3: return value.encode('latin-1').decode('utf-8') else: return value.decode('utf-8') class TestViews(TestCase): def setUp(self): super(TestViews, self).setUp() self.addon = addon_factory( slug=u'my-addôn', file_kw={'size': 1024}, version_kw={'version': '1.0'}) self.version = self.addon.current_version self.addon.current_version.update(created=self.days_ago(3)) def test_version_update_info(self): self.version.release_notes = { 'en-US': u'Fix for an important bug', 'fr': u'Quelque chose en français.\n\nQuelque chose d\'autre.' } self.version.save() file_ = self.version.files.all()[0] file_.update(platform=amo.PLATFORM_WIN.id) # Copy the file to create a new one attached to the same version. # This tests https://github.com/mozilla/addons-server/issues/8950 file_.pk = None file_.platform = amo.PLATFORM_MAC.id file_.save() response = self.client.get( reverse('addons.versions.update_info', args=(self.addon.slug, self.version.version))) assert response.status_code == 200 assert response['Content-Type'] == 'application/xhtml+xml' # pyquery is annoying to use with XML and namespaces. Use the HTML # parser, but do check that xmlns attribute is present (required by # Firefox for the notes to be shown properly). doc = PyQuery(response.content, parser='html') assert doc('html').attr('xmlns') == 'http://www.w3.org/1999/xhtml' assert doc('p').html() == 'Fix for an important bug' # Test update info in another language. with self.activate(locale='fr'): response = self.client.get( reverse('addons.versions.update_info', args=(self.addon.slug, self.version.version))) assert response.status_code == 200 assert response['Content-Type'] == 'application/xhtml+xml' assert b'<br/>' in response.content, ( 'Should be using XHTML self-closing tags!') doc = PyQuery(response.content, parser='html') assert doc('html').attr('xmlns') == 'http://www.w3.org/1999/xhtml' assert doc('p').html() == ( u"Quelque chose en français.<br/><br/>Quelque chose d'autre.") def test_version_update_info_legacy_redirect(self): response = self.client.get('/versions/updateInfo/%s' % self.version.id, follow=True) url = reverse('addons.versions.update_info', args=(self.version.addon.slug, self.version.version)) self.assert3xx(response, url, 302) def test_version_update_info_legacy_redirect_deleted(self): self.version.delete() response = self.client.get( '/en-US/firefox/versions/updateInfo/%s' % self.version.id) assert response.status_code == 404 def test_version_update_info_no_unlisted(self): self.version.update(channel=amo.RELEASE_CHANNEL_UNLISTED) response = self.client.get( reverse('addons.versions.update_info', args=(self.addon.slug, self.version.version))) assert response.status_code == 404 class TestDownloadsBase(TestCase): fixtures = ['base/addon_5299_gcal', 'base/users'] def setUp(self): super(TestDownloadsBase, self).setUp() self.addon = Addon.objects.get(id=5299) self.file = File.objects.get(id=33046) self.file_url = reverse('downloads.file', args=[self.file.id]) self.latest_url = reverse('downloads.latest', args=[self.addon.slug]) def assert_served_by_host(self, response, host, file_=None): if not file_: file_ = self.file assert response.status_code == 302 assert response.url == ( urlparams('%s%s/%s' % ( host, self.addon.id, urlquote(file_.filename) ), filehash=file_.hash)) assert response['X-Target-Digest'] == file_.hash def assert_served_internally(self, response, guarded=True): assert response.status_code == 200 file_path = (self.file.guarded_file_path if guarded else self.file.file_path) assert response[settings.XSENDFILE_HEADER] == file_path def assert_served_locally(self, response, file_=None, attachment=False): path = user_media_url('addons') if attachment: path += '_attachments/' self.assert_served_by_host(response, path, file_) def assert_served_by_cdn(self, response, file_=None): assert response.url.startswith(settings.MEDIA_URL) assert response.url.startswith('http') self.assert_served_by_host(response, user_media_url('addons'), file_) class TestDownloadsUnlistedVersions(TestDownloadsBase): def setUp(self): super(TestDownloadsUnlistedVersions, self).setUp() self.make_addon_unlisted(self.addon) @mock.patch.object(acl, 'is_reviewer', lambda request, addon: False) @mock.patch.object(acl, 'check_unlisted_addons_reviewer', lambda x: False) @mock.patch.object(acl, 'check_addon_ownership', lambda *args, **kwargs: False) def test_download_for_unlisted_addon_returns_404(self): """File downloading isn't allowed for unlisted addons.""" assert self.client.get(self.file_url).status_code == 404 assert self.client.get(self.latest_url).status_code == 404 @mock.patch.object(acl, 'is_reviewer', lambda request, addon: False) @mock.patch.object(acl, 'check_unlisted_addons_reviewer', lambda x: False) @mock.patch.object(acl, 'check_addon_ownership', lambda *args, **kwargs: True) def test_download_for_unlisted_addon_owner(self): """File downloading is allowed for addon owners.""" self.assert_served_internally(self.client.get(self.file_url), False) assert self.client.get(self.latest_url).status_code == 404 @mock.patch.object(acl, 'is_reviewer', lambda request, addon: True) @mock.patch.object(acl, 'check_unlisted_addons_reviewer', lambda x: False) @mock.patch.object(acl, 'check_addon_ownership', lambda *args, **kwargs: False) def test_download_for_unlisted_addon_reviewer(self): """File downloading isn't allowed for reviewers.""" assert self.client.get(self.file_url).status_code == 404 assert self.client.get(self.latest_url).status_code == 404 @mock.patch.object(acl, 'is_reviewer', lambda request, addon: False) @mock.patch.object(acl, 'check_unlisted_addons_reviewer', lambda x: True) @mock.patch.object(acl, 'check_addon_ownership', lambda *args, **kwargs: False) def test_download_for_unlisted_addon_unlisted_reviewer(self): """File downloading is allowed for unlisted reviewers.""" self.assert_served_internally(self.client.get(self.file_url), False) assert self.client.get(self.latest_url).status_code == 404 class TestDownloads(TestDownloadsBase): def test_file_404(self): r = self.client.get(reverse('downloads.file', args=[234])) assert r.status_code == 404 def test_public(self): assert self.addon.status == amo.STATUS_APPROVED assert self.file.status == amo.STATUS_APPROVED self.assert_served_by_cdn(self.client.get(self.file_url)) def test_public_addon_unreviewed_file(self): self.file.status = amo.STATUS_AWAITING_REVIEW self.file.save() self.assert_served_locally(self.client.get(self.file_url)) def test_unreviewed_addon(self): self.addon.status = amo.STATUS_PENDING self.addon.save() self.assert_served_locally(self.client.get(self.file_url)) def test_type_attachment(self): self.assert_served_by_cdn(self.client.get(self.file_url)) url = reverse('downloads.file', args=[self.file.id, 'attachment']) self.assert_served_locally(self.client.get(url), attachment=True) def test_trailing_filename(self): url = self.file_url + self.file.filename self.assert_served_by_cdn(self.client.get(url)) def test_null_datestatuschanged(self): self.file.update(datestatuschanged=None) self.assert_served_locally(self.client.get(self.file_url)) def test_unicode_url(self): self.file.update(filename=u'图像浏览器-0.5-fx.xpi') self.assert_served_by_cdn(self.client.get(self.file_url)) class TestDisabledFileDownloads(TestDownloadsBase): def test_admin_disabled_404(self): self.addon.update(status=amo.STATUS_DISABLED) assert self.client.get(self.file_url).status_code == 404 def test_user_disabled_404(self): self.addon.update(disabled_by_user=True) assert self.client.get(self.file_url).status_code == 404 def test_file_disabled_anon_404(self): self.file.update(status=amo.STATUS_DISABLED) assert self.client.get(self.file_url).status_code == 404 def test_file_disabled_unprivileged_404(self): assert self.client.login(email='regular@mozilla.com') self.file.update(status=amo.STATUS_DISABLED) assert self.client.get(self.file_url).status_code == 404 def test_file_disabled_ok_for_author(self): self.file.update(status=amo.STATUS_DISABLED) assert self.client.login(email='g@gmail.com') self.assert_served_internally(self.client.get(self.file_url)) def test_file_disabled_ok_for_reviewer(self): self.file.update(status=amo.STATUS_DISABLED) self.client.login(email='reviewer@mozilla.com') self.assert_served_internally(self.client.get(self.file_url)) def test_file_disabled_ok_for_admin(self): self.file.update(status=amo.STATUS_DISABLED) self.client.login(email='admin@mozilla.com') self.assert_served_internally(self.client.get(self.file_url)) def test_admin_disabled_ok_for_author(self): self.addon.update(status=amo.STATUS_DISABLED) assert self.client.login(email='g@gmail.com') self.assert_served_internally(self.client.get(self.file_url)) def test_admin_disabled_ok_for_admin(self): self.addon.update(status=amo.STATUS_DISABLED) self.client.login(email='admin@mozilla.com') self.assert_served_internally(self.client.get(self.file_url)) def test_user_disabled_ok_for_author(self): self.addon.update(disabled_by_user=True) assert self.client.login(email='g@gmail.com') self.assert_served_internally(self.client.get(self.file_url)) def test_user_disabled_ok_for_admin(self): self.addon.update(disabled_by_user=True) self.client.login(email='admin@mozilla.com') self.assert_served_internally(self.client.get(self.file_url)) class TestUnlistedDisabledFileDownloads(TestDisabledFileDownloads): def setUp(self): super(TestDisabledFileDownloads, self).setUp() self.make_addon_unlisted(self.addon) self.grant_permission( UserProfile.objects.get(email='reviewer@mozilla.com'), 'Addons:ReviewUnlisted') class TestDownloadsLatest(TestDownloadsBase): def setUp(self): super(TestDownloadsLatest, self).setUp() self.platform = 5 def test_404(self): url = reverse('downloads.latest', args=[123]) assert self.client.get(url).status_code == 404 def test_type_none(self): r = self.client.get(self.latest_url) assert r.status_code == 302 url = '%s?%s' % (self.file.filename, urlencode({'filehash': self.file.hash})) assert r['Location'].endswith(url), r['Location'] def test_success(self): assert self.addon.current_version self.assert_served_by_cdn(self.client.get(self.latest_url)) def test_platform(self): # We still match PLATFORM_ALL. url = reverse('downloads.latest', kwargs={'addon_id': self.addon.slug, 'platform': 5}) self.assert_served_by_cdn(self.client.get(url)) # And now we match the platform in the url. self.file.platform = self.platform self.file.save() self.assert_served_by_cdn(self.client.get(url)) # But we can't match platform=3. url = reverse('downloads.latest', kwargs={'addon_id': self.addon.slug, 'platform': 3}) assert self.client.get(url).status_code == 404 def test_type(self): url = reverse('downloads.latest', kwargs={'addon_id': self.addon.slug, 'type': 'attachment'}) self.assert_served_locally(self.client.get(url), attachment=True) def test_platform_and_type(self): url = reverse('downloads.latest', kwargs={'addon_id': self.addon.slug, 'platform': 5, 'type': 'attachment'}) self.assert_served_locally(self.client.get(url), attachment=True) def test_trailing_filename(self): url = reverse('downloads.latest', kwargs={'addon_id': self.addon.slug, 'platform': 5, 'type': 'attachment'}) url += self.file.filename self.assert_served_locally(self.client.get(url), attachment=True) def test_platform_multiple_objects(self): f = File.objects.create(platform=3, version=self.file.version, filename='unst.xpi', status=self.file.status) url = reverse('downloads.latest', kwargs={'addon_id': self.addon.slug, 'platform': 3}) self.assert_served_locally(self.client.get(url), file_=f) @override_settings(XSENDFILE=True) class TestDownloadSource(TestCase): fixtures = ['base/addon_3615', 'base/admin'] def setUp(self): super(TestDownloadSource, self).setUp() self.addon = Addon.objects.get(pk=3615) # Make sure non-ascii is ok. self.addon.update(slug=u'crosswarpex-확장') self.version = self.addon.current_version tdir = temp.gettempdir() self.source_file = temp.NamedTemporaryFile(suffix=".zip", dir=tdir) self.source_file.write(b'a' * (2 ** 21)) self.source_file.seek(0) self.version.source = DjangoFile(self.source_file) self.version.save() self.filename = os.path.basename(self.version.source.path) self.user = UserProfile.objects.get(email="del@icio.us") self.group = Group.objects.create( name='Editors BinarySource', rules='Editors:BinarySource' ) self.url = reverse('downloads.source', args=(self.version.pk, )) def test_owner_should_be_allowed(self): self.client.login(email=self.user.email) response = self.client.get(self.url) assert response.status_code == 200 assert response[settings.XSENDFILE_HEADER] assert 'Content-Disposition' in response filename = smart_text(self.filename) content_disposition = response['Content-Disposition'] assert filename in decode_http_header_value(content_disposition) expected_path = smart_text(self.version.source.path) xsendfile_header = decode_http_header_value( response[settings.XSENDFILE_HEADER]) assert xsendfile_header == expected_path def test_anonymous_should_not_be_allowed(self): response = self.client.get(self.url) assert response.status_code == 404 def test_deleted_version(self): self.version.delete() GroupUser.objects.create(user=self.user, group=self.group) self.client.login(email=self.user.email) response = self.client.get(self.url) assert response.status_code == 404 def test_group_binarysource_should_be_allowed(self): GroupUser.objects.create(user=self.user, group=self.group) self.client.login(email=self.user.email) response = self.client.get(self.url) assert response.status_code == 200 assert response[settings.XSENDFILE_HEADER] assert 'Content-Disposition' in response filename = smart_text(self.filename) content_disposition = response['Content-Disposition'] assert filename in decode_http_header_value(content_disposition) expected_path = smart_text(self.version.source.path) xsendfile_header = decode_http_header_value( response[settings.XSENDFILE_HEADER]) assert xsendfile_header == expected_path def test_no_source_should_go_in_404(self): self.version.source = None self.version.save() response = self.client.get(self.url) assert response.status_code == 404 @mock.patch.object(acl, 'is_reviewer', lambda request, addon: False) @mock.patch.object(acl, 'check_unlisted_addons_reviewer', lambda x: False) @mock.patch.object(acl, 'check_addon_ownership', lambda *args, **kwargs: False) def test_download_for_unlisted_addon_returns_404(self): """File downloading isn't allowed for unlisted addons.""" self.make_addon_unlisted(self.addon) assert self.client.get(self.url).status_code == 404 @mock.patch.object(acl, 'is_reviewer', lambda request, addon: False) @mock.patch.object(acl, 'check_unlisted_addons_reviewer', lambda x: False) @mock.patch.object(acl, 'check_addon_ownership', lambda *args, **kwargs: True) def test_download_for_unlisted_addon_owner(self): """File downloading is allowed for addon owners.""" self.make_addon_unlisted(self.addon) assert self.client.get(self.url).status_code == 200 @mock.patch.object(acl, 'is_reviewer', lambda request, addon: True) @mock.patch.object(acl, 'check_unlisted_addons_reviewer', lambda x: False) @mock.patch.object(acl, 'check_addon_ownership', lambda *args, **kwargs: False) def test_download_for_unlisted_addon_reviewer(self): """File downloading isn't allowed for reviewers.""" self.make_addon_unlisted(self.addon) assert self.client.get(self.url).status_code == 404 @mock.patch.object(acl, 'is_reviewer', lambda request, addon: False) @mock.patch.object(acl, 'check_unlisted_addons_reviewer', lambda x: True) @mock.patch.object(acl, 'check_addon_ownership', lambda *args, **kwargs: False) def test_download_for_unlisted_addon_unlisted_reviewer(self): """File downloading is allowed for unlisted reviewers.""" self.make_addon_unlisted(self.addon) assert self.client.get(self.url).status_code == 200
bsd-3-clause
daviddrysdale/python-phonenumbers
python/phonenumbers/shortdata/__init__.py
1
2185
"""Auto-generated file, do not edit by hand.""" # Copyright (C) 2010-2021 The Libphonenumber Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ..phonemetadata import PhoneMetadata _AVAILABLE_REGION_CODES = ['AC','AD','AE','AF','AG','AI','AL','AM','AO','AR','AS','AT','AU','AW','AX','AZ','BA','BB','BD','BE','BF','BG','BH','BI','BJ','BL','BM','BN','BO','BQ','BR','BS','BT','BW','BY','BZ','CA','CC','CD','CF','CG','CH','CI','CK','CL','CM','CN','CO','CR','CU','CV','CW','CX','CY','CZ','DE','DJ','DK','DM','DO','DZ','EC','EE','EG','EH','ER','ES','ET','FI','FJ','FK','FM','FO','FR','GA','GB','GD','GE','GF','GG','GH','GI','GL','GM','GN','GP','GR','GT','GU','GW','GY','HK','HN','HR','HT','HU','ID','IE','IL','IM','IN','IQ','IR','IS','IT','JE','JM','JO','JP','KE','KG','KH','KI','KM','KN','KP','KR','KW','KY','KZ','LA','LB','LC','LI','LK','LR','LS','LT','LU','LV','LY','MA','MC','MD','ME','MF','MG','MH','MK','ML','MM','MN','MO','MP','MQ','MR','MS','MT','MU','MV','MW','MX','MY','MZ','NA','NC','NE','NF','NG','NI','NL','NO','NP','NR','NU','NZ','OM','PA','PE','PF','PG','PH','PK','PL','PM','PR','PS','PT','PW','PY','QA','RE','RO','RS','RU','RW','SA','SB','SC','SD','SE','SG','SH','SI','SJ','SK','SL','SM','SN','SO','SR','SS','ST','SV','SX','SY','SZ','TC','TD','TG','TH','TJ','TL','TM','TN','TO','TR','TT','TV','TW','TZ','UA','UG','US','UY','UZ','VA','VC','VE','VG','VI','VN','VU','WF','WS','XK','YE','YT','ZA','ZM','ZW'] def _load_region(code): __import__("region_%s" % code, globals(), locals(), fromlist=["PHONE_METADATA_%s" % code], level=1) for region_code in _AVAILABLE_REGION_CODES: PhoneMetadata.register_short_region_loader(region_code, _load_region)
apache-2.0
canavandl/bokeh
tests/glyphs/MultiLine.py
43
1073
import numpy as np from bokeh.document import Document from bokeh.models import ColumnDataSource, DataRange1d, Plot, LinearAxis, Grid from bokeh.models.glyphs import MultiLine from bokeh.plotting import show N = 9 x = np.linspace(-2, 2, N) y = x**2 xpts = np.array([-.09, -.12, .0, .12, .09]) ypts = np.array([-.1, .02, .1, .02, -.1]) source = ColumnDataSource(dict( xs=[xpts*(1+i/10.0)+xx for i, xx in enumerate(x)], ys=[ypts*(1+i/10.0)+yy for i, yy in enumerate(y)], ) ) xdr = DataRange1d() ydr = DataRange1d() plot = Plot( title=None, x_range=xdr, y_range=ydr, plot_width=300, plot_height=300, h_symmetry=False, v_symmetry=False, min_border=0, toolbar_location=None) glyph = MultiLine(xs="xs", ys="ys", line_color="#8073ac", line_width=2) plot.add_glyph(source, glyph) xaxis = LinearAxis() plot.add_layout(xaxis, 'below') yaxis = LinearAxis() plot.add_layout(yaxis, 'left') plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker)) plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker)) doc = Document() doc.add(plot) show(plot)
bsd-3-clause
b1quint/samfp
samfp/mkcube.py
1
7108
#!/usr/bin/env python # -*- coding: utf8 -*- """ SAMI Make Cube This file gets several FITS images and put them together inside a single FITS file with three dimensions (data-cube). Todo ---- - Treat error case multiple extensions. """ import astropy.io.fits as pyfits import argparse import itertools import numpy as np import pandas as pd from . import io from .tools import version logger = io.get_logger("MakeCube") __author__ = 'Bruno Quint' def main(): # Parsing Arguments ------------------------------------------------------- parser = argparse.ArgumentParser( description="Build a data-cube from image files.") parser.add_argument('-a', '--algorithm', metavar='algorithm', type=str, default='average', help="Algorithm used when combining images per " "frame (average | median | sum)") parser.add_argument('-b', '--binning', type=int, nargs=2, default=(1, 1), help='New binning to be applied to the data-cube') parser.add_argument('-d', '--debug', action='store_true', help="Run debug mode.") parser.add_argument('-o', '--output', metavar='output', type=str, default="cube.fits", help="Name of the output cube.") parser.add_argument('-q', '--quiet', action='store_true', help="Run quietly.") parser.add_argument('files', metavar='files', type=str, nargs='+', help="input filenames.") parsed_args = parser.parse_args() if parsed_args.quiet: logger.setLevel('NOTSET') elif parsed_args.debug: logger.setLevel('DEBUG') else: logger.setLevel('INFO') logger.info("") logger.info("SAM-FP Tools: mkcube") logger.info("by Bruno Quint (bquint@ctio.noao.edu)") logger.info("version {:s}".format(version.__str__)) logger.info("Starting program.") logger.info("") make_cube(parsed_args.files, output=parsed_args.output, combine_algorithm=parsed_args.algorithm, binning=parsed_args.binning) def make_cube(list_of_files, z_key='FAPEROTZ', combine_algorithm='average', output='cube.fits', binning=(1, 1)): """ Stack FITS images within a single FITS data-cube. Parameters ---------- list_of_files : list A list of strings containing the path to the input fits files. z_key : str The wildcard name responsible to store the FP gap size in *bcv* units. combine_algorithm : string The algorithm used to combine several images into a single frame (average|median|sum) output : str Name of the output data-cube. binning : list or tuple Binning to be applied to the data-cube when mounting it. """ assert isinstance(list_of_files, list) list_of_files.sort() logger.debug('Create table') df = pd.DataFrame(columns=['filename', 'nrows', 'ncols', 'z']) logger.debug('Filling the table') for f in list_of_files: logger.debug('Read %s file' % f) hdr = pyfits.getheader(f) ds = pd.Series({ 'filename': f, 'nrows': int(hdr['naxis1']), 'ncols': int(hdr['naxis2']), 'z': int(hdr[z_key].strip()) }) df = df.append(ds, ignore_index=True) logger.debug('%d files with different number of rows' % len( df['nrows'].unique())) logger.debug('%d files with different number of columns' % len( df['ncols'].unique())) logger.debug('%d files with different Z' % len(df['z'].unique())) if len(df['nrows'].unique()) is not 1: raise ( IOError, 'Height mismatch for %d files' % len(df['nrows'].unique())) if len(df['ncols'].unique()) is not 1: raise ( IOError, 'Width mismatch for %d files' % len(df['ncols'].unique())) nrows = int(df['nrows'].unique() // binning[0]) ncols = int(df['ncols'].unique() // binning[1]) nchan = len(df['z'].unique()) nrows = int(nrows) ncols = int(ncols) nchan = int(nchan) logger.info('Creating data-cube with shape') logger.info('[%d, %d, %d]' % (nrows, ncols, nchan)) cube = np.zeros((nchan, ncols, nrows)) z_array = df['z'].unique() z_array = np.array(z_array, dtype=np.float64) z_array.sort() z_array = z_array[::-1] # Reverse array so lambda increases inside the cube combine_algorithm = combine_algorithm.lower() if combine_algorithm in ['mean', 'average']: combine = np.mean elif combine_algorithm in ['median']: combine = np.median elif combine_algorithm in ['sum']: combine = np.sum else: raise ValueError('"combine_algorith" kwarg must be average/median/sum') logger.info('Filling data-cube') x, y = range(binning[0]), range(binning[1]) # Build data-cube for i in range(z_array.size): logger.debug('Processing channel %03d - z = %.2f' % (i + 1, z_array[i])) files = df[df['z'] == z_array[i]]['filename'].tolist() temp_cube = np.zeros((len(files), ncols, nrows)) # Build temporary data-cube for each frame before combine it for j in range(len(files)): temp_image = pyfits.getdata(files[j]) # Binning images --- for (m, n) in itertools.product(x, y): temp_cube[j] += temp_image[n::binning[1], m::binning[0]] cube[i] = combine(temp_cube, axis=0) logger.info('Find Z solution') z = np.arange(z_array.size) + 1 z = np.array(z, dtype=np.float64) p = np.polyfit(z, z_array, deg=1) delta_z = p[0] z_zero = np.polyval(p, 1) hdr.set('CRPIX3', 1, 'Reference channel') hdr.set('CRVAL3', z_zero, 'Reference channel value') hdr.set('CUNIT3', 'bcv', 'Units in Z') hdr.set('CDELT3', delta_z, 'Average increment in Z') hdr.set('CR3_3', delta_z, 'Average increment in Z') hdr.set('C3_3', delta_z, 'Average increment in Z') # Saving filenames in the header --- hdr.add_history('Cube mounted using `mkcube`') for i in range(z_array.size): files = df[df['z'] == z_array[i]]['filename'].tolist() for j in range(len(files)): hdr.append(('CHAN_%03d' % (i + 1), files[j], 'z = %+04d' % z_array[i])) hdr.add_blank('', after='CHAN_%03d' % (i + 1)) hdr.add_blank('', before='CHAN_001') hdr.add_blank('--- Channels and Files ---', before='CHAN_001') output = io.safe_save(output, verbose=True) logger.info('Writing file to {:s}'.format(output)) pyfits.writeto(output, cube, hdr, overwrite=True) logger.debug( pd.DataFrame( data={ 'x': z, 'y': z_array, 'fit_y': np.polyval(p, z), 'round_fit': np.round(np.polyval(p, z)) } ) ) logger.debug(p) return if __name__ == '__main__': main()
bsd-3-clause
eerwitt/tensorflow
tensorflow/contrib/learn/python/learn/tests/dataframe/binary_transform_test.py
18
3504
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for binary transforms.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import sys # TODO: #6568 Remove this hack that makes dlopen() not crash. if hasattr(sys, "getdlopenflags") and hasattr(sys, "setdlopenflags"): import ctypes sys.setdlopenflags(sys.getdlopenflags() | ctypes.RTLD_GLOBAL) import numpy as np from tensorflow.contrib.learn.python.learn.dataframe import tensorflow_dataframe as df from tensorflow.contrib.learn.python.learn.dataframe.transforms.binary_transforms import BINARY_TRANSFORMS from tensorflow.python.client import session as session_lib from tensorflow.python.platform import test as test_lib from tensorflow.python.training import coordinator from tensorflow.python.training import queue_runner_impl NUMPY_ARRAY_SIZE = 100 SCALAR = 50.0 TEST_NAME_PREFIX = "testBinaryOp_" class BinaryTransformTestCase(test_lib.TestCase): """Test class for binary transforms.""" @classmethod def add_test_case(cls, fn_name, op): def _test(self): rng = np.arange( -NUMPY_ARRAY_SIZE // 2, NUMPY_ARRAY_SIZE // 2, dtype="float32") frame = df.TensorFlowDataFrame.from_numpy( rng, batch_size=len(rng), shuffle=False) frame["sqr"] = frame["value"].square() self.assertTrue(hasattr(frame["value"], fn_name)) frame["series_result"] = getattr(frame["value"], fn_name)(frame["sqr"]) frame["scalar_result"] = getattr(frame["value"], fn_name)(SCALAR) frame_built = frame.build() expected_series_tensor = op(frame_built["value"], frame_built["sqr"]) actual_series_tensor = frame_built["series_result"] expected_scalar_tensor = op(frame_built["value"], SCALAR) actual_scalar_tensor = frame_built["scalar_result"] session = session_lib.Session() coord = coordinator.Coordinator() threads = queue_runner_impl.start_queue_runners(sess=session, coord=coord) actual_series, expected_series, actual_scalar, expected_scalar = ( session.run([ actual_series_tensor, expected_series_tensor, actual_scalar_tensor, expected_scalar_tensor ])) coord.request_stop() coord.join(threads) np.testing.assert_almost_equal(expected_series, actual_series) np.testing.assert_almost_equal(expected_scalar, actual_scalar) setattr(cls, "{}{}".format(TEST_NAME_PREFIX, op.__name__), _test) for bt in BINARY_TRANSFORMS: BinaryTransformTestCase.add_test_case(*bt) # Check that the number of test methods matches the number of binary transforms. test_methods = [ test for test in dir(BinaryTransformTestCase) if test.startswith(TEST_NAME_PREFIX) ] assert len(test_methods) == len(BINARY_TRANSFORMS) if __name__ == "__main__": test_lib.main()
apache-2.0
fredzannarbor/pagekicker-community
scripts_python_3/bin/dev_Flickr_single.py
2
2970
#!/usr/local/bin/python ################################################### #Inputs: #1) a single seed #2) a uuid #Outputs: #1) url_list.txt - a list of images that are retrieved from Flickr import os, sys, json, flickrapi, codecs #================================================= def callTheApi(api_key, seed, per_page_num): #This calls the API with a search term or 'seed' flickr = flickrapi.FlickrAPI(api_key) json_photos = json.loads(flickr.photos_search(text=seed, \ per_page=str(per_page_num), format = 'json', nojsoncallback=1, \ license = '4,6' )); #print json_photos return json_photos #================================================= def getFlickrUsername(api_key, pic_owner): #calls API to retrieve username for photo flickr = flickrapi.FlickrAPI(api_key) user_info = json.loads(flickr.people_getInfo(user_id = pic_owner, \ format = 'json', nojsoncallback = 1)) #Filters out those users who don't have their real name available #and uses their username instead if 'realname' in user_info['person']: username = user_info['person']['realname']['_content'] if username == '': username = user_info['person']['path_alias'] else: username = user_info['person']['path_alias'] return username #================================================= def parser(json_photos, per_page_num, api_key, seed, savepath): #here we can do a quick dirty parsing now #ideally, we go back and make a class that will hold all the data #filters out null results if json_photos['photos']['total'] != '0': f = codecs.open(savepath+'/url_list.txt', encoding = 'utf-8', mode = 'w+') for pic in range(per_page_num): pic_title = json_photos["photos"]['photo'][int(pic)]['title'] pic_owner = json_photos['photos']['photo'][int(pic)]['owner'] pic_id = json_photos['photos']['photo'][int(pic)]['id'] pic_farm = json_photos['photos']['photo'][int(pic)]['farm'] pic_server = json_photos['photos']['photo'][int(pic)]['server'] pic_secret = json_photos['photos']['photo'][int(pic)]['secret'] username = getFlickrUsername(api_key, pic_owner) url = "http://farm" + str(pic_farm) + ".static.flickr.com/"\ + pic_server + "/" + pic_id + "_" + pic_secret + ".jpg" f.write(seed + ': "' + pic_title + '", An image by Flickr user: ' + username + ' : ' + url + '\n') f.close() #================================================= def main(): #working_path = '/opt/bitnami/apache2/htdocs/pk-main/development/\ #scripts/tmp' seed = str(sys.argv[1]) uuid = str(sys.argv[2]) uuid_path = '/opt/bitnami/apache2/htdocs/pk-main/development/scripts/'\ +'tmp' savepath = uuid_path + '/' + uuid + '/fetched' os.chdir(uuid_path + '/' + uuid) api_key = 'e7a1dbf3d545efe6dfe297f3745c1dbd' per_page_num = 10 json_photos = callTheApi(api_key, seed, per_page_num); parser(json_photos, per_page_num, api_key, seed, savepath) #================================================= if __name__ == '__main__': main()
apache-2.0
tempbottle/ironpython3
Src/Scripts/generate_dict_views.py
3
4294
##################################################################################### # # Copyright (c) Microsoft Corporation. All rights reserved. # # This source code is subject to terms and conditions of the Apache License, Version 2.0. A # copy of the license can be found in the License.html file at the root of this distribution. If # you cannot locate the Apache License, Version 2.0, please send an email to # ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound # by the terms of the Apache License, Version 2.0. # # You must not remove this notice, or any other, from this software. # # ##################################################################################### from generate import generate view_types = ['DictionaryKeyView', 'DictionaryItemView'] set_types = ['SetCollection', 'FrozenSetCollection'] ops = [ ('|', 'Union'), ('&', 'Intersection'), ('^', 'SymmetricDifference'), ('-', 'Difference'), ] comps = [ ('==', 'xs.Count == ys.Count && xs.IsSubset(ys)'), ('!=', 'xs.Count != ys.Count || !xs.IsSubset(ys)'), ('>', 'ys.IsStrictSubset(xs)'), ('<', 'xs.IsStrictSubset(ys)'), ('>=', 'ys.IsSubset(xs)'), ('<=', 'xs.IsSubset(ys)'), ] def equality(comp): return 'true' if comp != '!=' and '=' in comp else 'false' def inequality(comp): return 'true' if '=' not in comp or comp == '!=' else 'false' def gen_ops(ty): def _gen_ops(cw): for op, op_name in ops: for format_args in [ (op, ty + ' x', 'IEnumerable y'), (op, 'IEnumerable y', ty + ' x'), ]: cw.enter_block('public static SetCollection operator %s(%s, %s)' % format_args) cw.writeline('return new SetCollection(SetStorage.%s(' % op_name) cw.indent() cw.writeline('SetStorage.GetItemsWorker(x.GetEnumerator()),') cw.writeline('SetStorage.GetItems(y)') cw.dedent() cw.writeline('));') cw.exit_block() cw.writeline() return _gen_ops def gen_comps(ty): view_types_sorted = [ty] + [x for x in view_types if x != ty] def _gen_comps(cw): cw.enter_block('public override bool Equals(object obj)') cw.enter_block('if (obj == null)') cw.writeline('return false;') cw.exit_block() enter_block = cw.enter_block for check in view_types_sorted + set_types: enter_block('if (obj is %s)' % check) enter_block = cw.else_block cw.writeline('return this == (%s)obj;' % check) cw.exit_block() cw.writeline('return false;') cw.exit_block() cw.writeline() for right in view_types_sorted + set_types: for comp, expr in comps: cw.enter_block('public static bool operator %s(%s x, %s y)' % (comp, ty, right)) if right == ty: cw.enter_block('if (object.ReferenceEquals(x._dict, y._dict))') cw.writeline('return %s;' % equality(comp)) cw.exit_block() elif right in view_types: cw.enter_block('if (object.ReferenceEquals(x._dict, y._dict))') cw.writeline('return %s;' % inequality(comp)) cw.exit_block() xs = 'SetStorage.GetItemsWorker(x.GetEnumerator())' if right in view_types: ys = 'SetStorage.GetItemsWorker(y.GetEnumerator())' else: ys = 'y._items' cw.writeline('SetStorage xs = %s;' % xs) cw.writeline('SetStorage ys = %s;' % ys) cw.writeline('return %s;' % expr) cw.exit_block() cw.writeline() return _gen_comps def main(): generators = [ ('Set Operations (Keys)', gen_ops('DictionaryKeyView')), ('Set Comparison Operations (Keys)', gen_comps('DictionaryKeyView')), ('Set Operations (Items)', gen_ops('DictionaryItemView')), ('Set Comparison Operations (Items)', gen_comps('DictionaryItemView')), ] return generate(*generators) if __name__ == '__main__': main()
apache-2.0
googlearchive/appengine-try-python-flask
lib/werkzeug/debug/__init__.py
310
7800
# -*- coding: utf-8 -*- """ werkzeug.debug ~~~~~~~~~~~~~~ WSGI application traceback debugger. :copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import json import mimetypes from os.path import join, dirname, basename, isfile from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response from werkzeug.debug.tbtools import get_current_traceback, render_console_html from werkzeug.debug.console import Console from werkzeug.security import gen_salt #: import this here because it once was documented as being available #: from this module. In case there are users left ... from werkzeug.debug.repr import debug_repr class _ConsoleFrame(object): """Helper class so that we can reuse the frame console code for the standalone console. """ def __init__(self, namespace): self.console = Console(namespace) self.id = 0 class DebuggedApplication(object): """Enables debugging support for a given application:: from werkzeug.debug import DebuggedApplication from myapp import app app = DebuggedApplication(app, evalex=True) The `evalex` keyword argument allows evaluating expressions in a traceback's frame context. .. versionadded:: 0.9 The `lodgeit_url` parameter was deprecated. :param app: the WSGI application to run debugged. :param evalex: enable exception evaluation feature (interactive debugging). This requires a non-forking server. :param request_key: The key that points to the request object in ths environment. This parameter is ignored in current versions. :param console_path: the URL for a general purpose console. :param console_init_func: the function that is executed before starting the general purpose console. The return value is used as initial namespace. :param show_hidden_frames: by default hidden traceback frames are skipped. You can show them by setting this parameter to `True`. """ # this class is public __module__ = 'werkzeug' def __init__(self, app, evalex=False, request_key='werkzeug.request', console_path='/console', console_init_func=None, show_hidden_frames=False, lodgeit_url=None): if lodgeit_url is not None: from warnings import warn warn(DeprecationWarning('Werkzeug now pastes into gists.')) if not console_init_func: console_init_func = dict self.app = app self.evalex = evalex self.frames = {} self.tracebacks = {} self.request_key = request_key self.console_path = console_path self.console_init_func = console_init_func self.show_hidden_frames = show_hidden_frames self.secret = gen_salt(20) def debug_application(self, environ, start_response): """Run the application and conserve the traceback frames.""" app_iter = None try: app_iter = self.app(environ, start_response) for item in app_iter: yield item if hasattr(app_iter, 'close'): app_iter.close() except Exception: if hasattr(app_iter, 'close'): app_iter.close() traceback = get_current_traceback(skip=1, show_hidden_frames= self.show_hidden_frames, ignore_system_exceptions=True) for frame in traceback.frames: self.frames[frame.id] = frame self.tracebacks[traceback.id] = traceback try: start_response('500 INTERNAL SERVER ERROR', [ ('Content-Type', 'text/html; charset=utf-8'), # Disable Chrome's XSS protection, the debug # output can cause false-positives. ('X-XSS-Protection', '0'), ]) except Exception: # if we end up here there has been output but an error # occurred. in that situation we can do nothing fancy any # more, better log something into the error log and fall # back gracefully. environ['wsgi.errors'].write( 'Debugging middleware caught exception in streamed ' 'response at a point where response headers were already ' 'sent.\n') else: yield traceback.render_full(evalex=self.evalex, secret=self.secret) \ .encode('utf-8', 'replace') traceback.log(environ['wsgi.errors']) def execute_command(self, request, command, frame): """Execute a command in a console.""" return Response(frame.console.eval(command), mimetype='text/html') def display_console(self, request): """Display a standalone shell.""" if 0 not in self.frames: self.frames[0] = _ConsoleFrame(self.console_init_func()) return Response(render_console_html(secret=self.secret), mimetype='text/html') def paste_traceback(self, request, traceback): """Paste the traceback and return a JSON response.""" rv = traceback.paste() return Response(json.dumps(rv), mimetype='application/json') def get_source(self, request, frame): """Render the source viewer.""" return Response(frame.render_source(), mimetype='text/html') def get_resource(self, request, filename): """Return a static resource from the shared folder.""" filename = join(dirname(__file__), 'shared', basename(filename)) if isfile(filename): mimetype = mimetypes.guess_type(filename)[0] \ or 'application/octet-stream' f = open(filename, 'rb') try: return Response(f.read(), mimetype=mimetype) finally: f.close() return Response('Not Found', status=404) def __call__(self, environ, start_response): """Dispatch the requests.""" # important: don't ever access a function here that reads the incoming # form data! Otherwise the application won't have access to that data # any more! request = Request(environ) response = self.debug_application if request.args.get('__debugger__') == 'yes': cmd = request.args.get('cmd') arg = request.args.get('f') secret = request.args.get('s') traceback = self.tracebacks.get(request.args.get('tb', type=int)) frame = self.frames.get(request.args.get('frm', type=int)) if cmd == 'resource' and arg: response = self.get_resource(request, arg) elif cmd == 'paste' and traceback is not None and \ secret == self.secret: response = self.paste_traceback(request, traceback) elif cmd == 'source' and frame and self.secret == secret: response = self.get_source(request, frame) elif self.evalex and cmd is not None and frame is not None and \ self.secret == secret: response = self.execute_command(request, cmd, frame) elif self.evalex and self.console_path is not None and \ request.path == self.console_path: response = self.display_console(request) return response(environ, start_response)
apache-2.0
translate/translate
translate/storage/test_properties.py
2
61156
from io import BytesIO from pytest import mark, raises from translate.misc.multistring import multistring from translate.storage import properties, test_monolingual # Note that DialectJava delimitors are ["=", ":", " "] def test_find_delimiter_pos_simple(): """Simple tests to find the various delimiters""" assert properties.DialectJava.find_delimiter("key=value") == ("=", 3) assert properties.DialectJava.find_delimiter("key:value") == (":", 3) assert properties.DialectJava.find_delimiter("key value") == (" ", 3) # NOTE this is valid in Java properties, the key is then the empty string assert properties.DialectJava.find_delimiter("= value") == ("=", 0) def test_find_delimiter_pos_multiple(): """Find delimiters when multiple potential delimiters are involved""" assert properties.DialectJava.find_delimiter("key=value:value") == ("=", 3) assert properties.DialectJava.find_delimiter("key:value=value") == (":", 3) assert properties.DialectJava.find_delimiter("key value=value") == (" ", 3) def test_find_delimiter_pos_none(): """Find delimiters when there isn't one""" assert properties.DialectJava.find_delimiter("key") == (None, -1) assert properties.DialectJava.find_delimiter("key\\=\\:\\ ") == (None, -1) def test_find_delimiter_pos_whitespace(): """Find delimiters when whitespace is involved""" assert properties.DialectJava.find_delimiter("key = value") == ("=", 4) assert properties.DialectJava.find_delimiter("key : value") == (":", 4) assert properties.DialectJava.find_delimiter("key value") == (" ", 3) assert properties.DialectJava.find_delimiter("key value = value") == (" ", 3) assert properties.DialectJava.find_delimiter("key value value") == (" ", 3) assert properties.DialectJava.find_delimiter(" key = value") == ("=", 5) def test_find_delimiter_pos_escapes(): """Find delimiters when potential earlier delimiters are escaped""" assert properties.DialectJava.find_delimiter("key\\:=value") == ("=", 5) assert properties.DialectJava.find_delimiter("key\\=: value") == (":", 5) assert properties.DialectJava.find_delimiter("key\\ value") == (" ", 5) assert properties.DialectJava.find_delimiter("key\\ key\\ key\\: = value") == ( "=", 16, ) def test_is_line_continuation(): assert not properties.is_line_continuation("") assert not properties.is_line_continuation("some text") assert properties.is_line_continuation("""some text\\""") assert not properties.is_line_continuation("""some text\\\\""") # Escaped \ assert properties.is_line_continuation( """some text\\\\\\""" ) # Odd num. \ is line continuation assert properties.is_line_continuation("""\\\\\\""") def test_key_strip(): assert properties._key_strip("key") == "key" assert properties._key_strip(" key") == "key" assert properties._key_strip("\\ key") == "\\ key" assert properties._key_strip("key ") == "key" assert properties._key_strip("key\\ ") == "key\\ " def test_is_comment_one_line(): assert properties.is_comment_one_line("# comment") assert properties.is_comment_one_line("! comment") assert properties.is_comment_one_line("// comment") assert properties.is_comment_one_line(" # comment") assert properties.is_comment_one_line("/* comment */") assert not properties.is_comment_one_line("not = comment_line /* comment */") assert not properties.is_comment_one_line("/* comment ") def test_is_comment_start(): assert properties.is_comment_start("/* comment") assert not properties.is_comment_start("/* comment */") def test_is_comment_end(): assert properties.is_comment_end(" comment */") assert not properties.is_comment_end("/* comment */") class TestPropUnit(test_monolingual.TestMonolingualUnit): UnitClass = properties.propunit def test_rich_get(self): pass def test_rich_set(self): pass class TestGwtProp(test_monolingual.TestMonolingualStore): StoreClass = properties.gwtfile def propparse( self, propsource, personality="gwt", encoding=None, sourcelanguage=None, targetlanguage=None, ): """helper that parses properties source without requiring files""" dummyfile = BytesIO(propsource.encode()) propfile = properties.propfile(None, personality, encoding) if sourcelanguage: propfile.sourcelanguage = sourcelanguage if targetlanguage: propfile.targetlanguage = targetlanguage propsrc = dummyfile.read() dummyfile.close() propfile.parse(propsrc) propfile.makeindex() return propfile def propregen(self, propsource): """helper that converts properties source to propfile object and back""" return self.propparse(propsource).__bytes__() def test_quotes(self): """checks that quotes are parsed and saved correctly""" propsource = "test_me=I can ''code''!" propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "I can 'code'!" propunit.source = "I 'can' code!" assert bytes(propfile).decode() == "test_me=I ''can'' code!\n" def test_simpledefinition(self): """checks that a simple properties definition is parsed correctly""" propsource = "test_me=I can code!" propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "I can code!" def test_doubledefinition(self): """checks that a double properties definition is parsed correctly""" propsource = "test_me=I can code!\ntest_me[one]=I can code single!" propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source.strings == ["I can code single!", "I can code!"] assert propunit.value == ["I can code!", "I can code single!"] propunit.value = ["I can code double!", "I can code single!"] assert propunit.value == ["I can code double!", "I can code single!"] assert propunit.source.strings == ["I can code single!", "I can code double!"] # propunit.value = ["I can code single!", "I can code!" ] # assert propunit.value == ["I can code single!", "I can code!"] def test_doubledefinition_source(self): """checks that a double properties definition can be regenerated as source""" propsource = "test_me=I can code!\ntest_me[one]=I can code single!" propregen = self.propregen(propsource).decode() assert propsource + "\n" == propregen def test_reduce(self): """checks that if the target language has less plural form the generated properties file is correct""" propsource = "test_me=I can code!\ntest_me[one]=I can code single!" propfile = self.propparse( propsource, "gwt", None, "en", "ja" ) # Only "other" plural form print(propfile) print(str(propfile)) assert b"test_me=I can code!\n" == propfile.__bytes__() def test_increase(self): """checks that if the target language has more plural form the generated properties file is correct""" propsource = "test_me=I can code!\ntest_me[one]=I can code single!" propfile = self.propparse( propsource, "gwt", None, "en", "ar" ) # All plural forms assert len(propfile.units) == 1 propunit = propfile.units[0] assert isinstance(propunit.target, multistring) assert propunit.target.strings == ["", "", "", "", "", ""] assert ( b"test_me=I can code!\ntest_me[none]=\ntest_me[one]=I can code single!\n" + b"test_me[two]=\ntest_me[few]=\ntest_me[many]=\n" == propfile.__bytes__() ) propunit.target = { "other": "other", "one": "one", "zero": "zero", "few": "few", "two": "two", "many": "many", } assert isinstance(propunit.target, multistring) assert propunit.target.strings == ["zero", "one", "two", "few", "many", "other"] assert ( b"test_me=other\ntest_me[none]=zero\ntest_me[one]=one\n" + b"test_me[two]=two\ntest_me[few]=few\ntest_me[many]=many\n" == propfile.__bytes__() ) propunit.target = multistring(["zero", "one", "two", "few", "many", "other"]) assert isinstance(propunit.target, multistring) assert propunit.target.strings == ["zero", "one", "two", "few", "many", "other"] assert ( b"test_me=other\ntest_me[none]=zero\ntest_me[one]=one\n" + b"test_me[two]=two\ntest_me[few]=few\ntest_me[many]=many\n" == propfile.__bytes__() ) propunit.target = ["zero", "one", "two", "few", "many", "other"] assert isinstance(propunit.target, multistring) assert propunit.target.strings == ["zero", "one", "two", "few", "many", "other"] assert ( b"test_me=other\ntest_me[none]=zero\ntest_me[one]=one\n" + b"test_me[two]=two\ntest_me[few]=few\ntest_me[many]=many\n" == propfile.__bytes__() ) propunit.source = ["zero", "one", "two", "few", "many", "other"] assert isinstance(propunit.target, multistring) assert propunit.target.strings == ["zero", "one", "two", "few", "many", "other"] assert ( b"test_me=other\ntest_me[none]=zero\ntest_me[one]=one\n" + b"test_me[two]=two\ntest_me[few]=few\ntest_me[many]=many\n" == propfile.__bytes__() ) class TestProp(test_monolingual.TestMonolingualStore): StoreClass = properties.propfile def propparse(self, propsource, personality="java", encoding=None): """helper that parses properties source without requiring files""" dummyfile = BytesIO( propsource.encode() if isinstance(propsource, str) else propsource ) propfile = properties.propfile(dummyfile, personality, encoding) return propfile def propregen(self, propsource, encoding=None): """helper that converts properties source to propfile object and back""" return bytes(self.propparse(propsource, encoding=encoding)).decode("utf-8") def test_simpledefinition(self): """checks that a simple properties definition is parsed correctly""" propsource = "test_me=I can code!" propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "I can code!" def test_simpledefinition_source(self): """checks that a simple properties definition can be regenerated as source""" propsource = "test_me=I can code!" propregen = self.propregen(propsource) assert propsource + "\n" == propregen def test_controlutf8_source(self): """checks that a control characters are parsed correctly""" propsource = "test_me=\\\\\\n" propregen = self.propregen(propsource, encoding="utf-8") assert propsource + "\n" == propregen def test_control_source(self): """checks that a control characters are parsed correctly""" propsource = "test_me=\\\\\\n" propregen = self.propregen(propsource) assert propsource + "\n" == propregen def test_unicode_escaping(self): """check that escaped unicode is converted properly""" propsource = "unicode=\u0411\u0416\u0419\u0428" messagevalue = "\u0411\u0416\u0419\u0428".encode() propfile = self.propparse(propsource, personality="mozilla") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "unicode" assert propunit.source == "БЖЙШ" regensource = bytes(propfile) assert messagevalue in regensource assert b"\\" not in regensource def test_newlines_startend(self): r"""check that we preserve \n that appear at start and end of properties""" propsource = "newlines=\\ntext\\n" propregen = self.propregen(propsource) assert propsource + "\n" == propregen def test_whitespace_handling(self): """check that we remove extra whitespace around property""" whitespaces = ( ("key = value", "key", "value"), # Standard for baseline (" key = value", "key", "value"), # Extra \s before key and value ( "\\ key\\ = value", "\\ key\\ ", "value", ), # extra space at start and end of key ( "key = \\ value ", "key", " value ", ), # extra space at start end end of value ) for propsource, key, value in whitespaces: propfile = self.propparse(propsource) propunit = propfile.units[0] print(repr(propsource), repr(propunit.name), repr(propunit.source)) assert propunit.name == key assert propunit.source == value # let's reparse the output to ensure good serialisation->parsing roundtrip: propfile = self.propparse(str(propunit)) propunit = propfile.units[0] assert propunit.name == key assert propunit.source == value def test_key_value_delimiters_simple(self): """ test that we can handle colon, equals and space delimiter between key and value. We don't test any space removal or escaping """ delimiters = [":", "=", " "] for delimiter in delimiters: propsource = "key%svalue" % delimiter print(f"source: '{propsource}'\ndelimiter: '{delimiter}'") propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "key" assert propunit.source == "value" def test_comments(self): """checks that we handle # and ! comments""" markers = ["#", "!"] for comment_marker in markers: propsource = ( """%s A comment key=value """ % comment_marker ) propfile = self.propparse(propsource) print(repr(propsource)) print("Comment marker: '%s'" % comment_marker) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.comments == ["%s A comment" % comment_marker] def test_latin1(self): """checks that we handle non-escaped latin1 text""" prop_source = "key=valú".encode("latin1") prop_store = self.propparse(prop_source) assert len(prop_store.units) == 1 unit = prop_store.units[0] assert unit.source == "valú" def test_fullspec_delimiters(self): """test the full definiation as found in Java docs""" proplist = [ "Truth = Beauty\n", " Truth:Beauty", "Truth :Beauty", "Truth Beauty", ] for propsource in proplist: propfile = self.propparse(propsource) propunit = propfile.units[0] print(propunit) assert propunit.name == "Truth" assert propunit.source == "Beauty" def test_fullspec_escaped_key(self): """Escaped delimeters can be in the key""" prop_source = "\\:\\=" prop_store = self.propparse(prop_source) assert len(prop_store.units) == 1 unit = prop_store.units[0] print(unit) assert unit.name == "\\:\\=" def test_fullspec_line_continuation(self): """Whitespace delimiter and pre whitespace in line continuation are dropped""" prop_source = r"""fruits apple, banana, pear, \ cantaloupe, watermelon, \ kiwi, mango """ prop_store = self.propparse(prop_source) print(prop_store) assert len(prop_store.units) == 1 unit = prop_store.units[0] print(unit) assert properties.DialectJava.find_delimiter(prop_source) == (" ", 6) assert unit.name == "fruits" assert unit.source == "apple, banana, pear, cantaloupe, watermelon, kiwi, mango" def test_fullspec_key_without_value(self): """A key can have no value in which case the value is the empty string""" prop_source = "cheeses" prop_store = self.propparse(prop_source) assert len(prop_store.units) == 1 unit = prop_store.units[0] print(unit) assert unit.name == "cheeses" assert unit.source == "" def test_mac_strings(self): """test various items used in Mac OS X strings files""" propsource = r""""I am a \"key\"" = "I am a \"value\"";""".encode("utf-16") propfile = self.propparse(propsource, personality="strings") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == 'I am a "key"' assert propunit.source == 'I am a "value"' def test_utf_16_save(self): """test saving of utf-16 java properties files""" propsource = """key=zkouška\n""".encode("utf-16") propfile = self.propparse(propsource, personality="java-utf16") assert propfile.encoding == "utf-16" assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "key" assert propunit.source == "zkouška" assert bytes(propfile) == propsource def test_mac_multiline_strings(self): """test can read multiline items used in Mac OS X strings files""" propsource = ( r""""I am a \"key\"" = "I am a \"value\" """ + '\n nextline";' ).encode("utf-16") propfile = self.propparse(propsource, personality="strings") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == 'I am a "key"' assert propunit.source == 'I am a "value" nextline' def test_mac_strings_unicode(self): """Ensure we can handle Unicode""" propsource = """"I am a “key”" = "I am a “value”";""".encode("utf-16") propfile = self.propparse(propsource, personality="strings") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "I am a “key”" assert propfile.personality.encode(propunit.source) == "I am a “value”" def test_mac_strings_utf8(self): """Ensure we can handle Unicode""" propsource = """"I am a “key”" = "I am a “value”";""".encode() propfile = self.propparse(propsource, personality="strings-utf8") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "I am a “key”" assert propfile.personality.encode(propunit.source) == "I am a “value”" def test_mac_strings_newlines(self): r"""test newlines \n within a strings files""" propsource = r""""key" = "value\nvalue";""".encode("utf-16") propfile = self.propparse(propsource, personality="strings") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "key" assert propunit.source == "value\nvalue" assert propfile.personality.encode(propunit.source) == r"value\nvalue" def test_mac_strings_comments(self): """test .string comment types""" propsource = """/* Comment */ // Comment "key" = "value";""".encode( "utf-16" ) propfile = self.propparse(propsource, personality="strings") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "key" assert propunit.source == "value" assert propunit.getnotes() == "/* Comment */\n// Comment" def test_mac_strings_multilines_comments(self): """test .string multiline comments""" propsource = ("/* Foo\n" "Bar\n" "Baz */\n" '"key" = "value";').encode("utf-16") propfile = self.propparse(propsource, personality="strings") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "key" assert propunit.source == "value" assert propunit.getnotes() == "/* Foo\nBar\nBaz */" def test_mac_strings_comments_dropping(self): """.string generic (and unuseful) comments should be dropped""" propsource = """/* No comment provided by engineer. */ "key" = "value";""".encode( "utf-16" ) propfile = self.propparse(propsource, personality="strings") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "key" assert propunit.source == "value" assert propunit.getnotes() == "" def test_mac_strings_quotes(self): """test that parser unescapes characters used as wrappers""" propsource = r'"key with \"quotes\"" = "value with \"quotes\"";'.encode( "utf-16" ) propfile = self.propparse(propsource, personality="strings") propunit = propfile.units[0] assert propunit.name == 'key with "quotes"' assert propunit.value == 'value with "quotes"' def test_mac_strings_equals(self): """test that equal signs inside keys/values are not mixed with delimiter""" propsource = '"key with = sign" = "value with = sign";'.encode("utf-16") propfile = self.propparse(propsource, personality="strings") propunit = propfile.units[0] assert propunit.name == "key with = sign" assert propunit.value == "value with = sign" def test_mac_strings_serialization(self): """test that serializer quotes mac strings properly""" propsource = r'"key with \"quotes\"" = "value with \"quotes\"";'.encode( "utf-16" ) propfile = self.propparse(propsource, personality="strings") # we don't care about leading and trailing newlines and zero bytes # in the assert, we just want to make sure that # - all quotes are in place # - quotes inside are escaped # - for the sake of beauty a pair of spaces encloses the equal mark # - every line ends with ";" assert bytes(propfile).strip(b"\n\x00") == propsource.strip(b"\n\x00") def test_override_encoding(self): """test that we can override the encoding of a properties file""" propsource = "key = value".encode("cp1252") propfile = self.propparse(propsource, personality="strings", encoding="cp1252") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "key" assert propunit.source == "value" def test_trailing_comments(self): """test that we handle non-unit data at the end of a file""" propsource = "key = value\n# END" propfile = self.propparse(propsource) assert len(propfile.units) == 2 propunit = propfile.units[1] assert propunit.name == "" assert propunit.source == "" assert propunit.getnotes() == "# END" def test_utf16_byte_order_mark(self): """test that BOM appears in the resulting text once only""" propsource = "key1 = value1\nkey2 = value2\n".encode("utf-16") propfile = self.propparse(propsource, encoding="utf-16") result = bytes(propfile) bom = propsource[:2] assert result.startswith(bom) assert bom not in result[2:] def test_raise_ioerror_if_cannot_detect_encoding(self): """Test that IOError is thrown if file encoding cannot be detected.""" propsource = "key = ąćęłńóśźż".encode("cp1250") with raises(IOError): self.propparse(propsource, personality="strings") def test_utf8_byte_order_mark(self): """test that BOM handling works fine with newlines""" propsource = "\n\n\nkey1 = value1\n\nkey2 = value2\n".encode("utf-8-sig") propfile = self.propparse(propsource, personality="java-utf8") bom = propsource[:3] result = bytes(propfile) assert result.startswith(bom) assert bom not in result[3:] assert b"None" not in result[3:] def test_joomla_set_target(self): """test various items used in Joomla files""" propsource = b"""COM_EXAMPLE_FOO="This is a test"\n""" proptarget = b"""COM_EXAMPLE_FOO="This is another test"\n""" propfile = self.propparse(propsource, personality="joomla") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "COM_EXAMPLE_FOO" assert propunit.source == "This is a test" assert bytes(propfile) == propsource propunit.target = "This is another test" assert bytes(propfile) == proptarget def test_joomla(self): """test various items used in Joomla files""" propsource = b"""; comment\nVALUE="I am a "_QQ_"value"_QQ_""\n""" propfile = self.propparse(propsource, personality="joomla") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "VALUE" assert propunit.source == 'I am a "value"' assert bytes(propfile) == propsource def test_joomla_escape(self): """test various items used in Joomla files""" propsource = b"""; comment\nVALUE="I am a "_QQ_"value"_QQ_"\\n"\n""" propfile = self.propparse(propsource, personality="joomla") assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "VALUE" assert propunit.source == 'I am a "value"\n' assert bytes(propfile) == propsource def test_serialize_missing_delimiter(self): propsource = b"key\n" propfile = self.propparse(propsource, personality="java-utf8") propunit = propfile.units[0] assert propunit.name == "key" assert propunit.value == "" assert propunit.delimiter == "" assert bytes(propfile) == propsource def test_serialize_missing_value(self): propsource = b"key=\n" propfile = self.propparse(propsource, personality="java-utf8") propunit = propfile.units[0] assert propunit.name == "key" assert propunit.value == "" assert bytes(propfile) == propsource def test_multi_comments(self): propsource = b"""# This is free software; you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation; either version 2.1 of # the License, or (at your option) any later version. # This contains the translations of the module in the default language # (generally English). job.log.begin=Starting job of type [{0}] """ propfile = self.propparse(propsource, personality="java-utf8") assert len(propfile.units) == 2 propunit = propfile.units[0] assert propunit.name == "" assert propunit.value == "" assert ( propunit.getnotes() == """# This is free software; you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation; either version 2.1 of # the License, or (at your option) any later version. """ ) propunit = propfile.units[1] assert propunit.name == "job.log.begin" assert propunit.value == "Starting job of type [{0}]" print(bytes(propfile)) print(propsource) assert bytes(propfile) == propsource class TestXWiki(test_monolingual.TestMonolingualStore): StoreClass = properties.xwikifile def propparse(self, propsource): """helper that parses properties source without requiring files""" dummyfile = BytesIO( propsource.encode() if isinstance(propsource, str) else propsource ) propfile = properties.xwikifile(dummyfile) return propfile def propregen(self, propsource): """helper that converts properties source to propfile object and back""" return bytes(self.propparse(propsource)).decode("utf-8") def test_simpledefinition(self): """checks that a simple properties definition is parsed correctly""" propsource = "test_me=I can code!" propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "I can code!" assert not propunit.missing def test_missing_definition(self): """checks that a simple missing properties definition is parsed correctly""" propsource = "### Missing: test_me=I can code!" propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "I can code!" assert propunit.missing propunit.target = "" assert propunit.missing propunit.target = "I can code!" assert not propunit.missing propunit.target = "Je peux coder" assert not propunit.missing # Check encoding propunit.target = "تىپتىكى خىزمەتنى باشلاش" expected_content = ( "test_me=\\u062A\\u0649\\u067E\\u062A\\u0649\\u0643\\u0649 " "\\u062E\\u0649\\u0632\\u0645\\u06D5\\u062A\\u0646\\u0649 " "\\u0628\\u0627\\u0634\\u0644\\u0627\\u0634" ) generatedcontent = BytesIO() propfile.serialize(generatedcontent) assert ( generatedcontent.getvalue().decode(propfile.encoding) == expected_content + "\n" ) def test_missing_definition_source(self): propsource = "### Missing: test_me=I can code!" propgen = self.propregen(propsource) assert propsource + "\n" == propgen def test_definition_with_simple_quote(self): propsource = "test_me=A 'quoted' translation" propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "A 'quoted' translation" assert not propunit.missing assert propunit.getoutput() == propsource + "\n" def test_definition_with_simple_quote_and_argument(self): propsource = "test_me=A ''quoted'' translation for {0}" propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "A 'quoted' translation for {0}" assert not propunit.missing assert propunit.getoutput() == propsource + "\n" def test_header_preserved(self): propsource = """# -----\n# Header\n# -----\n\ntest_me=I can code""" propgen = self.propregen(propsource) assert propgen == propsource + "\n" def test_blank_line_before_comment_preserved(self): propsource = """\n# My comment\ntest_me=I can code""" propgen = self.propregen(propsource) assert propgen == propsource + "\n" def test_deprecated_comments_preserved(self): propsource = """# Deprecated keys starts here. #@deprecatedstart job.log.label=Job log #@deprecatedend""" propfile = self.propparse(propsource) assert len(propfile.units) == 3 propunit = propfile.units[1] assert propunit.name == "job.log.label" assert propunit.source == "Job log" assert not propunit.missing propunit.missing = True expected_output = """# Deprecated keys starts here. #@deprecatedstart ### Missing: job.log.label=Job log #@deprecatedend """ propgen = bytes(propfile).decode("utf-8") assert propgen == expected_output class TestXWikiPageProperties(test_monolingual.TestMonolingualStore): StoreClass = properties.XWikiPageProperties FILE_SCHEME = ( properties.XWikiPageProperties.XML_HEADER + """<xwikidoc locale="%(language)s"> <translation>1</translation> <language>%(language)s</language> <title/> <content>%(content)s</content> </xwikidoc>""" ) def getcontent(self, content, language="en"): return self.FILE_SCHEME % {"content": content + "\n", "language": language} def propparse(self, propsource): """helper that parses properties source without requiring files""" dummyfile = BytesIO( propsource.encode() if isinstance(propsource, str) else propsource ) propfile = properties.XWikiPageProperties(dummyfile) return propfile def propregen(self, propsource): """helper that converts properties source to propfile object and back""" return bytes(self.propparse(propsource)).decode("utf-8") def test_simpledefinition(self): """checks that a simple properties definition is parsed correctly""" propsource = self.getcontent("test_me=I can code!") propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "I can code!" assert not propunit.missing generatedcontent = BytesIO() propfile.serialize(generatedcontent) assert ( generatedcontent.getvalue().decode(propfile.encoding) == propsource + "\n" ) # check translation and language attribute propfile.settargetlanguage("fr") propunit.target = "Je peux coder" expectedcontent = self.getcontent("test_me=Je peux coder", "fr") generatedcontent = BytesIO() propfile.serialize(generatedcontent) assert ( generatedcontent.getvalue().decode(propfile.encoding) == expectedcontent + "\n" ) def test_missing_definition(self): """checks that a simple missing properties definition is parsed correctly""" propsource = self.getcontent("### Missing: test_me=I can code!") propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "I can code!" assert propunit.missing propunit.target = "" assert propunit.missing propunit.target = "Je peux coder" assert not propunit.missing propunit.target = "تىپتىكى خىزمەتنى باشلاش" expected_content = self.getcontent("test_me=تىپتىكى خىزمەتنى باشلاش") generatedcontent = BytesIO() propfile.serialize(generatedcontent) assert ( generatedcontent.getvalue().decode(propfile.encoding) == expected_content + "\n" ) def test_missing_definition_source(self): propsource = self.getcontent("### Missing: test_me=I can code!") propgen = self.propregen(propsource) assert propsource + "\n" == propgen def test_definition_with_simple_quote(self): propsource = self.getcontent("test_me=A 'quoted' translation") propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "A 'quoted' translation" assert not propunit.missing generatedcontent = BytesIO() propfile.serialize(generatedcontent) assert ( generatedcontent.getvalue().decode(propfile.encoding) == propsource + "\n" ) def test_definition_with_simple_quote_and_argument(self): propsource = self.getcontent("test_me=A ''quoted'' translation for {0}") propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "A 'quoted' translation for {0}" assert not propunit.missing generatedcontent = BytesIO() propfile.serialize(generatedcontent) assert ( generatedcontent.getvalue().decode(propfile.encoding) == propsource + "\n" ) def test_definition_with_encoded_html(self): propsource = self.getcontent("test_me=A &amp; is represented with &amp;amp;") propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] assert propunit.name == "test_me" assert propunit.source == "A & is represented with &amp;" assert not propunit.missing generatedcontent = BytesIO() propfile.serialize(generatedcontent) assert ( generatedcontent.getvalue().decode(propfile.encoding) == propsource + "\n" ) def test_cleaning_attributes(self): """ Ensure that the XML is correctly formatted during serialization: it should not contain objects or attachments tags, and translation should be set to 1. """ ## Real XWiki files are containing multiple attributes on xwikidoc tag: we're not testing it there ## because ElementTree changed its implementation between Python 3.7 and 3.8 which changed the order of output of the attributes ## it makes it more difficult to assert it on multiple versions of Python. propsource = ( properties.XWikiPageProperties.XML_HEADER + """<xwikidoc reference="XWiki.AdminTranslations"> <web>XWiki</web> <name>AdminTranslations</name> <language/> <defaultLanguage>en</defaultLanguage> <translation>0</translation> <creator>xwiki:XWiki.Admin</creator> <parent>XWiki.WebHome</parent> <author>xwiki:XWiki.Admin</author> <contentAuthor>xwiki:XWiki.Admin</contentAuthor> <version>1.1</version> <title>AdminTranslations</title> <comment/> <minorEdit>false</minorEdit> <syntaxId>plain/1.0</syntaxId> <hidden>true</hidden> <content># Users Section test_me=I can code! </content> <object> <name>XWiki.AdminTranslations</name> <number>0</number> <className>XWiki.TranslationDocumentClass</className> <guid>554b2ee4-98dc-48ef-b436-ef0cf7d38c4f</guid> <class> <name>XWiki.TranslationDocumentClass</name> <customClass/> <customMapping/> <defaultViewSheet/> <defaultEditSheet/> <defaultWeb/> <nameField/> <validationScript/> <scope> <cache>0</cache> <disabled>0</disabled> <displayType>select</displayType> <freeText>forbidden</freeText> <multiSelect>0</multiSelect> <name>scope</name> <number>1</number> <prettyName>Scope</prettyName> <relationalStorage>0</relationalStorage> <separator> </separator> <separators>|, </separators> <size>1</size> <unmodifiable>0</unmodifiable> <values>GLOBAL|WIKI|USER|ON_DEMAND</values> <classType>com.xpn.xwiki.objects.classes.StaticListClass</classType> </scope> </class> <property> <scope>WIKI</scope> </property> </object> <attachment> <filename>XWikiLogo.png</filename> <mimetype>image/png</mimetype> <filesize>1390</filesize> <author>xwiki:XWiki.Admin</author> <version>1.1</version> <comment/> <content>something=toto</content> </attachment> </xwikidoc>""" ) propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] propfile.settargetlanguage("fr") assert propunit.name == "test_me" assert propunit.source == "I can code!" assert not propunit.missing propunit.target = "Je peux coder !" generatedcontent = BytesIO() propfile.serialize(generatedcontent) expected_xml = ( properties.XWikiPageProperties.XML_HEADER + """<xwikidoc reference="XWiki.AdminTranslations" locale="fr"> <web>XWiki</web> <name>AdminTranslations</name> <language>fr</language> <defaultLanguage>en</defaultLanguage> <translation>1</translation> <creator>xwiki:XWiki.Admin</creator> <parent>XWiki.WebHome</parent> <author>xwiki:XWiki.Admin</author> <contentAuthor>xwiki:XWiki.Admin</contentAuthor> <version>1.1</version> <title>AdminTranslations</title> <comment/> <minorEdit>false</minorEdit> <syntaxId>plain/1.0</syntaxId> <hidden>true</hidden> <content># Users Section test_me=Je peux coder ! </content> </xwikidoc>""" ) assert ( generatedcontent.getvalue().decode(propfile.encoding) == expected_xml + "\n" ) assert '<?xml version="1.1" encoding="UTF-8"?>\n\n<!--\n * See the NOTICE file distributed with this work for additional' in generatedcontent.getvalue().decode( propfile.encoding ) def test_translate_source(self): """ Ensure that the XML is correctly formatted during serialization: it should not contain objects or attachments tags, and translation should be set to 1. """ ## Real XWiki files are containing multiple attributes on xwikidoc tag: we're not testing it there ## because ElementTree changed its implementation between Python 3.7 and 3.8 which changed the order of output of the attributes ## it makes it more difficult to assert it on multiple versions of Python. propsource = ( properties.XWikiPageProperties.XML_HEADER + """<xwikidoc reference="XWiki.AdminTranslations"> <web>XWiki</web> <name>AdminTranslations</name> <language/> <defaultLanguage>en</defaultLanguage> <translation>0</translation> <creator>xwiki:XWiki.Admin</creator> <parent>XWiki.WebHome</parent> <author>xwiki:XWiki.Admin</author> <contentAuthor>xwiki:XWiki.Admin</contentAuthor> <version>1.1</version> <title>AdminTranslations</title> <comment/> <minorEdit>false</minorEdit> <syntaxId>plain/1.0</syntaxId> <hidden>true</hidden> <content># Users Section test_me=I can code! </content> <object> <name>XWiki.AdminTranslations</name> <number>0</number> <className>XWiki.TranslationDocumentClass</className> <guid>554b2ee4-98dc-48ef-b436-ef0cf7d38c4f</guid> <class> <name>XWiki.TranslationDocumentClass</name> <customClass/> <customMapping/> <defaultViewSheet/> <defaultEditSheet/> <defaultWeb/> <nameField/> <validationScript/> <scope> <cache>0</cache> <disabled>0</disabled> <displayType>select</displayType> <freeText>forbidden</freeText> <multiSelect>0</multiSelect> <name>scope</name> <number>1</number> <prettyName>Scope</prettyName> <relationalStorage>0</relationalStorage> <separator> </separator> <separators>|, </separators> <size>1</size> <unmodifiable>0</unmodifiable> <values>GLOBAL|WIKI|USER|ON_DEMAND</values> <classType>com.xpn.xwiki.objects.classes.StaticListClass</classType> </scope> </class> <property> <scope>WIKI</scope> </property> </object> <attachment> <filename>XWikiLogo.png</filename> <mimetype>image/png</mimetype> <filesize>1390</filesize> <author>xwiki:XWiki.Admin</author> <version>1.1</version> <comment/> <content>something=toto</content> </attachment> </xwikidoc>""" ) propfile = self.propparse(propsource) assert len(propfile.units) == 1 propunit = propfile.units[0] propfile.settargetlanguage("en") assert propunit.name == "test_me" assert propunit.source == "I can code!" assert not propunit.missing propunit.target = "I can change the translation source" generatedcontent = BytesIO() propfile.serialize(generatedcontent) expected_xml = ( properties.XWikiPageProperties.XML_HEADER + """<xwikidoc reference="XWiki.AdminTranslations"> <web>XWiki</web> <name>AdminTranslations</name> <language/> <defaultLanguage>en</defaultLanguage> <translation>0</translation> <creator>xwiki:XWiki.Admin</creator> <parent>XWiki.WebHome</parent> <author>xwiki:XWiki.Admin</author> <contentAuthor>xwiki:XWiki.Admin</contentAuthor> <version>1.1</version> <title>AdminTranslations</title> <comment/> <minorEdit>false</minorEdit> <syntaxId>plain/1.0</syntaxId> <hidden>true</hidden> <content># Users Section test_me=I can change the translation source </content> <object> <name>XWiki.AdminTranslations</name> <number>0</number> <className>XWiki.TranslationDocumentClass</className> <guid>554b2ee4-98dc-48ef-b436-ef0cf7d38c4f</guid> <class> <name>XWiki.TranslationDocumentClass</name> <customClass/> <customMapping/> <defaultViewSheet/> <defaultEditSheet/> <defaultWeb/> <nameField/> <validationScript/> <scope> <cache>0</cache> <disabled>0</disabled> <displayType>select</displayType> <freeText>forbidden</freeText> <multiSelect>0</multiSelect> <name>scope</name> <number>1</number> <prettyName>Scope</prettyName> <relationalStorage>0</relationalStorage> <separator> </separator> <separators>|, </separators> <size>1</size> <unmodifiable>0</unmodifiable> <values>GLOBAL|WIKI|USER|ON_DEMAND</values> <classType>com.xpn.xwiki.objects.classes.StaticListClass</classType> </scope> </class> <property> <scope>WIKI</scope> </property> </object> <attachment> <filename>XWikiLogo.png</filename> <mimetype>image/png</mimetype> <filesize>1390</filesize> <author>xwiki:XWiki.Admin</author> <version>1.1</version> <comment/> <content>something=toto</content> </attachment> </xwikidoc>""" ) assert ( generatedcontent.getvalue().decode(propfile.encoding) == expected_xml + "\n" ) assert '<?xml version="1.1" encoding="UTF-8"?>\n\n<!--\n * See the NOTICE file distributed with this work for additional' in generatedcontent.getvalue().decode( propfile.encoding ) class TestXWikiFullPage(test_monolingual.TestMonolingualStore): StoreClass = properties.XWikiFullPage FILE_SCHEME = ( properties.XWikiPageProperties.XML_HEADER + """<xwikidoc locale="%(language)s"> <translation>1</translation> <language>%(language)s</language> <title>%(title)s</title> <content>%(content)s</content> </xwikidoc>""" ) def getcontent(self, content, title, language="en"): return self.FILE_SCHEME % { "content": content, "title": title, "language": language, } def propparse(self, propsource): """helper that parses properties source without requiring files""" dummyfile = BytesIO( propsource.encode() if isinstance(propsource, str) else propsource ) propfile = properties.XWikiFullPage(dummyfile) propfile.settargetlanguage("en") return propfile def propregen(self, propsource): """helper that converts properties source to propfile object and back""" return bytes(self.propparse(propsource)).decode("utf-8") def test_simpledefinition(self): """checks that a simple properties definition is parsed correctly""" propsource = self.getcontent("I can code!", "This is a title") propfile = self.propparse(propsource) assert len(propfile.units) == 2 propunit = propfile.units[0] assert propunit.name == "content" assert propunit.source == "I can code!" assert not propunit.missing propunit.target = "A new code!" propunit = propfile.units[1] assert propunit.name == "title" assert propunit.source == "This is a title" assert not propunit.missing # Check encoding and language attribute propfile.settargetlanguage("fr") propunit.target = "تىپتىكى خىزمەتنى باشلاش" expected_content = self.getcontent( "A new code!", "تىپتىكى خىزمەتنى باشلاش", "fr" ) generatedcontent = BytesIO() propfile.serialize(generatedcontent) assert ( generatedcontent.getvalue().decode(propfile.encoding) == expected_content + "\n" ) def test_parse(self): """ Tests converting to a string and parsing the resulting string. In case of an XWiki Full Page new units are ignored unless they are using 'content' or 'title' ids. """ store = self.StoreClass() unit1 = store.addsourceunit("Test String") unit1.target = "Test String" unit2 = store.addsourceunit("Test String 2") unit2.target = "Test String 2" newstore = self.reparse(store) assert 0 == len(newstore.units) unit3 = properties.xwikiunit("Some content") unit3.name = "content" unit3.target = "Some content" store.addunit(unit3) unit4 = properties.xwikiunit("A title") unit4.name = "title" unit4.target = "Specific title" store.addunit(unit4) store.makeindex() newstore = self.reparse(store) assert 2 == len(newstore.units) assert newstore.units[0]._get_source_unit().name == store.units[2].name assert newstore.units[0]._get_source_unit().source == store.units[2].target assert newstore.units[1]._get_source_unit().name == store.units[3].name assert newstore.units[1]._get_source_unit().source == store.units[3].target def test_files(self): """ Tests saving to and loading from files In case of an XWiki Full Page new units are ignored. """ store = self.StoreClass() unit1 = store.addsourceunit("Test String") unit1.target = "Test String" unit2 = store.addsourceunit("Test String 2") unit2.target = "Test String 2" store.savefile(self.filename) newstore = self.StoreClass.parsefile(self.filename) assert 0 == len(newstore.units) unit3 = properties.xwikiunit("Some content") unit3.name = "content" unit3.target = "Some content" store.addunit(unit3) unit4 = properties.xwikiunit("A title") unit4.name = "title" unit4.target = "Specific title" store.addunit(unit4) store.makeindex() store.savefile(self.filename) newstore = self.StoreClass.parsefile(self.filename) assert 2 == len(newstore.units) assert newstore.units[0]._get_source_unit().name == store.units[2].name assert newstore.units[0]._get_source_unit().source == store.units[2].target assert newstore.units[1]._get_source_unit().name == store.units[3].name assert newstore.units[1]._get_source_unit().source == store.units[3].target def test_save(self): """ Tests that we can save directly back to the original file. In case of an XWiki Full Page new units are ignored. """ store = self.StoreClass() unit1 = store.addsourceunit("Test String") unit1.target = "Test String" unit2 = store.addsourceunit("Test String 2") unit2.target = "Test String 2" store.savefile(self.filename) store.save() newstore = self.StoreClass.parsefile(self.filename) assert 0 == len(newstore.units) unit3 = properties.xwikiunit("Some content") unit3.name = "content" unit3.target = "Some content" store.addunit(unit3) unit4 = properties.xwikiunit("A title") unit4.name = "title" unit4.target = "Specific title" store.addunit(unit4) store.makeindex() store.savefile(self.filename) store.save() newstore = self.StoreClass.parsefile(self.filename) assert 2 == len(newstore.units) assert newstore.units[0]._get_source_unit().name == store.units[2].name assert newstore.units[0]._get_source_unit().source == store.units[2].target assert newstore.units[1]._get_source_unit().name == store.units[3].name assert newstore.units[1]._get_source_unit().source == store.units[3].target def test_cleaning_attributes(self): """ Ensure that the XML is correctly formatted during serialization: it should not contain objects or attachments tags, and translation should be set to 1. """ ## Real XWiki files are containing multiple attributes on xwikidoc tag: we're not testing it there ## because ElementTree changed its implementation between Python 3.7 and 3.8 which changed the order of output of the attributes ## it makes it more difficult to assert it on multiple versions of Python. propsource = ( properties.XWikiPageProperties.XML_HEADER + """<xwikidoc reference="XWiki.AdminTranslations"> <web>XWiki</web> <name>AdminTranslations</name> <language/> <defaultLanguage>en</defaultLanguage> <translation>0</translation> <creator>xwiki:XWiki.Admin</creator> <parent>XWiki.WebHome</parent> <author>xwiki:XWiki.Admin</author> <contentAuthor>xwiki:XWiki.Admin</contentAuthor> <version>1.1</version> <title>Some page title</title> <comment/> <minorEdit>false</minorEdit> <syntaxId>plain/1.0</syntaxId> <hidden>true</hidden> <content>A Lorem Ipsum or whatever might be contained there. == A wiki title == Some other stuff. </content> <object> <name>XWiki.AdminTranslations</name> <number>0</number> <className>XWiki.TranslationDocumentClass</className> <guid>554b2ee4-98dc-48ef-b436-ef0cf7d38c4f</guid> <class> <name>XWiki.TranslationDocumentClass</name> <customClass/> <customMapping/> <defaultViewSheet/> <defaultEditSheet/> <defaultWeb/> <nameField/> <validationScript/> <scope> <cache>0</cache> <disabled>0</disabled> <displayType>select</displayType> <freeText>forbidden</freeText> <multiSelect>0</multiSelect> <name>scope</name> <number>1</number> <prettyName>Scope</prettyName> <relationalStorage>0</relationalStorage> <separator> </separator> <separators>|, </separators> <size>1</size> <unmodifiable>0</unmodifiable> <values>GLOBAL|WIKI|USER|ON_DEMAND</values> <classType>com.xpn.xwiki.objects.classes.StaticListClass</classType> </scope> </class> <property> <scope>WIKI</scope> </property> </object> <attachment> <filename>XWikiLogo.png</filename> <mimetype>image/png</mimetype> <filesize>1390</filesize> <author>xwiki:XWiki.Admin</author> <version>1.1</version> <comment/> <content>something=toto</content> </attachment> </xwikidoc>""" ) propfile = self.propparse(propsource) assert len(propfile.units) == 2 propunit = propfile.units[0] assert propunit.name == "content" assert ( propunit.source == """A Lorem Ipsum or whatever might be contained there. == A wiki title == Some other stuff. """ ) assert not propunit.missing propunit.target = """Un Lorem Ipsum ou quoi que ce soit qui puisse être là. == Un titre de wiki == D'autres trucs. """ propunit = propfile.units[1] assert propunit.name == "title" assert propunit.source == "Some page title" assert not propunit.missing propunit.target = "Un titre de page" generatedcontent = BytesIO() propfile.settargetlanguage("fr") propfile.serialize(generatedcontent) expected_xml = ( properties.XWikiPageProperties.XML_HEADER + """<xwikidoc reference="XWiki.AdminTranslations" locale="fr"> <web>XWiki</web> <name>AdminTranslations</name> <language>fr</language> <defaultLanguage>en</defaultLanguage> <translation>1</translation> <creator>xwiki:XWiki.Admin</creator> <parent>XWiki.WebHome</parent> <author>xwiki:XWiki.Admin</author> <contentAuthor>xwiki:XWiki.Admin</contentAuthor> <version>1.1</version> <title>Un titre de page</title> <comment/> <minorEdit>false</minorEdit> <syntaxId>plain/1.0</syntaxId> <hidden>true</hidden> <content>Un Lorem Ipsum ou quoi que ce soit qui puisse être là. == Un titre de wiki == D'autres trucs. </content> </xwikidoc>""" ) assert ( generatedcontent.getvalue().decode(propfile.encoding) == expected_xml + "\n" ) @mark.xfail(reason="removal not working in full page") def test_remove(self): super().test_remove()
gpl-2.0
pluyckx/kam
setup.py
1
2481
#!/usr/bin/python3 import sys, os, shutil from distutils.core import setup, Command from distutils.command.install import install import subprocess class UpdateInit(install): def run(self): if os.path.exists("/etc/kam/kam.conf"): shutil.copyfile("/etc/kam/kam.conf", "/etc/kam/kam.conf.back") if not os.path.exists("/etc/kam"): os.mkdir("/etc/kam") shutil.copyfile("kam.conf", "/etc/kam/kam.conf") shutil.copyfile("version", "/etc/kam/version") super().run() if has_bin("update-rc.d"): print("Update-rc.d") subprocess.call(["update-rc.d", "kam", "defaults"]) if has_bin("systemctl"): print("Update systemd") subprocess.call(["systemctl", "enable", "kam.service"]) def check_version(): (major, minor, _, _, _) = sys.version_info if major != 3: print("This script needs python3!") sys.exit(1) def check_psutil(): missing = [] try: import psutil except: missing.append("psutil") if len(missing) > 0: print("Please install the following packages for python3 first:") print(", ".join(missing)) sys.exit(1) def find_packages(relative_dir, packages, package_dir): abs_dir = os.path.abspath(relative_dir) init_file = os.path.join(abs_dir, "__init__.py") if os.path.isfile(init_file): packages.append(relative_dir) package_dir[relative_dir.replace("/", ".")] = relative_dir for f in os.listdir(abs_dir): if os.path.isdir(os.path.join(abs_dir, f)): print("checking {0}".format(f)) find_packages(os.path.join(relative_dir, f), packages, package_dir) def has_bin(bin_name): ret = subprocess.call(["locate", bin_name], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) return True if ret == 0 else False def main(): check_version() check_psutil() packages = [] package_dir = {} find_packages("kam", packages, package_dir) install = UpdateInit setup(name="kam", version="1.0.0", description="Keep the machine alive on activity", url="http://github.com/pluyckx/kam", author="Philip Luyckx", author_email="philip.luyckx+kam@gmail.com", license="GPLV2", packages=packages, package_dir=package_dir, scripts=["kam/bin/kamd"], data_files=[ ("/etc/kam", ["kam.conf", "version"]), ("/etc/init.d", ["kam/init/kam"]), ("/lib/systemd/system", ["kam/init/kam.service"]) ], cmdclass = { 'install': install } ) if __name__ == "__main__": main() sys.exit(0)
gpl-2.0
lafranceinsoumise/api-django
agir/payments/actions/payments.py
1
5054
from datetime import datetime from django.http.response import HttpResponseRedirect from django.template import loader from agir.people.models import Person from agir.payments.models import Payment from agir.payments.payment_modes import DEFAULT_MODE from agir.payments.types import PAYMENT_TYPES class PaymentException(Exception): pass def create_payment(*, person=None, type, price, mode=DEFAULT_MODE, meta=None, **kwargs): """Generate payment response for person with type and price :param person: person that is paying, must have all necessary fields (name and location) :param type: type of payment :param price: price as a decimal :param payement_mode: the mode of payment, as found in module agir.payments.modes :param meta: an arbitrary bundle of data that will be sent to the payment provider :return: an HTTP response """ if meta is None: meta = {} person_fields = [ "first_name", "last_name", "email", "location_address1", "location_address2", "location_zip", "location_state", "location_city", "location_country", ] if person is not None: for f in person_fields: kwargs.setdefault(f, getattr(person, f)) kwargs.setdefault("phone_number", person.contact_phone) return Payment.objects.create( person=person, type=type, mode=mode, price=price, meta=meta, **kwargs ) def change_payment_status(payment, status): if status == Payment.STATUS_REFUND: return refund_payment(payment) if status == Payment.STATUS_COMPLETED: return complete_payment(payment) if status == Payment.STATUS_REFUSED: return refuse_payment(payment) if status == Payment.STATUS_CANCELED: return cancel_payment(payment) raise ValueError("Ce statut n'existe pas ou n'est pas disponible.") def complete_payment(payment): if payment.status == Payment.STATUS_CANCELED: raise PaymentException("Le paiement a déjà été annulé.") if payment.status == Payment.STATUS_REFUND: raise PaymentException("Le paiement a déjà été remboursé.") payment.status = Payment.STATUS_COMPLETED payment.save(update_fields=["status"]) def refuse_payment(payment): if payment.status == Payment.STATUS_CANCELED: raise PaymentException("Le paiement a déjà été annulé.") payment.status = Payment.STATUS_REFUSED payment.save(update_fields=["status"]) def cancel_payment(payment): if payment.status == Payment.STATUS_COMPLETED: raise PaymentException("Le paiement a déjà été confirmé.") payment.status = Payment.STATUS_CANCELED payment.save() def refund_payment(payment): if payment.status not in (Payment.STATUS_COMPLETED, Payment.STATUS_REFUND): raise PaymentException("Impossible de rembourser un paiement non confirmé.") payment.status = Payment.STATUS_REFUND payment.save() def redirect_to_payment(payment): return HttpResponseRedirect(payment.get_payment_url()) def notify_status_change(payment): # call the registered listener for this event type if there is one to notify it of the changes in status if payment.type in PAYMENT_TYPES and PAYMENT_TYPES[payment.type].status_listener: PAYMENT_TYPES[payment.type].status_listener(payment) def default_description_context_generator(payment): payment_type = PAYMENT_TYPES[payment.type] return {"payment": payment, "payment_type": payment_type} def description_for_payment(payment): if payment.type in PAYMENT_TYPES: payment_type = PAYMENT_TYPES[payment.type] template_name = ( payment_type.description_template or "payments/default_description.html" ) context_generator = ( payment_type.description_context_generator or default_description_context_generator ) else: template_name = "payments/default_description.html" context_generator = default_description_context_generator return loader.render_to_string(template_name, context_generator(payment)) def find_or_create_person_from_payment(payment): if payment.person is None and payment.email is not None: try: payment.person = Person.objects.get_by_natural_key(payment.email) if payment.meta.get("subscribed"): payment.person.subscribed = True payment.person.save() except Person.DoesNotExist: person_fields = [f.name for f in Person._meta.get_fields()] person_meta = {k: v for k, v in payment.meta.items() if k in person_fields} if "date_of_birth" in person_meta: person_meta["date_of_birth"] = datetime.strptime( person_meta["date_of_birth"], "%d/%m/%Y" ).date() payment.person = Person.objects.create_person( email=payment.email, is_insoumise=False, **person_meta ) payment.save()
agpl-3.0
Johnzero/erp
openerp/addons/fg_schedule/fg_schedule.py
1
10893
# -*- encoding: utf-8 -*- import pooler, time, base64 from osv import fields, osv AVAILABLE_PRIORITIES = [ ('1', '最高'), ('2', '高'), ('3', '中'), ('4', '低'), ('5', '最低'), ] class fg_jobcontent(osv.osv): _name = "fg_jobcontent" _description = "工作项目进度表" _columns = { 'name': fields.char('项目名称', size=128, select=True, required=True,), "executor": fields.many2one('res.users','执行人', required=True, select=True,), 'charge':fields.char('下单人', size=128,), "date_start":fields.date("下单时间",required=False,), "date_end":fields.date("实际完成时间",), 'note': fields.text('备注',size=512), "accept": fields.char('对接人',size=128,), "explain":fields.text('说明(要求)',), "rate":fields.text('工作进度',), "end_time": fields.date('要求完成时间',), 'jobstate': fields.selection([('draft', '未开始'),('processing','进行中'),('done','已完成')], '状态',), } _defaults = { 'date_start':fields.date.context_today, 'executor':lambda obj, cr, uid, context: uid, 'jobstate':'draft', } class task(osv.osv): _name = "fg_project.task" _description = "产品物料推进任务" def _get_img(self, cr, uid, ids, name, arg, context=None): res = {} image = None eid = self.read(cr, uid, ids, ['executor']) cr.execute('SELECT id FROM resource_resource WHERE user_id = %s', (eid[0]["executor"][0],)) reid = cr.fetchone() obj = self.pool.get("resource.resource") objhr = self.pool.get("hr.employee") if reid: cr.execute('SELECT photo FROM hr_employee WHERE resource_id = %s', (reid[0],)) image = cr.fetchone() if image :image = image[0] res[ids[0]]= image return res _columns = { 'name': fields.char('任务',select=True, required=True, size=128 ), "project": fields.many2one('fg_schedule.project','项目',select=True, required=True, ), "executor": fields.many2one('res.users','执行人', required=True, select=True), "executor_img": fields.function(_get_img, method=True, string='头像', type='binary', store=True, ), "order": fields.char('下单人',size=128, select=True), "order_time": fields.date('下单时间', ), 'detil': fields.text('工作摘要',size=512, ), "need_endtime": fields.date('截止时间'), "end_time": fields.date('完成时间',readonly=True, ), "accept": fields.char('对接人',size=128, ), 'state': fields.selection([('draft', '未开始'), ('processing', '执行中'), ('cancelled','取消(删除)'), ('done','完成')], '推进情况',required=True), 'stated': fields.selection([('draft', '未开始'), ('processing', '执行中'), ('cancelled','取消(删除)'), ('done','完成')], '推进情况',required=True, readonly=True ), "explain":fields.text('项目说明(要求)', ), "rate":fields.float('项目进度',), "note": fields.text('备注',), 'img':fields.binary("效果展示",readonly=True,), #------------------------------------------------------- 'colour':fields.one2many('product.colour', 'colour_schedule', '产品颜色'), 'barcode':fields.one2many('bar.code', 'barcode', '条形码申报'), 'productbook':fields.one2many('product.book', 'book_schedule', '产品说明书'), 'colorboard':fields.one2many('color.board', 'colorboard', '色板确认'), 'accessorypurchaser':fields.one2many('accessory.purchaser', 'accessorypurchaser', '辅料采购'), 'screenmaking':fields.one2many('screen.making', 'screenmaking', '网版制作'), 'productsample':fields.one2many('product.sample', 'productsample', '产品打样'), 'productpack':fields.one2many('product.pack', 'productpack', '产品包装'), 'productshoot':fields.one2many('product.shoot', 'productshoot', '产品拍摄及修图'), 'producttag':fields.one2many('product.tag', 'producttag', '吊牌,插卡,标签'), 'productopp':fields.one2many('product.opp', 'productopp', 'OPP袋,低压袋'), 'productcontainer':fields.one2many('product.container', 'productcontainer', '产品外箱'), 'productposter':fields.one2many('product.poster', 'productposter', '海报'), 'productelse':fields.one2many('product.else', 'productelse', '其他'), } _sql_constraints = [ ('name', 'unique (name)', u'该任务已存在 !'), ] _defaults = { 'need_endtime':fields.date.context_today, 'order_time':fields.date.context_today, 'state':'draft', 'stated':'draft', 'rate':0.00, 'executor':lambda obj, cr, uid, context: uid, } def change_stage(self, cr, uid, ids, *args): state = self.read(cr, uid, ids, ["stated"], context=None) if not state:self.write(cr, uid, ids, {'state': 'draft','stated':'draft','end_time':False, 'rate':0}) if state[0]["stated"] == 'draft':self.write(cr, uid, ids, {'state': 'processing','stated':'processing','end_time':False, 'rate':0}) elif state[0]["stated"] == 'processing':self.write(cr, uid, ids, {'state': 'done','stated':'done','end_time':time.strftime("%Y-%m-%d %H:%M:%S"),'rate':100}) elif state[0]["stated"] == 'done':self.write(cr, uid, ids, {'state': 'draft','stated':'draft','end_time':False, 'rate':0}) return True def case_draft(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'draft','stated':'draft','end_time':False, 'rate':0}) return True def case_cancelled(self, cr, uid, ids, *args): self.unlink(cr, uid, ids) return True def case_processing(self, cr, uid, ids, *args): self.write(cr, uid, ids,{'state': 'processing','stated':'processing','end_time':False, 'rate':0}) return True def case_done(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'done','stated':'done','end_time':time.strftime("%Y-%m-%d %H:%M:%S"),'rate':100}) return True def button(self, cr, uid, ids, *args): return True task() class project(osv.osv): _name = "fg_schedule.project" _description = "配置项目" def create(self, cr, uid, vals, context={}): result = super(project, self).create(cr, uid, vals, context=context) print result,'------------' obj = self.pool.get('fg_project.task') obj.create(cr, uid, {'name':'产品颜色','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'条形码申报','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'产品说明书','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'色板确认','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'辅料采购','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'网版制作','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'产品打样','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'产品包装','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'产品拍摄及修图','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'吊牌,插卡,标签','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'OPP袋,低压袋','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'产品外箱','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'海报','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) obj.create(cr, uid, {'name':'其他','project':result,'executor':uid,'state':'draft','stated':'draft','need_endtime':None}, context=context) return result _columns = { 'name': fields.char('产品名称', size=128, select=True, required=True,readonly=True,states={'draft':[('readonly',False)]}), 'to':fields.char('研发部对接人', size=128, select=True,readonly=True,states={'draft':[('readonly',False)]}), 'charge':fields.char('项目负责人', size=128,readonly=True,states={'draft':[('readonly',False)]}), "date_start":fields.date("开始时间",required=True,readonly=True,states={'draft':[('readonly',False)]}), "date_end":fields.date("完成时间",readonly=True,states={'draft':[('readonly',False)]}), 'note': fields.char('说明',readonly=True,states={'draft':[('readonly',False)]},size=512), 'img':fields.binary("产品图片",readonly=True,states={'draft':[('readonly',False)]}), 'state': fields.selection([('draft', '开启'),('done','结束')], '项目状态',), } _defaults = { 'date_start':fields.date.context_today, 'state':lambda *a:'draft', } _order = "date_start desc" _sql_constraints = [ ('name', 'unique (name)', u'产品名称已存在 !'), ] def case_done(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'done','date_end':time.strftime("%Y-%m-%d %H:%M:%S")}) return True project() class conf_task(osv.osv): _name = "conf_task" _description = "配置任务名称" _columns = { 'name': fields.char('任务',size=128,select=True,), 'priority': fields.selection(AVAILABLE_PRIORITIES, 'Priority', select=True), } conf_task()
agpl-3.0
hiranya911/rest-coder
test/test_api.py
1
11136
#!/usr/bin/python import os import unittest import sys sys.path.append('../python-lib') from api import parse, APIDescriptionException class TestAPIDescriptionParser(unittest.TestCase): def load_api_description(self, name): path = os.path.join('../samples', name) return parse(path) def test_simple1(self): """ Test for basic attributes - mainly name, base and resources """ api = self.load_api_description('simple1.json') self.assertEqual(api.name, 'Starbucks') self.assertEqual(len(api.resources), 1) resource = api.resources[0] self.assertEqual(resource.name, 'AllOrders') self.assertEqual(resource.path, '/') self.assertEqual(len(resource.operations), 1) operation = resource.operations[0] self.assertEqual(operation.method, 'GET') self.assertIsNone(operation.input) output = operation.output self.assertEqual(output.status, 200) self.assertEqual(output.type.type.get_reference_name(), 'list(string)') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') self.assertEqual(len(api.base), 1) self.assertEqual(api.base[0], 'http://test.com/starbucks') def test_simple2(self): """ Test for multiple operations in the same resource and named data type definitions. """ api = self.load_api_description('simple2.json') self.assertEqual(api.name, 'Starbucks') self.assertEqual(len(api.resources), 1) resource = api.resources[0] self.assertEqual(resource.name, 'AllOrders') self.assertEqual(resource.path, '/') self.assertEqual(len(resource.operations), 2) operation = resource.operations[0] self.assertEqual(operation.method, 'GET') self.assertIsNone(operation.input) output = operation.output self.assertEqual(output.status, 200) self.assertEqual(output.type.type.get_reference_name(), 'list(Order)') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') operation = resource.operations[1] self.assertEqual(operation.method, 'POST') input = operation.input self.assertEqual(input.type.type.get_reference_name(), 'OrderRequest') self.assertEqual(input.contentType[0], 'json') output = operation.output self.assertEqual(output.status, 201) self.assertEqual(output.type.type.get_reference_name(), 'Order') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') self.assertEqual(len(api.base), 1) self.assertEqual(api.base[0], 'http://test.com/starbucks') self.assertEqual(len(api.data_types), 2) self.assertEqual(len(api.data_types[0].fields), 5) self.assertEqual(len(api.data_types[1].fields), 2) self.assertFalse(api.data_types[1].fields[0].optional) self.assertTrue(api.data_types[1].fields[1].optional) def test_simple3(self): """ Test for multiple resources, multiple base URLs, detailed input/output definitions and errors. """ api = self.load_api_description('simple3.json') self.assertEqual(api.name, 'Starbucks') self.assertEqual(len(api.resources), 2) resource = api.resources[1] self.assertEqual(resource.name, 'AllOrders') self.assertEqual(resource.path, '/') self.assertEqual(len(resource.operations), 2) operation = resource.operations[0] self.assertEqual(operation.method, 'GET') self.assertIsNone(operation.input) output = operation.output self.assertEqual(output.status, 200) self.assertEqual(output.type.type.get_reference_name(), 'list(Order)') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') operation = resource.operations[1] self.assertEqual(operation.method, 'POST') input = operation.input self.assertEqual(input.type.type.get_reference_name(), 'OrderRequest') self.assertEqual(input.contentType[0], 'json') output = operation.output self.assertEqual(output.status, 201) self.assertEqual(output.type.type.get_reference_name(), 'Order') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') self.assertEqual(len(api.base), 2) self.assertEqual(api.base[0], 'http://test.com/starbucks') self.assertEqual(api.base[1], 'https://test.com/starbucks') self.assertEqual(len(api.data_types), 4) self.assertEqual(len(api.data_types[0].fields), 5) self.assertEqual(len(api.data_types[1].fields), 2) self.assertFalse(api.data_types[1].fields[0].optional) self.assertTrue(api.data_types[1].fields[1].optional) operation = api.resources[0].operations[0] self.assertEqual(len(operation.errors), 2) def test_simple4(self): """ Test for named input binding definitions and binding references in operation input definitions. Also checks for header definitions in the output section. """ api = self.load_api_description('simple4.json') self.assertEqual(api.name, 'Starbucks') self.assertEqual(len(api.resources), 2) resource = api.resources[1] self.assertEqual(resource.name, 'AllOrders') self.assertEqual(resource.path, '/') self.assertEqual(len(resource.operations), 2) operation = resource.operations[0] self.assertEqual(operation.method, 'GET') self.assertIsNone(operation.input) output = operation.output self.assertEqual(output.status, 200) self.assertEqual(output.type.type.get_reference_name(), 'list(Order)') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') operation = resource.operations[1] self.assertEqual(operation.method, 'POST') input = operation.input self.assertEqual(input.type.type.get_reference_name(), 'OrderRequest') self.assertEqual(input.contentType[0], 'json') output = operation.output self.assertEqual(output.status, 201) self.assertEqual(output.type.type.get_reference_name(), 'Order') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') self.assertEqual(len(output.headers), 1) header = output.headers[0] self.assertEqual(header.name, 'Location') self.assertEqual(header.type.type.get_reference_name(), 'href') self.assertEqual(header.type.ref, 'Order') self.assertEqual(len(api.base), 1) self.assertEqual(api.base[0], 'http://test.com/starbucks') self.assertEqual(len(api.data_types), 2) self.assertEqual(len(api.data_types[0].fields), 5) self.assertEqual(len(api.data_types[1].fields), 2) self.assertFalse(api.data_types[1].fields[0].optional) self.assertTrue(api.data_types[1].fields[1].optional) resource = api.resources[0] self.assertEqual(len(resource.input_bindings), 1) self.assertEqual(resource.input_bindings[0].id, 'orderIdBinding') self.assertEqual(len(resource.operations), 2) self.assertEqual(resource.operations[0].input.params[0].binding, 'orderIdBinding') self.assertEqual(resource.operations[1].input.params[0].binding, 'orderIdBinding') def test_simple5(self): """ Test for non functional attributes - license, community, ownership, sla etc """ api = self.load_api_description('simple5.json') self.assertEqual(api.name, 'Starbucks') self.assertEqual(api.license, 'apache2') self.assertEqual(api.community, 'http://community.test.com') self.assertEqual(len(api.ownership), 2) self.assertEqual(api.ownership[0].name, 'Peter Parker') self.assertEqual(api.ownership[1].name, 'Bruce Wayne') self.assertEqual(api.ownership[1].email, 'bat@jleague.com') self.assertEqual(api.ownership[1].ownerType, 'tech') self.assertEqual(len(api.sla), 3) sla = api.sla[2] self.assertEqual(sla.name, 'GOLD') self.assertEqual(sla.availability, 99.9) self.assertEqual(sla.rateLimit, 1000) self.assertEqual(sla.timeUnit, 'second') cost = sla.costModel self.assertEqual(cost.currency, 'USD') self.assertEqual(cost.unitPrice, 0.1) self.assertEqual(cost.requestsPerUnit, 1000) sla = api.sla[0] self.assertEqual(sla.name, 'FREE') self.assertIsNone(sla.costModel) def test_simple6(self): """ Test for anonymous bindings, anonymous types and nested type definitions. """ api = self.load_api_description('simple6.json') self.assertEqual(api.name, 'Starbucks') self.assertEqual(len(api.resources), 2) resource = api.resources[1] self.assertEqual(resource.name, 'AllOrders') self.assertEqual(resource.path, '/') self.assertEqual(len(resource.operations), 2) operation = resource.operations[0] self.assertEqual(operation.method, 'GET') self.assertIsNone(operation.input) output = operation.output self.assertEqual(output.status, 200) self.assertEqual(output.type.type.get_reference_name(), 'list(Order)') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') operation = resource.operations[1] self.assertEqual(operation.method, 'POST') input = operation.input self.assertEqual(len(input.type.type.fields), 2) self.assertEqual(input.contentType[0], 'json') output = operation.output self.assertEqual(output.status, 201) self.assertEqual(output.type.type.get_reference_name(), 'Order') self.assertEqual(len(output.contentType), 1) self.assertEqual(output.contentType[0], 'json') self.assertEqual(len(api.base), 1) self.assertEqual(api.base[0], 'http://test.com/starbucks') self.assertEqual(len(api.data_types), 2) self.assertEqual(len(api.data_types[0].fields), 5) self.assertEqual(len(api.data_types[1].fields), 2) nested = api.data_types[1] field = nested.fields[1] self.assertEqual(len(field.type.type.fields), 1) resource = api.resources[0] self.assertEqual(len(resource.input_bindings), 1) self.assertEqual(resource.input_bindings[0].id, 'orderIdBinding') self.assertEqual(len(resource.operations), 2) binding = resource.operations[0].input.params[0].binding self.assertEqual(binding.mode, 'url') self.assertEqual(binding.name, 'orderId') self.assertEqual(binding.type.type.get_reference_name(), 'string') self.assertEqual(resource.operations[1].input.params[0].binding, 'orderIdBinding') def test_error1(self): """ Test for undefined type references """ try: api = self.load_api_description('error1.json') self.fail('No error thrown for undefined type') except APIDescriptionException: pass def test_error2(self): """ Test for undefined input bindings """ try: api = self.load_api_description('error2.json') self.fail('No error thrown for undefined binding') except APIDescriptionException: pass def test_error3(self): """ Test for undefined input segment """ try: api = self.load_api_description('error3.json') self.fail('No error thrown for undefined input segment') except APIDescriptionException: pass if __name__ == '__main__': unittest.main()
apache-2.0
KitKatXperience/platform_external_chromium_org
tools/deep_memory_profiler/lib/dump.py
24
15360
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import copy import datetime import logging import os import re import time from lib.bucket import BUCKET_ID from lib.exceptions import EmptyDumpException, InvalidDumpException from lib.exceptions import ObsoleteDumpVersionException, ParsingException from lib.pageframe import PageFrame from lib.range_dict import ExclusiveRangeDict from lib.symbol import proc_maps LOGGER = logging.getLogger('dmprof') # Heap Profile Dump versions # DUMP_DEEP_[1-4] are obsolete. # DUMP_DEEP_2+ distinct mmap regions and malloc chunks. # DUMP_DEEP_3+ don't include allocation functions in their stack dumps. # DUMP_DEEP_4+ support comments with '#' and global stats "nonprofiled-*". # DUMP_DEEP_[1-2] should be processed by POLICY_DEEP_1. # DUMP_DEEP_[3-4] should be processed by POLICY_DEEP_2 or POLICY_DEEP_3. DUMP_DEEP_1 = 'DUMP_DEEP_1' DUMP_DEEP_2 = 'DUMP_DEEP_2' DUMP_DEEP_3 = 'DUMP_DEEP_3' DUMP_DEEP_4 = 'DUMP_DEEP_4' DUMP_DEEP_OBSOLETE = (DUMP_DEEP_1, DUMP_DEEP_2, DUMP_DEEP_3, DUMP_DEEP_4) # DUMP_DEEP_5 doesn't separate sections for malloc and mmap. # malloc and mmap are identified in bucket files. # DUMP_DEEP_5 should be processed by POLICY_DEEP_4. DUMP_DEEP_5 = 'DUMP_DEEP_5' # DUMP_DEEP_6 adds a mmap list to DUMP_DEEP_5. DUMP_DEEP_6 = 'DUMP_DEEP_6' class Dump(object): """Represents a heap profile dump.""" _PATH_PATTERN = re.compile(r'^(.*)\.([0-9]+)\.([0-9]+)\.heap$') _HOOK_PATTERN = re.compile( r'^ ([ \(])([a-f0-9]+)([ \)])-([ \(])([a-f0-9]+)([ \)])\s+' r'(hooked|unhooked)\s+(.+)$', re.IGNORECASE) _HOOKED_PATTERN = re.compile(r'(?P<TYPE>.+ )?(?P<COMMITTED>[0-9]+) / ' '(?P<RESERVED>[0-9]+) @ (?P<BUCKETID>[0-9]+)') _UNHOOKED_PATTERN = re.compile(r'(?P<TYPE>.+ )?(?P<COMMITTED>[0-9]+) / ' '(?P<RESERVED>[0-9]+)') _OLD_HOOKED_PATTERN = re.compile(r'(?P<TYPE>.+) @ (?P<BUCKETID>[0-9]+)') _OLD_UNHOOKED_PATTERN = re.compile(r'(?P<TYPE>.+) (?P<COMMITTED>[0-9]+)') _TIME_PATTERN_FORMAT = re.compile( r'^Time: ([0-9]+/[0-9]+/[0-9]+ [0-9]+:[0-9]+:[0-9]+)(\.[0-9]+)?') _TIME_PATTERN_SECONDS = re.compile(r'^Time: ([0-9]+)$') def __init__(self, path, modified_time): self._path = path matched = self._PATH_PATTERN.match(path) self._pid = int(matched.group(2)) self._count = int(matched.group(3)) self._time = modified_time self._map = {} self._procmaps = ExclusiveRangeDict(ProcMapsEntryAttribute) self._stacktrace_lines = [] self._global_stats = {} # used only in apply_policy self._run_id = '' self._pagesize = 4096 self._pageframe_length = 0 self._pageframe_encoding = '' self._has_pagecount = False self._version = '' self._lines = [] @property def path(self): return self._path @property def count(self): return self._count @property def time(self): return self._time @property def iter_map(self): for region in sorted(self._map.iteritems()): yield region[0], region[1] def iter_procmaps(self): for begin, end, attr in self._map.iter_range(): yield begin, end, attr @property def iter_stacktrace(self): for line in self._stacktrace_lines: yield line def global_stat(self, name): return self._global_stats[name] @property def run_id(self): return self._run_id @property def pagesize(self): return self._pagesize @property def pageframe_length(self): return self._pageframe_length @property def pageframe_encoding(self): return self._pageframe_encoding @property def has_pagecount(self): return self._has_pagecount @staticmethod def load(path, log_header='Loading a heap profile dump: '): """Loads a heap profile dump. Args: path: A file path string to load. log_header: A preceding string for log messages. Returns: A loaded Dump object. Raises: ParsingException for invalid heap profile dumps. """ dump = Dump(path, os.stat(path).st_mtime) with open(path, 'r') as f: dump.load_file(f, log_header) return dump def load_file(self, f, log_header): self._lines = [line for line in f if line and not line.startswith('#')] try: self._version, ln = self._parse_version() self._parse_meta_information() if self._version == DUMP_DEEP_6: self._parse_mmap_list() self._parse_global_stats() self._extract_stacktrace_lines(ln) except EmptyDumpException: LOGGER.info('%s%s ...ignored an empty dump.' % (log_header, self._path)) except ParsingException, e: LOGGER.error('%s%s ...error %s' % (log_header, self._path, e)) raise else: LOGGER.info('%s%s (version:%s)' % (log_header, self._path, self._version)) def _parse_version(self): """Parses a version string in self._lines. Returns: A pair of (a string representing a version of the stacktrace dump, and an integer indicating a line number next to the version string). Raises: ParsingException for invalid dump versions. """ version = '' # Skip until an identifiable line. headers = ('STACKTRACES:\n', 'MMAP_STACKTRACES:\n', 'heap profile: ') if not self._lines: raise EmptyDumpException('Empty heap dump file.') (ln, found) = skip_while( 0, len(self._lines), lambda n: not self._lines[n].startswith(headers)) if not found: raise InvalidDumpException('No version header.') # Identify a version. if self._lines[ln].startswith('heap profile: '): version = self._lines[ln][13:].strip() if version in (DUMP_DEEP_5, DUMP_DEEP_6): (ln, _) = skip_while( ln, len(self._lines), lambda n: self._lines[n] != 'STACKTRACES:\n') elif version in DUMP_DEEP_OBSOLETE: raise ObsoleteDumpVersionException(version) else: raise InvalidDumpException('Invalid version: %s' % version) elif self._lines[ln] == 'STACKTRACES:\n': raise ObsoleteDumpVersionException(DUMP_DEEP_1) elif self._lines[ln] == 'MMAP_STACKTRACES:\n': raise ObsoleteDumpVersionException(DUMP_DEEP_2) return (version, ln) def _parse_global_stats(self): """Parses lines in self._lines as global stats.""" (ln, _) = skip_while( 0, len(self._lines), lambda n: self._lines[n] != 'GLOBAL_STATS:\n') global_stat_names = [ 'total', 'absent', 'file-exec', 'file-nonexec', 'anonymous', 'stack', 'other', 'nonprofiled-absent', 'nonprofiled-anonymous', 'nonprofiled-file-exec', 'nonprofiled-file-nonexec', 'nonprofiled-stack', 'nonprofiled-other', 'profiled-mmap', 'profiled-malloc'] for prefix in global_stat_names: (ln, _) = skip_while( ln, len(self._lines), lambda n: self._lines[n].split()[0] != prefix) words = self._lines[ln].split() self._global_stats[prefix + '_virtual'] = int(words[-2]) self._global_stats[prefix + '_committed'] = int(words[-1]) def _parse_meta_information(self): """Parses lines in self._lines for meta information.""" (ln, found) = skip_while( 0, len(self._lines), lambda n: self._lines[n] != 'META:\n') if not found: return ln += 1 while True: if self._lines[ln].startswith('Time:'): matched_seconds = self._TIME_PATTERN_SECONDS.match(self._lines[ln]) matched_format = self._TIME_PATTERN_FORMAT.match(self._lines[ln]) if matched_format: self._time = time.mktime(datetime.datetime.strptime( matched_format.group(1), '%Y/%m/%d %H:%M:%S').timetuple()) if matched_format.group(2): self._time += float(matched_format.group(2)[1:]) / 1000.0 elif matched_seconds: self._time = float(matched_seconds.group(1)) elif self._lines[ln].startswith('Reason:'): pass # Nothing to do for 'Reason:' elif self._lines[ln].startswith('PageSize: '): self._pagesize = int(self._lines[ln][10:]) elif self._lines[ln].startswith('CommandLine:'): pass elif (self._lines[ln].startswith('PageFrame: ') or self._lines[ln].startswith('PFN: ')): if self._lines[ln].startswith('PageFrame: '): words = self._lines[ln][11:].split(',') else: words = self._lines[ln][5:].split(',') for word in words: if word == '24': self._pageframe_length = 24 elif word == 'Base64': self._pageframe_encoding = 'base64' elif word == 'PageCount': self._has_pagecount = True elif self._lines[ln].startswith('RunID: '): self._run_id = self._lines[ln][7:].strip() elif (self._lines[ln].startswith('MMAP_LIST:') or self._lines[ln].startswith('GLOBAL_STATS:')): # Skip until "MMAP_LIST:" or "GLOBAL_STATS" is found. break else: pass ln += 1 def _parse_mmap_list(self): """Parses lines in self._lines as a mmap list.""" (ln, found) = skip_while( 0, len(self._lines), lambda n: self._lines[n] != 'MMAP_LIST:\n') if not found: return {} ln += 1 self._map = {} current_vma = {} pageframe_list = [] while True: entry = proc_maps.ProcMaps.parse_line(self._lines[ln]) if entry: current_vma = {} for _, _, attr in self._procmaps.iter_range(entry.begin, entry.end): for key, value in entry.as_dict().iteritems(): attr[key] = value current_vma[key] = value ln += 1 continue if self._lines[ln].startswith(' PF: '): for pageframe in self._lines[ln][5:].split(): pageframe_list.append(PageFrame.parse(pageframe, self._pagesize)) ln += 1 continue matched = self._HOOK_PATTERN.match(self._lines[ln]) if not matched: break # 2: starting address # 5: end address # 7: hooked or unhooked # 8: additional information if matched.group(7) == 'hooked': submatched = self._HOOKED_PATTERN.match(matched.group(8)) if not submatched: submatched = self._OLD_HOOKED_PATTERN.match(matched.group(8)) elif matched.group(7) == 'unhooked': submatched = self._UNHOOKED_PATTERN.match(matched.group(8)) if not submatched: submatched = self._OLD_UNHOOKED_PATTERN.match(matched.group(8)) else: assert matched.group(7) in ['hooked', 'unhooked'] submatched_dict = submatched.groupdict() region_info = { 'vma': current_vma } if submatched_dict.get('TYPE'): region_info['type'] = submatched_dict['TYPE'].strip() if submatched_dict.get('COMMITTED'): region_info['committed'] = int(submatched_dict['COMMITTED']) if submatched_dict.get('RESERVED'): region_info['reserved'] = int(submatched_dict['RESERVED']) if submatched_dict.get('BUCKETID'): region_info['bucket_id'] = int(submatched_dict['BUCKETID']) if matched.group(1) == '(': start = current_vma['begin'] else: start = int(matched.group(2), 16) if matched.group(4) == '(': end = current_vma['end'] else: end = int(matched.group(5), 16) if pageframe_list and pageframe_list[0].start_truncated: pageframe_list[0].set_size( pageframe_list[0].size - start % self._pagesize) if pageframe_list and pageframe_list[-1].end_truncated: pageframe_list[-1].set_size( pageframe_list[-1].size - (self._pagesize - end % self._pagesize)) region_info['pageframe'] = pageframe_list pageframe_list = [] self._map[(start, end)] = (matched.group(7), region_info) ln += 1 def _extract_stacktrace_lines(self, line_number): """Extracts the position of stacktrace lines. Valid stacktrace lines are stored into self._stacktrace_lines. Args: line_number: A line number to start parsing in lines. Raises: ParsingException for invalid dump versions. """ if self._version in (DUMP_DEEP_5, DUMP_DEEP_6): (line_number, _) = skip_while( line_number, len(self._lines), lambda n: not self._lines[n].split()[0].isdigit()) stacktrace_start = line_number (line_number, _) = skip_while( line_number, len(self._lines), lambda n: self._check_stacktrace_line(self._lines[n])) self._stacktrace_lines = self._lines[stacktrace_start:line_number] elif self._version in DUMP_DEEP_OBSOLETE: raise ObsoleteDumpVersionException(self._version) else: raise InvalidDumpException('Invalid version: %s' % self._version) @staticmethod def _check_stacktrace_line(stacktrace_line): """Checks if a given stacktrace_line is valid as stacktrace. Args: stacktrace_line: A string to be checked. Returns: True if the given stacktrace_line is valid. """ words = stacktrace_line.split() if len(words) < BUCKET_ID + 1: return False if words[BUCKET_ID - 1] != '@': return False return True class DumpList(object): """Represents a sequence of heap profile dumps.""" def __init__(self, dump_list): self._dump_list = dump_list @staticmethod def load(path_list): LOGGER.info('Loading heap dump profiles.') dump_list = [] for path in path_list: dump_list.append(Dump.load(path, ' ')) return DumpList(dump_list) def __len__(self): return len(self._dump_list) def __iter__(self): for dump in self._dump_list: yield dump def __getitem__(self, index): return self._dump_list[index] class ProcMapsEntryAttribute(ExclusiveRangeDict.RangeAttribute): """Represents an entry of /proc/maps in range_dict.ExclusiveRangeDict.""" _DUMMY_ENTRY = proc_maps.ProcMapsEntry( 0, # begin 0, # end '-', # readable '-', # writable '-', # executable '-', # private 0, # offset '00', # major '00', # minor 0, # inode '' # name ) def __init__(self): super(ProcMapsEntryAttribute, self).__init__() self._entry = self._DUMMY_ENTRY.as_dict() def __str__(self): return str(self._entry) def __repr__(self): return 'ProcMapsEntryAttribute' + str(self._entry) def __getitem__(self, key): return self._entry[key] def __setitem__(self, key, value): if key not in self._entry: raise KeyError(key) self._entry[key] = value def copy(self): new_entry = ProcMapsEntryAttribute() for key, value in self._entry.iteritems(): new_entry[key] = copy.deepcopy(value) return new_entry def skip_while(index, max_index, skipping_condition): """Increments |index| until |skipping_condition|(|index|) is False. Returns: A pair of an integer indicating a line number after skipped, and a boolean value which is True if found a line which skipping_condition is False for. """ while skipping_condition(index): index += 1 if index >= max_index: return index, False return index, True
bsd-3-clause
iansealy/projecteuler
30.py
1
1040
#!/usr/bin/env python """This script solves the Project Euler problem "Digit fifth powers". The problem is: Find the sum of all the numbers that can be written as the sum of fifth powers of their digits. """ import argparse def main(args): """Digit fifth powers""" total_sum = 0 # Work out maximum number of digits max_per_digit = pow(9, args.power) max_digits = 1 while max_digits * max_per_digit > int('9' * max_digits): max_digits += 1 number = 2 max_number = pow(10, max_digits) while number < max_number: sum = 0 for digit in str(number): sum += pow(int(digit), args.power) if sum == number: total_sum += number number += 1 print(total_sum) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Digit fifth powers') parser.add_argument( 'power', metavar='POWER', type=int, default=5, nargs='?', help='The power to raise digits by') args = parser.parse_args() main(args)
gpl-3.0
spirosmastorakis/ns-3-dev-ndnSIM
src/mpi/bindings/modulegen__gcc_LP64.py
10
233800
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers import pybindgen.settings import warnings class ErrorHandler(pybindgen.settings.ErrorHandler): def handle_error(self, wrapper, exception, traceback_): warnings.warn("exception %r in wrapper %s" % (exception, wrapper)) return True pybindgen.settings.error_handler = ErrorHandler() import sys def module_init(): root_module = Module('ns.mpi', cpp_namespace='::ns3') return root_module def register_types(module): root_module = module.get_root() ## address.h (module 'network'): ns3::Address [class] module.add_class('Address', import_from_module='ns.network') ## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration] module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network') ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class] module.add_class('AttributeConstructionList', import_from_module='ns.core') ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct] module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList']) ## buffer.h (module 'network'): ns3::Buffer [class] module.add_class('Buffer', import_from_module='ns.network') ## buffer.h (module 'network'): ns3::Buffer::Iterator [class] module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer']) ## packet.h (module 'network'): ns3::ByteTagIterator [class] module.add_class('ByteTagIterator', import_from_module='ns.network') ## packet.h (module 'network'): ns3::ByteTagIterator::Item [class] module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator']) ## byte-tag-list.h (module 'network'): ns3::ByteTagList [class] module.add_class('ByteTagList', import_from_module='ns.network') ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class] module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList']) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct] module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator']) ## callback.h (module 'core'): ns3::CallbackBase [class] module.add_class('CallbackBase', import_from_module='ns.core') ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct] module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor']) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct] module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker']) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct] module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue']) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct] module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase']) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct] module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation']) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector> [struct] module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector']) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet> [struct] module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet']) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct] module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor']) ## hash.h (module 'core'): ns3::Hasher [class] module.add_class('Hasher', import_from_module='ns.core') ## ipv4-address.h (module 'network'): ns3::Ipv4Address [class] module.add_class('Ipv4Address', import_from_module='ns.network') ## ipv4-address.h (module 'network'): ns3::Ipv4Address [class] root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address']) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class] module.add_class('Ipv4Mask', import_from_module='ns.network') ## ipv6-address.h (module 'network'): ns3::Ipv6Address [class] module.add_class('Ipv6Address', import_from_module='ns.network') ## ipv6-address.h (module 'network'): ns3::Ipv6Address [class] root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address']) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class] module.add_class('Ipv6Prefix', import_from_module='ns.network') ## mac48-address.h (module 'network'): ns3::Mac48Address [class] module.add_class('Mac48Address', import_from_module='ns.network') ## mac48-address.h (module 'network'): ns3::Mac48Address [class] root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address']) ## mpi-interface.h (module 'mpi'): ns3::MpiInterface [class] module.add_class('MpiInterface') ## object-base.h (module 'core'): ns3::ObjectBase [class] module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core') ## object.h (module 'core'): ns3::ObjectDeleter [struct] module.add_class('ObjectDeleter', import_from_module='ns.core') ## packet-metadata.h (module 'network'): ns3::PacketMetadata [class] module.add_class('PacketMetadata', import_from_module='ns.network') ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct] module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata']) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::ItemType [enumeration] module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network') ## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class] module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata']) ## packet.h (module 'network'): ns3::PacketTagIterator [class] module.add_class('PacketTagIterator', import_from_module='ns.network') ## packet.h (module 'network'): ns3::PacketTagIterator::Item [class] module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator']) ## packet-tag-list.h (module 'network'): ns3::PacketTagList [class] module.add_class('PacketTagList', import_from_module='ns.network') ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct] module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList']) ## parallel-communication-interface.h (module 'mpi'): ns3::ParallelCommunicationInterface [class] module.add_class('ParallelCommunicationInterface', allow_subclassing=True) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## tag.h (module 'network'): ns3::Tag [class] module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase']) ## tag-buffer.h (module 'network'): ns3::TagBuffer [class] module.add_class('TagBuffer', import_from_module='ns.network') ## nstime.h (module 'core'): ns3::TimeWithUnit [class] module.add_class('TimeWithUnit', import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId [class] module.add_class('TypeId', import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration] module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration] module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct] module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct] module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) ## empty.h (module 'core'): ns3::empty [class] module.add_class('empty', import_from_module='ns.core') ## int64x64-128.h (module 'core'): ns3::int64x64_t [class] module.add_class('int64x64_t', import_from_module='ns.core') ## int64x64-128.h (module 'core'): ns3::int64x64_t::impl_type [enumeration] module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core') ## chunk.h (module 'network'): ns3::Chunk [class] module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase']) ## header.h (module 'network'): ns3::Header [class] module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk']) ## object.h (module 'core'): ns3::Object [class] module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) ## object.h (module 'core'): ns3::Object::AggregateIterator [class] module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object']) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## nstime.h (module 'core'): ns3::Time [class] module.add_class('Time', import_from_module='ns.core') ## nstime.h (module 'core'): ns3::Time::Unit [enumeration] module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core') ## nstime.h (module 'core'): ns3::Time [class] root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t']) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class] module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) ## trailer.h (module 'network'): ns3::Trailer [class] module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk']) ## attribute.h (module 'core'): ns3::AttributeAccessor [class] module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) ## attribute.h (module 'core'): ns3::AttributeChecker [class] module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) ## attribute.h (module 'core'): ns3::AttributeValue [class] module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) ## callback.h (module 'core'): ns3::CallbackChecker [class] module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## callback.h (module 'core'): ns3::CallbackImplBase [class] module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) ## callback.h (module 'core'): ns3::CallbackValue [class] module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class] module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor']) ## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class] module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## attribute.h (module 'core'): ns3::EmptyAttributeValue [class] module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class] module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class] module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class] module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class] module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class] module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class] module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class] module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class] module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class] module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class] module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## mpi-receiver.h (module 'mpi'): ns3::MpiReceiver [class] module.add_class('MpiReceiver', parent=root_module['ns3::Object']) ## nix-vector.h (module 'network'): ns3::NixVector [class] module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >']) ## packet.h (module 'network'): ns3::Packet [class] module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >']) ## nstime.h (module 'core'): ns3::TimeValue [class] module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## type-id.h (module 'core'): ns3::TypeIdChecker [class] module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## type-id.h (module 'core'): ns3::TypeIdValue [class] module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## address.h (module 'network'): ns3::AddressChecker [class] module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## address.h (module 'network'): ns3::AddressValue [class] module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class] module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) ## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class] module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) ## Register a nested module for the namespace FatalImpl nested_module = module.add_cpp_namespace('FatalImpl') register_types_ns3_FatalImpl(nested_module) ## Register a nested module for the namespace Hash nested_module = module.add_cpp_namespace('Hash') register_types_ns3_Hash(nested_module) ## Register a nested module for the namespace TracedValueCallback nested_module = module.add_cpp_namespace('TracedValueCallback') register_types_ns3_TracedValueCallback(nested_module) def register_types_ns3_FatalImpl(module): root_module = module.get_root() def register_types_ns3_Hash(module): root_module = module.get_root() ## hash-function.h (module 'core'): ns3::Hash::Implementation [class] module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >']) typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t const )', u'ns3::Hash::Hash32Function_ptr') typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t const )*', u'ns3::Hash::Hash32Function_ptr*') typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t const )&', u'ns3::Hash::Hash32Function_ptr&') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t const )', u'ns3::Hash::Hash64Function_ptr') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t const )*', u'ns3::Hash::Hash64Function_ptr*') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t const )&', u'ns3::Hash::Hash64Function_ptr&') ## Register a nested module for the namespace Function nested_module = module.add_cpp_namespace('Function') register_types_ns3_Hash_Function(nested_module) def register_types_ns3_Hash_Function(module): root_module = module.get_root() ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class] module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation']) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class] module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation']) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class] module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation']) ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class] module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation']) def register_types_ns3_TracedValueCallback(module): root_module = module.get_root() typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )', u'ns3::TracedValueCallback::Time') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )*', u'ns3::TracedValueCallback::Time*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time )&', u'ns3::TracedValueCallback::Time&') def register_methods(root_module): register_Ns3Address_methods(root_module, root_module['ns3::Address']) register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList']) register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item']) register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer']) register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator']) register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator']) register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item']) register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList']) register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator']) register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item']) register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase']) register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeAccessor >']) register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeChecker >']) register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeValue >']) register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, root_module['ns3::DefaultDeleter< ns3::CallbackImplBase >']) register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Hash::Implementation >']) register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, root_module['ns3::DefaultDeleter< ns3::NixVector >']) register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Packet >']) register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::TraceSourceAccessor >']) register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher']) register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address']) register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask']) register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address']) register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix']) register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address']) register_Ns3MpiInterface_methods(root_module, root_module['ns3::MpiInterface']) register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase']) register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter']) register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata']) register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item']) register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator']) register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator']) register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item']) register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList']) register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData']) register_Ns3ParallelCommunicationInterface_methods(root_module, root_module['ns3::ParallelCommunicationInterface']) register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) register_Ns3Tag_methods(root_module, root_module['ns3::Tag']) register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer']) register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit']) register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId']) register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation']) register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation']) register_Ns3Empty_methods(root_module, root_module['ns3::empty']) register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t']) register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk']) register_Ns3Header_methods(root_module, root_module['ns3::Header']) register_Ns3Object_methods(root_module, root_module['ns3::Object']) register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator']) register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >']) register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >']) register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >']) register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) register_Ns3Time_methods(root_module, root_module['ns3::Time']) register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor']) register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer']) register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor']) register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker']) register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue']) register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker']) register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase']) register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue']) register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor']) register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker']) register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue']) register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker']) register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue']) register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker']) register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue']) register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker']) register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue']) register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker']) register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue']) register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker']) register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue']) register_Ns3MpiReceiver_methods(root_module, root_module['ns3::MpiReceiver']) register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector']) register_Ns3Packet_methods(root_module, root_module['ns3::Packet']) register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue']) register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker']) register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue']) register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker']) register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue']) register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >']) register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Packet__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >']) register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation']) register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a']) register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32']) register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64']) register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3']) return def register_Ns3Address_methods(root_module, cls): cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('<') cls.add_output_stream_operator() ## address.h (module 'network'): ns3::Address::Address() [constructor] cls.add_constructor([]) ## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor] cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')]) ## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [constructor] cls.add_constructor([param('ns3::Address const &', 'address')]) ## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function] cls.add_method('CheckCompatible', 'bool', [param('uint8_t', 'type'), param('uint8_t', 'len')], is_const=True) ## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function] cls.add_method('CopyAllFrom', 'uint32_t', [param('uint8_t const *', 'buffer'), param('uint8_t', 'len')]) ## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function] cls.add_method('CopyAllTo', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint8_t', 'len')], is_const=True) ## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function] cls.add_method('CopyFrom', 'uint32_t', [param('uint8_t const *', 'buffer'), param('uint8_t', 'len')]) ## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function] cls.add_method('CopyTo', 'uint32_t', [param('uint8_t *', 'buffer')], is_const=True) ## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function] cls.add_method('Deserialize', 'void', [param('ns3::TagBuffer', 'buffer')]) ## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function] cls.add_method('GetLength', 'uint8_t', [], is_const=True) ## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function] cls.add_method('IsInvalid', 'bool', [], is_const=True) ## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function] cls.add_method('IsMatchingType', 'bool', [param('uint8_t', 'type')], is_const=True) ## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function] cls.add_method('Register', 'uint8_t', [], is_static=True) ## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function] cls.add_method('Serialize', 'void', [param('ns3::TagBuffer', 'buffer')], is_const=True) return def register_Ns3AttributeConstructionList_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [constructor] cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::Ptr<ns3::AttributeValue> value) [member function] cls.add_method('Add', 'void', [param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::Begin() const [member function] cls.add_method('Begin', 'ns3::AttributeConstructionList::CIterator', [], is_const=True) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::End() const [member function] cls.add_method('End', 'ns3::AttributeConstructionList::CIterator', [], is_const=True) ## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('Find', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True) return def register_Ns3AttributeConstructionListItem_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [constructor] cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable] cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False) return def register_Ns3Buffer_methods(root_module, cls): ## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [constructor] cls.add_constructor([param('ns3::Buffer const &', 'o')]) ## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor] cls.add_constructor([]) ## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor] cls.add_constructor([param('uint32_t', 'dataSize')]) ## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor] cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')]) ## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(uint32_t end) [member function] cls.add_method('AddAtEnd', 'void', [param('uint32_t', 'end')]) ## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function] cls.add_method('AddAtEnd', 'void', [param('ns3::Buffer const &', 'o')]) ## buffer.h (module 'network'): void ns3::Buffer::AddAtStart(uint32_t start) [member function] cls.add_method('AddAtStart', 'void', [param('uint32_t', 'start')]) ## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function] cls.add_method('Begin', 'ns3::Buffer::Iterator', [], is_const=True) ## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function] cls.add_method('CopyData', 'void', [param('std::ostream *', 'os'), param('uint32_t', 'size')], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function] cls.add_method('CopyData', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'size')], is_const=True) ## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function] cls.add_method('CreateFragment', 'ns3::Buffer', [param('uint32_t', 'start'), param('uint32_t', 'length')], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function] cls.add_method('Deserialize', 'uint32_t', [param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function] cls.add_method('End', 'ns3::Buffer::Iterator', [], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function] cls.add_method('GetSize', 'uint32_t', [], is_const=True) ## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function] cls.add_method('PeekData', 'uint8_t const *', [], is_const=True) ## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function] cls.add_method('RemoveAtEnd', 'void', [param('uint32_t', 'end')]) ## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function] cls.add_method('RemoveAtStart', 'void', [param('uint32_t', 'start')]) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function] cls.add_method('Serialize', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')], is_const=True) return def register_Ns3BufferIterator_methods(root_module, cls): ## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [constructor] cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')]) ## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor] cls.add_constructor([]) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function] cls.add_method('CalculateIpChecksum', 'uint16_t', [param('uint16_t', 'size')]) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function] cls.add_method('CalculateIpChecksum', 'uint16_t', [param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')]) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function] cls.add_method('GetDistanceFrom', 'uint32_t', [param('ns3::Buffer::Iterator const &', 'o')], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetRemainingSize() const [member function] cls.add_method('GetRemainingSize', 'uint32_t', [], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function] cls.add_method('GetSize', 'uint32_t', [], is_const=True) ## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function] cls.add_method('IsEnd', 'bool', [], is_const=True) ## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function] cls.add_method('IsStart', 'bool', [], is_const=True) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function] cls.add_method('Next', 'void', []) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function] cls.add_method('Next', 'void', [param('uint32_t', 'delta')]) ## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function] cls.add_method('PeekU8', 'uint8_t', []) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function] cls.add_method('Prev', 'void', []) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function] cls.add_method('Prev', 'void', [param('uint32_t', 'delta')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function] cls.add_method('Read', 'void', [param('uint8_t *', 'buffer'), param('uint32_t', 'size')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function] cls.add_method('Read', 'void', [param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')]) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function] cls.add_method('ReadLsbtohU16', 'uint16_t', []) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function] cls.add_method('ReadLsbtohU32', 'uint32_t', []) ## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function] cls.add_method('ReadLsbtohU64', 'uint64_t', []) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function] cls.add_method('ReadNtohU16', 'uint16_t', []) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function] cls.add_method('ReadNtohU32', 'uint32_t', []) ## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function] cls.add_method('ReadNtohU64', 'uint64_t', []) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function] cls.add_method('ReadU16', 'uint16_t', []) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function] cls.add_method('ReadU32', 'uint32_t', []) ## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function] cls.add_method('ReadU64', 'uint64_t', []) ## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function] cls.add_method('ReadU8', 'uint8_t', []) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function] cls.add_method('Write', 'void', [param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function] cls.add_method('Write', 'void', [param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function] cls.add_method('WriteHtolsbU16', 'void', [param('uint16_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function] cls.add_method('WriteHtolsbU32', 'void', [param('uint32_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function] cls.add_method('WriteHtolsbU64', 'void', [param('uint64_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function] cls.add_method('WriteHtonU16', 'void', [param('uint16_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function] cls.add_method('WriteHtonU32', 'void', [param('uint32_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function] cls.add_method('WriteHtonU64', 'void', [param('uint64_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function] cls.add_method('WriteU16', 'void', [param('uint16_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function] cls.add_method('WriteU32', 'void', [param('uint32_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function] cls.add_method('WriteU64', 'void', [param('uint64_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function] cls.add_method('WriteU8', 'void', [param('uint8_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function] cls.add_method('WriteU8', 'void', [param('uint8_t', 'data'), param('uint32_t', 'len')]) return def register_Ns3ByteTagIterator_methods(root_module, cls): ## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [constructor] cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')]) ## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function] cls.add_method('Next', 'ns3::ByteTagIterator::Item', []) return def register_Ns3ByteTagIteratorItem_methods(root_module, cls): ## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [constructor] cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')]) ## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function] cls.add_method('GetEnd', 'uint32_t', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function] cls.add_method('GetStart', 'uint32_t', [], is_const=True) ## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function] cls.add_method('GetTag', 'void', [param('ns3::Tag &', 'tag')], is_const=True) ## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_const=True) return def register_Ns3ByteTagList_methods(root_module, cls): ## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor] cls.add_constructor([]) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [constructor] cls.add_constructor([param('ns3::ByteTagList const &', 'o')]) ## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function] cls.add_method('Add', 'ns3::TagBuffer', [param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')]) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function] cls.add_method('Add', 'void', [param('ns3::ByteTagList const &', 'o')]) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t appendOffset) [member function] cls.add_method('AddAtEnd', 'void', [param('int32_t', 'appendOffset')]) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t prependOffset) [member function] cls.add_method('AddAtStart', 'void', [param('int32_t', 'prependOffset')]) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Adjust(int32_t adjustment) [member function] cls.add_method('Adjust', 'void', [param('int32_t', 'adjustment')]) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function] cls.add_method('Begin', 'ns3::ByteTagList::Iterator', [param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')], is_const=True) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function] cls.add_method('RemoveAll', 'void', []) return def register_Ns3ByteTagListIterator_methods(root_module, cls): ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [constructor] cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')]) ## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function] cls.add_method('GetOffsetStart', 'uint32_t', [], is_const=True) ## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function] cls.add_method('Next', 'ns3::ByteTagList::Iterator::Item', []) return def register_Ns3ByteTagListIteratorItem_methods(root_module, cls): ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [constructor] cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')]) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor] cls.add_constructor([param('ns3::TagBuffer', 'buf')]) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable] cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable] cls.add_instance_attribute('end', 'int32_t', is_const=False) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable] cls.add_instance_attribute('size', 'uint32_t', is_const=False) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable] cls.add_instance_attribute('start', 'int32_t', is_const=False) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable] cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False) return def register_Ns3CallbackBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function] cls.add_method('GetImpl', 'ns3::Ptr< ns3::CallbackImplBase >', [], is_const=True) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')], visibility='protected') return def register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, cls): ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter() [constructor] cls.add_constructor([]) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeAccessor> const & arg0) [constructor] cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeAccessor > const &', 'arg0')]) ## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeAccessor>::Delete(ns3::AttributeAccessor * object) [member function] cls.add_method('Delete', 'void', [param('ns3::AttributeAccessor *', 'object')], is_static=True) return def register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, cls): ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter() [constructor] cls.add_constructor([]) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeChecker> const & arg0) [constructor] cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeChecker > const &', 'arg0')]) ## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeChecker>::Delete(ns3::AttributeChecker * object) [member function] cls.add_method('Delete', 'void', [param('ns3::AttributeChecker *', 'object')], is_static=True) return def register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, cls): ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter() [constructor] cls.add_constructor([]) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeValue> const & arg0) [constructor] cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeValue > const &', 'arg0')]) ## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeValue>::Delete(ns3::AttributeValue * object) [member function] cls.add_method('Delete', 'void', [param('ns3::AttributeValue *', 'object')], is_static=True) return def register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, cls): ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter() [constructor] cls.add_constructor([]) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter(ns3::DefaultDeleter<ns3::CallbackImplBase> const & arg0) [constructor] cls.add_constructor([param('ns3::DefaultDeleter< ns3::CallbackImplBase > const &', 'arg0')]) ## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::CallbackImplBase>::Delete(ns3::CallbackImplBase * object) [member function] cls.add_method('Delete', 'void', [param('ns3::CallbackImplBase *', 'object')], is_static=True) return def register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, cls): ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter() [constructor] cls.add_constructor([]) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter(ns3::DefaultDeleter<ns3::Hash::Implementation> const & arg0) [constructor] cls.add_constructor([param('ns3::DefaultDeleter< ns3::Hash::Implementation > const &', 'arg0')]) ## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Hash::Implementation>::Delete(ns3::Hash::Implementation * object) [member function] cls.add_method('Delete', 'void', [param('ns3::Hash::Implementation *', 'object')], is_static=True) return def register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, cls): ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter() [constructor] cls.add_constructor([]) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter(ns3::DefaultDeleter<ns3::NixVector> const & arg0) [constructor] cls.add_constructor([param('ns3::DefaultDeleter< ns3::NixVector > const &', 'arg0')]) ## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::NixVector>::Delete(ns3::NixVector * object) [member function] cls.add_method('Delete', 'void', [param('ns3::NixVector *', 'object')], is_static=True) return def register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, cls): ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter() [constructor] cls.add_constructor([]) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter(ns3::DefaultDeleter<ns3::Packet> const & arg0) [constructor] cls.add_constructor([param('ns3::DefaultDeleter< ns3::Packet > const &', 'arg0')]) ## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Packet>::Delete(ns3::Packet * object) [member function] cls.add_method('Delete', 'void', [param('ns3::Packet *', 'object')], is_static=True) return def register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, cls): ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter() [constructor] cls.add_constructor([]) ## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::TraceSourceAccessor> const & arg0) [constructor] cls.add_constructor([param('ns3::DefaultDeleter< ns3::TraceSourceAccessor > const &', 'arg0')]) ## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::TraceSourceAccessor>::Delete(ns3::TraceSourceAccessor * object) [member function] cls.add_method('Delete', 'void', [param('ns3::TraceSourceAccessor *', 'object')], is_static=True) return def register_Ns3Hasher_methods(root_module, cls): ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [constructor] cls.add_constructor([param('ns3::Hasher const &', 'arg0')]) ## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor] cls.add_constructor([]) ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')]) ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function] cls.add_method('GetHash32', 'uint32_t', [param('std::string const', 's')]) ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function] cls.add_method('GetHash64', 'uint64_t', [param('std::string const', 's')]) ## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function] cls.add_method('clear', 'ns3::Hasher &', []) return def register_Ns3Ipv4Address_methods(root_module, cls): cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('<') ## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor] cls.add_constructor([param('uint32_t', 'address')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor] cls.add_constructor([param('char const *', 'address')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function] cls.add_method('CombineMask', 'ns3::Ipv4Address', [param('ns3::Ipv4Mask const &', 'mask')], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function] cls.add_method('ConvertFrom', 'ns3::Ipv4Address', [param('ns3::Address const &', 'address')], is_static=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function] cls.add_method('Deserialize', 'ns3::Ipv4Address', [param('uint8_t const *', 'buf')], is_static=True) ## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function] cls.add_method('Get', 'uint32_t', [], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function] cls.add_method('GetAny', 'ns3::Ipv4Address', [], is_static=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function] cls.add_method('GetBroadcast', 'ns3::Ipv4Address', [], is_static=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function] cls.add_method('GetLoopback', 'ns3::Ipv4Address', [], is_static=True) ## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function] cls.add_method('GetSubnetDirectedBroadcast', 'ns3::Ipv4Address', [param('ns3::Ipv4Mask const &', 'mask')], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function] cls.add_method('GetZero', 'ns3::Ipv4Address', [], is_static=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsAny() const [member function] cls.add_method('IsAny', 'bool', [], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function] cls.add_method('IsBroadcast', 'bool', [], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ipv4Address const &', 'other')], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function] cls.add_method('IsLocalMulticast', 'bool', [], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalhost() const [member function] cls.add_method('IsLocalhost', 'bool', [], is_const=True) ## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function] cls.add_method('IsMatchingType', 'bool', [param('ns3::Address const &', 'address')], is_static=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function] cls.add_method('IsMulticast', 'bool', [], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function] cls.add_method('IsSubnetDirectedBroadcast', 'bool', [param('ns3::Ipv4Mask const &', 'mask')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function] cls.add_method('Serialize', 'void', [param('uint8_t *', 'buf')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function] cls.add_method('Set', 'void', [param('uint32_t', 'address')]) ## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function] cls.add_method('Set', 'void', [param('char const *', 'address')]) return def register_Ns3Ipv4Mask_methods(root_module, cls): cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('!=') ## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor] cls.add_constructor([param('uint32_t', 'mask')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor] cls.add_constructor([param('char const *', 'mask')]) ## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function] cls.add_method('Get', 'uint32_t', [], is_const=True) ## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function] cls.add_method('GetInverse', 'uint32_t', [], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function] cls.add_method('GetLoopback', 'ns3::Ipv4Mask', [], is_static=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function] cls.add_method('GetOnes', 'ns3::Ipv4Mask', [], is_static=True) ## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function] cls.add_method('GetPrefixLength', 'uint16_t', [], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function] cls.add_method('GetZero', 'ns3::Ipv4Mask', [], is_static=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ipv4Mask', 'other')], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function] cls.add_method('IsMatch', 'bool', [param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function] cls.add_method('Set', 'void', [param('uint32_t', 'mask')]) return def register_Ns3Ipv6Address_methods(root_module, cls): cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('<') ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor] cls.add_constructor([param('char const *', 'address')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor] cls.add_constructor([param('uint8_t *', 'address')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [constructor] cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor] cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function] cls.add_method('CombinePrefix', 'ns3::Ipv6Address', [param('ns3::Ipv6Prefix const &', 'prefix')]) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function] cls.add_method('ConvertFrom', 'ns3::Ipv6Address', [param('ns3::Address const &', 'address')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function] cls.add_method('Deserialize', 'ns3::Ipv6Address', [param('uint8_t const *', 'buf')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function] cls.add_method('GetAllHostsMulticast', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function] cls.add_method('GetAllNodesMulticast', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function] cls.add_method('GetAllRoutersMulticast', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function] cls.add_method('GetAny', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function] cls.add_method('GetBytes', 'void', [param('uint8_t *', 'buf')], is_const=True) ## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function] cls.add_method('GetIpv4MappedAddress', 'ns3::Ipv4Address', [], is_const=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function] cls.add_method('GetLoopback', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function] cls.add_method('GetOnes', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function] cls.add_method('GetZero', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function] cls.add_method('IsAllHostsMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function] cls.add_method('IsAllNodesMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function] cls.add_method('IsAllRoutersMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function] cls.add_method('IsAny', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function] cls.add_method('IsDocumentation', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ipv6Address const &', 'other')], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function] cls.add_method('IsIpv4MappedAddress', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function] cls.add_method('IsLinkLocal', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function] cls.add_method('IsLinkLocalMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function] cls.add_method('IsLocalhost', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function] cls.add_method('IsMatchingType', 'bool', [param('ns3::Address const &', 'address')], is_static=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function] cls.add_method('IsMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function] cls.add_method('IsSolicitedMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function] cls.add_method('MakeAutoconfiguredAddress', 'ns3::Ipv6Address', [param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function] cls.add_method('MakeAutoconfiguredAddress', 'ns3::Ipv6Address', [param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function] cls.add_method('MakeAutoconfiguredAddress', 'ns3::Ipv6Address', [param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function] cls.add_method('MakeAutoconfiguredLinkLocalAddress', 'ns3::Ipv6Address', [param('ns3::Mac16Address', 'mac')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function] cls.add_method('MakeAutoconfiguredLinkLocalAddress', 'ns3::Ipv6Address', [param('ns3::Mac48Address', 'mac')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function] cls.add_method('MakeAutoconfiguredLinkLocalAddress', 'ns3::Ipv6Address', [param('ns3::Mac64Address', 'mac')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function] cls.add_method('MakeIpv4MappedAddress', 'ns3::Ipv6Address', [param('ns3::Ipv4Address', 'addr')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function] cls.add_method('MakeSolicitedAddress', 'ns3::Ipv6Address', [param('ns3::Ipv6Address', 'addr')], is_static=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function] cls.add_method('Serialize', 'void', [param('uint8_t *', 'buf')], is_const=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function] cls.add_method('Set', 'void', [param('char const *', 'address')]) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function] cls.add_method('Set', 'void', [param('uint8_t *', 'address')]) return def register_Ns3Ipv6Prefix_methods(root_module, cls): cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('!=') ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor] cls.add_constructor([param('uint8_t *', 'prefix')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor] cls.add_constructor([param('char const *', 'prefix')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor] cls.add_constructor([param('uint8_t', 'prefix')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [constructor] cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor] cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')]) ## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function] cls.add_method('GetBytes', 'void', [param('uint8_t *', 'buf')], is_const=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function] cls.add_method('GetLoopback', 'ns3::Ipv6Prefix', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function] cls.add_method('GetOnes', 'ns3::Ipv6Prefix', [], is_static=True) ## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function] cls.add_method('GetPrefixLength', 'uint8_t', [], is_const=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function] cls.add_method('GetZero', 'ns3::Ipv6Prefix', [], is_static=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ipv6Prefix const &', 'other')], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function] cls.add_method('IsMatch', 'bool', [param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')], is_const=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) return def register_Ns3Mac48Address_methods(root_module, cls): cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('<') cls.add_output_stream_operator() ## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [constructor] cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')]) ## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor] cls.add_constructor([]) ## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor] cls.add_constructor([param('char const *', 'str')]) ## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function] cls.add_method('Allocate', 'ns3::Mac48Address', [], is_static=True) ## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function] cls.add_method('ConvertFrom', 'ns3::Mac48Address', [param('ns3::Address const &', 'address')], is_static=True) ## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function] cls.add_method('CopyFrom', 'void', [param('uint8_t const *', 'buffer')]) ## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function] cls.add_method('CopyTo', 'void', [param('uint8_t *', 'buffer')], is_const=True) ## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function] cls.add_method('GetBroadcast', 'ns3::Mac48Address', [], is_static=True) ## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function] cls.add_method('GetMulticast', 'ns3::Mac48Address', [param('ns3::Ipv4Address', 'address')], is_static=True) ## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function] cls.add_method('GetMulticast', 'ns3::Mac48Address', [param('ns3::Ipv6Address', 'address')], is_static=True) ## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function] cls.add_method('GetMulticast6Prefix', 'ns3::Mac48Address', [], is_static=True) ## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function] cls.add_method('GetMulticastPrefix', 'ns3::Mac48Address', [], is_static=True) ## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function] cls.add_method('IsBroadcast', 'bool', [], is_const=True) ## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function] cls.add_method('IsGroup', 'bool', [], is_const=True) ## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function] cls.add_method('IsMatchingType', 'bool', [param('ns3::Address const &', 'address')], is_static=True) return def register_Ns3MpiInterface_methods(root_module, cls): ## mpi-interface.h (module 'mpi'): ns3::MpiInterface::MpiInterface() [constructor] cls.add_constructor([]) ## mpi-interface.h (module 'mpi'): ns3::MpiInterface::MpiInterface(ns3::MpiInterface const & arg0) [constructor] cls.add_constructor([param('ns3::MpiInterface const &', 'arg0')]) ## mpi-interface.h (module 'mpi'): static void ns3::MpiInterface::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_static=True) ## mpi-interface.h (module 'mpi'): static void ns3::MpiInterface::Disable() [member function] cls.add_method('Disable', 'void', [], is_static=True) ## mpi-interface.h (module 'mpi'): static void ns3::MpiInterface::Enable(int * pargc, char * * * pargv) [member function] cls.add_method('Enable', 'void', [param('int *', 'pargc'), param('char * * *', 'pargv')], is_static=True) ## mpi-interface.h (module 'mpi'): static uint32_t ns3::MpiInterface::GetSize() [member function] cls.add_method('GetSize', 'uint32_t', [], is_static=True) ## mpi-interface.h (module 'mpi'): static uint32_t ns3::MpiInterface::GetSystemId() [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_static=True) ## mpi-interface.h (module 'mpi'): static bool ns3::MpiInterface::IsEnabled() [member function] cls.add_method('IsEnabled', 'bool', [], is_static=True) ## mpi-interface.h (module 'mpi'): static void ns3::MpiInterface::SendPacket(ns3::Ptr<ns3::Packet> p, ns3::Time const & rxTime, uint32_t node, uint32_t dev) [member function] cls.add_method('SendPacket', 'void', [param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Time const &', 'rxTime'), param('uint32_t', 'node'), param('uint32_t', 'dev')], is_static=True) return def register_Ns3ObjectBase_methods(root_module, cls): ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor] cls.add_constructor([]) ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [constructor] cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')]) ## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function] cls.add_method('ConstructSelf', 'void', [param('ns3::AttributeConstructionList const &', 'attributes')], visibility='protected') ## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function] cls.add_method('NotifyConstructionCompleted', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectDeleter_methods(root_module, cls): ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor] cls.add_constructor([]) ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [constructor] cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')]) ## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function] cls.add_method('Delete', 'void', [param('ns3::Object *', 'object')], is_static=True) return def register_Ns3PacketMetadata_methods(root_module, cls): ## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor] cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [constructor] cls.add_constructor([param('ns3::PacketMetadata const &', 'o')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function] cls.add_method('AddAtEnd', 'void', [param('ns3::PacketMetadata const &', 'o')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function] cls.add_method('AddHeader', 'void', [param('ns3::Header const &', 'header'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function] cls.add_method('AddPaddingAtEnd', 'void', [param('uint32_t', 'end')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function] cls.add_method('AddTrailer', 'void', [param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function] cls.add_method('BeginItem', 'ns3::PacketMetadata::ItemIterator', [param('ns3::Buffer', 'buffer')], is_const=True) ## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function] cls.add_method('CreateFragment', 'ns3::PacketMetadata', [param('uint32_t', 'start'), param('uint32_t', 'end')], is_const=True) ## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function] cls.add_method('Deserialize', 'uint32_t', [param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function] cls.add_method('Enable', 'void', [], is_static=True) ## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function] cls.add_method('EnableChecking', 'void', [], is_static=True) ## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function] cls.add_method('GetUid', 'uint64_t', [], is_const=True) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function] cls.add_method('RemoveAtEnd', 'void', [param('uint32_t', 'end')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function] cls.add_method('RemoveAtStart', 'void', [param('uint32_t', 'start')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function] cls.add_method('RemoveHeader', 'void', [param('ns3::Header const &', 'header'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function] cls.add_method('RemoveTrailer', 'void', [param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function] cls.add_method('Serialize', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')], is_const=True) return def register_Ns3PacketMetadataItem_methods(root_module, cls): ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor] cls.add_constructor([]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [constructor] cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable] cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable] cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable] cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable] cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable] cls.add_instance_attribute('isFragment', 'bool', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable] cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::type [variable] cls.add_instance_attribute('type', 'ns3::PacketMetadata::Item::ItemType', is_const=False) return def register_Ns3PacketMetadataItemIterator_methods(root_module, cls): ## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [constructor] cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor] cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')]) ## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function] cls.add_method('Next', 'ns3::PacketMetadata::Item', []) return def register_Ns3PacketTagIterator_methods(root_module, cls): ## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [constructor] cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')]) ## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function] cls.add_method('Next', 'ns3::PacketTagIterator::Item', []) return def register_Ns3PacketTagIteratorItem_methods(root_module, cls): ## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [constructor] cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')]) ## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function] cls.add_method('GetTag', 'void', [param('ns3::Tag &', 'tag')], is_const=True) ## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_const=True) return def register_Ns3PacketTagList_methods(root_module, cls): ## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor] cls.add_constructor([]) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [constructor] cls.add_constructor([param('ns3::PacketTagList const &', 'o')]) ## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function] cls.add_method('Add', 'void', [param('ns3::Tag const &', 'tag')], is_const=True) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function] cls.add_method('Head', 'ns3::PacketTagList::TagData const *', [], is_const=True) ## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function] cls.add_method('Peek', 'bool', [param('ns3::Tag &', 'tag')], is_const=True) ## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function] cls.add_method('Remove', 'bool', [param('ns3::Tag &', 'tag')]) ## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function] cls.add_method('RemoveAll', 'void', []) ## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function] cls.add_method('Replace', 'bool', [param('ns3::Tag &', 'tag')]) return def register_Ns3PacketTagListTagData_methods(root_module, cls): ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor] cls.add_constructor([]) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [constructor] cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')]) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable] cls.add_instance_attribute('count', 'uint32_t', is_const=False) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable] cls.add_instance_attribute('data', 'uint8_t [ 1 ]', is_const=False) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable] cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::size [variable] cls.add_instance_attribute('size', 'uint32_t', is_const=False) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable] cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False) return def register_Ns3ParallelCommunicationInterface_methods(root_module, cls): ## parallel-communication-interface.h (module 'mpi'): ns3::ParallelCommunicationInterface::ParallelCommunicationInterface() [constructor] cls.add_constructor([]) ## parallel-communication-interface.h (module 'mpi'): ns3::ParallelCommunicationInterface::ParallelCommunicationInterface(ns3::ParallelCommunicationInterface const & arg0) [constructor] cls.add_constructor([param('ns3::ParallelCommunicationInterface const &', 'arg0')]) ## parallel-communication-interface.h (module 'mpi'): void ns3::ParallelCommunicationInterface::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_pure_virtual=True, is_virtual=True) ## parallel-communication-interface.h (module 'mpi'): void ns3::ParallelCommunicationInterface::Disable() [member function] cls.add_method('Disable', 'void', [], is_pure_virtual=True, is_virtual=True) ## parallel-communication-interface.h (module 'mpi'): void ns3::ParallelCommunicationInterface::Enable(int * pargc, char * * * pargv) [member function] cls.add_method('Enable', 'void', [param('int *', 'pargc'), param('char * * *', 'pargv')], is_pure_virtual=True, is_virtual=True) ## parallel-communication-interface.h (module 'mpi'): uint32_t ns3::ParallelCommunicationInterface::GetSize() [member function] cls.add_method('GetSize', 'uint32_t', [], is_pure_virtual=True, is_virtual=True) ## parallel-communication-interface.h (module 'mpi'): uint32_t ns3::ParallelCommunicationInterface::GetSystemId() [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_pure_virtual=True, is_virtual=True) ## parallel-communication-interface.h (module 'mpi'): bool ns3::ParallelCommunicationInterface::IsEnabled() [member function] cls.add_method('IsEnabled', 'bool', [], is_pure_virtual=True, is_virtual=True) ## parallel-communication-interface.h (module 'mpi'): void ns3::ParallelCommunicationInterface::SendPacket(ns3::Ptr<ns3::Packet> p, ns3::Time const & rxTime, uint32_t node, uint32_t dev) [member function] cls.add_method('SendPacket', 'void', [param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Time const &', 'rxTime'), param('uint32_t', 'node'), param('uint32_t', 'dev')], is_pure_virtual=True, is_virtual=True) return def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')]) return def register_Ns3Tag_methods(root_module, cls): ## tag.h (module 'network'): ns3::Tag::Tag() [constructor] cls.add_constructor([]) ## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [constructor] cls.add_constructor([param('ns3::Tag const &', 'arg0')]) ## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function] cls.add_method('Deserialize', 'void', [param('ns3::TagBuffer', 'i')], is_pure_virtual=True, is_virtual=True) ## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_pure_virtual=True, is_const=True, is_virtual=True) ## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function] cls.add_method('Serialize', 'void', [param('ns3::TagBuffer', 'i')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3TagBuffer_methods(root_module, cls): ## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [constructor] cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')]) ## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor] cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function] cls.add_method('CopyFrom', 'void', [param('ns3::TagBuffer', 'o')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function] cls.add_method('Read', 'void', [param('uint8_t *', 'buffer'), param('uint32_t', 'size')]) ## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function] cls.add_method('ReadDouble', 'double', []) ## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function] cls.add_method('ReadU16', 'uint16_t', []) ## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function] cls.add_method('ReadU32', 'uint32_t', []) ## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function] cls.add_method('ReadU64', 'uint64_t', []) ## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function] cls.add_method('ReadU8', 'uint8_t', []) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function] cls.add_method('TrimAtEnd', 'void', [param('uint32_t', 'trim')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function] cls.add_method('Write', 'void', [param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function] cls.add_method('WriteDouble', 'void', [param('double', 'v')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t v) [member function] cls.add_method('WriteU16', 'void', [param('uint16_t', 'v')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t v) [member function] cls.add_method('WriteU32', 'void', [param('uint32_t', 'v')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function] cls.add_method('WriteU64', 'void', [param('uint64_t', 'v')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function] cls.add_method('WriteU8', 'void', [param('uint8_t', 'v')]) return def register_Ns3TimeWithUnit_methods(root_module, cls): cls.add_output_stream_operator() ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [constructor] cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')]) ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor] cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')]) return def register_Ns3TypeId_methods(root_module, cls): cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('<') ## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor] cls.add_constructor([param('char const *', 'name')]) ## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [constructor] cls.add_constructor([param('ns3::TypeId const &', 'o')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor) [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function] cls.add_method('GetAttribute', 'ns3::TypeId::AttributeInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function] cls.add_method('GetAttributeFullName', 'std::string', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function] cls.add_method('GetAttributeN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::TypeId::GetConstructor() const [member function] cls.add_method('GetConstructor', 'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function] cls.add_method('GetGroupName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId::hash_t ns3::TypeId::GetHash() const [member function] cls.add_method('GetHash', 'ns3::TypeId::hash_t', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function] cls.add_method('GetParent', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function] cls.add_method('GetRegistered', 'ns3::TypeId', [param('uint32_t', 'i')], is_static=True) ## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function] cls.add_method('GetRegisteredN', 'uint32_t', [], is_static=True) ## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function] cls.add_method('GetSize', 'std::size_t', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function] cls.add_method('GetTraceSource', 'ns3::TypeId::TraceSourceInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function] cls.add_method('GetTraceSourceN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function] cls.add_method('GetUid', 'uint16_t', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function] cls.add_method('HasConstructor', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function] cls.add_method('HasParent', 'bool', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function] cls.add_method('HideFromDocumentation', 'ns3::TypeId', []) ## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function] cls.add_method('IsChildOf', 'bool', [param('ns3::TypeId', 'other')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function] cls.add_method('LookupAttributeByName', 'bool', [param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(ns3::TypeId::hash_t hash) [member function] cls.add_method('LookupByHash', 'ns3::TypeId', [param('uint32_t', 'hash')], is_static=True) ## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(ns3::TypeId::hash_t hash, ns3::TypeId * tid) [member function] cls.add_method('LookupByHashFailSafe', 'bool', [param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')], is_static=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function] cls.add_method('LookupByName', 'ns3::TypeId', [param('std::string', 'name')], is_static=True) ## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function] cls.add_method('LookupTraceSourceByName', 'ns3::Ptr< ns3::TraceSourceAccessor const >', [param('std::string', 'name')], is_const=True) ## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function] cls.add_method('LookupTraceSourceByName', 'ns3::Ptr< ns3::TraceSourceAccessor const >', [param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function] cls.add_method('MustHideFromDocumentation', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<const ns3::AttributeValue> initialValue) [member function] cls.add_method('SetAttributeInitialValue', 'bool', [param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function] cls.add_method('SetGroupName', 'ns3::TypeId', [param('std::string', 'groupName')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function] cls.add_method('SetParent', 'ns3::TypeId', [param('ns3::TypeId', 'tid')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function] cls.add_method('SetSize', 'ns3::TypeId', [param('std::size_t', 'size')]) ## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function] cls.add_method('SetUid', 'void', [param('uint16_t', 'uid')]) return def register_Ns3TypeIdAttributeInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [constructor] cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) cls.add_instance_attribute('flags', 'uint32_t', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable] cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable] cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable] cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable] cls.add_instance_attribute('supportMsg', 'std::string', is_const=False) return def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [constructor] cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable] cls.add_instance_attribute('callback', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable] cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable] cls.add_instance_attribute('supportMsg', 'std::string', is_const=False) return def register_Ns3Empty_methods(root_module, cls): ## empty.h (module 'core'): ns3::empty::empty() [constructor] cls.add_constructor([]) ## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [constructor] cls.add_constructor([param('ns3::empty const &', 'arg0')]) return def register_Ns3Int64x64_t_methods(root_module, cls): cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('>=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right')) cls.add_unary_numeric_operator('-') ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor] cls.add_constructor([]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(double const value) [constructor] cls.add_constructor([param('double const', 'value')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long double const value) [constructor] cls.add_constructor([param('long double const', 'value')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(int const v) [constructor] cls.add_constructor([param('int const', 'v')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long int const v) [constructor] cls.add_constructor([param('long int const', 'v')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int const v) [constructor] cls.add_constructor([param('long long int const', 'v')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int const v) [constructor] cls.add_constructor([param('unsigned int const', 'v')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int const v) [constructor] cls.add_constructor([param('long unsigned int const', 'v')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int const v) [constructor] cls.add_constructor([param('long long unsigned int const', 'v')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t const hi, uint64_t const lo) [constructor] cls.add_constructor([param('int64_t const', 'hi'), param('uint64_t const', 'lo')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'o')]) ## int64x64-128.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## int64x64-128.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function] cls.add_method('GetHigh', 'int64_t', [], is_const=True) ## int64x64-128.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function] cls.add_method('GetLow', 'uint64_t', [], is_const=True) ## int64x64-128.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t const v) [member function] cls.add_method('Invert', 'ns3::int64x64_t', [param('uint64_t const', 'v')], is_static=True) ## int64x64-128.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function] cls.add_method('MulByInvert', 'void', [param('ns3::int64x64_t const &', 'o')]) ## int64x64-128.h (module 'core'): ns3::int64x64_t::implementation [variable] cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True) return def register_Ns3Chunk_methods(root_module, cls): ## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor] cls.add_constructor([]) ## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [constructor] cls.add_constructor([param('ns3::Chunk const &', 'arg0')]) ## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_pure_virtual=True, is_virtual=True) ## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3Header_methods(root_module, cls): cls.add_output_stream_operator() ## header.h (module 'network'): ns3::Header::Header() [constructor] cls.add_constructor([]) ## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [constructor] cls.add_constructor([param('ns3::Header const &', 'arg0')]) ## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_pure_virtual=True, is_virtual=True) ## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_pure_virtual=True, is_const=True, is_virtual=True) ## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3Object_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::Object() [constructor] cls.add_constructor([]) ## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function] cls.add_method('AggregateObject', 'void', [param('ns3::Ptr< ns3::Object >', 'other')]) ## object.h (module 'core'): void ns3::Object::Dispose() [member function] cls.add_method('Dispose', 'void', []) ## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function] cls.add_method('GetAggregateIterator', 'ns3::Object::AggregateIterator', [], is_const=True) ## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object.h (module 'core'): void ns3::Object::Initialize() [member function] cls.add_method('Initialize', 'void', []) ## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function] cls.add_method('IsInitialized', 'bool', [], is_const=True) ## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [constructor] cls.add_constructor([param('ns3::Object const &', 'o')], visibility='protected') ## object.h (module 'core'): void ns3::Object::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::DoInitialize() [member function] cls.add_method('DoInitialize', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function] cls.add_method('NotifyNewAggregate', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectAggregateIterator_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [constructor] cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')]) ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor] cls.add_constructor([]) ## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## object.h (module 'core'): ns3::Ptr<const ns3::Object> ns3::Object::AggregateIterator::Next() [member function] cls.add_method('Next', 'ns3::Ptr< ns3::Object const >', []) return def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')]) return def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')]) return def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')]) return def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')]) return def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')]) return def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')]) return def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')]) return def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')]) return def register_Ns3Time_methods(root_module, cls): cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('>=') cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right')) cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right')) cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right')) cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right')) cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right')) cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right')) cls.add_output_stream_operator() ## nstime.h (module 'core'): ns3::Time::Time() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [constructor] cls.add_constructor([param('ns3::Time const &', 'o')]) ## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor] cls.add_constructor([param('std::string const &', 's')]) ## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function] cls.add_method('As', 'ns3::TimeWithUnit', [param('ns3::Time::Unit const', 'unit')], is_const=True) ## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function] cls.add_method('Compare', 'int', [param('ns3::Time const &', 'o')], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function] cls.add_method('FromDouble', 'ns3::Time', [param('double', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function] cls.add_method('FromInteger', 'ns3::Time', [param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function] cls.add_method('GetDays', 'double', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function] cls.add_method('GetFemtoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function] cls.add_method('GetHours', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function] cls.add_method('GetInteger', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function] cls.add_method('GetMicroSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function] cls.add_method('GetMilliSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function] cls.add_method('GetMinutes', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function] cls.add_method('GetNanoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function] cls.add_method('GetPicoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function] cls.add_method('GetResolution', 'ns3::Time::Unit', [], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function] cls.add_method('GetSeconds', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function] cls.add_method('GetTimeStep', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function] cls.add_method('GetYears', 'double', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function] cls.add_method('IsNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function] cls.add_method('IsPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function] cls.add_method('IsStrictlyNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function] cls.add_method('IsStrictlyPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function] cls.add_method('IsZero', 'bool', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function] cls.add_method('Max', 'ns3::Time', [], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function] cls.add_method('Min', 'ns3::Time', [], is_static=True) ## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function] cls.add_method('SetResolution', 'void', [param('ns3::Time::Unit', 'resolution')], is_static=True) ## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function] cls.add_method('StaticInit', 'bool', [], is_static=True) ## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function] cls.add_method('To', 'ns3::int64x64_t', [param('ns3::Time::Unit', 'unit')], is_const=True) ## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function] cls.add_method('ToDouble', 'double', [param('ns3::Time::Unit', 'unit')], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function] cls.add_method('ToInteger', 'int64_t', [param('ns3::Time::Unit', 'unit')], is_const=True) return def register_Ns3TraceSourceAccessor_methods(root_module, cls): ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [constructor] cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')]) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor] cls.add_constructor([]) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Connect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('ConnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Disconnect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('DisconnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3Trailer_methods(root_module, cls): cls.add_output_stream_operator() ## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor] cls.add_constructor([]) ## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [constructor] cls.add_constructor([param('ns3::Trailer const &', 'arg0')]) ## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'end')], is_pure_virtual=True, is_virtual=True) ## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeAccessor_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [constructor] cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeChecker_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [constructor] cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function] cls.add_method('CreateValidValue', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::AttributeValue const &', 'value')], is_const=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [constructor] cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3CallbackChecker_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [constructor] cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')]) return def register_Ns3CallbackImplBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [constructor] cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')]) ## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function] cls.add_method('GetTypeid', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<const ns3::CallbackImplBase> other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')], is_pure_virtual=True, is_const=True, is_virtual=True) ## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function] cls.add_method('Demangle', 'std::string', [param('std::string const &', 'mangled')], is_static=True, visibility='protected') ## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function] cls.add_method('GetCppTypeid', 'std::string', [], is_static=True, visibility='protected', template_parameters=[u'ns3::ObjectBase*']) ## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function] cls.add_method('GetCppTypeid', 'std::string', [], is_static=True, visibility='protected', template_parameters=[u'void']) ## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function] cls.add_method('GetCppTypeid', 'std::string', [], is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Packet> ']) return def register_Ns3CallbackValue_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [constructor] cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'base')]) ## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function] cls.add_method('Set', 'void', [param('ns3::CallbackBase', 'base')]) return def register_Ns3EmptyAttributeAccessor_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [constructor] cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) return def register_Ns3EmptyAttributeChecker_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [constructor] cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_const=True, is_virtual=True) return def register_Ns3EmptyAttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [constructor] cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, visibility='private', is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], visibility='private', is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3Ipv4AddressChecker_methods(root_module, cls): ## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')]) return def register_Ns3Ipv4AddressValue_methods(root_module, cls): ## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor] cls.add_constructor([param('ns3::Ipv4Address const &', 'value')]) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')]) ## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function] cls.add_method('Get', 'ns3::Ipv4Address', [], is_const=True) ## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Ipv4Address const &', 'value')]) return def register_Ns3Ipv4MaskChecker_methods(root_module, cls): ## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')]) return def register_Ns3Ipv4MaskValue_methods(root_module, cls): ## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor] cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')]) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')]) ## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function] cls.add_method('Get', 'ns3::Ipv4Mask', [], is_const=True) ## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Ipv4Mask const &', 'value')]) return def register_Ns3Ipv6AddressChecker_methods(root_module, cls): ## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')]) return def register_Ns3Ipv6AddressValue_methods(root_module, cls): ## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor] cls.add_constructor([param('ns3::Ipv6Address const &', 'value')]) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')]) ## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function] cls.add_method('Get', 'ns3::Ipv6Address', [], is_const=True) ## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Ipv6Address const &', 'value')]) return def register_Ns3Ipv6PrefixChecker_methods(root_module, cls): ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')]) return def register_Ns3Ipv6PrefixValue_methods(root_module, cls): ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor] cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')]) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [constructor] cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')]) ## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function] cls.add_method('Get', 'ns3::Ipv6Prefix', [], is_const=True) ## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Ipv6Prefix const &', 'value')]) return def register_Ns3Mac48AddressChecker_methods(root_module, cls): ## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor] cls.add_constructor([]) ## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [constructor] cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')]) return def register_Ns3Mac48AddressValue_methods(root_module, cls): ## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor] cls.add_constructor([]) ## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor] cls.add_constructor([param('ns3::Mac48Address const &', 'value')]) ## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [constructor] cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')]) ## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function] cls.add_method('Get', 'ns3::Mac48Address', [], is_const=True) ## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Mac48Address const &', 'value')]) return def register_Ns3MpiReceiver_methods(root_module, cls): ## mpi-receiver.h (module 'mpi'): ns3::MpiReceiver::MpiReceiver() [constructor] cls.add_constructor([]) ## mpi-receiver.h (module 'mpi'): ns3::MpiReceiver::MpiReceiver(ns3::MpiReceiver const & arg0) [constructor] cls.add_constructor([param('ns3::MpiReceiver const &', 'arg0')]) ## mpi-receiver.h (module 'mpi'): static ns3::TypeId ns3::MpiReceiver::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## mpi-receiver.h (module 'mpi'): void ns3::MpiReceiver::Receive(ns3::Ptr<ns3::Packet> p) [member function] cls.add_method('Receive', 'void', [param('ns3::Ptr< ns3::Packet >', 'p')]) ## mpi-receiver.h (module 'mpi'): void ns3::MpiReceiver::SetReceiveCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function] cls.add_method('SetReceiveCallback', 'void', [param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')]) ## mpi-receiver.h (module 'mpi'): void ns3::MpiReceiver::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='private', is_virtual=True) return def register_Ns3NixVector_methods(root_module, cls): cls.add_output_stream_operator() ## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor] cls.add_constructor([]) ## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [constructor] cls.add_constructor([param('ns3::NixVector const &', 'o')]) ## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function] cls.add_method('AddNeighborIndex', 'void', [param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')]) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function] cls.add_method('BitCount', 'uint32_t', [param('uint32_t', 'numberOfNeighbors')], is_const=True) ## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::NixVector >', [], is_const=True) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function] cls.add_method('Deserialize', 'uint32_t', [param('uint32_t const *', 'buffer'), param('uint32_t', 'size')]) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function] cls.add_method('ExtractNeighborIndex', 'uint32_t', [param('uint32_t', 'numberOfBits')]) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function] cls.add_method('GetRemainingBits', 'uint32_t', []) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function] cls.add_method('Serialize', 'uint32_t', [param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')], is_const=True) return def register_Ns3Packet_methods(root_module, cls): cls.add_output_stream_operator() ## packet.h (module 'network'): ns3::Packet::Packet() [constructor] cls.add_constructor([]) ## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [constructor] cls.add_constructor([param('ns3::Packet const &', 'o')]) ## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor] cls.add_constructor([param('uint32_t', 'size')]) ## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor] cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')]) ## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor] cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function] cls.add_method('AddAtEnd', 'void', [param('ns3::Ptr< ns3::Packet const >', 'packet')]) ## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function] cls.add_method('AddByteTag', 'void', [param('ns3::Tag const &', 'tag')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function] cls.add_method('AddHeader', 'void', [param('ns3::Header const &', 'header')]) ## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function] cls.add_method('AddPacketTag', 'void', [param('ns3::Tag const &', 'tag')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function] cls.add_method('AddPaddingAtEnd', 'void', [param('uint32_t', 'size')]) ## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function] cls.add_method('AddTrailer', 'void', [param('ns3::Trailer const &', 'trailer')]) ## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function] cls.add_method('BeginItem', 'ns3::PacketMetadata::ItemIterator', [], is_const=True) ## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::Packet >', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function] cls.add_method('CopyData', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'size')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function] cls.add_method('CopyData', 'void', [param('std::ostream *', 'os'), param('uint32_t', 'size')], is_const=True) ## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function] cls.add_method('CreateFragment', 'ns3::Ptr< ns3::Packet >', [param('uint32_t', 'start'), param('uint32_t', 'length')], is_const=True) ## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function] cls.add_method('EnableChecking', 'void', [], is_static=True) ## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function] cls.add_method('EnablePrinting', 'void', [], is_static=True) ## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function] cls.add_method('FindFirstMatchingByteTag', 'bool', [param('ns3::Tag &', 'tag')], is_const=True) ## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function] cls.add_method('GetByteTagIterator', 'ns3::ByteTagIterator', [], is_const=True) ## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function] cls.add_method('GetNixVector', 'ns3::Ptr< ns3::NixVector >', [], is_const=True) ## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function] cls.add_method('GetPacketTagIterator', 'ns3::PacketTagIterator', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function] cls.add_method('GetSize', 'uint32_t', [], is_const=True) ## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function] cls.add_method('GetUid', 'uint64_t', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function] cls.add_method('PeekHeader', 'uint32_t', [param('ns3::Header &', 'header')], is_const=True) ## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function] cls.add_method('PeekPacketTag', 'bool', [param('ns3::Tag &', 'tag')], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function] cls.add_method('PeekTrailer', 'uint32_t', [param('ns3::Trailer &', 'trailer')]) ## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function] cls.add_method('PrintByteTags', 'void', [param('std::ostream &', 'os')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function] cls.add_method('PrintPacketTags', 'void', [param('std::ostream &', 'os')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function] cls.add_method('RemoveAllByteTags', 'void', []) ## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function] cls.add_method('RemoveAllPacketTags', 'void', []) ## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function] cls.add_method('RemoveAtEnd', 'void', [param('uint32_t', 'size')]) ## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function] cls.add_method('RemoveAtStart', 'void', [param('uint32_t', 'size')]) ## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function] cls.add_method('RemoveHeader', 'uint32_t', [param('ns3::Header &', 'header')]) ## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function] cls.add_method('RemovePacketTag', 'bool', [param('ns3::Tag &', 'tag')]) ## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function] cls.add_method('RemoveTrailer', 'uint32_t', [param('ns3::Trailer &', 'trailer')]) ## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function] cls.add_method('ReplacePacketTag', 'bool', [param('ns3::Tag &', 'tag')]) ## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function] cls.add_method('Serialize', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function] cls.add_method('SetNixVector', 'void', [param('ns3::Ptr< ns3::NixVector >', 'nixVector')]) ## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function] cls.add_method('ToString', 'std::string', [], is_const=True) return def register_Ns3TimeValue_methods(root_module, cls): ## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor] cls.add_constructor([param('ns3::Time const &', 'value')]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [constructor] cls.add_constructor([param('ns3::TimeValue const &', 'arg0')]) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function] cls.add_method('Get', 'ns3::Time', [], is_const=True) ## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Time const &', 'value')]) return def register_Ns3TypeIdChecker_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [constructor] cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')]) return def register_Ns3TypeIdValue_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor] cls.add_constructor([param('ns3::TypeId const &', 'value')]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [constructor] cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')]) ## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function] cls.add_method('Get', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function] cls.add_method('Set', 'void', [param('ns3::TypeId const &', 'value')]) return def register_Ns3AddressChecker_methods(root_module, cls): ## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor] cls.add_constructor([]) ## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [constructor] cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')]) return def register_Ns3AddressValue_methods(root_module, cls): ## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor] cls.add_constructor([]) ## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor] cls.add_constructor([param('ns3::Address const &', 'value')]) ## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [constructor] cls.add_constructor([param('ns3::AddressValue const &', 'arg0')]) ## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function] cls.add_method('Get', 'ns3::Address', [], is_const=True) ## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Address const &', 'value')]) return def register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor] cls.add_constructor([param('ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')]) ## callback.h (module 'core'): static std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function] cls.add_method('DoGetTypeid', 'std::string', [], is_static=True) ## callback.h (module 'core'): std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function] cls.add_method('GetTypeid', 'std::string', [], is_const=True, is_virtual=True) ## callback.h (module 'core'): ns3::ObjectBase * ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()() [member operator] cls.add_method('operator()', 'ns3::ObjectBase *', [], is_pure_virtual=True, is_virtual=True, custom_name=u'__call__') return def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3Packet__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor] cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')]) ## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function] cls.add_method('DoGetTypeid', 'std::string', [], is_static=True) ## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function] cls.add_method('GetTypeid', 'std::string', [], is_const=True, is_virtual=True) ## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Packet> arg0) [member operator] cls.add_method('operator()', 'void', [param('ns3::Ptr< ns3::Packet >', 'arg0')], is_pure_virtual=True, is_virtual=True, custom_name=u'__call__') return def register_Ns3HashImplementation_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [constructor] cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor] cls.add_constructor([]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_pure_virtual=True, is_virtual=True) ## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function] cls.add_method('clear', 'void', [], is_pure_virtual=True, is_virtual=True) return def register_Ns3HashFunctionFnv1a_methods(root_module, cls): ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [constructor] cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')]) ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor] cls.add_constructor([]) ## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionHash32_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [constructor] cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor] cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionHash64_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [constructor] cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor] cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionMurmur3_methods(root_module, cls): ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [constructor] cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')]) ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor] cls.add_constructor([]) ## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_functions(root_module): module = root_module register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module) register_functions_ns3_Hash(module.get_submodule('Hash'), root_module) register_functions_ns3_TracedValueCallback(module.get_submodule('TracedValueCallback'), root_module) return def register_functions_ns3_FatalImpl(module, root_module): return def register_functions_ns3_Hash(module, root_module): register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module) return def register_functions_ns3_Hash_Function(module, root_module): return def register_functions_ns3_TracedValueCallback(module, root_module): return def main(): out = FileCodeSink(sys.stdout) root_module = module_init() register_types(root_module) register_methods(root_module) register_functions(root_module) root_module.generate(out) if __name__ == '__main__': main()
gpl-2.0
zhenv5/scikit-learn
sklearn/externals/odict.py
63
9149
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. # Passes Python2.7's test suite and incorporates all the latest updates. # Copyright 2009 Raymond Hettinger # http://code.activestate.com/recipes/576693/ "Ordered dictionary" try: from thread import get_ident as _get_ident except ImportError: try: from dummy_thread import get_ident as _get_ident except ImportError: # Ensure that this module is still importable under Pythhon3 to avoid # crashing code-inspecting tools like nose. from _dummy_thread import get_ident as _get_ident try: from _abcoll import KeysView, ValuesView, ItemsView except ImportError: pass class OrderedDict(dict): 'Dictionary that remembers insertion order' # An inherited dict maps keys to values. # The inherited dict provides __getitem__, __len__, __contains__, and get. # The remaining methods are order-aware. # Big-O running times for all methods are the same as for regular dictionaries. # The internal self.__map dictionary maps keys to links in a doubly linked list. # The circular doubly linked list starts and ends with a sentinel element. # The sentinel element never gets deleted (this simplifies the algorithm). # Each link is stored as a list of length three: [PREV, NEXT, KEY]. def __init__(self, *args, **kwds): '''Initialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. ''' if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) try: self.__root except AttributeError: self.__root = root = [] # sentinel node root[:] = [root, root, None] self.__map = {} self.__update(*args, **kwds) def __setitem__(self, key, value, dict_setitem=dict.__setitem__): 'od.__setitem__(i, y) <==> od[i]=y' # Setting a new item creates a new link which goes at the end of the linked # list, and the inherited dictionary is updated with the new key/value pair. if key not in self: root = self.__root last = root[0] last[1] = root[0] = self.__map[key] = [last, root, key] dict_setitem(self, key, value) def __delitem__(self, key, dict_delitem=dict.__delitem__): 'od.__delitem__(y) <==> del od[y]' # Deleting an existing item uses self.__map to find the link which is # then removed by updating the links in the predecessor and successor nodes. dict_delitem(self, key) link_prev, link_next, key = self.__map.pop(key) link_prev[1] = link_next link_next[0] = link_prev def __iter__(self): 'od.__iter__() <==> iter(od)' root = self.__root curr = root[1] while curr is not root: yield curr[2] curr = curr[1] def __reversed__(self): 'od.__reversed__() <==> reversed(od)' root = self.__root curr = root[0] while curr is not root: yield curr[2] curr = curr[0] def clear(self): 'od.clear() -> None. Remove all items from od.' try: for node in self.__map.itervalues(): del node[:] root = self.__root root[:] = [root, root, None] self.__map.clear() except AttributeError: pass dict.clear(self) def popitem(self, last=True): '''od.popitem() -> (k, v), return and remove a (key, value) pair. Pairs are returned in LIFO order if last is true or FIFO order if false. ''' if not self: raise KeyError('dictionary is empty') root = self.__root if last: link = root[0] link_prev = link[0] link_prev[1] = root root[0] = link_prev else: link = root[1] link_next = link[1] root[1] = link_next link_next[0] = root key = link[2] del self.__map[key] value = dict.pop(self, key) return key, value # -- the following methods do not depend on the internal structure -- def keys(self): 'od.keys() -> list of keys in od' return list(self) def values(self): 'od.values() -> list of values in od' return [self[key] for key in self] def items(self): 'od.items() -> list of (key, value) pairs in od' return [(key, self[key]) for key in self] def iterkeys(self): 'od.iterkeys() -> an iterator over the keys in od' return iter(self) def itervalues(self): 'od.itervalues -> an iterator over the values in od' for k in self: yield self[k] def iteritems(self): 'od.iteritems -> an iterator over the (key, value) items in od' for k in self: yield (k, self[k]) def update(*args, **kwds): '''od.update(E, **F) -> None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, v in F.items(): od[k] = v ''' if len(args) > 2: raise TypeError('update() takes at most 2 positional ' 'arguments (%d given)' % (len(args),)) elif not args: raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] # Make progressively weaker assumptions about "other" other = () if len(args) == 2: other = args[1] if isinstance(other, dict): for key in other: self[key] = other[key] elif hasattr(other, 'keys'): for key in other.keys(): self[key] = other[key] else: for key, value in other: self[key] = value for key, value in kwds.items(): self[key] = value __update = update # let subclasses override update without breaking __init__ __marker = object() def pop(self, key, default=__marker): '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. ''' if key in self: result = self[key] del self[key] return result if default is self.__marker: raise KeyError(key) return default def setdefault(self, key, default=None): 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' if key in self: return self[key] self[key] = default return default def __repr__(self, _repr_running={}): 'od.__repr__() <==> repr(od)' call_key = id(self), _get_ident() if call_key in _repr_running: return '...' _repr_running[call_key] = 1 try: if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key] def __reduce__(self): 'Return state information for pickling' items = [[k, self[k]] for k in self] inst_dict = vars(self).copy() for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: return (self.__class__, (items,), inst_dict) return self.__class__, (items,) def copy(self): 'od.copy() -> a shallow copy of od' return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S and values equal to v (which defaults to None). ''' d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive. ''' if isinstance(other, OrderedDict): return len(self)==len(other) and self.items() == other.items() return dict.__eq__(self, other) def __ne__(self, other): return not self == other # -- the following methods are only used in Python 2.7 -- def viewkeys(self): "od.viewkeys() -> a set-like object providing a view on od's keys" return KeysView(self) def viewvalues(self): "od.viewvalues() -> an object providing a view on od's values" return ValuesView(self) def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self)
bsd-3-clause
quarkonics/zstack-woodpecker
integrationtest/vm/virtualrouter/poc/test_poc_1_vm.py
4
1906
''' For POC Testing: create 1 VM and destroy it after 3s. It is for testing create/destroy 10000 VM. @author: Youyk ''' import zstackwoodpecker.test_util as test_util import zstackwoodpecker.test_lib as test_lib import zstackwoodpecker.test_state as test_state import os import time import zstackwoodpecker.operations.resource_operations as res_ops import zstackwoodpecker.zstack_test.zstack_test_vm as zstack_vm_header test_stub = test_lib.lib_get_test_stub() test_obj_dict = test_state.TestStateDict() parallel_num = 10 def test(): image_name = os.environ.get('imageName_s') image_uuid = test_lib.lib_get_image_by_name(image_name).uuid l3_name = os.environ.get('l3VlanNetworkName1') l3_net_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid l3_uuid_list = [l3_net_uuid] vm_creation_option = test_util.VmOption() conditions = res_ops.gen_query_conditions('type', '=', 'UserVm') instance_offering_uuid = res_ops.query_resource(res_ops.INSTANCE_OFFERING, conditions)[0].uuid vm_name = '10k_vm-' + str(time.time()) vm_creation_option.set_instance_offering_uuid(instance_offering_uuid) vm_creation_option.set_l3_uuids(l3_uuid_list) vm_creation_option.set_image_uuid(image_uuid) #disk_offering = test_lib.lib_get_disk_offering_by_name(os.environ.get('smallDiskOfferingName')) #disk_offering_uuids = [disk_offering.uuid] #vm_creation_option.set_data_disk_uuids(disk_offering_uuids) vm_creation_option.set_name(vm_name) vm = zstack_vm_header.ZstackTestVm() vm.set_creation_option(vm_creation_option) vm.create() test_obj_dict.add_vm(vm) time.sleep(1) vm.destroy() test_util.test_pass('Create/Destroy VM successfully') #Will be called only if exception happens in test(). def error_cleanup(): global test_obj_dict test_lib.lib_error_cleanup(test_obj_dict)
apache-2.0
ufal/neuralmonkey
neuralmonkey/runners/xent_runner.py
1
1344
from typing import Dict, List, Union from typeguard import check_argument_types import tensorflow as tf import numpy as np from neuralmonkey.decoders.autoregressive import AutoregressiveDecoder from neuralmonkey.decoders.sequence_labeler import SequenceLabeler from neuralmonkey.decorators import tensor from neuralmonkey.runners.base_runner import BaseRunner SupportedDecoders = Union[AutoregressiveDecoder, SequenceLabeler] class XentRunner(BaseRunner[SupportedDecoders]): # pylint: disable=too-few-public-methods # Pylint issue here: https://github.com/PyCQA/pylint/issues/2607 class Executable(BaseRunner.Executable["XentRunner"]): def collect_results(self, results: List[Dict]) -> None: xents = np.mean([res["xents"] for res in results], axis=0) self.set_runner_result(outputs=xents.tolist(), losses=[float(np.mean(xents))]) # pylint: enable=too-few-public-methods def __init__(self, output_series: str, decoder: SupportedDecoders) -> None: check_argument_types() super().__init__(output_series, decoder) @tensor def fetches(self) -> Dict[str, tf.Tensor]: return {"xents": self.decoder.train_xents} @property def loss_names(self) -> List[str]: return ["xent"]
bsd-3-clause
ecoal95/servo
components/net/tests/cookie_http_state_utils.py
15
5966
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at https://mozilla.org/MPL/2.0/. import os import subprocess import tempfile REPO = "https://github.com/abarth/http-state.git" TEST_FILE = "cookie_http_state.rs" DOMAIN = "http://home.example.org:8888" RUST_FN = """ #[test]{should_panic} fn test_{name}() {{ let r = run("{set_location}", {set_cookies}, "{location}"); assert_eq!(&r, "{expect}"); }} """ SET_COOKIES_INDENT = 18 SHOULD_PANIC = "\n#[should_panic] // Look at cookie_http_state_utils.py if this test fails" # Those tests should PASS. But until fixes land in servo, keep them failing FAILING_TESTS = [ "0003", # Waiting for a way to clean expired cookies "0006", # Waiting for a way to clean expired cookies "mozilla0001", # Waiting for a way to clean expired cookies "mozilla0002", # Waiting for a way to clean expired cookies "mozilla0003", # Waiting for a way to clean expired cookies "mozilla0005", # Waiting for a way to clean expired cookies "mozilla0007", # Waiting for a way to clean expired cookies "mozilla0009", # Waiting for a way to clean expired cookies "mozilla0010", # Waiting for a way to clean expired cookies "mozilla0013", # Waiting for a way to clean expired cookies ] def list_tests(dir): suffix = "-test" def keep(name): return name.endswith(suffix) and not name.startswith("disabled") tests = [name[:-len(suffix)] for name in os.listdir(dir) if keep(name)] tests.sort() return tests def escape(s): """ Escape the string `s` so that it can be parsed by rust as a valid UTF-8 string. We can't use only `encode("unicode_escape")` as it produces things that rust does not accept ("\\xbf", "\\u6265" for example). So we manually convert all character whose code point is greater than 128 to \\u{code_point}. All other characters are encoded with "unicode_escape" to get escape sequences ("\\r" for example) except for `"` that we specifically escape because our string will be quoted by double-quotes. Lines are also limited in size, so split the string every 70 characters (gives room for indentation). """ res = "" last_split = 0 for c in s: if len(res) - last_split > 70: res += "\\\n" last_split = len(res) o = ord(c) if o == 34: res += "\\\"" continue if o >= 128: res += "\\u{" + hex(o)[2:] + "}" else: res += c.encode("unicode_escape") return res def format_slice_cookies(cookies): esc_cookies = ['"%s"' % escape(c) for c in cookies] if sum(len(s) for s in esc_cookies) < 80: sep = ", " else: sep = ",\n" + " " * SET_COOKIES_INDENT return "&[" + sep.join(esc_cookies) + "]" def generate_code_for_test(test_dir, name): if name in FAILING_TESTS: should_panic = SHOULD_PANIC else: should_panic = "" test_file = os.path.join(test_dir, name + "-test") expect_file = os.path.join(test_dir, name + "-expected") set_cookies = [] set_location = DOMAIN + "/cookie-parser?" + name expect = "" location = DOMAIN + "/cookie-parser-result?" + name with open(test_file) as fo: for line in fo: line = line.decode("utf-8").rstrip() prefix = "Set-Cookie: " if line.startswith(prefix): set_cookies.append(line[len(prefix):]) prefix = "Location: " if line.startswith(prefix): location = line[len(prefix):] if location.startswith("/"): location = DOMAIN + location with open(expect_file) as fo: for line in fo: line = line.decode("utf-8").rstrip() prefix = "Cookie: " if line.startswith(prefix): expect = line[len(prefix):] return RUST_FN.format(name=name.replace('-', '_'), set_location=escape(set_location), set_cookies=format_slice_cookies(set_cookies), should_panic=should_panic, location=escape(location), expect=escape(expect)) def update_test_file(cachedir): workdir = os.path.dirname(os.path.realpath(__file__)) test_file = os.path.join(workdir, TEST_FILE) # Create the cache dir if not os.path.isdir(cachedir): os.makedirs(cachedir) # Clone or update the repo repo_dir = os.path.join(cachedir, "http-state") if os.path.isdir(repo_dir): args = ["git", "pull", "-f"] process = subprocess.Popen(args, cwd=repo_dir) if process.wait() != 0: print("failed to update the http-state git repo") return 1 else: args = ["git", "clone", REPO, repo_dir] process = subprocess.Popen(args) if process.wait() != 0: print("failed to clone the http-state git repo") return 1 # Truncate the unit test file to remove all existing tests with open(test_file, "r+") as fo: while True: line = fo.readline() if line.strip() == "// Test listing": fo.truncate() fo.flush() break if line == "": print("Failed to find listing delimiter on unit test file") return 1 # Append all tests to unit test file tests_dir = os.path.join(repo_dir, "tests", "data", "parser") with open(test_file, "a") as fo: for test in list_tests(tests_dir): fo.write(generate_code_for_test(tests_dir, test).encode("utf-8")) return 0 if __name__ == "__main__": update_test_file(tempfile.gettempdir())
mpl-2.0
fangxingli/hue
desktop/core/ext-py/simplejson/simplejson/__init__.py
448
12463
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data interchange format. :mod:`simplejson` exposes an API familiar to users of the standard library :mod:`marshal` and :mod:`pickle` modules. It is the externally maintained version of the :mod:`json` library contained in Python 2.6, but maintains compatibility with Python 2.4 and Python 2.5 and (currently) has significant performance advantages, even without using the optional C extension for speedups. Encoding basic Python object hierarchies:: >>> import simplejson as json >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) '["foo", {"bar": ["baz", null, 1.0, 2]}]' >>> print json.dumps("\"foo\bar") "\"foo\bar" >>> print json.dumps(u'\u1234') "\u1234" >>> print json.dumps('\\') "\\" >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True) {"a": 0, "b": 0, "c": 0} >>> from StringIO import StringIO >>> io = StringIO() >>> json.dump(['streaming API'], io) >>> io.getvalue() '["streaming API"]' Compact encoding:: >>> import simplejson as json >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':')) '[1,2,3,{"4":5,"6":7}]' Pretty printing:: >>> import simplejson as json >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4) >>> print '\n'.join([l.rstrip() for l in s.splitlines()]) { "4": 5, "6": 7 } Decoding JSON:: >>> import simplejson as json >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}] >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj True >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar' True >>> from StringIO import StringIO >>> io = StringIO('["streaming API"]') >>> json.load(io)[0] == 'streaming API' True Specializing JSON object decoding:: >>> import simplejson as json >>> def as_complex(dct): ... if '__complex__' in dct: ... return complex(dct['real'], dct['imag']) ... return dct ... >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', ... object_hook=as_complex) (1+2j) >>> import decimal >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1') True Specializing JSON object encoding:: >>> import simplejson as json >>> def encode_complex(obj): ... if isinstance(obj, complex): ... return [obj.real, obj.imag] ... raise TypeError(repr(o) + " is not JSON serializable") ... >>> json.dumps(2 + 1j, default=encode_complex) '[2.0, 1.0]' >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j) '[2.0, 1.0]' >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j)) '[2.0, 1.0]' Using simplejson.tool from the shell to validate and pretty-print:: $ echo '{"json":"obj"}' | python -m simplejson.tool { "json": "obj" } $ echo '{ 1.2:3.4}' | python -m simplejson.tool Expecting property name: line 1 column 2 (char 2) """ __version__ = '2.0.9' __all__ = [ 'dump', 'dumps', 'load', 'loads', 'JSONDecoder', 'JSONEncoder', ] __author__ = 'Bob Ippolito <bob@redivi.com>' from decoder import JSONDecoder from encoder import JSONEncoder _default_encoder = JSONEncoder( skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, indent=None, separators=None, encoding='utf-8', default=None, ) def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, **kw): """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a ``.write()``-supporting file-like object). If ``skipkeys`` is true then ``dict`` keys that are not basic types (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) will be skipped instead of raising a ``TypeError``. If ``ensure_ascii`` is false, then the some chunks written to ``fp`` may be ``unicode`` instances, subject to normal Python ``str`` to ``unicode`` coercion rules. Unless ``fp.write()`` explicitly understands ``unicode`` (as in ``codecs.getwriter()``) this is likely to cause an error. If ``check_circular`` is false, then the circular reference check for container types will be skipped and a circular reference will result in an ``OverflowError`` (or worse). If ``allow_nan`` is false, then it will be a ``ValueError`` to serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in strict compliance of the JSON specification, instead of using the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). If ``indent`` is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. ``None`` is the most compact representation. If ``separators`` is an ``(item_separator, dict_separator)`` tuple then it will be used instead of the default ``(', ', ': ')`` separators. ``(',', ':')`` is the most compact JSON representation. ``encoding`` is the character encoding for str instances, default is UTF-8. ``default(obj)`` is a function that should return a serializable version of obj or raise TypeError. The default simply raises TypeError. To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the ``.default()`` method to serialize additional types), specify it with the ``cls`` kwarg. """ # cached encoder if (not skipkeys and ensure_ascii and check_circular and allow_nan and cls is None and indent is None and separators is None and encoding == 'utf-8' and default is None and not kw): iterable = _default_encoder.iterencode(obj) else: if cls is None: cls = JSONEncoder iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, indent=indent, separators=separators, encoding=encoding, default=default, **kw).iterencode(obj) # could accelerate with writelines in some versions of Python, at # a debuggability cost for chunk in iterable: fp.write(chunk) def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, **kw): """Serialize ``obj`` to a JSON formatted ``str``. If ``skipkeys`` is false then ``dict`` keys that are not basic types (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) will be skipped instead of raising a ``TypeError``. If ``ensure_ascii`` is false, then the return value will be a ``unicode`` instance subject to normal Python ``str`` to ``unicode`` coercion rules instead of being escaped to an ASCII ``str``. If ``check_circular`` is false, then the circular reference check for container types will be skipped and a circular reference will result in an ``OverflowError`` (or worse). If ``allow_nan`` is false, then it will be a ``ValueError`` to serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in strict compliance of the JSON specification, instead of using the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). If ``indent`` is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. ``None`` is the most compact representation. If ``separators`` is an ``(item_separator, dict_separator)`` tuple then it will be used instead of the default ``(', ', ': ')`` separators. ``(',', ':')`` is the most compact JSON representation. ``encoding`` is the character encoding for str instances, default is UTF-8. ``default(obj)`` is a function that should return a serializable version of obj or raise TypeError. The default simply raises TypeError. To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the ``.default()`` method to serialize additional types), specify it with the ``cls`` kwarg. """ # cached encoder if (not skipkeys and ensure_ascii and check_circular and allow_nan and cls is None and indent is None and separators is None and encoding == 'utf-8' and default is None and not kw): return _default_encoder.encode(obj) if cls is None: cls = JSONEncoder return cls( skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, indent=indent, separators=separators, encoding=encoding, default=default, **kw).encode(obj) _default_decoder = JSONDecoder(encoding=None, object_hook=None) def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, **kw): """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a JSON document) to a Python object. If the contents of ``fp`` is encoded with an ASCII based encoding other than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must be specified. Encodings that are not ASCII based (such as UCS-2) are not allowed, and should be wrapped with ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode`` object and passed to ``loads()`` ``object_hook`` is an optional function that will be called with the result of any object literal decode (a ``dict``). The return value of ``object_hook`` will be used instead of the ``dict``. This feature can be used to implement custom decoders (e.g. JSON-RPC class hinting). To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. """ return loads(fp.read(), encoding=encoding, cls=cls, object_hook=object_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, **kw) def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, **kw): """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON document) to a Python object. If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name must be specified. Encodings that are not ASCII based (such as UCS-2) are not allowed and should be decoded to ``unicode`` first. ``object_hook`` is an optional function that will be called with the result of any object literal decode (a ``dict``). The return value of ``object_hook`` will be used instead of the ``dict``. This feature can be used to implement custom decoders (e.g. JSON-RPC class hinting). ``parse_float``, if specified, will be called with the string of every JSON float to be decoded. By default this is equivalent to float(num_str). This can be used to use another datatype or parser for JSON floats (e.g. decimal.Decimal). ``parse_int``, if specified, will be called with the string of every JSON int to be decoded. By default this is equivalent to int(num_str). This can be used to use another datatype or parser for JSON integers (e.g. float). ``parse_constant``, if specified, will be called with one of the following strings: -Infinity, Infinity, NaN, null, true, false. This can be used to raise an exception if invalid JSON numbers are encountered. To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. """ if (cls is None and encoding is None and object_hook is None and parse_int is None and parse_float is None and parse_constant is None and not kw): return _default_decoder.decode(s) if cls is None: cls = JSONDecoder if object_hook is not None: kw['object_hook'] = object_hook if parse_float is not None: kw['parse_float'] = parse_float if parse_int is not None: kw['parse_int'] = parse_int if parse_constant is not None: kw['parse_constant'] = parse_constant return cls(encoding=encoding, **kw).decode(s)
apache-2.0
vipul-sharma20/oh-mainline
vendor/packages/gdata/tests/gdata_tests/books/service_test.py
127
2096
#!/usr/bin/python __author__ = "James Sams <sams.james@gmail.com>" import unittest import getpass import atom import gdata.books import gdata.books.service from gdata import test_data username = "" password = "" class BookCRUDTests(unittest.TestCase): def setUp(self): self.service = gdata.books.service.BookService(email=username, password=password, source="Google-PythonGdataTest-1") if username and password: self.authenticated = True self.service.ProgrammaticLogin() else: self.authenticated = False def testPublicSearch(self): entry = self.service.get_by_google_id("b7GZr5Btp30C") self.assertEquals((entry.creator[0].text, entry.dc_title[0].text), ('John Rawls', 'A theory of justice')) feed = self.service.search_by_keyword(isbn="9780198250548") feed1 = self.service.search("9780198250548") self.assertEquals(len(feed.entry), 1) self.assertEquals(len(feed1.entry), 1) def testLibraryCrd(self): """ the success of the create operations assumes the book was not already in the library. if it was, there will not be a failure, but a successful add will not actually be tested. """ if not self.authenticated: return entry = self.service.get_by_google_id("b7GZr5Btp30C") entry = self.service.add_item_to_library(entry) lib = list(self.service.get_library()) self.assert_(entry.to_dict()['title'] in [x.to_dict()['title'] for x in lib]) self.service.remove_item_from_library(entry) lib = list(self.service.get_library()) self.assert_(entry.to_dict()['title'] not in [x.to_dict()['title'] for x in lib]) def testAnnotations(self): "annotations do not behave as expected" pass if __name__ == "__main__": print "Please use a test account. May cause data loss." username = raw_input("Google Username: ").strip() password = getpass.getpass() unittest.main()
agpl-3.0
pmisik/buildbot
master/buildbot/db/migrate/versions/041_add_N_N_tagsbuilders.py
6
2147
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builders = sautils.Table('builders', metadata, autoload=True) # drop the tags column builders.c.tags.drop() tags = sautils.Table( 'tags', metadata, sa.Column('id', sa.Integer, primary_key=True), # tag's name sa.Column('name', sa.Text, nullable=False), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), ) # a many-to-may relationship between builders and tags builders_tags = sautils.Table( 'builders_tags', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id'), nullable=False), sa.Column('tagid', sa.Integer, sa.ForeignKey('tags.id'), nullable=False), ) # create the new tables tags.create() builders_tags.create() # and the indices idx = sa.Index('builders_tags_builderid', builders_tags.c.builderid) idx.create() idx = sa.Index('builders_tags_unique', builders_tags.c.builderid, builders_tags.c.tagid, unique=True) idx.create() idx = sa.Index('tag_name_hash', tags.c.name_hash, unique=True) idx.create()
gpl-2.0
SeanMcGrath/ScopeOut
ScopeOut.py
1
1297
""" ScopeOut """ import sys import signal import logging import os from scopeout.client import ThreadedClient from scopeout.config import ScopeOutConfig as Config def main(): print("Initializing ScopeOut...") logger = logging.getLogger('ScopeOut') logger.setLevel(logging.DEBUG) # create file handler which logs even debug messages log_dir = Config.get('Logging', 'log_dir') if not os.path.exists(log_dir): os.makedirs(log_dir) log_path = os.path.join(log_dir, Config.get('Logging', 'log_file')) fh = logging.FileHandler(log_path) fh.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.ERROR) # create formatter and add it to the handlers formatter = logging.Formatter('%(asctime)s - %(name)s:%(lineno)d - %(levelname)s - %(message)s') fh.setFormatter(formatter) ch.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) logger.addHandler(ch) logger.info("Initializing ScopeOut...") app = ThreadedClient(sys.argv) logger.info("ScopeOut initialization completed") # Enable keyboard shortcuts to kill from command line signal.signal(signal.SIGINT, signal.SIG_DFL) return app.exec_() if __name__ == "__main__": sys.exit(main())
mit
Affix/CouchPotatoServer
libs/guessit/transfo/guess_properties.py
150
1273
#!/usr/bin/env python # -*- coding: utf-8 -*- # # GuessIt - A library for guessing information from filenames # Copyright (c) 2012 Nicolas Wack <wackou@gmail.com> # # GuessIt is free software; you can redistribute it and/or modify it under # the terms of the Lesser GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # GuessIt is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Lesser GNU General Public License for more details. # # You should have received a copy of the Lesser GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import unicode_literals from guessit.transfo import SingleNodeGuesser from guessit.patterns import find_properties import logging log = logging.getLogger(__name__) def guess_properties(string): try: prop, value, pos, end = find_properties(string)[0] return { prop: value }, (pos, end) except IndexError: return None, None def process(mtree): SingleNodeGuesser(guess_properties, 1.0, log).process(mtree)
gpl-3.0
gauribhoite/personfinder
tools/remote_api.py
4
5974
#!/usr/bin/python2.7 # Copyright 2009-2010 by Ka-Ping Yee # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """An interactive Python console connected to an app's datastore. Instead of running this script directly, use the 'console' shell script, which sets up the PYTHONPATH and other necessary environment variables.""" import code import getpass import logging import optparse import os import sys import urllib2 import urlparse import yaml from google.appengine.ext.remote_api import remote_api_stub from google.appengine.ext import db def key_repr(key): """A more convenient replacement for db.Key.__repr__.""" levels = [] while key: levels.insert(0, '%s %s' % (key.kind(), key.id() or repr(key.name()))) key = key.parent() return '<Key: %s>' % '/'.join(levels) def model_repr(model): """A more convenient replacement for db.Model.__repr__.""" if model.is_saved(): key = model.key() return '<%s: %s>' % (key.kind(), key.id() or repr(key.name())) else: return '<%s: unsaved>' % model.kind() # Use a dummy password when connecting to a development app server. password = (address == 'localhost' and 'foo') or None def parse_url(url): # Determine the protocol, host, port, and path from the URL argument. if '//' not in url: url = '//' + url scheme, netloc, path, query, fragment = urlparse.urlsplit(url) host, port = urllib2.splitport(netloc) port = int(port or (scheme == 'http' and 80 or 443)) # default to https secure = (port == 443) host = host or 'localhost' path = path or '/_ah/remote_api' return secure, host, port, path def connect(url, email=None, password=None, exit_on_failure=False): """Sets up a connection to an app that has the remote_api handler.""" secure, host, port, path = parse_url(url) hostport = '%s:%d' % (host, port) url = (secure and 'https' or 'http') + '://' + hostport + path def get_login(): # Get the e-mail and password from args, os.environ, or the user. e = email or os.environ.get('USER_EMAIL') if not e: sys.stderr.write('User e-mail: ') sys.stderr.flush() e = raw_input() # don't use raw_input's prompt (goes to stdout) else: print >>sys.stderr, 'User e-mail: %s' % e os.environ['USER_EMAIL'] = e # used by users.get_current_user() p = password if not p and host != 'localhost': p = getpass.getpass('Password: ', sys.stderr) return e, p # Connect to the appserver. try: logging.basicConfig(file=sys.stderr, level=logging.ERROR) try: remote_api_stub.ConfigureRemoteApi( None, path, get_login, hostport, secure=secure) except Exception, e: if not path.endswith('/remote_api'): path = path.rstrip('/') + '/remote_api' remote_api_stub.ConfigureRemoteApi( None, path, get_login, hostport, secure=secure) else: raise except Exception, e: if isinstance(e, urllib2.URLError): reason = hasattr(e.reason, 'args') and e.reason.args[-1] or e.reason print >>sys.stderr, 'Cannot connect to %s: %s' % (hostport, reason) if exit_on_failure: sys.exit(1) return None # ConfigureRemoteApi sets os.environ['APPLICATION_ID'] app_id = os.environ['APPLICATION_ID'] sys.ps1 = app_id + '> ' # for the interactive console return url, app_id def main(): parser = optparse.OptionParser(usage='''%prog [options] <appserver_url> Starts an interactive Python console connected to an App Engine datastore. Use the <appserver_url> argument to set the protocol, hostname, port number, and path to the remote_api handler. If <appserver_url> does not include a protocol or port number, the default protocol is HTTPS. The default path is /_ah/remote_api (the default for "remote_api: on" in app.yaml). Examples: # Start Python but don't connect % %prog # Connect to xyz.appspot.com, port 443, path /_ah/remote_api % %prog xyz.appspot.com # Connect to foo.org, port 80, try /bar/baz, then try /bar/baz/remote_api % %prog http://foo.org/bar/baz # Connect to localhost, port 6789, path /_ah/remote_api % %prog :6789''') parser.add_option('-e', dest='email', help='user e-mail (default: $USER_EMAIL)') parser.add_option('-c', dest='command', help='Python command to execute') options, args = parser.parse_args() # Connect to the app server. if args: url, app_id = connect(args[0], options.email, exit_on_failure=True) banner = 'Connected to: ' + url else: banner = 'Not connected. Use connect(appserver_url) to connect.' # Set up more useful representations for interactive data manipulation # and debugging. Alas, the App Engine runtime relies on the specific # output of repr(), so this isn't safe in production, only debugging. db.Key.__repr__ = key_repr db.Model.__repr__ = model_repr locals()['connect'] = connect # Run startup commands. rc = os.environ.get('REMOTE_API_RC', '') if rc: banner = (banner + '\n' + rc).strip() exec rc in globals(), locals() if options.command: exec options.command else: code.interact(banner, None, locals()) if __name__ == '__main__': main()
apache-2.0
40223125/40223125-2
static/Brython3.1.1-20150328-091302/Lib/test/re_tests.py
879
31796
#!/usr/bin/env python3 # -*- mode: python -*- # Re test suite and benchmark suite v1.5 # The 3 possible outcomes for each pattern [SUCCEED, FAIL, SYNTAX_ERROR] = range(3) # Benchmark suite (needs expansion) # # The benchmark suite does not test correctness, just speed. The # first element of each tuple is the regex pattern; the second is a # string to match it against. The benchmarking code will embed the # second string inside several sizes of padding, to test how regex # matching performs on large strings. benchmarks = [ # test common prefix ('Python|Perl', 'Perl'), # Alternation ('(Python|Perl)', 'Perl'), # Grouped alternation ('Python|Perl|Tcl', 'Perl'), # Alternation ('(Python|Perl|Tcl)', 'Perl'), # Grouped alternation ('(Python)\\1', 'PythonPython'), # Backreference ('([0a-z][a-z0-9]*,)+', 'a5,b7,c9,'), # Disable the fastmap optimization ('([a-z][a-z0-9]*,)+', 'a5,b7,c9,'), # A few sets ('Python', 'Python'), # Simple text literal ('.*Python', 'Python'), # Bad text literal ('.*Python.*', 'Python'), # Worse text literal ('.*(Python)', 'Python'), # Bad text literal with grouping ] # Test suite (for verifying correctness) # # The test suite is a list of 5- or 3-tuples. The 5 parts of a # complete tuple are: # element 0: a string containing the pattern # 1: the string to match against the pattern # 2: the expected result (SUCCEED, FAIL, SYNTAX_ERROR) # 3: a string that will be eval()'ed to produce a test string. # This is an arbitrary Python expression; the available # variables are "found" (the whole match), and "g1", "g2", ... # up to "g99" contain the contents of each group, or the # string 'None' if the group wasn't given a value, or the # string 'Error' if the group index was out of range; # also "groups", the return value of m.group() (a tuple). # 4: The expected result of evaluating the expression. # If the two don't match, an error is reported. # # If the regex isn't expected to work, the latter two elements can be omitted. tests = [ # Test ?P< and ?P= extensions ('(?P<foo_123', '', SYNTAX_ERROR), # Unterminated group identifier ('(?P<1>a)', '', SYNTAX_ERROR), # Begins with a digit ('(?P<!>a)', '', SYNTAX_ERROR), # Begins with an illegal char ('(?P<foo!>a)', '', SYNTAX_ERROR), # Begins with an illegal char # Same tests, for the ?P= form ('(?P<foo_123>a)(?P=foo_123', 'aa', SYNTAX_ERROR), ('(?P<foo_123>a)(?P=1)', 'aa', SYNTAX_ERROR), ('(?P<foo_123>a)(?P=!)', 'aa', SYNTAX_ERROR), ('(?P<foo_123>a)(?P=foo_124', 'aa', SYNTAX_ERROR), # Backref to undefined group ('(?P<foo_123>a)', 'a', SUCCEED, 'g1', 'a'), ('(?P<foo_123>a)(?P=foo_123)', 'aa', SUCCEED, 'g1', 'a'), # Test octal escapes ('\\1', 'a', SYNTAX_ERROR), # Backreference ('[\\1]', '\1', SUCCEED, 'found', '\1'), # Character ('\\09', chr(0) + '9', SUCCEED, 'found', chr(0) + '9'), ('\\141', 'a', SUCCEED, 'found', 'a'), ('(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)(l)\\119', 'abcdefghijklk9', SUCCEED, 'found+"-"+g11', 'abcdefghijklk9-k'), # Test \0 is handled everywhere (r'\0', '\0', SUCCEED, 'found', '\0'), (r'[\0a]', '\0', SUCCEED, 'found', '\0'), (r'[a\0]', '\0', SUCCEED, 'found', '\0'), (r'[^a\0]', '\0', FAIL), # Test various letter escapes (r'\a[\b]\f\n\r\t\v', '\a\b\f\n\r\t\v', SUCCEED, 'found', '\a\b\f\n\r\t\v'), (r'[\a][\b][\f][\n][\r][\t][\v]', '\a\b\f\n\r\t\v', SUCCEED, 'found', '\a\b\f\n\r\t\v'), # NOTE: not an error under PCRE/PRE: # (r'\u', '', SYNTAX_ERROR), # A Perl escape (r'\c\e\g\h\i\j\k\m\o\p\q\y\z', 'ceghijkmopqyz', SUCCEED, 'found', 'ceghijkmopqyz'), (r'\xff', '\377', SUCCEED, 'found', chr(255)), # new \x semantics (r'\x00ffffffffffffff', '\377', FAIL, 'found', chr(255)), (r'\x00f', '\017', FAIL, 'found', chr(15)), (r'\x00fe', '\376', FAIL, 'found', chr(254)), # (r'\x00ffffffffffffff', '\377', SUCCEED, 'found', chr(255)), # (r'\x00f', '\017', SUCCEED, 'found', chr(15)), # (r'\x00fe', '\376', SUCCEED, 'found', chr(254)), (r"^\w+=(\\[\000-\277]|[^\n\\])*", "SRC=eval.c g.c blah blah blah \\\\\n\tapes.c", SUCCEED, 'found', "SRC=eval.c g.c blah blah blah \\\\"), # Test that . only matches \n in DOTALL mode ('a.b', 'acb', SUCCEED, 'found', 'acb'), ('a.b', 'a\nb', FAIL), ('a.*b', 'acc\nccb', FAIL), ('a.{4,5}b', 'acc\nccb', FAIL), ('a.b', 'a\rb', SUCCEED, 'found', 'a\rb'), ('a.b(?s)', 'a\nb', SUCCEED, 'found', 'a\nb'), ('a.*(?s)b', 'acc\nccb', SUCCEED, 'found', 'acc\nccb'), ('(?s)a.{4,5}b', 'acc\nccb', SUCCEED, 'found', 'acc\nccb'), ('(?s)a.b', 'a\nb', SUCCEED, 'found', 'a\nb'), (')', '', SYNTAX_ERROR), # Unmatched right bracket ('', '', SUCCEED, 'found', ''), # Empty pattern ('abc', 'abc', SUCCEED, 'found', 'abc'), ('abc', 'xbc', FAIL), ('abc', 'axc', FAIL), ('abc', 'abx', FAIL), ('abc', 'xabcy', SUCCEED, 'found', 'abc'), ('abc', 'ababc', SUCCEED, 'found', 'abc'), ('ab*c', 'abc', SUCCEED, 'found', 'abc'), ('ab*bc', 'abc', SUCCEED, 'found', 'abc'), ('ab*bc', 'abbc', SUCCEED, 'found', 'abbc'), ('ab*bc', 'abbbbc', SUCCEED, 'found', 'abbbbc'), ('ab+bc', 'abbc', SUCCEED, 'found', 'abbc'), ('ab+bc', 'abc', FAIL), ('ab+bc', 'abq', FAIL), ('ab+bc', 'abbbbc', SUCCEED, 'found', 'abbbbc'), ('ab?bc', 'abbc', SUCCEED, 'found', 'abbc'), ('ab?bc', 'abc', SUCCEED, 'found', 'abc'), ('ab?bc', 'abbbbc', FAIL), ('ab?c', 'abc', SUCCEED, 'found', 'abc'), ('^abc$', 'abc', SUCCEED, 'found', 'abc'), ('^abc$', 'abcc', FAIL), ('^abc', 'abcc', SUCCEED, 'found', 'abc'), ('^abc$', 'aabc', FAIL), ('abc$', 'aabc', SUCCEED, 'found', 'abc'), ('^', 'abc', SUCCEED, 'found+"-"', '-'), ('$', 'abc', SUCCEED, 'found+"-"', '-'), ('a.c', 'abc', SUCCEED, 'found', 'abc'), ('a.c', 'axc', SUCCEED, 'found', 'axc'), ('a.*c', 'axyzc', SUCCEED, 'found', 'axyzc'), ('a.*c', 'axyzd', FAIL), ('a[bc]d', 'abc', FAIL), ('a[bc]d', 'abd', SUCCEED, 'found', 'abd'), ('a[b-d]e', 'abd', FAIL), ('a[b-d]e', 'ace', SUCCEED, 'found', 'ace'), ('a[b-d]', 'aac', SUCCEED, 'found', 'ac'), ('a[-b]', 'a-', SUCCEED, 'found', 'a-'), ('a[\\-b]', 'a-', SUCCEED, 'found', 'a-'), # NOTE: not an error under PCRE/PRE: # ('a[b-]', 'a-', SYNTAX_ERROR), ('a[]b', '-', SYNTAX_ERROR), ('a[', '-', SYNTAX_ERROR), ('a\\', '-', SYNTAX_ERROR), ('abc)', '-', SYNTAX_ERROR), ('(abc', '-', SYNTAX_ERROR), ('a]', 'a]', SUCCEED, 'found', 'a]'), ('a[]]b', 'a]b', SUCCEED, 'found', 'a]b'), ('a[\]]b', 'a]b', SUCCEED, 'found', 'a]b'), ('a[^bc]d', 'aed', SUCCEED, 'found', 'aed'), ('a[^bc]d', 'abd', FAIL), ('a[^-b]c', 'adc', SUCCEED, 'found', 'adc'), ('a[^-b]c', 'a-c', FAIL), ('a[^]b]c', 'a]c', FAIL), ('a[^]b]c', 'adc', SUCCEED, 'found', 'adc'), ('\\ba\\b', 'a-', SUCCEED, '"-"', '-'), ('\\ba\\b', '-a', SUCCEED, '"-"', '-'), ('\\ba\\b', '-a-', SUCCEED, '"-"', '-'), ('\\by\\b', 'xy', FAIL), ('\\by\\b', 'yz', FAIL), ('\\by\\b', 'xyz', FAIL), ('x\\b', 'xyz', FAIL), ('x\\B', 'xyz', SUCCEED, '"-"', '-'), ('\\Bz', 'xyz', SUCCEED, '"-"', '-'), ('z\\B', 'xyz', FAIL), ('\\Bx', 'xyz', FAIL), ('\\Ba\\B', 'a-', FAIL, '"-"', '-'), ('\\Ba\\B', '-a', FAIL, '"-"', '-'), ('\\Ba\\B', '-a-', FAIL, '"-"', '-'), ('\\By\\B', 'xy', FAIL), ('\\By\\B', 'yz', FAIL), ('\\By\\b', 'xy', SUCCEED, '"-"', '-'), ('\\by\\B', 'yz', SUCCEED, '"-"', '-'), ('\\By\\B', 'xyz', SUCCEED, '"-"', '-'), ('ab|cd', 'abc', SUCCEED, 'found', 'ab'), ('ab|cd', 'abcd', SUCCEED, 'found', 'ab'), ('()ef', 'def', SUCCEED, 'found+"-"+g1', 'ef-'), ('$b', 'b', FAIL), ('a\\(b', 'a(b', SUCCEED, 'found+"-"+g1', 'a(b-Error'), ('a\\(*b', 'ab', SUCCEED, 'found', 'ab'), ('a\\(*b', 'a((b', SUCCEED, 'found', 'a((b'), ('a\\\\b', 'a\\b', SUCCEED, 'found', 'a\\b'), ('((a))', 'abc', SUCCEED, 'found+"-"+g1+"-"+g2', 'a-a-a'), ('(a)b(c)', 'abc', SUCCEED, 'found+"-"+g1+"-"+g2', 'abc-a-c'), ('a+b+c', 'aabbabc', SUCCEED, 'found', 'abc'), ('(a+|b)*', 'ab', SUCCEED, 'found+"-"+g1', 'ab-b'), ('(a+|b)+', 'ab', SUCCEED, 'found+"-"+g1', 'ab-b'), ('(a+|b)?', 'ab', SUCCEED, 'found+"-"+g1', 'a-a'), (')(', '-', SYNTAX_ERROR), ('[^ab]*', 'cde', SUCCEED, 'found', 'cde'), ('abc', '', FAIL), ('a*', '', SUCCEED, 'found', ''), ('a|b|c|d|e', 'e', SUCCEED, 'found', 'e'), ('(a|b|c|d|e)f', 'ef', SUCCEED, 'found+"-"+g1', 'ef-e'), ('abcd*efg', 'abcdefg', SUCCEED, 'found', 'abcdefg'), ('ab*', 'xabyabbbz', SUCCEED, 'found', 'ab'), ('ab*', 'xayabbbz', SUCCEED, 'found', 'a'), ('(ab|cd)e', 'abcde', SUCCEED, 'found+"-"+g1', 'cde-cd'), ('[abhgefdc]ij', 'hij', SUCCEED, 'found', 'hij'), ('^(ab|cd)e', 'abcde', FAIL, 'xg1y', 'xy'), ('(abc|)ef', 'abcdef', SUCCEED, 'found+"-"+g1', 'ef-'), ('(a|b)c*d', 'abcd', SUCCEED, 'found+"-"+g1', 'bcd-b'), ('(ab|ab*)bc', 'abc', SUCCEED, 'found+"-"+g1', 'abc-a'), ('a([bc]*)c*', 'abc', SUCCEED, 'found+"-"+g1', 'abc-bc'), ('a([bc]*)(c*d)', 'abcd', SUCCEED, 'found+"-"+g1+"-"+g2', 'abcd-bc-d'), ('a([bc]+)(c*d)', 'abcd', SUCCEED, 'found+"-"+g1+"-"+g2', 'abcd-bc-d'), ('a([bc]*)(c+d)', 'abcd', SUCCEED, 'found+"-"+g1+"-"+g2', 'abcd-b-cd'), ('a[bcd]*dcdcde', 'adcdcde', SUCCEED, 'found', 'adcdcde'), ('a[bcd]+dcdcde', 'adcdcde', FAIL), ('(ab|a)b*c', 'abc', SUCCEED, 'found+"-"+g1', 'abc-ab'), ('((a)(b)c)(d)', 'abcd', SUCCEED, 'g1+"-"+g2+"-"+g3+"-"+g4', 'abc-a-b-d'), ('[a-zA-Z_][a-zA-Z0-9_]*', 'alpha', SUCCEED, 'found', 'alpha'), ('^a(bc+|b[eh])g|.h$', 'abh', SUCCEED, 'found+"-"+g1', 'bh-None'), ('(bc+d$|ef*g.|h?i(j|k))', 'effgz', SUCCEED, 'found+"-"+g1+"-"+g2', 'effgz-effgz-None'), ('(bc+d$|ef*g.|h?i(j|k))', 'ij', SUCCEED, 'found+"-"+g1+"-"+g2', 'ij-ij-j'), ('(bc+d$|ef*g.|h?i(j|k))', 'effg', FAIL), ('(bc+d$|ef*g.|h?i(j|k))', 'bcdd', FAIL), ('(bc+d$|ef*g.|h?i(j|k))', 'reffgz', SUCCEED, 'found+"-"+g1+"-"+g2', 'effgz-effgz-None'), ('(((((((((a)))))))))', 'a', SUCCEED, 'found', 'a'), ('multiple words of text', 'uh-uh', FAIL), ('multiple words', 'multiple words, yeah', SUCCEED, 'found', 'multiple words'), ('(.*)c(.*)', 'abcde', SUCCEED, 'found+"-"+g1+"-"+g2', 'abcde-ab-de'), ('\\((.*), (.*)\\)', '(a, b)', SUCCEED, 'g2+"-"+g1', 'b-a'), ('[k]', 'ab', FAIL), ('a[-]?c', 'ac', SUCCEED, 'found', 'ac'), ('(abc)\\1', 'abcabc', SUCCEED, 'g1', 'abc'), ('([a-c]*)\\1', 'abcabc', SUCCEED, 'g1', 'abc'), ('^(.+)?B', 'AB', SUCCEED, 'g1', 'A'), ('(a+).\\1$', 'aaaaa', SUCCEED, 'found+"-"+g1', 'aaaaa-aa'), ('^(a+).\\1$', 'aaaa', FAIL), ('(abc)\\1', 'abcabc', SUCCEED, 'found+"-"+g1', 'abcabc-abc'), ('([a-c]+)\\1', 'abcabc', SUCCEED, 'found+"-"+g1', 'abcabc-abc'), ('(a)\\1', 'aa', SUCCEED, 'found+"-"+g1', 'aa-a'), ('(a+)\\1', 'aa', SUCCEED, 'found+"-"+g1', 'aa-a'), ('(a+)+\\1', 'aa', SUCCEED, 'found+"-"+g1', 'aa-a'), ('(a).+\\1', 'aba', SUCCEED, 'found+"-"+g1', 'aba-a'), ('(a)ba*\\1', 'aba', SUCCEED, 'found+"-"+g1', 'aba-a'), ('(aa|a)a\\1$', 'aaa', SUCCEED, 'found+"-"+g1', 'aaa-a'), ('(a|aa)a\\1$', 'aaa', SUCCEED, 'found+"-"+g1', 'aaa-a'), ('(a+)a\\1$', 'aaa', SUCCEED, 'found+"-"+g1', 'aaa-a'), ('([abc]*)\\1', 'abcabc', SUCCEED, 'found+"-"+g1', 'abcabc-abc'), ('(a)(b)c|ab', 'ab', SUCCEED, 'found+"-"+g1+"-"+g2', 'ab-None-None'), ('(a)+x', 'aaax', SUCCEED, 'found+"-"+g1', 'aaax-a'), ('([ac])+x', 'aacx', SUCCEED, 'found+"-"+g1', 'aacx-c'), ('([^/]*/)*sub1/', 'd:msgs/tdir/sub1/trial/away.cpp', SUCCEED, 'found+"-"+g1', 'd:msgs/tdir/sub1/-tdir/'), ('([^.]*)\\.([^:]*):[T ]+(.*)', 'track1.title:TBlah blah blah', SUCCEED, 'found+"-"+g1+"-"+g2+"-"+g3', 'track1.title:TBlah blah blah-track1-title-Blah blah blah'), ('([^N]*N)+', 'abNNxyzN', SUCCEED, 'found+"-"+g1', 'abNNxyzN-xyzN'), ('([^N]*N)+', 'abNNxyz', SUCCEED, 'found+"-"+g1', 'abNN-N'), ('([abc]*)x', 'abcx', SUCCEED, 'found+"-"+g1', 'abcx-abc'), ('([abc]*)x', 'abc', FAIL), ('([xyz]*)x', 'abcx', SUCCEED, 'found+"-"+g1', 'x-'), ('(a)+b|aac', 'aac', SUCCEED, 'found+"-"+g1', 'aac-None'), # Test symbolic groups ('(?P<i d>aaa)a', 'aaaa', SYNTAX_ERROR), ('(?P<id>aaa)a', 'aaaa', SUCCEED, 'found+"-"+id', 'aaaa-aaa'), ('(?P<id>aa)(?P=id)', 'aaaa', SUCCEED, 'found+"-"+id', 'aaaa-aa'), ('(?P<id>aa)(?P=xd)', 'aaaa', SYNTAX_ERROR), # Test octal escapes/memory references ('\\1', 'a', SYNTAX_ERROR), ('\\09', chr(0) + '9', SUCCEED, 'found', chr(0) + '9'), ('\\141', 'a', SUCCEED, 'found', 'a'), ('(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)(l)\\119', 'abcdefghijklk9', SUCCEED, 'found+"-"+g11', 'abcdefghijklk9-k'), # All tests from Perl ('abc', 'abc', SUCCEED, 'found', 'abc'), ('abc', 'xbc', FAIL), ('abc', 'axc', FAIL), ('abc', 'abx', FAIL), ('abc', 'xabcy', SUCCEED, 'found', 'abc'), ('abc', 'ababc', SUCCEED, 'found', 'abc'), ('ab*c', 'abc', SUCCEED, 'found', 'abc'), ('ab*bc', 'abc', SUCCEED, 'found', 'abc'), ('ab*bc', 'abbc', SUCCEED, 'found', 'abbc'), ('ab*bc', 'abbbbc', SUCCEED, 'found', 'abbbbc'), ('ab{0,}bc', 'abbbbc', SUCCEED, 'found', 'abbbbc'), ('ab+bc', 'abbc', SUCCEED, 'found', 'abbc'), ('ab+bc', 'abc', FAIL), ('ab+bc', 'abq', FAIL), ('ab{1,}bc', 'abq', FAIL), ('ab+bc', 'abbbbc', SUCCEED, 'found', 'abbbbc'), ('ab{1,}bc', 'abbbbc', SUCCEED, 'found', 'abbbbc'), ('ab{1,3}bc', 'abbbbc', SUCCEED, 'found', 'abbbbc'), ('ab{3,4}bc', 'abbbbc', SUCCEED, 'found', 'abbbbc'), ('ab{4,5}bc', 'abbbbc', FAIL), ('ab?bc', 'abbc', SUCCEED, 'found', 'abbc'), ('ab?bc', 'abc', SUCCEED, 'found', 'abc'), ('ab{0,1}bc', 'abc', SUCCEED, 'found', 'abc'), ('ab?bc', 'abbbbc', FAIL), ('ab?c', 'abc', SUCCEED, 'found', 'abc'), ('ab{0,1}c', 'abc', SUCCEED, 'found', 'abc'), ('^abc$', 'abc', SUCCEED, 'found', 'abc'), ('^abc$', 'abcc', FAIL), ('^abc', 'abcc', SUCCEED, 'found', 'abc'), ('^abc$', 'aabc', FAIL), ('abc$', 'aabc', SUCCEED, 'found', 'abc'), ('^', 'abc', SUCCEED, 'found', ''), ('$', 'abc', SUCCEED, 'found', ''), ('a.c', 'abc', SUCCEED, 'found', 'abc'), ('a.c', 'axc', SUCCEED, 'found', 'axc'), ('a.*c', 'axyzc', SUCCEED, 'found', 'axyzc'), ('a.*c', 'axyzd', FAIL), ('a[bc]d', 'abc', FAIL), ('a[bc]d', 'abd', SUCCEED, 'found', 'abd'), ('a[b-d]e', 'abd', FAIL), ('a[b-d]e', 'ace', SUCCEED, 'found', 'ace'), ('a[b-d]', 'aac', SUCCEED, 'found', 'ac'), ('a[-b]', 'a-', SUCCEED, 'found', 'a-'), ('a[b-]', 'a-', SUCCEED, 'found', 'a-'), ('a[b-a]', '-', SYNTAX_ERROR), ('a[]b', '-', SYNTAX_ERROR), ('a[', '-', SYNTAX_ERROR), ('a]', 'a]', SUCCEED, 'found', 'a]'), ('a[]]b', 'a]b', SUCCEED, 'found', 'a]b'), ('a[^bc]d', 'aed', SUCCEED, 'found', 'aed'), ('a[^bc]d', 'abd', FAIL), ('a[^-b]c', 'adc', SUCCEED, 'found', 'adc'), ('a[^-b]c', 'a-c', FAIL), ('a[^]b]c', 'a]c', FAIL), ('a[^]b]c', 'adc', SUCCEED, 'found', 'adc'), ('ab|cd', 'abc', SUCCEED, 'found', 'ab'), ('ab|cd', 'abcd', SUCCEED, 'found', 'ab'), ('()ef', 'def', SUCCEED, 'found+"-"+g1', 'ef-'), ('*a', '-', SYNTAX_ERROR), ('(*)b', '-', SYNTAX_ERROR), ('$b', 'b', FAIL), ('a\\', '-', SYNTAX_ERROR), ('a\\(b', 'a(b', SUCCEED, 'found+"-"+g1', 'a(b-Error'), ('a\\(*b', 'ab', SUCCEED, 'found', 'ab'), ('a\\(*b', 'a((b', SUCCEED, 'found', 'a((b'), ('a\\\\b', 'a\\b', SUCCEED, 'found', 'a\\b'), ('abc)', '-', SYNTAX_ERROR), ('(abc', '-', SYNTAX_ERROR), ('((a))', 'abc', SUCCEED, 'found+"-"+g1+"-"+g2', 'a-a-a'), ('(a)b(c)', 'abc', SUCCEED, 'found+"-"+g1+"-"+g2', 'abc-a-c'), ('a+b+c', 'aabbabc', SUCCEED, 'found', 'abc'), ('a{1,}b{1,}c', 'aabbabc', SUCCEED, 'found', 'abc'), ('a**', '-', SYNTAX_ERROR), ('a.+?c', 'abcabc', SUCCEED, 'found', 'abc'), ('(a+|b)*', 'ab', SUCCEED, 'found+"-"+g1', 'ab-b'), ('(a+|b){0,}', 'ab', SUCCEED, 'found+"-"+g1', 'ab-b'), ('(a+|b)+', 'ab', SUCCEED, 'found+"-"+g1', 'ab-b'), ('(a+|b){1,}', 'ab', SUCCEED, 'found+"-"+g1', 'ab-b'), ('(a+|b)?', 'ab', SUCCEED, 'found+"-"+g1', 'a-a'), ('(a+|b){0,1}', 'ab', SUCCEED, 'found+"-"+g1', 'a-a'), (')(', '-', SYNTAX_ERROR), ('[^ab]*', 'cde', SUCCEED, 'found', 'cde'), ('abc', '', FAIL), ('a*', '', SUCCEED, 'found', ''), ('([abc])*d', 'abbbcd', SUCCEED, 'found+"-"+g1', 'abbbcd-c'), ('([abc])*bcd', 'abcd', SUCCEED, 'found+"-"+g1', 'abcd-a'), ('a|b|c|d|e', 'e', SUCCEED, 'found', 'e'), ('(a|b|c|d|e)f', 'ef', SUCCEED, 'found+"-"+g1', 'ef-e'), ('abcd*efg', 'abcdefg', SUCCEED, 'found', 'abcdefg'), ('ab*', 'xabyabbbz', SUCCEED, 'found', 'ab'), ('ab*', 'xayabbbz', SUCCEED, 'found', 'a'), ('(ab|cd)e', 'abcde', SUCCEED, 'found+"-"+g1', 'cde-cd'), ('[abhgefdc]ij', 'hij', SUCCEED, 'found', 'hij'), ('^(ab|cd)e', 'abcde', FAIL), ('(abc|)ef', 'abcdef', SUCCEED, 'found+"-"+g1', 'ef-'), ('(a|b)c*d', 'abcd', SUCCEED, 'found+"-"+g1', 'bcd-b'), ('(ab|ab*)bc', 'abc', SUCCEED, 'found+"-"+g1', 'abc-a'), ('a([bc]*)c*', 'abc', SUCCEED, 'found+"-"+g1', 'abc-bc'), ('a([bc]*)(c*d)', 'abcd', SUCCEED, 'found+"-"+g1+"-"+g2', 'abcd-bc-d'), ('a([bc]+)(c*d)', 'abcd', SUCCEED, 'found+"-"+g1+"-"+g2', 'abcd-bc-d'), ('a([bc]*)(c+d)', 'abcd', SUCCEED, 'found+"-"+g1+"-"+g2', 'abcd-b-cd'), ('a[bcd]*dcdcde', 'adcdcde', SUCCEED, 'found', 'adcdcde'), ('a[bcd]+dcdcde', 'adcdcde', FAIL), ('(ab|a)b*c', 'abc', SUCCEED, 'found+"-"+g1', 'abc-ab'), ('((a)(b)c)(d)', 'abcd', SUCCEED, 'g1+"-"+g2+"-"+g3+"-"+g4', 'abc-a-b-d'), ('[a-zA-Z_][a-zA-Z0-9_]*', 'alpha', SUCCEED, 'found', 'alpha'), ('^a(bc+|b[eh])g|.h$', 'abh', SUCCEED, 'found+"-"+g1', 'bh-None'), ('(bc+d$|ef*g.|h?i(j|k))', 'effgz', SUCCEED, 'found+"-"+g1+"-"+g2', 'effgz-effgz-None'), ('(bc+d$|ef*g.|h?i(j|k))', 'ij', SUCCEED, 'found+"-"+g1+"-"+g2', 'ij-ij-j'), ('(bc+d$|ef*g.|h?i(j|k))', 'effg', FAIL), ('(bc+d$|ef*g.|h?i(j|k))', 'bcdd', FAIL), ('(bc+d$|ef*g.|h?i(j|k))', 'reffgz', SUCCEED, 'found+"-"+g1+"-"+g2', 'effgz-effgz-None'), ('((((((((((a))))))))))', 'a', SUCCEED, 'g10', 'a'), ('((((((((((a))))))))))\\10', 'aa', SUCCEED, 'found', 'aa'), # Python does not have the same rules for \\41 so this is a syntax error # ('((((((((((a))))))))))\\41', 'aa', FAIL), # ('((((((((((a))))))))))\\41', 'a!', SUCCEED, 'found', 'a!'), ('((((((((((a))))))))))\\41', '', SYNTAX_ERROR), ('(?i)((((((((((a))))))))))\\41', '', SYNTAX_ERROR), ('(((((((((a)))))))))', 'a', SUCCEED, 'found', 'a'), ('multiple words of text', 'uh-uh', FAIL), ('multiple words', 'multiple words, yeah', SUCCEED, 'found', 'multiple words'), ('(.*)c(.*)', 'abcde', SUCCEED, 'found+"-"+g1+"-"+g2', 'abcde-ab-de'), ('\\((.*), (.*)\\)', '(a, b)', SUCCEED, 'g2+"-"+g1', 'b-a'), ('[k]', 'ab', FAIL), ('a[-]?c', 'ac', SUCCEED, 'found', 'ac'), ('(abc)\\1', 'abcabc', SUCCEED, 'g1', 'abc'), ('([a-c]*)\\1', 'abcabc', SUCCEED, 'g1', 'abc'), ('(?i)abc', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)abc', 'XBC', FAIL), ('(?i)abc', 'AXC', FAIL), ('(?i)abc', 'ABX', FAIL), ('(?i)abc', 'XABCY', SUCCEED, 'found', 'ABC'), ('(?i)abc', 'ABABC', SUCCEED, 'found', 'ABC'), ('(?i)ab*c', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)ab*bc', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)ab*bc', 'ABBC', SUCCEED, 'found', 'ABBC'), ('(?i)ab*?bc', 'ABBBBC', SUCCEED, 'found', 'ABBBBC'), ('(?i)ab{0,}?bc', 'ABBBBC', SUCCEED, 'found', 'ABBBBC'), ('(?i)ab+?bc', 'ABBC', SUCCEED, 'found', 'ABBC'), ('(?i)ab+bc', 'ABC', FAIL), ('(?i)ab+bc', 'ABQ', FAIL), ('(?i)ab{1,}bc', 'ABQ', FAIL), ('(?i)ab+bc', 'ABBBBC', SUCCEED, 'found', 'ABBBBC'), ('(?i)ab{1,}?bc', 'ABBBBC', SUCCEED, 'found', 'ABBBBC'), ('(?i)ab{1,3}?bc', 'ABBBBC', SUCCEED, 'found', 'ABBBBC'), ('(?i)ab{3,4}?bc', 'ABBBBC', SUCCEED, 'found', 'ABBBBC'), ('(?i)ab{4,5}?bc', 'ABBBBC', FAIL), ('(?i)ab??bc', 'ABBC', SUCCEED, 'found', 'ABBC'), ('(?i)ab??bc', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)ab{0,1}?bc', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)ab??bc', 'ABBBBC', FAIL), ('(?i)ab??c', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)ab{0,1}?c', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)^abc$', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)^abc$', 'ABCC', FAIL), ('(?i)^abc', 'ABCC', SUCCEED, 'found', 'ABC'), ('(?i)^abc$', 'AABC', FAIL), ('(?i)abc$', 'AABC', SUCCEED, 'found', 'ABC'), ('(?i)^', 'ABC', SUCCEED, 'found', ''), ('(?i)$', 'ABC', SUCCEED, 'found', ''), ('(?i)a.c', 'ABC', SUCCEED, 'found', 'ABC'), ('(?i)a.c', 'AXC', SUCCEED, 'found', 'AXC'), ('(?i)a.*?c', 'AXYZC', SUCCEED, 'found', 'AXYZC'), ('(?i)a.*c', 'AXYZD', FAIL), ('(?i)a[bc]d', 'ABC', FAIL), ('(?i)a[bc]d', 'ABD', SUCCEED, 'found', 'ABD'), ('(?i)a[b-d]e', 'ABD', FAIL), ('(?i)a[b-d]e', 'ACE', SUCCEED, 'found', 'ACE'), ('(?i)a[b-d]', 'AAC', SUCCEED, 'found', 'AC'), ('(?i)a[-b]', 'A-', SUCCEED, 'found', 'A-'), ('(?i)a[b-]', 'A-', SUCCEED, 'found', 'A-'), ('(?i)a[b-a]', '-', SYNTAX_ERROR), ('(?i)a[]b', '-', SYNTAX_ERROR), ('(?i)a[', '-', SYNTAX_ERROR), ('(?i)a]', 'A]', SUCCEED, 'found', 'A]'), ('(?i)a[]]b', 'A]B', SUCCEED, 'found', 'A]B'), ('(?i)a[^bc]d', 'AED', SUCCEED, 'found', 'AED'), ('(?i)a[^bc]d', 'ABD', FAIL), ('(?i)a[^-b]c', 'ADC', SUCCEED, 'found', 'ADC'), ('(?i)a[^-b]c', 'A-C', FAIL), ('(?i)a[^]b]c', 'A]C', FAIL), ('(?i)a[^]b]c', 'ADC', SUCCEED, 'found', 'ADC'), ('(?i)ab|cd', 'ABC', SUCCEED, 'found', 'AB'), ('(?i)ab|cd', 'ABCD', SUCCEED, 'found', 'AB'), ('(?i)()ef', 'DEF', SUCCEED, 'found+"-"+g1', 'EF-'), ('(?i)*a', '-', SYNTAX_ERROR), ('(?i)(*)b', '-', SYNTAX_ERROR), ('(?i)$b', 'B', FAIL), ('(?i)a\\', '-', SYNTAX_ERROR), ('(?i)a\\(b', 'A(B', SUCCEED, 'found+"-"+g1', 'A(B-Error'), ('(?i)a\\(*b', 'AB', SUCCEED, 'found', 'AB'), ('(?i)a\\(*b', 'A((B', SUCCEED, 'found', 'A((B'), ('(?i)a\\\\b', 'A\\B', SUCCEED, 'found', 'A\\B'), ('(?i)abc)', '-', SYNTAX_ERROR), ('(?i)(abc', '-', SYNTAX_ERROR), ('(?i)((a))', 'ABC', SUCCEED, 'found+"-"+g1+"-"+g2', 'A-A-A'), ('(?i)(a)b(c)', 'ABC', SUCCEED, 'found+"-"+g1+"-"+g2', 'ABC-A-C'), ('(?i)a+b+c', 'AABBABC', SUCCEED, 'found', 'ABC'), ('(?i)a{1,}b{1,}c', 'AABBABC', SUCCEED, 'found', 'ABC'), ('(?i)a**', '-', SYNTAX_ERROR), ('(?i)a.+?c', 'ABCABC', SUCCEED, 'found', 'ABC'), ('(?i)a.*?c', 'ABCABC', SUCCEED, 'found', 'ABC'), ('(?i)a.{0,5}?c', 'ABCABC', SUCCEED, 'found', 'ABC'), ('(?i)(a+|b)*', 'AB', SUCCEED, 'found+"-"+g1', 'AB-B'), ('(?i)(a+|b){0,}', 'AB', SUCCEED, 'found+"-"+g1', 'AB-B'), ('(?i)(a+|b)+', 'AB', SUCCEED, 'found+"-"+g1', 'AB-B'), ('(?i)(a+|b){1,}', 'AB', SUCCEED, 'found+"-"+g1', 'AB-B'), ('(?i)(a+|b)?', 'AB', SUCCEED, 'found+"-"+g1', 'A-A'), ('(?i)(a+|b){0,1}', 'AB', SUCCEED, 'found+"-"+g1', 'A-A'), ('(?i)(a+|b){0,1}?', 'AB', SUCCEED, 'found+"-"+g1', '-None'), ('(?i))(', '-', SYNTAX_ERROR), ('(?i)[^ab]*', 'CDE', SUCCEED, 'found', 'CDE'), ('(?i)abc', '', FAIL), ('(?i)a*', '', SUCCEED, 'found', ''), ('(?i)([abc])*d', 'ABBBCD', SUCCEED, 'found+"-"+g1', 'ABBBCD-C'), ('(?i)([abc])*bcd', 'ABCD', SUCCEED, 'found+"-"+g1', 'ABCD-A'), ('(?i)a|b|c|d|e', 'E', SUCCEED, 'found', 'E'), ('(?i)(a|b|c|d|e)f', 'EF', SUCCEED, 'found+"-"+g1', 'EF-E'), ('(?i)abcd*efg', 'ABCDEFG', SUCCEED, 'found', 'ABCDEFG'), ('(?i)ab*', 'XABYABBBZ', SUCCEED, 'found', 'AB'), ('(?i)ab*', 'XAYABBBZ', SUCCEED, 'found', 'A'), ('(?i)(ab|cd)e', 'ABCDE', SUCCEED, 'found+"-"+g1', 'CDE-CD'), ('(?i)[abhgefdc]ij', 'HIJ', SUCCEED, 'found', 'HIJ'), ('(?i)^(ab|cd)e', 'ABCDE', FAIL), ('(?i)(abc|)ef', 'ABCDEF', SUCCEED, 'found+"-"+g1', 'EF-'), ('(?i)(a|b)c*d', 'ABCD', SUCCEED, 'found+"-"+g1', 'BCD-B'), ('(?i)(ab|ab*)bc', 'ABC', SUCCEED, 'found+"-"+g1', 'ABC-A'), ('(?i)a([bc]*)c*', 'ABC', SUCCEED, 'found+"-"+g1', 'ABC-BC'), ('(?i)a([bc]*)(c*d)', 'ABCD', SUCCEED, 'found+"-"+g1+"-"+g2', 'ABCD-BC-D'), ('(?i)a([bc]+)(c*d)', 'ABCD', SUCCEED, 'found+"-"+g1+"-"+g2', 'ABCD-BC-D'), ('(?i)a([bc]*)(c+d)', 'ABCD', SUCCEED, 'found+"-"+g1+"-"+g2', 'ABCD-B-CD'), ('(?i)a[bcd]*dcdcde', 'ADCDCDE', SUCCEED, 'found', 'ADCDCDE'), ('(?i)a[bcd]+dcdcde', 'ADCDCDE', FAIL), ('(?i)(ab|a)b*c', 'ABC', SUCCEED, 'found+"-"+g1', 'ABC-AB'), ('(?i)((a)(b)c)(d)', 'ABCD', SUCCEED, 'g1+"-"+g2+"-"+g3+"-"+g4', 'ABC-A-B-D'), ('(?i)[a-zA-Z_][a-zA-Z0-9_]*', 'ALPHA', SUCCEED, 'found', 'ALPHA'), ('(?i)^a(bc+|b[eh])g|.h$', 'ABH', SUCCEED, 'found+"-"+g1', 'BH-None'), ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'EFFGZ', SUCCEED, 'found+"-"+g1+"-"+g2', 'EFFGZ-EFFGZ-None'), ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'IJ', SUCCEED, 'found+"-"+g1+"-"+g2', 'IJ-IJ-J'), ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'EFFG', FAIL), ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'BCDD', FAIL), ('(?i)(bc+d$|ef*g.|h?i(j|k))', 'REFFGZ', SUCCEED, 'found+"-"+g1+"-"+g2', 'EFFGZ-EFFGZ-None'), ('(?i)((((((((((a))))))))))', 'A', SUCCEED, 'g10', 'A'), ('(?i)((((((((((a))))))))))\\10', 'AA', SUCCEED, 'found', 'AA'), #('(?i)((((((((((a))))))))))\\41', 'AA', FAIL), #('(?i)((((((((((a))))))))))\\41', 'A!', SUCCEED, 'found', 'A!'), ('(?i)(((((((((a)))))))))', 'A', SUCCEED, 'found', 'A'), ('(?i)(?:(?:(?:(?:(?:(?:(?:(?:(?:(a))))))))))', 'A', SUCCEED, 'g1', 'A'), ('(?i)(?:(?:(?:(?:(?:(?:(?:(?:(?:(a|b|c))))))))))', 'C', SUCCEED, 'g1', 'C'), ('(?i)multiple words of text', 'UH-UH', FAIL), ('(?i)multiple words', 'MULTIPLE WORDS, YEAH', SUCCEED, 'found', 'MULTIPLE WORDS'), ('(?i)(.*)c(.*)', 'ABCDE', SUCCEED, 'found+"-"+g1+"-"+g2', 'ABCDE-AB-DE'), ('(?i)\\((.*), (.*)\\)', '(A, B)', SUCCEED, 'g2+"-"+g1', 'B-A'), ('(?i)[k]', 'AB', FAIL), # ('(?i)abcd', 'ABCD', SUCCEED, 'found+"-"+\\found+"-"+\\\\found', 'ABCD-$&-\\ABCD'), # ('(?i)a(bc)d', 'ABCD', SUCCEED, 'g1+"-"+\\g1+"-"+\\\\g1', 'BC-$1-\\BC'), ('(?i)a[-]?c', 'AC', SUCCEED, 'found', 'AC'), ('(?i)(abc)\\1', 'ABCABC', SUCCEED, 'g1', 'ABC'), ('(?i)([a-c]*)\\1', 'ABCABC', SUCCEED, 'g1', 'ABC'), ('a(?!b).', 'abad', SUCCEED, 'found', 'ad'), ('a(?=d).', 'abad', SUCCEED, 'found', 'ad'), ('a(?=c|d).', 'abad', SUCCEED, 'found', 'ad'), ('a(?:b|c|d)(.)', 'ace', SUCCEED, 'g1', 'e'), ('a(?:b|c|d)*(.)', 'ace', SUCCEED, 'g1', 'e'), ('a(?:b|c|d)+?(.)', 'ace', SUCCEED, 'g1', 'e'), ('a(?:b|(c|e){1,2}?|d)+?(.)', 'ace', SUCCEED, 'g1 + g2', 'ce'), ('^(.+)?B', 'AB', SUCCEED, 'g1', 'A'), # lookbehind: split by : but not if it is escaped by -. ('(?<!-):(.*?)(?<!-):', 'a:bc-:de:f', SUCCEED, 'g1', 'bc-:de' ), # escaping with \ as we know it ('(?<!\\\):(.*?)(?<!\\\):', 'a:bc\\:de:f', SUCCEED, 'g1', 'bc\\:de' ), # terminating with ' and escaping with ? as in edifact ("(?<!\\?)'(.*?)(?<!\\?)'", "a'bc?'de'f", SUCCEED, 'g1', "bc?'de" ), # Comments using the (?#...) syntax ('w(?# comment', 'w', SYNTAX_ERROR), ('w(?# comment 1)xy(?# comment 2)z', 'wxyz', SUCCEED, 'found', 'wxyz'), # Check odd placement of embedded pattern modifiers # not an error under PCRE/PRE: ('w(?i)', 'W', SUCCEED, 'found', 'W'), # ('w(?i)', 'W', SYNTAX_ERROR), # Comments using the x embedded pattern modifier ("""(?x)w# comment 1 x y # comment 2 z""", 'wxyz', SUCCEED, 'found', 'wxyz'), # using the m embedded pattern modifier ('^abc', """jkl abc xyz""", FAIL), ('(?m)^abc', """jkl abc xyz""", SUCCEED, 'found', 'abc'), ('(?m)abc$', """jkl xyzabc 123""", SUCCEED, 'found', 'abc'), # using the s embedded pattern modifier ('a.b', 'a\nb', FAIL), ('(?s)a.b', 'a\nb', SUCCEED, 'found', 'a\nb'), # test \w, etc. both inside and outside character classes ('\\w+', '--ab_cd0123--', SUCCEED, 'found', 'ab_cd0123'), ('[\\w]+', '--ab_cd0123--', SUCCEED, 'found', 'ab_cd0123'), ('\\D+', '1234abc5678', SUCCEED, 'found', 'abc'), ('[\\D]+', '1234abc5678', SUCCEED, 'found', 'abc'), ('[\\da-fA-F]+', '123abc', SUCCEED, 'found', '123abc'), # not an error under PCRE/PRE: # ('[\\d-x]', '-', SYNTAX_ERROR), (r'([\s]*)([\S]*)([\s]*)', ' testing!1972', SUCCEED, 'g3+g2+g1', 'testing!1972 '), (r'(\s*)(\S*)(\s*)', ' testing!1972', SUCCEED, 'g3+g2+g1', 'testing!1972 '), (r'\xff', '\377', SUCCEED, 'found', chr(255)), # new \x semantics (r'\x00ff', '\377', FAIL), # (r'\x00ff', '\377', SUCCEED, 'found', chr(255)), (r'\t\n\v\r\f\a\g', '\t\n\v\r\f\ag', SUCCEED, 'found', '\t\n\v\r\f\ag'), ('\t\n\v\r\f\a\g', '\t\n\v\r\f\ag', SUCCEED, 'found', '\t\n\v\r\f\ag'), (r'\t\n\v\r\f\a', '\t\n\v\r\f\a', SUCCEED, 'found', chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)), (r'[\t][\n][\v][\r][\f][\b]', '\t\n\v\r\f\b', SUCCEED, 'found', '\t\n\v\r\f\b'), # # post-1.5.2 additions # xmllib problem (r'(([a-z]+):)?([a-z]+)$', 'smil', SUCCEED, 'g1+"-"+g2+"-"+g3', 'None-None-smil'), # bug 110866: reference to undefined group (r'((.)\1+)', '', SYNTAX_ERROR), # bug 111869: search (PRE/PCRE fails on this one, SRE doesn't) (r'.*d', 'abc\nabd', SUCCEED, 'found', 'abd'), # bug 112468: various expected syntax errors (r'(', '', SYNTAX_ERROR), (r'[\41]', '!', SUCCEED, 'found', '!'), # bug 114033: nothing to repeat (r'(x?)?', 'x', SUCCEED, 'found', 'x'), # bug 115040: rescan if flags are modified inside pattern (r' (?x)foo ', 'foo', SUCCEED, 'found', 'foo'), # bug 115618: negative lookahead (r'(?<!abc)(d.f)', 'abcdefdof', SUCCEED, 'found', 'dof'), # bug 116251: character class bug (r'[\w-]+', 'laser_beam', SUCCEED, 'found', 'laser_beam'), # bug 123769+127259: non-greedy backtracking bug (r'.*?\S *:', 'xx:', SUCCEED, 'found', 'xx:'), (r'a[ ]*?\ (\d+).*', 'a 10', SUCCEED, 'found', 'a 10'), (r'a[ ]*?\ (\d+).*', 'a 10', SUCCEED, 'found', 'a 10'), # bug 127259: \Z shouldn't depend on multiline mode (r'(?ms).*?x\s*\Z(.*)','xx\nx\n', SUCCEED, 'g1', ''), # bug 128899: uppercase literals under the ignorecase flag (r'(?i)M+', 'MMM', SUCCEED, 'found', 'MMM'), (r'(?i)m+', 'MMM', SUCCEED, 'found', 'MMM'), (r'(?i)[M]+', 'MMM', SUCCEED, 'found', 'MMM'), (r'(?i)[m]+', 'MMM', SUCCEED, 'found', 'MMM'), # bug 130748: ^* should be an error (nothing to repeat) (r'^*', '', SYNTAX_ERROR), # bug 133283: minimizing repeat problem (r'"(?:\\"|[^"])*?"', r'"\""', SUCCEED, 'found', r'"\""'), # bug 477728: minimizing repeat problem (r'^.*?$', 'one\ntwo\nthree\n', FAIL), # bug 483789: minimizing repeat problem (r'a[^>]*?b', 'a>b', FAIL), # bug 490573: minimizing repeat problem (r'^a*?$', 'foo', FAIL), # bug 470582: nested groups problem (r'^((a)c)?(ab)$', 'ab', SUCCEED, 'g1+"-"+g2+"-"+g3', 'None-None-ab'), # another minimizing repeat problem (capturing groups in assertions) ('^([ab]*?)(?=(b)?)c', 'abc', SUCCEED, 'g1+"-"+g2', 'ab-None'), ('^([ab]*?)(?!(b))c', 'abc', SUCCEED, 'g1+"-"+g2', 'ab-None'), ('^([ab]*?)(?<!(a))c', 'abc', SUCCEED, 'g1+"-"+g2', 'ab-None'), ] u = '\N{LATIN CAPITAL LETTER A WITH DIAERESIS}' tests.extend([ # bug 410271: \b broken under locales (r'\b.\b', 'a', SUCCEED, 'found', 'a'), (r'(?u)\b.\b', u, SUCCEED, 'found', u), (r'(?u)\w', u, SUCCEED, 'found', u), ])
gpl-3.0
75651/kbengine_cloud
kbe/res/scripts/common/Lib/test/test_asyncio/test_unix_events.py
60
53084
"""Tests for unix_events.py.""" import collections import gc import errno import io import os import pprint import signal import socket import stat import sys import tempfile import threading import unittest from unittest import mock if sys.platform == 'win32': raise unittest.SkipTest('UNIX only') import asyncio from asyncio import log from asyncio import test_utils from asyncio import unix_events MOCK_ANY = mock.ANY @unittest.skipUnless(signal, 'Signals are not supported') class SelectorEventLoopSignalTests(test_utils.TestCase): def setUp(self): self.loop = asyncio.SelectorEventLoop() self.set_event_loop(self.loop) def test_check_signal(self): self.assertRaises( TypeError, self.loop._check_signal, '1') self.assertRaises( ValueError, self.loop._check_signal, signal.NSIG + 1) def test_handle_signal_no_handler(self): self.loop._handle_signal(signal.NSIG + 1) def test_handle_signal_cancelled_handler(self): h = asyncio.Handle(mock.Mock(), (), loop=mock.Mock()) h.cancel() self.loop._signal_handlers[signal.NSIG + 1] = h self.loop.remove_signal_handler = mock.Mock() self.loop._handle_signal(signal.NSIG + 1) self.loop.remove_signal_handler.assert_called_with(signal.NSIG + 1) @mock.patch('asyncio.unix_events.signal') def test_add_signal_handler_setup_error(self, m_signal): m_signal.NSIG = signal.NSIG m_signal.set_wakeup_fd.side_effect = ValueError self.assertRaises( RuntimeError, self.loop.add_signal_handler, signal.SIGINT, lambda: True) @mock.patch('asyncio.unix_events.signal') def test_add_signal_handler(self, m_signal): m_signal.NSIG = signal.NSIG cb = lambda: True self.loop.add_signal_handler(signal.SIGHUP, cb) h = self.loop._signal_handlers.get(signal.SIGHUP) self.assertIsInstance(h, asyncio.Handle) self.assertEqual(h._callback, cb) @mock.patch('asyncio.unix_events.signal') def test_add_signal_handler_install_error(self, m_signal): m_signal.NSIG = signal.NSIG def set_wakeup_fd(fd): if fd == -1: raise ValueError() m_signal.set_wakeup_fd = set_wakeup_fd class Err(OSError): errno = errno.EFAULT m_signal.signal.side_effect = Err self.assertRaises( Err, self.loop.add_signal_handler, signal.SIGINT, lambda: True) @mock.patch('asyncio.unix_events.signal') @mock.patch('asyncio.base_events.logger') def test_add_signal_handler_install_error2(self, m_logging, m_signal): m_signal.NSIG = signal.NSIG class Err(OSError): errno = errno.EINVAL m_signal.signal.side_effect = Err self.loop._signal_handlers[signal.SIGHUP] = lambda: True self.assertRaises( RuntimeError, self.loop.add_signal_handler, signal.SIGINT, lambda: True) self.assertFalse(m_logging.info.called) self.assertEqual(1, m_signal.set_wakeup_fd.call_count) @mock.patch('asyncio.unix_events.signal') @mock.patch('asyncio.base_events.logger') def test_add_signal_handler_install_error3(self, m_logging, m_signal): class Err(OSError): errno = errno.EINVAL m_signal.signal.side_effect = Err m_signal.NSIG = signal.NSIG self.assertRaises( RuntimeError, self.loop.add_signal_handler, signal.SIGINT, lambda: True) self.assertFalse(m_logging.info.called) self.assertEqual(2, m_signal.set_wakeup_fd.call_count) @mock.patch('asyncio.unix_events.signal') def test_remove_signal_handler(self, m_signal): m_signal.NSIG = signal.NSIG self.loop.add_signal_handler(signal.SIGHUP, lambda: True) self.assertTrue( self.loop.remove_signal_handler(signal.SIGHUP)) self.assertTrue(m_signal.set_wakeup_fd.called) self.assertTrue(m_signal.signal.called) self.assertEqual( (signal.SIGHUP, m_signal.SIG_DFL), m_signal.signal.call_args[0]) @mock.patch('asyncio.unix_events.signal') def test_remove_signal_handler_2(self, m_signal): m_signal.NSIG = signal.NSIG m_signal.SIGINT = signal.SIGINT self.loop.add_signal_handler(signal.SIGINT, lambda: True) self.loop._signal_handlers[signal.SIGHUP] = object() m_signal.set_wakeup_fd.reset_mock() self.assertTrue( self.loop.remove_signal_handler(signal.SIGINT)) self.assertFalse(m_signal.set_wakeup_fd.called) self.assertTrue(m_signal.signal.called) self.assertEqual( (signal.SIGINT, m_signal.default_int_handler), m_signal.signal.call_args[0]) @mock.patch('asyncio.unix_events.signal') @mock.patch('asyncio.base_events.logger') def test_remove_signal_handler_cleanup_error(self, m_logging, m_signal): m_signal.NSIG = signal.NSIG self.loop.add_signal_handler(signal.SIGHUP, lambda: True) m_signal.set_wakeup_fd.side_effect = ValueError self.loop.remove_signal_handler(signal.SIGHUP) self.assertTrue(m_logging.info) @mock.patch('asyncio.unix_events.signal') def test_remove_signal_handler_error(self, m_signal): m_signal.NSIG = signal.NSIG self.loop.add_signal_handler(signal.SIGHUP, lambda: True) m_signal.signal.side_effect = OSError self.assertRaises( OSError, self.loop.remove_signal_handler, signal.SIGHUP) @mock.patch('asyncio.unix_events.signal') def test_remove_signal_handler_error2(self, m_signal): m_signal.NSIG = signal.NSIG self.loop.add_signal_handler(signal.SIGHUP, lambda: True) class Err(OSError): errno = errno.EINVAL m_signal.signal.side_effect = Err self.assertRaises( RuntimeError, self.loop.remove_signal_handler, signal.SIGHUP) @mock.patch('asyncio.unix_events.signal') def test_close(self, m_signal): m_signal.NSIG = signal.NSIG self.loop.add_signal_handler(signal.SIGHUP, lambda: True) self.loop.add_signal_handler(signal.SIGCHLD, lambda: True) self.assertEqual(len(self.loop._signal_handlers), 2) m_signal.set_wakeup_fd.reset_mock() self.loop.close() self.assertEqual(len(self.loop._signal_handlers), 0) m_signal.set_wakeup_fd.assert_called_once_with(-1) @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'UNIX Sockets are not supported') class SelectorEventLoopUnixSocketTests(test_utils.TestCase): def setUp(self): self.loop = asyncio.SelectorEventLoop() self.set_event_loop(self.loop) def test_create_unix_server_existing_path_sock(self): with test_utils.unix_socket_path() as path: sock = socket.socket(socket.AF_UNIX) sock.bind(path) with sock: coro = self.loop.create_unix_server(lambda: None, path) with self.assertRaisesRegex(OSError, 'Address.*is already in use'): self.loop.run_until_complete(coro) def test_create_unix_server_existing_path_nonsock(self): with tempfile.NamedTemporaryFile() as file: coro = self.loop.create_unix_server(lambda: None, file.name) with self.assertRaisesRegex(OSError, 'Address.*is already in use'): self.loop.run_until_complete(coro) def test_create_unix_server_ssl_bool(self): coro = self.loop.create_unix_server(lambda: None, path='spam', ssl=True) with self.assertRaisesRegex(TypeError, 'ssl argument must be an SSLContext'): self.loop.run_until_complete(coro) def test_create_unix_server_nopath_nosock(self): coro = self.loop.create_unix_server(lambda: None, path=None) with self.assertRaisesRegex(ValueError, 'path was not specified, and no sock'): self.loop.run_until_complete(coro) def test_create_unix_server_path_inetsock(self): sock = socket.socket() with sock: coro = self.loop.create_unix_server(lambda: None, path=None, sock=sock) with self.assertRaisesRegex(ValueError, 'A UNIX Domain Socket was expected'): self.loop.run_until_complete(coro) @mock.patch('asyncio.unix_events.socket') def test_create_unix_server_bind_error(self, m_socket): # Ensure that the socket is closed on any bind error sock = mock.Mock() m_socket.socket.return_value = sock sock.bind.side_effect = OSError coro = self.loop.create_unix_server(lambda: None, path="/test") with self.assertRaises(OSError): self.loop.run_until_complete(coro) self.assertTrue(sock.close.called) sock.bind.side_effect = MemoryError coro = self.loop.create_unix_server(lambda: None, path="/test") with self.assertRaises(MemoryError): self.loop.run_until_complete(coro) self.assertTrue(sock.close.called) def test_create_unix_connection_path_sock(self): coro = self.loop.create_unix_connection( lambda: None, '/dev/null', sock=object()) with self.assertRaisesRegex(ValueError, 'path and sock can not be'): self.loop.run_until_complete(coro) def test_create_unix_connection_nopath_nosock(self): coro = self.loop.create_unix_connection( lambda: None, None) with self.assertRaisesRegex(ValueError, 'no path and sock were specified'): self.loop.run_until_complete(coro) def test_create_unix_connection_nossl_serverhost(self): coro = self.loop.create_unix_connection( lambda: None, '/dev/null', server_hostname='spam') with self.assertRaisesRegex(ValueError, 'server_hostname is only meaningful'): self.loop.run_until_complete(coro) def test_create_unix_connection_ssl_noserverhost(self): coro = self.loop.create_unix_connection( lambda: None, '/dev/null', ssl=True) with self.assertRaisesRegex( ValueError, 'you have to pass server_hostname when using ssl'): self.loop.run_until_complete(coro) class UnixReadPipeTransportTests(test_utils.TestCase): def setUp(self): self.loop = self.new_test_loop() self.protocol = test_utils.make_test_protocol(asyncio.Protocol) self.pipe = mock.Mock(spec_set=io.RawIOBase) self.pipe.fileno.return_value = 5 blocking_patcher = mock.patch('asyncio.unix_events._set_nonblocking') blocking_patcher.start() self.addCleanup(blocking_patcher.stop) fstat_patcher = mock.patch('os.fstat') m_fstat = fstat_patcher.start() st = mock.Mock() st.st_mode = stat.S_IFIFO m_fstat.return_value = st self.addCleanup(fstat_patcher.stop) def test_ctor(self): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) self.loop.assert_reader(5, tr._read_ready) test_utils.run_briefly(self.loop) self.protocol.connection_made.assert_called_with(tr) def test_ctor_with_waiter(self): fut = asyncio.Future(loop=self.loop) unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol, fut) test_utils.run_briefly(self.loop) self.assertIsNone(fut.result()) @mock.patch('os.read') def test__read_ready(self, m_read): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) m_read.return_value = b'data' tr._read_ready() m_read.assert_called_with(5, tr.max_size) self.protocol.data_received.assert_called_with(b'data') @mock.patch('os.read') def test__read_ready_eof(self, m_read): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) m_read.return_value = b'' tr._read_ready() m_read.assert_called_with(5, tr.max_size) self.assertFalse(self.loop.readers) test_utils.run_briefly(self.loop) self.protocol.eof_received.assert_called_with() self.protocol.connection_lost.assert_called_with(None) @mock.patch('os.read') def test__read_ready_blocked(self, m_read): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) m_read.side_effect = BlockingIOError tr._read_ready() m_read.assert_called_with(5, tr.max_size) test_utils.run_briefly(self.loop) self.assertFalse(self.protocol.data_received.called) @mock.patch('asyncio.log.logger.error') @mock.patch('os.read') def test__read_ready_error(self, m_read, m_logexc): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) err = OSError() m_read.side_effect = err tr._close = mock.Mock() tr._read_ready() m_read.assert_called_with(5, tr.max_size) tr._close.assert_called_with(err) m_logexc.assert_called_with( test_utils.MockPattern( 'Fatal read error on pipe transport' '\nprotocol:.*\ntransport:.*'), exc_info=(OSError, MOCK_ANY, MOCK_ANY)) @mock.patch('os.read') def test_pause_reading(self, m_read): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) m = mock.Mock() self.loop.add_reader(5, m) tr.pause_reading() self.assertFalse(self.loop.readers) @mock.patch('os.read') def test_resume_reading(self, m_read): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) tr.resume_reading() self.loop.assert_reader(5, tr._read_ready) @mock.patch('os.read') def test_close(self, m_read): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) tr._close = mock.Mock() tr.close() tr._close.assert_called_with(None) @mock.patch('os.read') def test_close_already_closing(self, m_read): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) tr._closing = True tr._close = mock.Mock() tr.close() self.assertFalse(tr._close.called) @mock.patch('os.read') def test__close(self, m_read): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) err = object() tr._close(err) self.assertTrue(tr._closing) self.assertFalse(self.loop.readers) test_utils.run_briefly(self.loop) self.protocol.connection_lost.assert_called_with(err) def test__call_connection_lost(self): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) self.assertIsNotNone(tr._protocol) self.assertIsNotNone(tr._loop) err = None tr._call_connection_lost(err) self.protocol.connection_lost.assert_called_with(err) self.pipe.close.assert_called_with() self.assertIsNone(tr._protocol) self.assertIsNone(tr._loop) def test__call_connection_lost_with_err(self): tr = unix_events._UnixReadPipeTransport( self.loop, self.pipe, self.protocol) self.assertIsNotNone(tr._protocol) self.assertIsNotNone(tr._loop) err = OSError() tr._call_connection_lost(err) self.protocol.connection_lost.assert_called_with(err) self.pipe.close.assert_called_with() self.assertIsNone(tr._protocol) self.assertIsNone(tr._loop) class UnixWritePipeTransportTests(test_utils.TestCase): def setUp(self): self.loop = self.new_test_loop() self.protocol = test_utils.make_test_protocol(asyncio.BaseProtocol) self.pipe = mock.Mock(spec_set=io.RawIOBase) self.pipe.fileno.return_value = 5 blocking_patcher = mock.patch('asyncio.unix_events._set_nonblocking') blocking_patcher.start() self.addCleanup(blocking_patcher.stop) fstat_patcher = mock.patch('os.fstat') m_fstat = fstat_patcher.start() st = mock.Mock() st.st_mode = stat.S_IFSOCK m_fstat.return_value = st self.addCleanup(fstat_patcher.stop) def test_ctor(self): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.assert_reader(5, tr._read_ready) test_utils.run_briefly(self.loop) self.protocol.connection_made.assert_called_with(tr) def test_ctor_with_waiter(self): fut = asyncio.Future(loop=self.loop) tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol, fut) self.loop.assert_reader(5, tr._read_ready) test_utils.run_briefly(self.loop) self.assertEqual(None, fut.result()) def test_can_write_eof(self): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.assertTrue(tr.can_write_eof()) @mock.patch('os.write') def test_write(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) m_write.return_value = 4 tr.write(b'data') m_write.assert_called_with(5, b'data') self.assertFalse(self.loop.writers) self.assertEqual([], tr._buffer) @mock.patch('os.write') def test_write_no_data(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) tr.write(b'') self.assertFalse(m_write.called) self.assertFalse(self.loop.writers) self.assertEqual([], tr._buffer) @mock.patch('os.write') def test_write_partial(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) m_write.return_value = 2 tr.write(b'data') m_write.assert_called_with(5, b'data') self.loop.assert_writer(5, tr._write_ready) self.assertEqual([b'ta'], tr._buffer) @mock.patch('os.write') def test_write_buffer(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.add_writer(5, tr._write_ready) tr._buffer = [b'previous'] tr.write(b'data') self.assertFalse(m_write.called) self.loop.assert_writer(5, tr._write_ready) self.assertEqual([b'previous', b'data'], tr._buffer) @mock.patch('os.write') def test_write_again(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) m_write.side_effect = BlockingIOError() tr.write(b'data') m_write.assert_called_with(5, b'data') self.loop.assert_writer(5, tr._write_ready) self.assertEqual([b'data'], tr._buffer) @mock.patch('asyncio.unix_events.logger') @mock.patch('os.write') def test_write_err(self, m_write, m_log): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) err = OSError() m_write.side_effect = err tr._fatal_error = mock.Mock() tr.write(b'data') m_write.assert_called_with(5, b'data') self.assertFalse(self.loop.writers) self.assertEqual([], tr._buffer) tr._fatal_error.assert_called_with( err, 'Fatal write error on pipe transport') self.assertEqual(1, tr._conn_lost) tr.write(b'data') self.assertEqual(2, tr._conn_lost) tr.write(b'data') tr.write(b'data') tr.write(b'data') tr.write(b'data') # This is a bit overspecified. :-( m_log.warning.assert_called_with( 'pipe closed by peer or os.write(pipe, data) raised exception.') @mock.patch('os.write') def test_write_close(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) tr._read_ready() # pipe was closed by peer tr.write(b'data') self.assertEqual(tr._conn_lost, 1) tr.write(b'data') self.assertEqual(tr._conn_lost, 2) def test__read_ready(self): tr = unix_events._UnixWritePipeTransport(self.loop, self.pipe, self.protocol) tr._read_ready() self.assertFalse(self.loop.readers) self.assertFalse(self.loop.writers) self.assertTrue(tr._closing) test_utils.run_briefly(self.loop) self.protocol.connection_lost.assert_called_with(None) @mock.patch('os.write') def test__write_ready(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.add_writer(5, tr._write_ready) tr._buffer = [b'da', b'ta'] m_write.return_value = 4 tr._write_ready() m_write.assert_called_with(5, b'data') self.assertFalse(self.loop.writers) self.assertEqual([], tr._buffer) @mock.patch('os.write') def test__write_ready_partial(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.add_writer(5, tr._write_ready) tr._buffer = [b'da', b'ta'] m_write.return_value = 3 tr._write_ready() m_write.assert_called_with(5, b'data') self.loop.assert_writer(5, tr._write_ready) self.assertEqual([b'a'], tr._buffer) @mock.patch('os.write') def test__write_ready_again(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.add_writer(5, tr._write_ready) tr._buffer = [b'da', b'ta'] m_write.side_effect = BlockingIOError() tr._write_ready() m_write.assert_called_with(5, b'data') self.loop.assert_writer(5, tr._write_ready) self.assertEqual([b'data'], tr._buffer) @mock.patch('os.write') def test__write_ready_empty(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.add_writer(5, tr._write_ready) tr._buffer = [b'da', b'ta'] m_write.return_value = 0 tr._write_ready() m_write.assert_called_with(5, b'data') self.loop.assert_writer(5, tr._write_ready) self.assertEqual([b'data'], tr._buffer) @mock.patch('asyncio.log.logger.error') @mock.patch('os.write') def test__write_ready_err(self, m_write, m_logexc): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.add_writer(5, tr._write_ready) tr._buffer = [b'da', b'ta'] m_write.side_effect = err = OSError() tr._write_ready() m_write.assert_called_with(5, b'data') self.assertFalse(self.loop.writers) self.assertFalse(self.loop.readers) self.assertEqual([], tr._buffer) self.assertTrue(tr._closing) m_logexc.assert_called_with( test_utils.MockPattern( 'Fatal write error on pipe transport' '\nprotocol:.*\ntransport:.*'), exc_info=(OSError, MOCK_ANY, MOCK_ANY)) self.assertEqual(1, tr._conn_lost) test_utils.run_briefly(self.loop) self.protocol.connection_lost.assert_called_with(err) @mock.patch('os.write') def test__write_ready_closing(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.add_writer(5, tr._write_ready) tr._closing = True tr._buffer = [b'da', b'ta'] m_write.return_value = 4 tr._write_ready() m_write.assert_called_with(5, b'data') self.assertFalse(self.loop.writers) self.assertFalse(self.loop.readers) self.assertEqual([], tr._buffer) self.protocol.connection_lost.assert_called_with(None) self.pipe.close.assert_called_with() @mock.patch('os.write') def test_abort(self, m_write): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.loop.add_writer(5, tr._write_ready) self.loop.add_reader(5, tr._read_ready) tr._buffer = [b'da', b'ta'] tr.abort() self.assertFalse(m_write.called) self.assertFalse(self.loop.readers) self.assertFalse(self.loop.writers) self.assertEqual([], tr._buffer) self.assertTrue(tr._closing) test_utils.run_briefly(self.loop) self.protocol.connection_lost.assert_called_with(None) def test__call_connection_lost(self): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.assertIsNotNone(tr._protocol) self.assertIsNotNone(tr._loop) err = None tr._call_connection_lost(err) self.protocol.connection_lost.assert_called_with(err) self.pipe.close.assert_called_with() self.assertIsNone(tr._protocol) self.assertIsNone(tr._loop) def test__call_connection_lost_with_err(self): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) self.assertIsNotNone(tr._protocol) self.assertIsNotNone(tr._loop) err = OSError() tr._call_connection_lost(err) self.protocol.connection_lost.assert_called_with(err) self.pipe.close.assert_called_with() self.assertIsNone(tr._protocol) self.assertIsNone(tr._loop) def test_close(self): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) tr.write_eof = mock.Mock() tr.close() tr.write_eof.assert_called_with() def test_close_closing(self): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) tr.write_eof = mock.Mock() tr._closing = True tr.close() self.assertFalse(tr.write_eof.called) def test_write_eof(self): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) tr.write_eof() self.assertTrue(tr._closing) self.assertFalse(self.loop.readers) test_utils.run_briefly(self.loop) self.protocol.connection_lost.assert_called_with(None) def test_write_eof_pending(self): tr = unix_events._UnixWritePipeTransport( self.loop, self.pipe, self.protocol) tr._buffer = [b'data'] tr.write_eof() self.assertTrue(tr._closing) self.assertFalse(self.protocol.connection_lost.called) class AbstractChildWatcherTests(unittest.TestCase): def test_not_implemented(self): f = mock.Mock() watcher = asyncio.AbstractChildWatcher() self.assertRaises( NotImplementedError, watcher.add_child_handler, f, f) self.assertRaises( NotImplementedError, watcher.remove_child_handler, f) self.assertRaises( NotImplementedError, watcher.attach_loop, f) self.assertRaises( NotImplementedError, watcher.close) self.assertRaises( NotImplementedError, watcher.__enter__) self.assertRaises( NotImplementedError, watcher.__exit__, f, f, f) class BaseChildWatcherTests(unittest.TestCase): def test_not_implemented(self): f = mock.Mock() watcher = unix_events.BaseChildWatcher() self.assertRaises( NotImplementedError, watcher._do_waitpid, f) WaitPidMocks = collections.namedtuple("WaitPidMocks", ("waitpid", "WIFEXITED", "WIFSIGNALED", "WEXITSTATUS", "WTERMSIG", )) class ChildWatcherTestsMixin: ignore_warnings = mock.patch.object(log.logger, "warning") def setUp(self): self.loop = self.new_test_loop() self.running = False self.zombies = {} with mock.patch.object( self.loop, "add_signal_handler") as self.m_add_signal_handler: self.watcher = self.create_watcher() self.watcher.attach_loop(self.loop) def waitpid(self, pid, flags): if isinstance(self.watcher, asyncio.SafeChildWatcher) or pid != -1: self.assertGreater(pid, 0) try: if pid < 0: return self.zombies.popitem() else: return pid, self.zombies.pop(pid) except KeyError: pass if self.running: return 0, 0 else: raise ChildProcessError() def add_zombie(self, pid, returncode): self.zombies[pid] = returncode + 32768 def WIFEXITED(self, status): return status >= 32768 def WIFSIGNALED(self, status): return 32700 < status < 32768 def WEXITSTATUS(self, status): self.assertTrue(self.WIFEXITED(status)) return status - 32768 def WTERMSIG(self, status): self.assertTrue(self.WIFSIGNALED(status)) return 32768 - status def test_create_watcher(self): self.m_add_signal_handler.assert_called_once_with( signal.SIGCHLD, self.watcher._sig_chld) def waitpid_mocks(func): def wrapped_func(self): def patch(target, wrapper): return mock.patch(target, wraps=wrapper, new_callable=mock.Mock) with patch('os.WTERMSIG', self.WTERMSIG) as m_WTERMSIG, \ patch('os.WEXITSTATUS', self.WEXITSTATUS) as m_WEXITSTATUS, \ patch('os.WIFSIGNALED', self.WIFSIGNALED) as m_WIFSIGNALED, \ patch('os.WIFEXITED', self.WIFEXITED) as m_WIFEXITED, \ patch('os.waitpid', self.waitpid) as m_waitpid: func(self, WaitPidMocks(m_waitpid, m_WIFEXITED, m_WIFSIGNALED, m_WEXITSTATUS, m_WTERMSIG, )) return wrapped_func @waitpid_mocks def test_sigchld(self, m): # register a child callback = mock.Mock() with self.watcher: self.running = True self.watcher.add_child_handler(42, callback, 9, 10, 14) self.assertFalse(callback.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # child is running self.watcher._sig_chld() self.assertFalse(callback.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # child terminates (returncode 12) self.running = False self.add_zombie(42, 12) self.watcher._sig_chld() self.assertTrue(m.WIFEXITED.called) self.assertTrue(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) callback.assert_called_once_with(42, 12, 9, 10, 14) m.WIFSIGNALED.reset_mock() m.WIFEXITED.reset_mock() m.WEXITSTATUS.reset_mock() callback.reset_mock() # ensure that the child is effectively reaped self.add_zombie(42, 13) with self.ignore_warnings: self.watcher._sig_chld() self.assertFalse(callback.called) self.assertFalse(m.WTERMSIG.called) m.WIFSIGNALED.reset_mock() m.WIFEXITED.reset_mock() m.WEXITSTATUS.reset_mock() # sigchld called again self.zombies.clear() self.watcher._sig_chld() self.assertFalse(callback.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) @waitpid_mocks def test_sigchld_two_children(self, m): callback1 = mock.Mock() callback2 = mock.Mock() # register child 1 with self.watcher: self.running = True self.watcher.add_child_handler(43, callback1, 7, 8) self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # register child 2 with self.watcher: self.watcher.add_child_handler(44, callback2, 147, 18) self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # children are running self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # child 1 terminates (signal 3) self.add_zombie(43, -3) self.watcher._sig_chld() callback1.assert_called_once_with(43, -3, 7, 8) self.assertFalse(callback2.called) self.assertTrue(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertTrue(m.WTERMSIG.called) m.WIFSIGNALED.reset_mock() m.WIFEXITED.reset_mock() m.WTERMSIG.reset_mock() callback1.reset_mock() # child 2 still running self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # child 2 terminates (code 108) self.add_zombie(44, 108) self.running = False self.watcher._sig_chld() callback2.assert_called_once_with(44, 108, 147, 18) self.assertFalse(callback1.called) self.assertTrue(m.WIFEXITED.called) self.assertTrue(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) m.WIFSIGNALED.reset_mock() m.WIFEXITED.reset_mock() m.WEXITSTATUS.reset_mock() callback2.reset_mock() # ensure that the children are effectively reaped self.add_zombie(43, 14) self.add_zombie(44, 15) with self.ignore_warnings: self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WTERMSIG.called) m.WIFSIGNALED.reset_mock() m.WIFEXITED.reset_mock() m.WEXITSTATUS.reset_mock() # sigchld called again self.zombies.clear() self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) @waitpid_mocks def test_sigchld_two_children_terminating_together(self, m): callback1 = mock.Mock() callback2 = mock.Mock() # register child 1 with self.watcher: self.running = True self.watcher.add_child_handler(45, callback1, 17, 8) self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # register child 2 with self.watcher: self.watcher.add_child_handler(46, callback2, 1147, 18) self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # children are running self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # child 1 terminates (code 78) # child 2 terminates (signal 5) self.add_zombie(45, 78) self.add_zombie(46, -5) self.running = False self.watcher._sig_chld() callback1.assert_called_once_with(45, 78, 17, 8) callback2.assert_called_once_with(46, -5, 1147, 18) self.assertTrue(m.WIFSIGNALED.called) self.assertTrue(m.WIFEXITED.called) self.assertTrue(m.WEXITSTATUS.called) self.assertTrue(m.WTERMSIG.called) m.WIFSIGNALED.reset_mock() m.WIFEXITED.reset_mock() m.WTERMSIG.reset_mock() m.WEXITSTATUS.reset_mock() callback1.reset_mock() callback2.reset_mock() # ensure that the children are effectively reaped self.add_zombie(45, 14) self.add_zombie(46, 15) with self.ignore_warnings: self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WTERMSIG.called) @waitpid_mocks def test_sigchld_race_condition(self, m): # register a child callback = mock.Mock() with self.watcher: # child terminates before being registered self.add_zombie(50, 4) self.watcher._sig_chld() self.watcher.add_child_handler(50, callback, 1, 12) callback.assert_called_once_with(50, 4, 1, 12) callback.reset_mock() # ensure that the child is effectively reaped self.add_zombie(50, -1) with self.ignore_warnings: self.watcher._sig_chld() self.assertFalse(callback.called) @waitpid_mocks def test_sigchld_replace_handler(self, m): callback1 = mock.Mock() callback2 = mock.Mock() # register a child with self.watcher: self.running = True self.watcher.add_child_handler(51, callback1, 19) self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # register the same child again with self.watcher: self.watcher.add_child_handler(51, callback2, 21) self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # child terminates (signal 8) self.running = False self.add_zombie(51, -8) self.watcher._sig_chld() callback2.assert_called_once_with(51, -8, 21) self.assertFalse(callback1.called) self.assertTrue(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertTrue(m.WTERMSIG.called) m.WIFSIGNALED.reset_mock() m.WIFEXITED.reset_mock() m.WTERMSIG.reset_mock() callback2.reset_mock() # ensure that the child is effectively reaped self.add_zombie(51, 13) with self.ignore_warnings: self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(m.WTERMSIG.called) @waitpid_mocks def test_sigchld_remove_handler(self, m): callback = mock.Mock() # register a child with self.watcher: self.running = True self.watcher.add_child_handler(52, callback, 1984) self.assertFalse(callback.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # unregister the child self.watcher.remove_child_handler(52) self.assertFalse(callback.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # child terminates (code 99) self.running = False self.add_zombie(52, 99) with self.ignore_warnings: self.watcher._sig_chld() self.assertFalse(callback.called) @waitpid_mocks def test_sigchld_unknown_status(self, m): callback = mock.Mock() # register a child with self.watcher: self.running = True self.watcher.add_child_handler(53, callback, -19) self.assertFalse(callback.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # terminate with unknown status self.zombies[53] = 1178 self.running = False self.watcher._sig_chld() callback.assert_called_once_with(53, 1178, -19) self.assertTrue(m.WIFEXITED.called) self.assertTrue(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) callback.reset_mock() m.WIFEXITED.reset_mock() m.WIFSIGNALED.reset_mock() # ensure that the child is effectively reaped self.add_zombie(53, 101) with self.ignore_warnings: self.watcher._sig_chld() self.assertFalse(callback.called) @waitpid_mocks def test_remove_child_handler(self, m): callback1 = mock.Mock() callback2 = mock.Mock() callback3 = mock.Mock() # register children with self.watcher: self.running = True self.watcher.add_child_handler(54, callback1, 1) self.watcher.add_child_handler(55, callback2, 2) self.watcher.add_child_handler(56, callback3, 3) # remove child handler 1 self.assertTrue(self.watcher.remove_child_handler(54)) # remove child handler 2 multiple times self.assertTrue(self.watcher.remove_child_handler(55)) self.assertFalse(self.watcher.remove_child_handler(55)) self.assertFalse(self.watcher.remove_child_handler(55)) # all children terminate self.add_zombie(54, 0) self.add_zombie(55, 1) self.add_zombie(56, 2) self.running = False with self.ignore_warnings: self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) callback3.assert_called_once_with(56, 2, 3) @waitpid_mocks def test_sigchld_unhandled_exception(self, m): callback = mock.Mock() # register a child with self.watcher: self.running = True self.watcher.add_child_handler(57, callback) # raise an exception m.waitpid.side_effect = ValueError with mock.patch.object(log.logger, 'error') as m_error: self.assertEqual(self.watcher._sig_chld(), None) self.assertTrue(m_error.called) @waitpid_mocks def test_sigchld_child_reaped_elsewhere(self, m): # register a child callback = mock.Mock() with self.watcher: self.running = True self.watcher.add_child_handler(58, callback) self.assertFalse(callback.called) self.assertFalse(m.WIFEXITED.called) self.assertFalse(m.WIFSIGNALED.called) self.assertFalse(m.WEXITSTATUS.called) self.assertFalse(m.WTERMSIG.called) # child terminates self.running = False self.add_zombie(58, 4) # waitpid is called elsewhere os.waitpid(58, os.WNOHANG) m.waitpid.reset_mock() # sigchld with self.ignore_warnings: self.watcher._sig_chld() if isinstance(self.watcher, asyncio.FastChildWatcher): # here the FastChildWatche enters a deadlock # (there is no way to prevent it) self.assertFalse(callback.called) else: callback.assert_called_once_with(58, 255) @waitpid_mocks def test_sigchld_unknown_pid_during_registration(self, m): # register two children callback1 = mock.Mock() callback2 = mock.Mock() with self.ignore_warnings, self.watcher: self.running = True # child 1 terminates self.add_zombie(591, 7) # an unknown child terminates self.add_zombie(593, 17) self.watcher._sig_chld() self.watcher.add_child_handler(591, callback1) self.watcher.add_child_handler(592, callback2) callback1.assert_called_once_with(591, 7) self.assertFalse(callback2.called) @waitpid_mocks def test_set_loop(self, m): # register a child callback = mock.Mock() with self.watcher: self.running = True self.watcher.add_child_handler(60, callback) # attach a new loop old_loop = self.loop self.loop = self.new_test_loop() patch = mock.patch.object with patch(old_loop, "remove_signal_handler") as m_old_remove, \ patch(self.loop, "add_signal_handler") as m_new_add: self.watcher.attach_loop(self.loop) m_old_remove.assert_called_once_with( signal.SIGCHLD) m_new_add.assert_called_once_with( signal.SIGCHLD, self.watcher._sig_chld) # child terminates self.running = False self.add_zombie(60, 9) self.watcher._sig_chld() callback.assert_called_once_with(60, 9) @waitpid_mocks def test_set_loop_race_condition(self, m): # register 3 children callback1 = mock.Mock() callback2 = mock.Mock() callback3 = mock.Mock() with self.watcher: self.running = True self.watcher.add_child_handler(61, callback1) self.watcher.add_child_handler(62, callback2) self.watcher.add_child_handler(622, callback3) # detach the loop old_loop = self.loop self.loop = None with mock.patch.object( old_loop, "remove_signal_handler") as m_remove_signal_handler: self.watcher.attach_loop(None) m_remove_signal_handler.assert_called_once_with( signal.SIGCHLD) # child 1 & 2 terminate self.add_zombie(61, 11) self.add_zombie(62, -5) # SIGCHLD was not caught self.assertFalse(callback1.called) self.assertFalse(callback2.called) self.assertFalse(callback3.called) # attach a new loop self.loop = self.new_test_loop() with mock.patch.object( self.loop, "add_signal_handler") as m_add_signal_handler: self.watcher.attach_loop(self.loop) m_add_signal_handler.assert_called_once_with( signal.SIGCHLD, self.watcher._sig_chld) callback1.assert_called_once_with(61, 11) # race condition! callback2.assert_called_once_with(62, -5) # race condition! self.assertFalse(callback3.called) callback1.reset_mock() callback2.reset_mock() # child 3 terminates self.running = False self.add_zombie(622, 19) self.watcher._sig_chld() self.assertFalse(callback1.called) self.assertFalse(callback2.called) callback3.assert_called_once_with(622, 19) @waitpid_mocks def test_close(self, m): # register two children callback1 = mock.Mock() with self.watcher: self.running = True # child 1 terminates self.add_zombie(63, 9) # other child terminates self.add_zombie(65, 18) self.watcher._sig_chld() self.watcher.add_child_handler(63, callback1) self.watcher.add_child_handler(64, callback1) self.assertEqual(len(self.watcher._callbacks), 1) if isinstance(self.watcher, asyncio.FastChildWatcher): self.assertEqual(len(self.watcher._zombies), 1) with mock.patch.object( self.loop, "remove_signal_handler") as m_remove_signal_handler: self.watcher.close() m_remove_signal_handler.assert_called_once_with( signal.SIGCHLD) self.assertFalse(self.watcher._callbacks) if isinstance(self.watcher, asyncio.FastChildWatcher): self.assertFalse(self.watcher._zombies) class SafeChildWatcherTests (ChildWatcherTestsMixin, test_utils.TestCase): def create_watcher(self): return asyncio.SafeChildWatcher() class FastChildWatcherTests (ChildWatcherTestsMixin, test_utils.TestCase): def create_watcher(self): return asyncio.FastChildWatcher() class PolicyTests(unittest.TestCase): def create_policy(self): return asyncio.DefaultEventLoopPolicy() def test_get_child_watcher(self): policy = self.create_policy() self.assertIsNone(policy._watcher) watcher = policy.get_child_watcher() self.assertIsInstance(watcher, asyncio.SafeChildWatcher) self.assertIs(policy._watcher, watcher) self.assertIs(watcher, policy.get_child_watcher()) self.assertIsNone(watcher._loop) def test_get_child_watcher_after_set(self): policy = self.create_policy() watcher = asyncio.FastChildWatcher() policy.set_child_watcher(watcher) self.assertIs(policy._watcher, watcher) self.assertIs(watcher, policy.get_child_watcher()) def test_get_child_watcher_with_mainloop_existing(self): policy = self.create_policy() loop = policy.get_event_loop() self.assertIsNone(policy._watcher) watcher = policy.get_child_watcher() self.assertIsInstance(watcher, asyncio.SafeChildWatcher) self.assertIs(watcher._loop, loop) loop.close() def test_get_child_watcher_thread(self): def f(): policy.set_event_loop(policy.new_event_loop()) self.assertIsInstance(policy.get_event_loop(), asyncio.AbstractEventLoop) watcher = policy.get_child_watcher() self.assertIsInstance(watcher, asyncio.SafeChildWatcher) self.assertIsNone(watcher._loop) policy.get_event_loop().close() policy = self.create_policy() th = threading.Thread(target=f) th.start() th.join() def test_child_watcher_replace_mainloop_existing(self): policy = self.create_policy() loop = policy.get_event_loop() watcher = policy.get_child_watcher() self.assertIs(watcher._loop, loop) new_loop = policy.new_event_loop() policy.set_event_loop(new_loop) self.assertIs(watcher._loop, new_loop) policy.set_event_loop(None) self.assertIs(watcher._loop, None) loop.close() new_loop.close() if __name__ == '__main__': unittest.main()
lgpl-3.0
EricMinso/Scripts
ExemplesPython/NoseTests/test_decouverte.py
1
1802
#!/usr/bin/env python from configuration import ConfigurationDuLog import os, sys logger = None message_découverte = "Test de découverte exécuté : %s"%__file__ def setup_module(module): """ Méthode que Nose Test exécute avant le début des tests """ global logger logger = ConfigurationDuLog.getLogger( __name__ ) logger.debug( "# Démarrage du %s"%str( module )) def teardown_module(module): """ Méthode que Nose Test exécute après la fin des tests """ logger.debug( "# Fin du %s"%str( module )) logger.debug( " - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" ) def test_decouverte_print_console(): # Ecriture sur la console print( "PRINT: %s"%message_découverte ) assert True def test_decouverte_print_log(): assert logger is not None # Ecriture dans le log logger.info( "LOG: %s"%message_découverte ) def test_decouverte_print_infos_debug(): logger.debug( "** INFORMATIONS DEBOGAGE **") logger.debug( "__name__ : '%s'"%__name__ ) logger.debug( "__file__ : '%s'"%__file__ ) logger.debug( "__package__ : '%s'"%__package__ ) # logger.debug( "__class__ : '%s'"%__class__ ) # logger.debug( "__path__ : '%s'"%__path__ ) logger.debug( "__loader__ : '%s'"%__loader__ ) # logger.debug( "Loader Name : '%s'"%__loader__.fullname ) logger.debug( "Sys.Args : '%s'"%( ",".join([ str(elt) for elt in sys.argv ]))) def test_decouverte_print_environnement(): workspace = os.getenv( 'WORKSPACE', False ) logger.info( "Variable d'environnement 'WORKSPACE' : %s"%str( workspace )) assert workspace ### En cas d'appel manuel ### if( __name__ == "__main__" ): print( "Ce fichier ne s'appelle pas directement." ) print( "Il est utilisé par les tests nose" )
gpl-3.0
manaris/jythonMusic
library/jython2.5.3/Lib/encodings/utf_16_be.py
860
1037
""" Python 'utf-16-be' Codec Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ import codecs ### Codec APIs encode = codecs.utf_16_be_encode def decode(input, errors='strict'): return codecs.utf_16_be_decode(input, errors, True) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.utf_16_be_encode(input, self.errors)[0] class IncrementalDecoder(codecs.BufferedIncrementalDecoder): _buffer_decode = codecs.utf_16_be_decode class StreamWriter(codecs.StreamWriter): encode = codecs.utf_16_be_encode class StreamReader(codecs.StreamReader): decode = codecs.utf_16_be_decode ### encodings module API def getregentry(): return codecs.CodecInfo( name='utf-16-be', encode=encode, decode=decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
gpl-3.0
gunan/tensorflow
tensorflow/lite/micro/examples/magic_wand/train/data_augmentation.py
19
2836
# Lint as: python3 # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=g-bad-import-order """Data augmentation that will be used in data_load.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import random import numpy as np def time_wrapping(molecule, denominator, data): """Generate (molecule/denominator)x speed data.""" tmp_data = [[0 for i in range(len(data[0]))] for j in range((int(len(data) / molecule) - 1) * denominator)] for i in range(int(len(data) / molecule) - 1): for j in range(len(data[i])): for k in range(denominator): tmp_data[denominator * i + k][j] = (data[molecule * i + k][j] * (denominator - k) + data[molecule * i + k + 1][j] * k) / denominator return tmp_data def augment_data(original_data, original_label): """Perform data augmentation.""" new_data = [] new_label = [] for idx, (data, label) in enumerate(zip(original_data, original_label)): # pylint: disable=unused-variable # Original data new_data.append(data) new_label.append(label) # Sequence shift for num in range(5): # pylint: disable=unused-variable new_data.append((np.array(data, dtype=np.float32) + (random.random() - 0.5) * 200).tolist()) new_label.append(label) # Random noise tmp_data = [[0 for i in range(len(data[0]))] for j in range(len(data))] for num in range(5): for i in range(len(tmp_data)): for j in range(len(tmp_data[i])): tmp_data[i][j] = data[i][j] + 5 * random.random() new_data.append(tmp_data) new_label.append(label) # Time warping fractions = [(3, 2), (5, 3), (2, 3), (3, 4), (9, 5), (6, 5), (4, 5)] for molecule, denominator in fractions: new_data.append(time_wrapping(molecule, denominator, data)) new_label.append(label) # Movement amplification for molecule, denominator in fractions: new_data.append( (np.array(data, dtype=np.float32) * molecule / denominator).tolist()) new_label.append(label) return new_data, new_label
apache-2.0
GbalsaC/bitnamiP
common/djangoapps/external_auth/tests/test_ssl.py
116
18120
""" Provides unit tests for SSL based authentication portions of the external_auth app. """ import copy import unittest from django.conf import settings from django.contrib.auth import SESSION_KEY from django.contrib.auth.models import AnonymousUser, User from django.contrib.sessions.middleware import SessionMiddleware from django.core.urlresolvers import reverse from django.test.client import Client from django.test.client import RequestFactory from django.test.utils import override_settings from edxmako.middleware import MakoMiddleware from external_auth.models import ExternalAuthMap import external_auth.views from mock import Mock from student.models import CourseEnrollment from student.roles import CourseStaffRole from student.tests.factories import UserFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory FEATURES_WITH_SSL_AUTH = settings.FEATURES.copy() FEATURES_WITH_SSL_AUTH['AUTH_USE_CERTIFICATES'] = True FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP = FEATURES_WITH_SSL_AUTH.copy() FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP['AUTH_USE_CERTIFICATES_IMMEDIATE_SIGNUP'] = True FEATURES_WITH_SSL_AUTH_AUTO_ACTIVATE = FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP.copy() FEATURES_WITH_SSL_AUTH_AUTO_ACTIVATE['BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'] = True FEATURES_WITHOUT_SSL_AUTH = settings.FEATURES.copy() FEATURES_WITHOUT_SSL_AUTH['AUTH_USE_CERTIFICATES'] = False CACHES_ENABLE_GENERAL = copy.deepcopy(settings.CACHES) CACHES_ENABLE_GENERAL['general']['BACKEND'] = 'django.core.cache.backends.locmem.LocMemCache' @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH) @override_settings(CACHES=CACHES_ENABLE_GENERAL) class SSLClientTest(ModuleStoreTestCase): """ Tests SSL Authentication code sections of external_auth """ AUTH_DN = '/C=US/ST=Massachusetts/O=Massachusetts Institute of Technology/OU=Client CA v1/CN={0}/emailAddress={1}' USER_NAME = 'test_user_ssl' USER_EMAIL = 'test_user_ssl@EDX.ORG' MOCK_URL = '/' def _create_ssl_request(self, url): """Creates a basic request for SSL use.""" request = self.factory.get(url) request.META['SSL_CLIENT_S_DN'] = self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL) request.user = AnonymousUser() middleware = SessionMiddleware() middleware.process_request(request) request.session.save() MakoMiddleware().process_request(request) return request def _create_normal_request(self, url): """Creates sessioned request without SSL headers""" request = self.factory.get(url) request.user = AnonymousUser() middleware = SessionMiddleware() middleware.process_request(request) request.session.save() MakoMiddleware().process_request(request) return request def setUp(self): """Setup test case by adding primary user.""" super(SSLClientTest, self).setUp() self.client = Client() self.factory = RequestFactory() self.mock = Mock() @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') def test_ssl_login_with_signup_lms(self): """ Validate that an SSL login creates an eamap user and redirects them to the signup page. """ response = external_auth.views.ssl_login(self._create_ssl_request('/')) # Response should contain template for signup form, eamap should have user, and internal # auth should not have a user self.assertIn('<form role="form" id="register-form" method="post"', response.content) try: ExternalAuthMap.objects.get(external_id=self.USER_EMAIL) except ExternalAuthMap.DoesNotExist, ex: self.fail('User did not get properly added to external auth map, exception was {0}'.format(str(ex))) with self.assertRaises(User.DoesNotExist): User.objects.get(email=self.USER_EMAIL) @unittest.skipUnless(settings.ROOT_URLCONF == 'cms.urls', 'Test only valid in cms') def test_ssl_login_with_signup_cms(self): """ Validate that an SSL login creates an eamap user and redirects them to the signup page on CMS. """ self.client.get( reverse('contentstore.views.login_page'), SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL) ) try: ExternalAuthMap.objects.get(external_id=self.USER_EMAIL) except ExternalAuthMap.DoesNotExist, ex: self.fail('User did not get properly added to external auth map, exception was {0}'.format(str(ex))) with self.assertRaises(User.DoesNotExist): User.objects.get(email=self.USER_EMAIL) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP) def test_ssl_login_without_signup_lms(self): """ Test IMMEDIATE_SIGNUP feature flag and ensure the user account is automatically created and the user is redirected to slash. """ external_auth.views.ssl_login(self._create_ssl_request('/')) # Assert our user exists in both eamap and Users, and that we are logged in try: ExternalAuthMap.objects.get(external_id=self.USER_EMAIL) except ExternalAuthMap.DoesNotExist, ex: self.fail('User did not get properly added to external auth map, exception was {0}'.format(str(ex))) try: User.objects.get(email=self.USER_EMAIL) except ExternalAuthMap.DoesNotExist, ex: self.fail('User did not get properly added to internal users, exception was {0}'.format(str(ex))) @unittest.skipUnless(settings.ROOT_URLCONF == 'cms.urls', 'Test only valid in cms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP) def test_ssl_login_without_signup_cms(self): """ Test IMMEDIATE_SIGNUP feature flag and ensure the user account is automatically created on CMS, and that we are redirected to courses. """ response = self.client.get( reverse('contentstore.views.login_page'), SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL) ) self.assertEqual(response.status_code, 302) self.assertIn('/course', response['location']) # Assert our user exists in both eamap and Users, and that we are logged in try: ExternalAuthMap.objects.get(external_id=self.USER_EMAIL) except ExternalAuthMap.DoesNotExist, ex: self.fail('User did not get properly added to external auth map, exception was {0}'.format(str(ex))) try: User.objects.get(email=self.USER_EMAIL) except ExternalAuthMap.DoesNotExist, ex: self.fail('User did not get properly added to internal users, exception was {0}'.format(str(ex))) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP) def test_default_login_decorator_ssl(self): """ Make sure that SSL login happens if it is enabled on protected views instead of showing the login form. """ response = self.client.get(reverse('dashboard'), follows=True) self.assertEqual(response.status_code, 302) self.assertIn(reverse('signin_user'), response['location']) response = self.client.get( reverse('dashboard'), follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL)) self.assertEquals(('http://testserver/dashboard', 302), response.redirect_chain[-1]) self.assertIn(SESSION_KEY, self.client.session) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP) def test_registration_page_bypass(self): """ This tests to make sure when immediate signup is on that the user doesn't get presented with the registration page. """ response = self.client.get( reverse('register_user'), follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL)) self.assertEquals(('http://testserver/dashboard', 302), response.redirect_chain[-1]) self.assertIn(SESSION_KEY, self.client.session) @unittest.skipUnless(settings.ROOT_URLCONF == 'cms.urls', 'Test only valid in cms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP) def test_cms_registration_page_bypass(self): """ This tests to make sure when immediate signup is on that the user doesn't get presented with the registration page. """ response = self.client.get( reverse('signup'), follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL) ) self.assertEqual(response.status_code, 404) # assert that we are logged in self.assertIn(SESSION_KEY, self.client.session) # Now that we are logged in, make sure we don't see the registration page response = self.client.get(reverse('signup'), follow=True) self.assertEqual(response.status_code, 404) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP) def test_signin_page_bypass(self): """ This tests to make sure when ssl authentication is on that user doesn't get presented with the login page if they have a certificate. """ # Test that they do signin if they don't have a cert response = self.client.get(reverse('signin_user')) self.assertEqual(200, response.status_code) self.assertTrue('login-and-registration-container' in response.content) # And get directly logged in otherwise response = self.client.get( reverse('signin_user'), follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL)) self.assertEquals(('http://testserver/dashboard', 302), response.redirect_chain[-1]) self.assertIn(SESSION_KEY, self.client.session) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP) def test_ssl_bad_eamap(self): """ This tests the response when a user exists but their eamap password doesn't match their internal password. The internal password use for certificates has been removed and this should not fail. """ # Create account, break internal password, and activate account external_auth.views.ssl_login(self._create_ssl_request('/')) user = User.objects.get(email=self.USER_EMAIL) user.set_password('not autogenerated') user.is_active = True user.save() # Make sure we can still login self.client.get( reverse('signin_user'), follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL)) self.assertIn(SESSION_KEY, self.client.session) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITHOUT_SSL_AUTH) def test_ssl_decorator_no_certs(self): """Make sure no external auth happens without SSL enabled""" dec_mock = external_auth.views.ssl_login_shortcut(self.mock) request = self._create_normal_request(self.MOCK_URL) request.user = AnonymousUser() # Call decorated mock function to make sure it passes # the call through without hitting the external_auth functions and # thereby creating an external auth map object. dec_mock(request) self.assertTrue(self.mock.called) self.assertEqual(0, len(ExternalAuthMap.objects.all())) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') def test_ssl_login_decorator(self): """Create mock function to test ssl login decorator""" dec_mock = external_auth.views.ssl_login_shortcut(self.mock) # Test that anonymous without cert doesn't create authmap request = self._create_normal_request(self.MOCK_URL) dec_mock(request) self.assertTrue(self.mock.called) self.assertEqual(0, len(ExternalAuthMap.objects.all())) # Test valid user self.mock.reset_mock() request = self._create_ssl_request(self.MOCK_URL) dec_mock(request) self.assertFalse(self.mock.called) self.assertEqual(1, len(ExternalAuthMap.objects.all())) # Test logged in user gets called self.mock.reset_mock() request = self._create_ssl_request(self.MOCK_URL) request.user = UserFactory() dec_mock(request) self.assertTrue(self.mock.called) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_IMMEDIATE_SIGNUP) def test_ssl_decorator_auto_signup(self): """ Test that with auto signup the decorator will bypass registration and call retfun. """ dec_mock = external_auth.views.ssl_login_shortcut(self.mock) request = self._create_ssl_request(self.MOCK_URL) dec_mock(request) # Assert our user exists in both eamap and Users try: ExternalAuthMap.objects.get(external_id=self.USER_EMAIL) except ExternalAuthMap.DoesNotExist, ex: self.fail('User did not get properly added to external auth map, exception was {0}'.format(str(ex))) try: User.objects.get(email=self.USER_EMAIL) except ExternalAuthMap.DoesNotExist, ex: self.fail('User did not get properly added to internal users, exception was {0}'.format(str(ex))) self.assertEqual(1, len(ExternalAuthMap.objects.all())) self.assertTrue(self.mock.called) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_AUTO_ACTIVATE) def test_ssl_lms_redirection(self): """ Auto signup auth user and ensure they return to the original url they visited after being logged in. """ course = CourseFactory.create( org='MITx', number='999', display_name='Robot Super Course' ) external_auth.views.ssl_login(self._create_ssl_request('/')) user = User.objects.get(email=self.USER_EMAIL) CourseEnrollment.enroll(user, course.id) course_private_url = '/courses/MITx/999/Robot_Super_Course/courseware' self.assertFalse(SESSION_KEY in self.client.session) response = self.client.get( course_private_url, follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL), HTTP_ACCEPT='text/html' ) self.assertEqual(('http://testserver{0}'.format(course_private_url), 302), response.redirect_chain[-1]) self.assertIn(SESSION_KEY, self.client.session) @unittest.skipUnless(settings.ROOT_URLCONF == 'cms.urls', 'Test only valid in cms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_AUTO_ACTIVATE) def test_ssl_cms_redirection(self): """ Auto signup auth user and ensure they return to the original url they visited after being logged in. """ course = CourseFactory.create( org='MITx', number='999', display_name='Robot Super Course' ) external_auth.views.ssl_login(self._create_ssl_request('/')) user = User.objects.get(email=self.USER_EMAIL) CourseEnrollment.enroll(user, course.id) CourseStaffRole(course.id).add_users(user) course_private_url = reverse('course_handler', args=(unicode(course.id),)) self.assertFalse(SESSION_KEY in self.client.session) response = self.client.get( course_private_url, follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL), HTTP_ACCEPT='text/html' ) self.assertEqual(('http://testserver{0}'.format(course_private_url), 302), response.redirect_chain[-1]) self.assertIn(SESSION_KEY, self.client.session) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @override_settings(FEATURES=FEATURES_WITH_SSL_AUTH_AUTO_ACTIVATE) def test_ssl_logout(self): """ Because the branding view is cached for anonymous users and we use that to login users, the browser wasn't actually making the request to that view as the redirect was being cached. This caused a redirect loop, and this test confirms that that won't happen. Test is only in LMS because we don't use / in studio to login SSL users. """ response = self.client.get( reverse('dashboard'), follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL)) self.assertEquals(('http://testserver/dashboard', 302), response.redirect_chain[-1]) self.assertIn(SESSION_KEY, self.client.session) response = self.client.get( reverse('logout'), follow=True, SSL_CLIENT_S_DN=self.AUTH_DN.format(self.USER_NAME, self.USER_EMAIL) ) # Make sure that even though we logged out, we have logged back in self.assertIn(SESSION_KEY, self.client.session)
agpl-3.0
jameswatt2008/jameswatt2008.github.io
python/Python核心编程/网络编程/截图和代码/概述、SOCKET/多进程copy文件/test-复件/sched.py
14
6216
"""A generally useful event scheduler class. Each instance of this class manages its own queue. No multi-threading is implied; you are supposed to hack that yourself, or use a single instance per application. Each instance is parametrized with two functions, one that is supposed to return the current time, one that is supposed to implement a delay. You can implement real-time scheduling by substituting time and sleep from built-in module time, or you can implement simulated time by writing your own functions. This can also be used to integrate scheduling with STDWIN events; the delay function is allowed to modify the queue. Time can be expressed as integers or floating point numbers, as long as it is consistent. Events are specified by tuples (time, priority, action, argument, kwargs). As in UNIX, lower priority numbers mean higher priority; in this way the queue can be maintained as a priority queue. Execution of the event means calling the action function, passing it the argument sequence in "argument" (remember that in Python, multiple function arguments are be packed in a sequence) and keyword parameters in "kwargs". The action function may be an instance method so it has another way to reference private data (besides global variables). """ # XXX The timefunc and delayfunc should have been defined as methods # XXX so you can define new kinds of schedulers using subclassing # XXX instead of having to define a module or class just to hold # XXX the global state of your particular time and delay functions. import time import heapq from collections import namedtuple try: import threading except ImportError: import dummy_threading as threading from time import monotonic as _time __all__ = ["scheduler"] class Event(namedtuple('Event', 'time, priority, action, argument, kwargs')): def __eq__(s, o): return (s.time, s.priority) == (o.time, o.priority) def __lt__(s, o): return (s.time, s.priority) < (o.time, o.priority) def __le__(s, o): return (s.time, s.priority) <= (o.time, o.priority) def __gt__(s, o): return (s.time, s.priority) > (o.time, o.priority) def __ge__(s, o): return (s.time, s.priority) >= (o.time, o.priority) _sentinel = object() class scheduler: def __init__(self, timefunc=_time, delayfunc=time.sleep): """Initialize a new instance, passing the time and delay functions""" self._queue = [] self._lock = threading.RLock() self.timefunc = timefunc self.delayfunc = delayfunc def enterabs(self, time, priority, action, argument=(), kwargs=_sentinel): """Enter a new event in the queue at an absolute time. Returns an ID for the event which can be used to remove it, if necessary. """ if kwargs is _sentinel: kwargs = {} event = Event(time, priority, action, argument, kwargs) with self._lock: heapq.heappush(self._queue, event) return event # The ID def enter(self, delay, priority, action, argument=(), kwargs=_sentinel): """A variant that specifies the time as a relative time. This is actually the more commonly used interface. """ time = self.timefunc() + delay return self.enterabs(time, priority, action, argument, kwargs) def cancel(self, event): """Remove an event from the queue. This must be presented the ID as returned by enter(). If the event is not in the queue, this raises ValueError. """ with self._lock: self._queue.remove(event) heapq.heapify(self._queue) def empty(self): """Check whether the queue is empty.""" with self._lock: return not self._queue def run(self, blocking=True): """Execute events until the queue is empty. If blocking is False executes the scheduled events due to expire soonest (if any) and then return the deadline of the next scheduled call in the scheduler. When there is a positive delay until the first event, the delay function is called and the event is left in the queue; otherwise, the event is removed from the queue and executed (its action function is called, passing it the argument). If the delay function returns prematurely, it is simply restarted. It is legal for both the delay function and the action function to modify the queue or to raise an exception; exceptions are not caught but the scheduler's state remains well-defined so run() may be called again. A questionable hack is added to allow other threads to run: just after an event is executed, a delay of 0 is executed, to avoid monopolizing the CPU when other threads are also runnable. """ # localize variable access to minimize overhead # and to improve thread safety lock = self._lock q = self._queue delayfunc = self.delayfunc timefunc = self.timefunc pop = heapq.heappop while True: with lock: if not q: break time, priority, action, argument, kwargs = q[0] now = timefunc() if time > now: delay = True else: delay = False pop(q) if delay: if not blocking: return time - now delayfunc(time - now) else: action(*argument, **kwargs) delayfunc(0) # Let other threads run @property def queue(self): """An ordered list of upcoming events. Events are named tuples with fields for: time, priority, action, arguments, kwargs """ # Use heapq to sort the queue rather than using 'sorted(self._queue)'. # With heapq, two events scheduled at the same time will show in # the actual order they would be retrieved. with self._lock: events = self._queue[:] return list(map(heapq.heappop, [events]*len(events)))
gpl-2.0
swinter2011/login-page-update
markdown/extensions/def_list.py
19
3713
""" Definition List Extension for Python-Markdown ============================================= Added parsing of Definition Lists to Python-Markdown. A simple example: Apple : Pomaceous fruit of plants of the genus Malus in the family Rosaceae. : An american computer company. Orange : The fruit of an evergreen tree of the genus Citrus. Copyright 2008 - [Waylan Limberg](http://achinghead.com) """ from __future__ import absolute_import from __future__ import unicode_literals from . import Extension from ..blockprocessors import BlockProcessor, ListIndentProcessor from ..util import etree import re class DefListProcessor(BlockProcessor): """ Process Definition Lists. """ RE = re.compile(r'(^|\n)[ ]{0,3}:[ ]{1,3}(.*?)(\n|$)') NO_INDENT_RE = re.compile(r'^[ ]{0,3}[^ :]') def test(self, parent, block): return bool(self.RE.search(block)) def run(self, parent, blocks): raw_block = blocks.pop(0) m = self.RE.search(raw_block) terms = [l.strip() for l in raw_block[:m.start()].split('\n') if l.strip()] block = raw_block[m.end():] no_indent = self.NO_INDENT_RE.match(block) if no_indent: d, theRest = (block, None) else: d, theRest = self.detab(block) if d: d = '%s\n%s' % (m.group(2), d) else: d = m.group(2) sibling = self.lastChild(parent) if not terms and sibling is None: # This is not a definition item. Most likely a paragraph that # starts with a colon at the begining of a document or list. blocks.insert(0, raw_block) return False if not terms and sibling.tag == 'p': # The previous paragraph contains the terms state = 'looselist' terms = sibling.text.split('\n') parent.remove(sibling) # Aquire new sibling sibling = self.lastChild(parent) else: state = 'list' if sibling and sibling.tag == 'dl': # This is another item on an existing list dl = sibling if not terms and len(dl) and dl[-1].tag == 'dd' and len(dl[-1]): state = 'looselist' else: # This is a new list dl = etree.SubElement(parent, 'dl') # Add terms for term in terms: dt = etree.SubElement(dl, 'dt') dt.text = term # Add definition self.parser.state.set(state) dd = etree.SubElement(dl, 'dd') self.parser.parseBlocks(dd, [d]) self.parser.state.reset() if theRest: blocks.insert(0, theRest) class DefListIndentProcessor(ListIndentProcessor): """ Process indented children of definition list items. """ ITEM_TYPES = ['dd'] LIST_TYPES = ['dl'] def create_item(self, parent, block): """ Create a new dd and parse the block with it as the parent. """ dd = etree.SubElement(parent, 'dd') self.parser.parseBlocks(dd, [block]) class DefListExtension(Extension): """ Add definition lists to Markdown. """ def extendMarkdown(self, md, md_globals): """ Add an instance of DefListProcessor to BlockParser. """ md.parser.blockprocessors.add('defindent', DefListIndentProcessor(md.parser), '>indent') md.parser.blockprocessors.add('deflist', DefListProcessor(md.parser), '>ulist') def makeExtension(configs={}): return DefListExtension(configs=configs)
apache-2.0
matchimmo/django-postman
postman/urls_for_tests.py
4
9564
""" URLconf for tests.py usage. """ from __future__ import unicode_literals from django.conf import settings try: from django.conf.urls import patterns, include, url # django 1.4 except ImportError: from django.conf.urls.defaults import * # "patterns, include, url" is enough for django 1.3, "*" for django 1.2 from django.forms import ValidationError from django.views.generic.base import RedirectView from . import OPTIONS from .views import (InboxView, SentView, ArchivesView, TrashView, WriteView, ReplyView, MessageView, ConversationView, ArchiveView, DeleteView, UndeleteView) # user_filter function set def user_filter_reason(user): if user.get_username() == 'bar': return 'some reason' return None def user_filter_no_reason(user): return '' def user_filter_false(user): return False def user_filter_exception(user): if user.get_username() == 'bar': raise ValidationError(['first good reason', "anyway, I don't like {0}".format(user.get_username())]) return None # exchange_filter function set def exch_filter_reason(sender, recipient, recipients_list): if recipient.get_username() == 'bar': return 'some reason' return None def exch_filter_no_reason(sender, recipient, recipients_list): return '' def exch_filter_false(sender, recipient, recipients_list): return False def exch_filter_exception(sender, recipient, recipients_list): if recipient.get_username() == 'bar': raise ValidationError(['first good reason', "anyway, I don't like {0}".format(recipient.get_username())]) return None # auto-moderation function set def moderate_as_51(message): return 51 def moderate_as_48(message): return (48, "some reason") moderate_as_48.default_reason = 'some default reason' # quote formatters def format_subject(subject): return "Re_ " + subject def format_body(sender, body): return "{0} _ {1}".format(sender, body) postman_patterns = patterns('', # Basic set url(r'^inbox/(?:(?P<option>'+OPTIONS+')/)?$', InboxView.as_view(), name='postman_inbox'), url(r'^sent/(?:(?P<option>'+OPTIONS+')/)?$', SentView.as_view(), name='postman_sent'), url(r'^archives/(?:(?P<option>'+OPTIONS+')/)?$', ArchivesView.as_view(), name='postman_archives'), url(r'^trash/(?:(?P<option>'+OPTIONS+')/)?$', TrashView.as_view(), name='postman_trash'), url(r'^write/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(), name='postman_write'), url(r'^reply/(?P<message_id>[\d]+)/$', ReplyView.as_view(), name='postman_reply'), url(r'^view/(?P<message_id>[\d]+)/$', MessageView.as_view(), name='postman_view'), url(r'^view/t/(?P<thread_id>[\d]+)/$', ConversationView.as_view(), name='postman_view_conversation'), url(r'^archive/$', ArchiveView.as_view(), name='postman_archive'), url(r'^delete/$', DeleteView.as_view(), name='postman_delete'), url(r'^undelete/$', UndeleteView.as_view(), name='postman_undelete'), (r'^$', RedirectView.as_view(url='inbox/')), # Customized set # 'success_url' url(r'^write_sent/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(success_url='postman_sent'), name='postman_write_with_success_url_to_sent'), url(r'^reply_sent/(?P<message_id>[\d]+)/$', ReplyView.as_view(success_url='postman_sent'), name='postman_reply_with_success_url_to_sent'), url(r'^archive_arch/$', ArchiveView.as_view(success_url='postman_archives'), name='postman_archive_with_success_url_to_archives'), url(r'^delete_arch/$', DeleteView.as_view(success_url='postman_archives'), name='postman_delete_with_success_url_to_archives'), url(r'^undelete_arch/$', UndeleteView.as_view(success_url='postman_archives'), name='postman_undelete_with_success_url_to_archives'), # 'max' url(r'^write_max/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(max=1), name='postman_write_with_max'), url(r'^reply_max/(?P<message_id>[\d]+)/$', ReplyView.as_view(max=1), name='postman_reply_with_max'), # 'user_filter' on write url(r'^write_user_filter_reason/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(user_filter=user_filter_reason), name='postman_write_with_user_filter_reason'), url(r'^write_user_filter_no_reason/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(user_filter=user_filter_no_reason), name='postman_write_with_user_filter_no_reason'), url(r'^write_user_filter_false/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(user_filter=user_filter_false), name='postman_write_with_user_filter_false'), url(r'^write_user_filter_exception/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(user_filter=user_filter_exception), name='postman_write_with_user_filter_exception'), # 'user_filter' on reply url(r'^reply_user_filter_reason/(?P<message_id>[\d]+)/$', ReplyView.as_view(user_filter=user_filter_reason), name='postman_reply_with_user_filter_reason'), url(r'^reply_user_filter_no_reason/(?P<message_id>[\d]+)/$', ReplyView.as_view(user_filter=user_filter_no_reason), name='postman_reply_with_user_filter_no_reason'), url(r'^reply_user_filter_false/(?P<message_id>[\d]+)/$', ReplyView.as_view(user_filter=user_filter_false), name='postman_reply_with_user_filter_false'), url(r'^reply_user_filter_exception/(?P<message_id>[\d]+)/$', ReplyView.as_view(user_filter=user_filter_exception), name='postman_reply_with_user_filter_exception'), # 'exchange_filter' on write url(r'^write_exch_filter_reason/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(exchange_filter=exch_filter_reason), name='postman_write_with_exch_filter_reason'), url(r'^write_exch_filter_no_reason/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(exchange_filter=exch_filter_no_reason), name='postman_write_with_exch_filter_no_reason'), url(r'^write_exch_filter_false/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(exchange_filter=exch_filter_false), name='postman_write_with_exch_filter_false'), url(r'^write_exch_filter_exception/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(exchange_filter=exch_filter_exception), name='postman_write_with_exch_filter_exception'), # 'exchange_filter' on reply url(r'^reply_exch_filter_reason/(?P<message_id>[\d]+)/$', ReplyView.as_view(exchange_filter=exch_filter_reason), name='postman_reply_with_exch_filter_reason'), url(r'^reply_exch_filter_no_reason/(?P<message_id>[\d]+)/$', ReplyView.as_view(exchange_filter=exch_filter_no_reason), name='postman_reply_with_exch_filter_no_reason'), url(r'^reply_exch_filter_false/(?P<message_id>[\d]+)/$', ReplyView.as_view(exchange_filter=exch_filter_false), name='postman_reply_with_exch_filter_false'), url(r'^reply_exch_filter_exception/(?P<message_id>[\d]+)/$', ReplyView.as_view(exchange_filter=exch_filter_exception), name='postman_reply_with_exch_filter_exception'), # 'auto_moderators' url(r'^write_moderate/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(auto_moderators=(moderate_as_51,moderate_as_48)), name='postman_write_moderate'), url(r'^reply_moderate/(?P<message_id>[\d]+)/$', ReplyView.as_view(auto_moderators=(moderate_as_51,moderate_as_48)), name='postman_reply_moderate'), # 'formatters' url(r'^reply_formatters/(?P<message_id>[\d]+)/$', ReplyView.as_view(formatters=(format_subject, format_body)), name='postman_reply_formatters'), url(r'^view_formatters/(?P<message_id>[\d]+)/$', MessageView.as_view(formatters=(format_subject, format_body)), name='postman_view_formatters'), # auto-complete url(r'^write_ac/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(autocomplete_channels=('postman_multiple_as1-1', None)), name='postman_write_auto_complete'), url(r'^reply_ac/(?P<message_id>[\d]+)/$', ReplyView.as_view(autocomplete_channel='postman_multiple_as1-1'), name='postman_reply_auto_complete'), # 'template_name' url(r'^inbox_template/(?:(?P<option>'+OPTIONS+')/)?$', InboxView.as_view(template_name='postman/fake.html'), name='postman_inbox_template'), url(r'^sent_template/(?:(?P<option>'+OPTIONS+')/)?$', SentView.as_view(template_name='postman/fake.html'), name='postman_sent_template'), url(r'^archives_template/(?:(?P<option>'+OPTIONS+')/)?$', ArchivesView.as_view(template_name='postman/fake.html'), name='postman_archives_template'), url(r'^trash_template/(?:(?P<option>'+OPTIONS+')/)?$', TrashView.as_view(template_name='postman/fake.html'), name='postman_trash_template'), url(r'^write_template/(?:(?P<recipients>[^/#]+)/)?$', WriteView.as_view(template_name='postman/fake.html'), name='postman_write_template'), url(r'^reply_template/(?P<message_id>[\d]+)/$', ReplyView.as_view(template_name='postman/fake.html'), name='postman_reply_template'), url(r'^view_template/(?P<message_id>[\d]+)/$', MessageView.as_view(template_name='postman/fake.html'), name='postman_view_template'), url(r'^view_template/t/(?P<thread_id>[\d]+)/$', ConversationView.as_view(template_name='postman/fake.html'), name='postman_view_conversation_template'), ) urlpatterns = patterns('', (r'^accounts/login/$', 'django.contrib.auth.views.login'), # because of the login_required decorator (r'^messages/', include(postman_patterns)), ) # because of fields.py/AutoCompleteWidget/render()/reverse() if 'ajax_select' in settings.INSTALLED_APPS: urlpatterns += patterns('', (r'^ajax_select/', include('ajax_select.urls')), # django-ajax-selects ) # optional if 'notification' in settings.INSTALLED_APPS: urlpatterns += patterns('', (r'^notification/', include('notification.urls')), # django-notification )
bsd-3-clause
waseem18/oh-mainline
vendor/packages/python-social-auth/social/strategies/base.py
22
6910
import time import random import hashlib from social.utils import setting_name, module_member from social.store import OpenIdStore, OpenIdSessionWrapper from social.pipeline import DEFAULT_AUTH_PIPELINE, DEFAULT_DISCONNECT_PIPELINE from social.pipeline.utils import partial_from_session, partial_to_session class BaseTemplateStrategy(object): def __init__(self, strategy): self.strategy = strategy def render(self, tpl=None, html=None, context=None): if not tpl and not html: raise ValueError('Missing template or html parameters') context = context or {} if tpl: return self.render_template(tpl, context) else: return self.render_string(html, context) def render_template(self, tpl, context): raise NotImplementedError('Implement in subclass') def render_string(self, html, context): raise NotImplementedError('Implement in subclass') class BaseStrategy(object): ALLOWED_CHARS = 'abcdefghijklmnopqrstuvwxyz' \ 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' \ '0123456789' DEFAULT_TEMPLATE_STRATEGY = BaseTemplateStrategy def __init__(self, storage=None, tpl=None): self.storage = storage self.tpl = (tpl or self.DEFAULT_TEMPLATE_STRATEGY)(self) def setting(self, name, default=None, backend=None): names = [setting_name(name), name] if backend: names.insert(0, setting_name(backend.name, name)) for name in names: try: return self.get_setting(name) except (AttributeError, KeyError): pass return default def create_user(self, *args, **kwargs): return self.storage.user.create_user(*args, **kwargs) def get_user(self, *args, **kwargs): return self.storage.user.get_user(*args, **kwargs) def session_setdefault(self, name, value): self.session_set(name, value) return self.session_get(name) def openid_session_dict(self, name): # Many frameworks are switching the session serialization from Pickle # to JSON to avoid code execution risks. Flask did this from Flask # 0.10, Django is switching to JSON by default from version 1.6. # # Sadly python-openid stores classes instances in the session which # fails the JSON serialization, the classes are: # # openid.yadis.manager.YadisServiceManager # openid.consumer.discover.OpenIDServiceEndpoint # # This method will return a wrapper over the session value used with # openid (a dict) which will automatically keep a pickled value for the # mentioned classes. return OpenIdSessionWrapper(self.session_setdefault(name, {})) def to_session_value(self, val): return val def from_session_value(self, val): return val def partial_to_session(self, next, backend, request=None, *args, **kwargs): return partial_to_session(self, next, backend, request=request, *args, **kwargs) def partial_from_session(self, session): return partial_from_session(self, session) def clean_partial_pipeline(self, name='partial_pipeline'): self.session_pop(name) def openid_store(self): return OpenIdStore(self) def get_pipeline(self): return self.setting('PIPELINE', DEFAULT_AUTH_PIPELINE) def get_disconnect_pipeline(self): return self.setting('DISCONNECT_PIPELINE', DEFAULT_DISCONNECT_PIPELINE) def random_string(self, length=12, chars=ALLOWED_CHARS): # Implementation borrowed from django 1.4 try: random.SystemRandom() except NotImplementedError: key = self.setting('SECRET_KEY', '') seed = '{0}{1}{2}'.format(random.getstate(), time.time(), key) random.seed(hashlib.sha256(seed.encode()).digest()) return ''.join([random.choice(chars) for i in range(length)]) def absolute_uri(self, path=None): uri = self.build_absolute_uri(path) if uri and self.setting('REDIRECT_IS_HTTPS'): uri = uri.replace('http://', 'https://') return uri def get_language(self): """Return current language""" return '' def send_email_validation(self, backend, email): email_validation = self.setting('EMAIL_VALIDATION_FUNCTION') send_email = module_member(email_validation) code = self.storage.code.make_code(email) send_email(self, backend, code) return code def validate_email(self, email, code): verification_code = self.storage.code.get_code(code) if not verification_code or verification_code.code != code: return False else: verification_code.verify() return True def render_html(self, tpl=None, html=None, context=None): """Render given template or raw html with given context""" return self.tpl.render(tpl, html, context) def authenticate(self, backend, *args, **kwargs): """Trigger the authentication mechanism tied to the current framework""" kwargs['strategy'] = self kwargs['storage'] = self.storage kwargs['backend'] = backend return backend.authenticate(*args, **kwargs) def get_backends(self): """Return configured backends""" return self.setting('AUTHENTICATION_BACKENDS', []) # Implement the following methods on strategies sub-classes def redirect(self, url): """Return a response redirect to the given URL""" raise NotImplementedError('Implement in subclass') def get_setting(self, name): """Return value for given setting name""" raise NotImplementedError('Implement in subclass') def html(self, content): """Return HTTP response with given content""" raise NotImplementedError('Implement in subclass') def request_data(self, merge=True): """Return current request data (POST or GET)""" raise NotImplementedError('Implement in subclass') def request_host(self): """Return current host value""" raise NotImplementedError('Implement in subclass') def session_get(self, name, default=None): """Return session value for given key""" raise NotImplementedError('Implement in subclass') def session_set(self, name, value): """Set session value for given key""" raise NotImplementedError('Implement in subclass') def session_pop(self, name): """Pop session value for given key""" raise NotImplementedError('Implement in subclass') def build_absolute_uri(self, path=None): """Build absolute URI with given (optional) path""" raise NotImplementedError('Implement in subclass')
agpl-3.0
beatscoindev/BeatsCoin
share/qt/make_spinner.py
4415
1035
#!/usr/bin/env python # W.J. van der Laan, 2011 # Make spinning .mng animation from a .png # Requires imagemagick 6.7+ from __future__ import division from os import path from PIL import Image from subprocess import Popen SRC='img/reload_scaled.png' DST='../../src/qt/res/movies/update_spinner.mng' TMPDIR='/tmp' TMPNAME='tmp-%03i.png' NUMFRAMES=35 FRAMERATE=10.0 CONVERT='convert' CLOCKWISE=True DSIZE=(16,16) im_src = Image.open(SRC) if CLOCKWISE: im_src = im_src.transpose(Image.FLIP_LEFT_RIGHT) def frame_to_filename(frame): return path.join(TMPDIR, TMPNAME % frame) frame_files = [] for frame in xrange(NUMFRAMES): rotation = (frame + 0.5) / NUMFRAMES * 360.0 if CLOCKWISE: rotation = -rotation im_new = im_src.rotate(rotation, Image.BICUBIC) im_new.thumbnail(DSIZE, Image.ANTIALIAS) outfile = frame_to_filename(frame) im_new.save(outfile, 'png') frame_files.append(outfile) p = Popen([CONVERT, "-delay", str(FRAMERATE), "-dispose", "2"] + frame_files + [DST]) p.communicate()
mit
xshotD/pyglet
pyglet/canvas/xlib.py
40
8646
#!/usr/bin/env python ''' ''' __docformat__ = 'restructuredtext' __version__ = '$Id: $' from ctypes import * import ctypes from pyglet import app from pyglet.app.xlib import XlibSelectDevice from base import Display, Screen, ScreenMode, Canvas import xlib_vidmoderestore # XXX #from pyglet.window import NoSuchDisplayException class NoSuchDisplayException(Exception): pass from pyglet.libs.x11 import xlib try: from pyglet.libs.x11 import xinerama _have_xinerama = True except: _have_xinerama = False try: from pyglet.libs.x11 import xsync _have_xsync = True except: _have_xsync = False try: from pyglet.libs.x11 import xf86vmode _have_xf86vmode = True except: _have_xf86vmode = False # Set up error handler def _error_handler(display, event): # By default, all errors are silently ignored: this has a better chance # of working than the default behaviour of quitting ;-) # # We've actually never seen an error that was our fault; they're always # driver bugs (and so the reports are useless). Nevertheless, set # environment variable PYGLET_DEBUG_X11 to 1 to get dumps of the error # and a traceback (execution will continue). import pyglet if pyglet.options['debug_x11']: event = event.contents buf = c_buffer(1024) xlib.XGetErrorText(display, event.error_code, buf, len(buf)) print 'X11 error:', buf.value print ' serial:', event.serial print ' request:', event.request_code print ' minor:', event.minor_code print ' resource:', event.resourceid import traceback print 'Python stack trace (innermost last):' traceback.print_stack() return 0 _error_handler_ptr = xlib.XErrorHandler(_error_handler) xlib.XSetErrorHandler(_error_handler_ptr) class XlibDisplay(XlibSelectDevice, Display): _display = None # POINTER(xlib.Display) _x_im = None # X input method # TODO close _x_im when display connection closed. _enable_xsync = False _screens = None # Cache of get_screens() def __init__(self, name=None, x_screen=None): if x_screen is None: x_screen = 0 self._display = xlib.XOpenDisplay(name) if not self._display: raise NoSuchDisplayException('Cannot connect to "%s"' % name) screen_count = xlib.XScreenCount(self._display) if x_screen >= screen_count: raise NoSuchDisplayException( 'Display "%s" has no screen %d' % (name, x_screen)) super(XlibDisplay, self).__init__() self.name = name self.x_screen = x_screen self._fileno = xlib.XConnectionNumber(self._display) self._window_map = {} # Initialise XSync if _have_xsync: event_base = c_int() error_base = c_int() if xsync.XSyncQueryExtension(self._display, byref(event_base), byref(error_base)): major_version = c_int() minor_version = c_int() if xsync.XSyncInitialize(self._display, byref(major_version), byref(minor_version)): self._enable_xsync = True # Add to event loop select list. Assume we never go away. app.platform_event_loop._select_devices.add(self) def get_screens(self): if self._screens: return self._screens if _have_xinerama and xinerama.XineramaIsActive(self._display): number = c_int() infos = xinerama.XineramaQueryScreens(self._display, byref(number)) infos = cast(infos, POINTER(xinerama.XineramaScreenInfo * number.value)).contents self._screens = [] using_xinerama = number.value > 1 for info in infos: self._screens.append(XlibScreen(self, info.x_org, info.y_org, info.width, info.height, using_xinerama)) xlib.XFree(infos) else: # No xinerama screen_info = xlib.XScreenOfDisplay(self._display, self.x_screen) screen = XlibScreen(self, 0, 0, screen_info.contents.width, screen_info.contents.height, False) self._screens = [screen] return self._screens # XlibSelectDevice interface def fileno(self): return self._fileno def select(self): e = xlib.XEvent() while xlib.XPending(self._display): xlib.XNextEvent(self._display, e) # Key events are filtered by the xlib window event # handler so they get a shot at the prefiltered event. if e.xany.type not in (xlib.KeyPress, xlib.KeyRelease): if xlib.XFilterEvent(e, e.xany.window): continue try: dispatch = self._window_map[e.xany.window] except KeyError: continue dispatch(e) def poll(self): return xlib.XPending(self._display) class XlibScreen(Screen): _initial_mode = None def __init__(self, display, x, y, width, height, xinerama): super(XlibScreen, self).__init__(display, x, y, width, height) self._xinerama = xinerama def get_matching_configs(self, template): canvas = XlibCanvas(self.display, None) configs = template.match(canvas) # XXX deprecate for config in configs: config.screen = self return configs def get_modes(self): if not _have_xf86vmode: return [] if self._xinerama: # If Xinerama/TwinView is enabled, xf86vidmode's modelines # correspond to metamodes, which don't distinguish one screen from # another. XRandR (broken) or NV (complicated) extensions needed. return [] count = ctypes.c_int() info_array = \ ctypes.POINTER(ctypes.POINTER(xf86vmode.XF86VidModeModeInfo))() xf86vmode.XF86VidModeGetAllModeLines( self.display._display, self.display.x_screen, count, info_array) # Copy modes out of list and free list modes = [] for i in range(count.value): info = xf86vmode.XF86VidModeModeInfo() ctypes.memmove(ctypes.byref(info), ctypes.byref(info_array.contents[i]), ctypes.sizeof(info)) modes.append(XlibScreenMode(self, info)) if info.privsize: xlib.XFree(info.private) xlib.XFree(info_array) return modes def get_mode(self): modes = self.get_modes() if modes: return modes[0] return None def set_mode(self, mode): assert mode.screen is self if not self._initial_mode: self._initial_mode = self.get_mode() xlib_vidmoderestore.set_initial_mode(self._initial_mode) xf86vmode.XF86VidModeSwitchToMode(self.display._display, self.display.x_screen, mode.info) xlib.XFlush(self.display._display) xf86vmode.XF86VidModeSetViewPort(self.display._display, self.display.x_screen, 0, 0) xlib.XFlush(self.display._display) self.width = mode.width self.height = mode.height def restore_mode(self): if self._initial_mode: self.set_mode(self._initial_mode) def __repr__(self): return 'XlibScreen(display=%r, x=%d, y=%d, ' \ 'width=%d, height=%d, xinerama=%d)' % \ (self.display, self.x, self.y, self.width, self.height, self._xinerama) class XlibScreenMode(ScreenMode): def __init__(self, screen, info): super(XlibScreenMode, self).__init__(screen) self.info = info self.width = info.hdisplay self.height = info.vdisplay self.rate = info.dotclock self.depth = None class XlibCanvas(Canvas): def __init__(self, display, x_window): super(XlibCanvas, self).__init__(display) self.x_window = x_window
bsd-3-clause
M4rtinK/pyside-bb10
tests/QtCore/qrect_test.py
6
3363
#!/usr/bin/python '''Test cases for QRect''' import unittest from PySide.QtCore import QPoint, QRect, QRectF class RectConstructor(unittest.TestCase): def testDefault(self): #QRect() obj = QRect() self.assert_(obj.isNull()) def testConstructorQPoint(self): topLeft = QPoint(3, 0) bottomRight = QPoint(0, 3) rect1 = QRect(topLeft, bottomRight) rect2 = QRect(topLeft, bottomRight) self.assertEqual(rect1, rect2) class RectOperator(unittest.TestCase): '''Test case for QRect operators''' def testEqual(self): '''QRect == QRect Note: operator == must be working as it's the main check for correctness''' rect1 = QRect() rect2 = QRect() self.assertEqual(rect1, rect2) rect1 = QRect(0, 4, 100, 300) rect2 = QRect(0, 4, 100, 300) self.assertEqual(rect1, rect2) def testNullRectIntersection(self): #QRect & QRect for null rects rect1 = QRect() rect2 = QRect() rect3 = rect1 & rect2 self.assertEqual(rect3, rect1) self.assertEqual(rect3, rect2) def testNoIntersect(self): '''QRect & QRect for non-intersecting QRects Non-intersecting QRects return a 'null' QRect for operator &''' rect1 = QRect(10, 10, 5, 5) rect2 = QRect(20, 20, 5, 5) rect3 = rect1 & rect2 self.assertEqual(rect3, QRect()) def testIntersectPartial(self): #QRect & QRect for partial intersections rect1 = QRect(10, 10, 10, 10) rect2 = QRect(15, 15, 10, 10) rect3 = rect1 & rect2 self.assertEqual(rect3, QRect(15, 15, 5, 5)) def testIntersetEnclosed(self): #QRect & QRect for a qrect inside another rect1 = QRect(10, 10, 20, 20) rect2 = QRect(15, 15, 5, 5) rect3 = rect1 & rect2 self.assertEqual(rect3, rect2) def testNullRectIntersectBounding(self): #QRect | QRect for null rects rect1 = QRect() rect2 = QRect() rect3 = rect1 & rect2 self.assertEqual(rect3, rect1) self.assertEqual(rect3, rect2) def testNoIntersectBounding(self): '''QRect | QRect for non-intersecting QRects Non-intersecting QRects return a greater QRect for operator |''' rect1 = QRect(10, 10, 5, 5) rect2 = QRect(20, 20, 5, 5) rect3 = rect1 | rect2 self.assertEqual(rect3, QRect(10, 10, 15, 15)) def testBoundingPartialIntersection(self): #QRect | QRect for partial intersections rect1 = QRect(10, 10, 10, 10) rect2 = QRect(15, 15, 10, 10) rect3 = rect1 | rect2 self.assertEqual(rect3, QRect(10, 10, 15, 15)) def testBoundingEnclosed(self): #QRect | QRect for a qrect inside another rect1 = QRect(10, 10, 20, 20) rect2 = QRect(15, 15, 5, 5) rect3 = rect1 | rect2 self.assertEqual(rect3, rect1) def testGetCoordsAndRect(self): rect1 = QRect(1, 2, 3, 4) self.assertEqual(rect1.getRect(), (1, 2, 3, 4)) self.assertEqual(rect1.getCoords(), (1, 2, 3, 5)) rect1 = QRectF(1, 2, 3, 4) self.assertEqual(rect1.getRect(), (1, 2, 3, 4)) self.assertEqual(rect1.getCoords(), (1, 2, 4, 6)) if __name__ == '__main__': unittest.main()
lgpl-2.1
lmregus/Portfolio
python/design_patterns/env/lib/python3.7/site-packages/flake8/main/options.py
3
8657
"""Contains the logic for all of the default options for Flake8.""" from flake8 import defaults from flake8.main import debug from flake8.main import vcs def register_default_options(option_manager): """Register the default options on our OptionManager. The default options include: - ``-v``/``--verbose`` - ``-q``/``--quiet`` - ``--count`` - ``--diff`` - ``--exclude`` - ``--filename`` - ``--format`` - ``--hang-closing`` - ``--ignore`` - ``--extend-ignore`` - ``--per-file-ignores`` - ``--max-line-length`` - ``--max-doc-length`` - ``--select`` - ``--disable-noqa`` - ``--show-source`` - ``--statistics`` - ``--enable-extensions`` - ``--exit-zero`` - ``-j``/``--jobs`` - ``--output-file`` - ``--tee`` - ``--append-config`` - ``--config`` - ``--isolated`` - ``--benchmark`` - ``--bug-report`` """ add_option = option_manager.add_option # pep8 options add_option( "-v", "--verbose", default=0, action="count", parse_from_config=True, help="Print more information about what is happening in flake8." " This option is repeatable and will increase verbosity each " "time it is repeated.", ) add_option( "-q", "--quiet", default=0, action="count", parse_from_config=True, help="Report only file names, or nothing. This option is repeatable.", ) add_option( "--count", action="store_true", parse_from_config=True, help="Print total number of errors and warnings to standard error and" " set the exit code to 1 if total is not empty.", ) add_option( "--diff", action="store_true", help="Report changes only within line number ranges in the unified " "diff provided on standard in by the user.", ) add_option( "--exclude", metavar="patterns", default=",".join(defaults.EXCLUDE), comma_separated_list=True, parse_from_config=True, normalize_paths=True, help="Comma-separated list of files or directories to exclude." " (Default: %default)", ) add_option( "--filename", metavar="patterns", default="*.py", parse_from_config=True, comma_separated_list=True, help="Only check for filenames matching the patterns in this comma-" "separated list. (Default: %default)", ) add_option( "--stdin-display-name", default="stdin", help="The name used when reporting errors from code passed via stdin." " This is useful for editors piping the file contents to flake8." " (Default: %default)", ) # TODO(sigmavirus24): Figure out --first/--repeat # NOTE(sigmavirus24): We can't use choices for this option since users can # freely provide a format string and that will break if we restrict their # choices. add_option( "--format", metavar="format", default="default", parse_from_config=True, help="Format errors according to the chosen formatter.", ) add_option( "--hang-closing", action="store_true", parse_from_config=True, help="Hang closing bracket instead of matching indentation of opening" " bracket's line.", ) add_option( "--ignore", metavar="errors", default=",".join(defaults.IGNORE), parse_from_config=True, comma_separated_list=True, help="Comma-separated list of errors and warnings to ignore (or skip)." " For example, ``--ignore=E4,E51,W234``. (Default: %default)", ) add_option( "--extend-ignore", metavar="errors", default="", parse_from_config=True, comma_separated_list=True, help="Comma-separated list of errors and warnings to add to the list" " of ignored ones. For example, ``--extend-ignore=E4,E51,W234``.", ) add_option( "--per-file-ignores", default="", parse_from_config=True, help="A pairing of filenames and violation codes that defines which " "violations to ignore in a particular file. The filenames can be " "specified in a manner similar to the ``--exclude`` option and the " "violations work similarly to the ``--ignore`` and ``--select`` " "options.", ) add_option( "--max-line-length", type="int", metavar="n", default=defaults.MAX_LINE_LENGTH, parse_from_config=True, help="Maximum allowed line length for the entirety of this run. " "(Default: %default)", ) add_option( "--max-doc-length", type="int", metavar="n", default=None, parse_from_config=True, help="Maximum allowed doc line length for the entirety of this run. " "(Default: %default)", ) add_option( "--select", metavar="errors", default=",".join(defaults.SELECT), parse_from_config=True, comma_separated_list=True, help="Comma-separated list of errors and warnings to enable." " For example, ``--select=E4,E51,W234``. (Default: %default)", ) add_option( "--disable-noqa", default=False, parse_from_config=True, action="store_true", help='Disable the effect of "# noqa". This will report errors on ' 'lines with "# noqa" at the end.', ) # TODO(sigmavirus24): Decide what to do about --show-pep8 add_option( "--show-source", action="store_true", parse_from_config=True, help="Show the source generate each error or warning.", ) add_option( "--statistics", action="store_true", parse_from_config=True, help="Count errors and warnings.", ) # Flake8 options add_option( "--enable-extensions", default="", parse_from_config=True, comma_separated_list=True, type="string", help="Enable plugins and extensions that are otherwise disabled " "by default", ) add_option( "--exit-zero", action="store_true", help='Exit with status code "0" even if there are errors.', ) add_option( "--install-hook", action="callback", type="choice", choices=vcs.choices(), callback=vcs.install, help="Install a hook that is run prior to a commit for the supported " "version control system.", ) add_option( "-j", "--jobs", type="string", default="auto", parse_from_config=True, help="Number of subprocesses to use to run checks in parallel. " 'This is ignored on Windows. The default, "auto", will ' "auto-detect the number of processors available to use." " (Default: %default)", ) add_option( "--output-file", default=None, type="string", parse_from_config=True, # callback=callbacks.redirect_stdout, help="Redirect report to a file.", ) add_option( "--tee", default=False, parse_from_config=True, action="store_true", help="Write to stdout and output-file.", ) # Config file options add_option( "--append-config", action="append", help="Provide extra config files to parse in addition to the files " "found by Flake8 by default. These files are the last ones read " "and so they take the highest precedence when multiple files " "provide the same option.", ) add_option( "--config", default=None, help="Path to the config file that will be the authoritative config " "source. This will cause Flake8 to ignore all other " "configuration files.", ) add_option( "--isolated", default=False, action="store_true", help="Ignore all configuration files.", ) # Benchmarking add_option( "--benchmark", default=False, action="store_true", help="Print benchmark information about this run of Flake8", ) # Debugging add_option( "--bug-report", action="callback", callback=debug.print_information, callback_kwargs={"option_manager": option_manager}, help="Print information necessary when preparing a bug report", )
mit
rehsack/linux-curie
arch/ia64/scripts/unwcheck.py
13143
1714
#!/usr/bin/python # # Usage: unwcheck.py FILE # # This script checks the unwind info of each function in file FILE # and verifies that the sum of the region-lengths matches the total # length of the function. # # Based on a shell/awk script originally written by Harish Patil, # which was converted to Perl by Matthew Chapman, which was converted # to Python by David Mosberger. # import os import re import sys if len(sys.argv) != 2: print "Usage: %s FILE" % sys.argv[0] sys.exit(2) readelf = os.getenv("READELF", "readelf") start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]") rlen_pattern = re.compile(".*rlen=([0-9]+)") def check_func (func, slots, rlen_sum): if slots != rlen_sum: global num_errors num_errors += 1 if not func: func = "[%#x-%#x]" % (start, end) print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum) return num_funcs = 0 num_errors = 0 func = False slots = 0 rlen_sum = 0 for line in os.popen("%s -u %s" % (readelf, sys.argv[1])): m = start_pattern.match(line) if m: check_func(func, slots, rlen_sum) func = m.group(1) start = long(m.group(2), 16) end = long(m.group(3), 16) slots = 3 * (end - start) / 16 rlen_sum = 0L num_funcs += 1 else: m = rlen_pattern.match(line) if m: rlen_sum += long(m.group(1)) check_func(func, slots, rlen_sum) if num_errors == 0: print "No errors detected in %u functions." % num_funcs else: if num_errors > 1: err="errors" else: err="error" print "%u %s detected in %u functions." % (num_errors, err, num_funcs) sys.exit(1)
gpl-2.0
infobloxopen/infoblox-netmri
infoblox_netmri/api/broker/v2_3_0/spm_devices_default_grid_broker.py
4
5890
from ..broker import Broker class SpmDevicesDefaultGridBroker(Broker): controller = "spm_devices_default_grids" def index(self, **kwargs): """Lists the available spm devices default grids. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` today :param starttime: The data returned will represent the spm devices default grids with this date and time as lower boundary. If omitted, the result will indicate the most recently collected data. :type starttime: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` tomorrow :param endtime: The data returned will represent the spm devices default grids with this date and time as upper boundary. If omitted, the result will indicate the most recently collected data. :type endtime: DateTime | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` id :param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, VirtualNetworkID, DeviceID, DeviceName, DeviceIPDotted, DeviceIPNumeric, Network, DeviceDNSName, TotalPorts, UsedTrunkPorts, UsedAccessPorts, FreePorts, FreePortsPercentage, AvailPorts, AvailPortsPercentage, PoEPorts, DeviceSysLocation, DeviceVendor, DeviceModel, PhysicalSerialNum, DeviceSysDescr, DeviceType, DeviceAssurance, FirstSeen, LastSeen, LastChanged, PollDuration, SwitchingInd. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each SpmDevicesDefaultGrid. Valid values are id, VirtualNetworkID, DeviceID, DeviceName, DeviceIPDotted, DeviceIPNumeric, Network, DeviceDNSName, TotalPorts, UsedTrunkPorts, UsedAccessPorts, FreePorts, FreePortsPercentage, AvailPorts, AvailPortsPercentage, PoEPorts, DeviceSysLocation, DeviceVendor, DeviceModel, PhysicalSerialNum, DeviceSysDescr, DeviceType, DeviceAssurance, FirstSeen, LastSeen, LastChanged, PollDuration, SwitchingInd. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` False :param refresh_ind: If true, the grid will be regenerated, rather than using any available cached grid data. :type refresh_ind: Boolean | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` False :param async_ind: If true and if grid data is not yet available, it will return immediately with 202 status. User should retry again later. :type async_ind: Boolean **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return spm_devices_default_grids: An array of the SpmDevicesDefaultGrid objects that match the specified input criteria. :rtype spm_devices_default_grids: Array of SpmDevicesDefaultGrid | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return summary: A summary of calculation of selected columns, when applicable. :rtype summary: Hash """ return self.api_list_request(self._get_method_fullname("index"), kwargs)
apache-2.0
rahul67/hue
desktop/core/ext-py/Django-1.6.10/tests/generic_inline_admin/tests.py
49
17643
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.conf import settings from django.contrib import admin from django.contrib.admin.sites import AdminSite from django.contrib.contenttypes.generic import ( generic_inlineformset_factory, GenericTabularInline) from django.forms.formsets import DEFAULT_MAX_NUM from django.forms.models import ModelForm from django.test import TestCase from django.test.utils import override_settings # local test models from .admin import MediaInline, MediaPermanentInline from .models import (Episode, EpisodeExtra, EpisodeMaxNum, Media, EpisodePermanent, Category) @override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',)) class GenericAdminViewTest(TestCase): urls = "generic_inline_admin.urls" fixtures = ['users.xml'] def setUp(self): # set TEMPLATE_DEBUG to True to ensure {% include %} will raise # exceptions since that is how inlines are rendered and #9498 will # bubble up if it is an issue. self.original_template_debug = settings.TEMPLATE_DEBUG settings.TEMPLATE_DEBUG = True self.client.login(username='super', password='secret') # Can't load content via a fixture (since the GenericForeignKey # relies on content type IDs, which will vary depending on what # other tests have been run), thus we do it here. e = Episode.objects.create(name='This Week in Django') self.episode_pk = e.pk m = Media(content_object=e, url='http://example.com/podcast.mp3') m.save() self.mp3_media_pk = m.pk m = Media(content_object=e, url='http://example.com/logo.png') m.save() self.png_media_pk = m.pk def tearDown(self): self.client.logout() settings.TEMPLATE_DEBUG = self.original_template_debug def testBasicAddGet(self): """ A smoke test to ensure GET on the add_view works. """ response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episode/add/') self.assertEqual(response.status_code, 200) def testBasicEditGet(self): """ A smoke test to ensure GET on the change_view works. """ response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episode/%d/' % self.episode_pk) self.assertEqual(response.status_code, 200) def testBasicAddPost(self): """ A smoke test to ensure POST on add_view works. """ post_data = { "name": "This Week in Django", # inline data "generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "1", "generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "0", "generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0", } response = self.client.post('/generic_inline_admin/admin/generic_inline_admin/episode/add/', post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def testBasicEditPost(self): """ A smoke test to ensure POST on edit_view works. """ post_data = { "name": "This Week in Django", # inline data "generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "3", "generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "2", "generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0", "generic_inline_admin-media-content_type-object_id-0-id": "%d" % self.mp3_media_pk, "generic_inline_admin-media-content_type-object_id-0-url": "http://example.com/podcast.mp3", "generic_inline_admin-media-content_type-object_id-1-id": "%d" % self.png_media_pk, "generic_inline_admin-media-content_type-object_id-1-url": "http://example.com/logo.png", "generic_inline_admin-media-content_type-object_id-2-id": "", "generic_inline_admin-media-content_type-object_id-2-url": "", } url = '/generic_inline_admin/admin/generic_inline_admin/episode/%d/' % self.episode_pk response = self.client.post(url, post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def testGenericInlineFormset(self): EpisodeMediaFormSet = generic_inlineformset_factory(Media, can_delete=False, exclude=['description', 'keywords'], extra=3) e = Episode.objects.get(name='This Week in Django') # Works with no queryset formset = EpisodeMediaFormSet(instance=e) self.assertEqual(len(formset.forms), 5) self.assertHTMLEqual(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="url" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/podcast.mp3" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.mp3_media_pk) self.assertHTMLEqual(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="url" name="generic_inline_admin-media-content_type-object_id-1-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>' % self.png_media_pk) self.assertHTMLEqual(formset.forms[2].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-2-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-2-url" type="url" name="generic_inline_admin-media-content_type-object_id-2-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-2-id" id="id_generic_inline_admin-media-content_type-object_id-2-id" /></p>') # A queryset can be used to alter display ordering formset = EpisodeMediaFormSet(instance=e, queryset=Media.objects.order_by('url')) self.assertEqual(len(formset.forms), 5) self.assertHTMLEqual(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="url" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.png_media_pk) self.assertHTMLEqual(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="url" name="generic_inline_admin-media-content_type-object_id-1-url" value="http://example.com/podcast.mp3" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>' % self.mp3_media_pk) self.assertHTMLEqual(formset.forms[2].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-2-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-2-url" type="url" name="generic_inline_admin-media-content_type-object_id-2-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-2-id" id="id_generic_inline_admin-media-content_type-object_id-2-id" /></p>') # Works with a queryset that omits items formset = EpisodeMediaFormSet(instance=e, queryset=Media.objects.filter(url__endswith=".png")) self.assertEqual(len(formset.forms), 4) self.assertHTMLEqual(formset.forms[0].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="url" name="generic_inline_admin-media-content_type-object_id-0-url" value="http://example.com/logo.png" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>' % self.png_media_pk) self.assertHTMLEqual(formset.forms[1].as_p(), '<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="url" name="generic_inline_admin-media-content_type-object_id-1-url" maxlength="200" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>') def testGenericInlineFormsetFactory(self): # Regression test for #10522. inline_formset = generic_inlineformset_factory(Media, exclude=('url',)) # Regression test for #12340. e = Episode.objects.get(name='This Week in Django') formset = inline_formset(instance=e) self.assertTrue(formset.get_queryset().ordered) @override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',)) class GenericInlineAdminParametersTest(TestCase): urls = "generic_inline_admin.urls" fixtures = ['users.xml'] def setUp(self): self.client.login(username='super', password='secret') def tearDown(self): self.client.logout() def _create_object(self, model): """ Create a model with an attached Media object via GFK. We can't load content via a fixture (since the GenericForeignKey relies on content type IDs, which will vary depending on what other tests have been run), thus we do it here. """ e = model.objects.create(name='This Week in Django') Media.objects.create(content_object=e, url='http://example.com/podcast.mp3') return e def testNoParam(self): """ With one initial form, extra (default) at 3, there should be 4 forms. """ e = self._create_object(Episode) response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episode/%s/' % e.pk) formset = response.context['inline_admin_formsets'][0].formset self.assertEqual(formset.total_form_count(), 4) self.assertEqual(formset.initial_form_count(), 1) def testExtraParam(self): """ With extra=0, there should be one form. """ e = self._create_object(EpisodeExtra) response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episodeextra/%s/' % e.pk) formset = response.context['inline_admin_formsets'][0].formset self.assertEqual(formset.total_form_count(), 1) self.assertEqual(formset.initial_form_count(), 1) def testMaxNumParam(self): """ With extra=5 and max_num=2, there should be only 2 forms. """ e = self._create_object(EpisodeMaxNum) inline_form_data = '<input type="hidden" name="generic_inline_admin-media-content_type-object_id-TOTAL_FORMS" value="2" id="id_generic_inline_admin-media-content_type-object_id-TOTAL_FORMS" /><input type="hidden" name="generic_inline_admin-media-content_type-object_id-INITIAL_FORMS" value="1" id="id_generic_inline_admin-media-content_type-object_id-INITIAL_FORMS" />' response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/episodemaxnum/%s/' % e.pk) formset = response.context['inline_admin_formsets'][0].formset self.assertEqual(formset.total_form_count(), 2) self.assertEqual(formset.initial_form_count(), 1) @override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',)) class GenericInlineAdminWithUniqueTogetherTest(TestCase): urls = "generic_inline_admin.urls" fixtures = ['users.xml'] def setUp(self): self.client.login(username='super', password='secret') def tearDown(self): self.client.logout() def testAdd(self): category_id = Category.objects.create(name='male').pk post_data = { "name": "John Doe", # inline data "generic_inline_admin-phonenumber-content_type-object_id-TOTAL_FORMS": "1", "generic_inline_admin-phonenumber-content_type-object_id-INITIAL_FORMS": "0", "generic_inline_admin-phonenumber-content_type-object_id-MAX_NUM_FORMS": "0", "generic_inline_admin-phonenumber-content_type-object_id-0-id": "", "generic_inline_admin-phonenumber-content_type-object_id-0-phone_number": "555-555-5555", "generic_inline_admin-phonenumber-content_type-object_id-0-category": "%s" % category_id, } response = self.client.get('/generic_inline_admin/admin/generic_inline_admin/contact/add/') self.assertEqual(response.status_code, 200) response = self.client.post('/generic_inline_admin/admin/generic_inline_admin/contact/add/', post_data) self.assertEqual(response.status_code, 302) # redirect somewhere class NoInlineDeletionTest(TestCase): urls = "generic_inline_admin.urls" def test_no_deletion(self): fake_site = object() inline = MediaPermanentInline(EpisodePermanent, fake_site) fake_request = object() formset = inline.get_formset(fake_request) self.assertFalse(formset.can_delete) class MockRequest(object): pass class MockSuperUser(object): def has_perm(self, perm): return True request = MockRequest() request.user = MockSuperUser() class GenericInlineModelAdminTest(TestCase): urls = "generic_inline_admin.urls" def setUp(self): self.site = AdminSite() def test_get_formset_kwargs(self): media_inline = MediaInline(Media, AdminSite()) # Create a formset with default arguments formset = media_inline.get_formset(request) self.assertEqual(formset.max_num, DEFAULT_MAX_NUM) self.assertEqual(formset.can_order, False) # Create a formset with custom keyword arguments formset = media_inline.get_formset(request, max_num=100, can_order=True) self.assertEqual(formset.max_num, 100) self.assertEqual(formset.can_order, True) def test_custom_form_meta_exclude_with_readonly(self): """ Ensure that the custom ModelForm's `Meta.exclude` is respected when used in conjunction with `GenericInlineModelAdmin.readonly_fields` and when no `ModelAdmin.exclude` is defined. """ class MediaForm(ModelForm): class Meta: model = Media exclude = ['url'] class MediaInline(GenericTabularInline): readonly_fields = ['description'] form = MediaForm model = Media class EpisodeAdmin(admin.ModelAdmin): inlines = [ MediaInline ] ma = EpisodeAdmin(Episode, self.site) self.assertEqual( list(list(ma.get_formsets(request))[0]().forms[0].fields), ['keywords', 'id', 'DELETE']) def test_custom_form_meta_exclude(self): """ Ensure that the custom ModelForm's `Meta.exclude` is respected by `GenericInlineModelAdmin.get_formset`, and overridden if `ModelAdmin.exclude` or `GenericInlineModelAdmin.exclude` are defined. Refs #15907. """ # First with `GenericInlineModelAdmin` ----------------- class MediaForm(ModelForm): class Meta: model = Media exclude = ['url'] class MediaInline(GenericTabularInline): exclude = ['description'] form = MediaForm model = Media class EpisodeAdmin(admin.ModelAdmin): inlines = [ MediaInline ] ma = EpisodeAdmin(Episode, self.site) self.assertEqual( list(list(ma.get_formsets(request))[0]().forms[0].fields), ['url', 'keywords', 'id', 'DELETE']) # Then, only with `ModelForm` ----------------- class MediaInline(GenericTabularInline): form = MediaForm model = Media class EpisodeAdmin(admin.ModelAdmin): inlines = [ MediaInline ] ma = EpisodeAdmin(Episode, self.site) self.assertEqual( list(list(ma.get_formsets(request))[0]().forms[0].fields), ['description', 'keywords', 'id', 'DELETE']) def test_get_fieldsets(self): # Test that get_fieldsets is called when figuring out form fields. # Refs #18681. class MediaForm(ModelForm): class Meta: model = Media fields = '__all__' class MediaInline(GenericTabularInline): form = MediaForm model = Media can_delete = False def get_fieldsets(self, request, obj=None): return [(None, {'fields': ['url', 'description']})] ma = MediaInline(Media, self.site) form = ma.get_formset(None).form self.assertEqual(form._meta.fields, ['url', 'description'])
apache-2.0
seaotterman/tensorflow
tensorflow/python/kernel_tests/atrous_convolution_test.py
58
8382
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for atrous convolution functionality in tensorflow.ops.nn.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.ops import gradient_checker from tensorflow.python.ops import nn_ops import tensorflow.python.ops.nn_grad # pylint: disable=unused-import from tensorflow.python.platform import test def upsample_filters(filters, rate): """Upsamples the filters by a factor of rate along the spatial dimensions. Args: filters: spatial_shape + [in_channels, out_channels] Original filters. rate: A list of len(spatial_shape) positive ints, specifying the upsampling rate. Returns: filters_up: output_spatial_shape + [in_channels, out_channels]. Upsampled filters with output_spatial_shape[i] = (spatial_shape[i] - 1) * rate[i] + 1 containing (rate[i] - 1) zeros between consecutive filter values along spatial dimension i. """ num_spatial_dims = len(rate) spatial_shape = np.array(filters.shape[:num_spatial_dims]) output_spatial_shape = (spatial_shape - 1) * rate + 1 output = np.zeros( tuple(output_spatial_shape) + tuple(filters.shape[-2:]), filters.dtype) output[tuple(np.s_[::rate[i]] for i in range(num_spatial_dims))] = filters return output class AtrousConvolutionTest(test.TestCase): def _test_atrous_convolution(self, input_shape, filter_shape, dilation_rate, **kwargs): filters = np.arange( np.prod(filter_shape), dtype=np.float32).reshape(filter_shape) filters_upsampled = upsample_filters(filters, dilation_rate) x = np.arange(np.prod(input_shape), dtype=np.float32).reshape(input_shape) y1 = nn_ops.convolution( input=x, filter=filters, dilation_rate=dilation_rate, **kwargs) y2 = nn_ops.convolution(input=x, filter=filters_upsampled, **kwargs) self.assertAllClose(y1.eval(), y2.eval(), rtol=1e-2, atol=1e-2) def testAtrousConvolution2D(self): with self.test_session(): for padding in ["SAME", "VALID"]: for height, width in [[9, 9], [9, 10]]: for kernel_height, kernel_width in [[1, 1], [2, 2], [2, 3]]: for dilation_rate in [[1, 1], [3, 2], [2, 1]]: self._test_atrous_convolution( input_shape=[2, height, width, 2], filter_shape=[kernel_height, kernel_width, 2, 2], padding=padding, dilation_rate=dilation_rate) def testAtrousConvolution3D(self): with self.test_session(): for padding in ["SAME", "VALID"]: for depth, height, width in [[9, 9, 10], [9, 10, 9]]: for kernel_depth, kernel_height, kernel_width in [[3, 3, 3], [3, 2, 2], [2, 1, 3]]: for dilation_rate in [[1, 1, 1], [3, 3, 3], [3, 2, 3], [3, 1, 2]]: self._test_atrous_convolution( input_shape=[2, depth, height, width, 2], filter_shape=[ kernel_depth, kernel_height, kernel_width, 2, 2 ], padding=padding, dilation_rate=dilation_rate) def testAtrousConvolution1D(self): with self.test_session(): for padding in ["SAME", "VALID"]: for width in [9, 10]: for kernel_width in range(1, 4): for rate in range(1, 4): self._test_atrous_convolution( input_shape=[2, width, 2], filter_shape=[kernel_width, 2, 2], padding=padding, dilation_rate=[rate]) def testAtrousConvolutionNC(self): if test.is_gpu_available(cuda_only=True): # "NCW" and "NCHW" formats are currently supported only on CUDA. with self.test_session(use_gpu=True): for padding in ["SAME", "VALID"]: self._test_atrous_convolution( input_shape=[2, 2, 9], padding=padding, filter_shape=[3, 2, 2], dilation_rate=[2], data_format="NCW") self._test_atrous_convolution( input_shape=[2, 2, 9, 5], padding=padding, filter_shape=[3, 3, 2, 2], dilation_rate=[2, 1], data_format="NCHW") def testAtrousSequence(self): """Tests optimization of sequence of atrous convolutions. See the documentation of with_space_to_batch. """ with self.test_session(): for padding in ["SAME", "VALID"]: for height in range(15, 17): for width in range(15, 17): x_shape = [3, height, width, 2] x = np.random.random_sample(x_shape).astype(np.float32) kernel_sizes = [1, 3] if padding == "SAME" else range(1, 3) for kernel in kernel_sizes: f_shape = [kernel, kernel, 2, 2] f1 = 1e-2 * np.random.random_sample(f_shape).astype(np.float32) f2 = 1e-2 * np.random.random_sample(f_shape).astype(np.float32) def combined_op(converted_input, num_spatial_dims, padding_arg): # pylint: disable=unused-argument result = nn_ops.convolution( input=converted_input, filter=f1, padding=padding) # pylint: disable=cell-var-from-loop result = nn_ops.convolution( input=result, filter=f2, padding=padding) # pylint: disable=cell-var-from-loop return result for rate_height in range(2, 4): for rate_width in range(2, 4): dilation_rate = [rate_height, rate_width] y1 = nn_ops.convolution( input=x, filter=f1, padding=padding, dilation_rate=dilation_rate) y1 = nn_ops.convolution( input=y1, filter=f2, padding=padding, dilation_rate=dilation_rate) y2 = nn_ops.with_space_to_batch( input=x, dilation_rate=dilation_rate, op=combined_op, padding="VALID") self.assertAllClose( y1.eval(), y2.eval(), rtol=1e-2, atol=1e-2) def _test_gradient(self, x_shape, f_shape, dilation_rate, padding): x_val = np.random.random_sample(x_shape).astype(np.float32) f_val = np.random.random_sample(f_shape).astype(np.float32) x = constant_op.constant(x_val, name="x", dtype=dtypes.float32) f = constant_op.constant(f_val, name="f", dtype=dtypes.float32) output = nn_ops.convolution( input=x, filter=f, dilation_rate=dilation_rate, padding=padding) y_shape = output.get_shape().as_list() err = gradient_checker.compute_gradient_error([x, f], [x_shape, f_shape], output, y_shape) err_tolerance = 1e-3 self.assertLess(err, err_tolerance) def testGradient(self): with self.test_session(): for padding in ["SAME", "VALID"]: for rate_width in range(1, 3): for rate_height in range(1, 3): self._test_gradient( x_shape=[2, 5, 6, 2], f_shape=[3, 3, 2, 2], dilation_rate=[rate_height, rate_width], padding=padding) if __name__ == "__main__": test.main()
apache-2.0
fangxingli/hue
desktop/core/ext-py/Django-1.6.10/tests/model_fields/tests.py
40
24900
from __future__ import absolute_import, unicode_literals import datetime from decimal import Decimal from django import test from django import forms from django.core.exceptions import ValidationError from django.db import connection, models, IntegrityError from django.db.models.fields import ( AutoField, BigIntegerField, BinaryField, BooleanField, CharField, CommaSeparatedIntegerField, DateField, DateTimeField, DecimalField, EmailField, FilePathField, FloatField, IntegerField, IPAddressField, GenericIPAddressField, NullBooleanField, PositiveIntegerField, PositiveSmallIntegerField, SlugField, SmallIntegerField, TextField, TimeField, URLField) from django.db.models.fields.files import FileField, ImageField from django.utils import six from django.utils import unittest from .models import (Foo, Bar, Whiz, BigD, BigS, Image, BigInt, Post, NullBooleanModel, BooleanModel, DataModel, Document, RenamedField, DateTimeModel, VerboseNameField, FksToBooleans) class BasicFieldTests(test.TestCase): def test_show_hidden_initial(self): """ Regression test for #12913. Make sure fields with choices respect show_hidden_initial as a kwarg to models.Field.formfield() """ choices = [(0, 0), (1, 1)] model_field = models.Field(choices=choices) form_field = model_field.formfield(show_hidden_initial=True) self.assertTrue(form_field.show_hidden_initial) form_field = model_field.formfield(show_hidden_initial=False) self.assertFalse(form_field.show_hidden_initial) def test_nullbooleanfield_blank(self): """ Regression test for #13071: NullBooleanField should not throw a validation error when given a value of None. """ nullboolean = NullBooleanModel(nbfield=None) try: nullboolean.full_clean() except ValidationError as e: self.fail("NullBooleanField failed validation with value of None: %s" % e.messages) def test_field_repr(self): """ Regression test for #5931: __repr__ of a field also displays its name """ f = Foo._meta.get_field('a') self.assertEqual(repr(f), '<django.db.models.fields.CharField: a>') f = models.fields.CharField() self.assertEqual(repr(f), '<django.db.models.fields.CharField>') def test_field_name(self): """ Regression test for #14695: explicitly defined field name overwritten by model's attribute name. """ instance = RenamedField() self.assertTrue(hasattr(instance, 'get_fieldname_display')) self.assertFalse(hasattr(instance, 'get_modelname_display')) def test_field_verbose_name(self): m = VerboseNameField for i in range(1, 23): self.assertEqual(m._meta.get_field('field%d' % i).verbose_name, 'verbose field%d' % i) self.assertEqual(m._meta.get_field('id').verbose_name, 'verbose pk') def test_choices_form_class(self): """Can supply a custom choices form class. Regression for #20999.""" choices = [('a', 'a')] field = models.CharField(choices=choices) klass = forms.TypedMultipleChoiceField self.assertIsInstance(field.formfield(choices_form_class=klass), klass) class DecimalFieldTests(test.TestCase): def test_to_python(self): f = models.DecimalField(max_digits=4, decimal_places=2) self.assertEqual(f.to_python(3), Decimal("3")) self.assertEqual(f.to_python("3.14"), Decimal("3.14")) self.assertRaises(ValidationError, f.to_python, "abc") def test_default(self): f = models.DecimalField(default=Decimal("0.00")) self.assertEqual(f.get_default(), Decimal("0.00")) def test_format(self): f = models.DecimalField(max_digits=5, decimal_places=1) self.assertEqual(f._format(f.to_python(2)), '2.0') self.assertEqual(f._format(f.to_python('2.6')), '2.6') self.assertEqual(f._format(None), None) def test_get_db_prep_lookup(self): from django.db import connection f = models.DecimalField(max_digits=5, decimal_places=1) self.assertEqual(f.get_db_prep_lookup('exact', None, connection=connection), [None]) def test_filter_with_strings(self): """ We should be able to filter decimal fields using strings (#8023) """ Foo.objects.create(id=1, a='abc', d=Decimal("12.34")) self.assertEqual(list(Foo.objects.filter(d='1.23')), []) def test_save_without_float_conversion(self): """ Ensure decimals don't go through a corrupting float conversion during save (#5079). """ bd = BigD(d="12.9") bd.save() bd = BigD.objects.get(pk=bd.pk) self.assertEqual(bd.d, Decimal("12.9")) def test_lookup_really_big_value(self): """ Ensure that really big values can be used in a filter statement, even with older Python versions. """ # This should not crash. That counts as a win for our purposes. Foo.objects.filter(d__gte=100000000000) class ForeignKeyTests(test.TestCase): def test_callable_default(self): """Test the use of a lazy callable for ForeignKey.default""" a = Foo.objects.create(id=1, a='abc', d=Decimal("12.34")) b = Bar.objects.create(b="bcd") self.assertEqual(b.a, a) class DateTimeFieldTests(unittest.TestCase): def test_datetimefield_to_python_usecs(self): """DateTimeField.to_python should support usecs""" f = models.DateTimeField() self.assertEqual(f.to_python('2001-01-02 03:04:05.000006'), datetime.datetime(2001, 1, 2, 3, 4, 5, 6)) self.assertEqual(f.to_python('2001-01-02 03:04:05.999999'), datetime.datetime(2001, 1, 2, 3, 4, 5, 999999)) def test_timefield_to_python_usecs(self): """TimeField.to_python should support usecs""" f = models.TimeField() self.assertEqual(f.to_python('01:02:03.000004'), datetime.time(1, 2, 3, 4)) self.assertEqual(f.to_python('01:02:03.999999'), datetime.time(1, 2, 3, 999999)) @test.skipUnlessDBFeature("supports_microsecond_precision") def test_datetimes_save_completely(self): dat = datetime.date(2014, 3, 12) datetim = datetime.datetime(2014, 3, 12, 21, 22, 23, 240000) tim = datetime.time(21, 22, 23, 240000) DateTimeModel.objects.create(d=dat, dt=datetim, t=tim) obj = DateTimeModel.objects.first() self.assertTrue(obj) self.assertEqual(obj.d, dat) self.assertEqual(obj.dt, datetim) self.assertEqual(obj.t, tim) class BooleanFieldTests(unittest.TestCase): def _test_get_db_prep_lookup(self, f): from django.db import connection self.assertEqual(f.get_db_prep_lookup('exact', True, connection=connection), [True]) self.assertEqual(f.get_db_prep_lookup('exact', '1', connection=connection), [True]) self.assertEqual(f.get_db_prep_lookup('exact', 1, connection=connection), [True]) self.assertEqual(f.get_db_prep_lookup('exact', False, connection=connection), [False]) self.assertEqual(f.get_db_prep_lookup('exact', '0', connection=connection), [False]) self.assertEqual(f.get_db_prep_lookup('exact', 0, connection=connection), [False]) self.assertEqual(f.get_db_prep_lookup('exact', None, connection=connection), [None]) def _test_to_python(self, f): self.assertTrue(f.to_python(1) is True) self.assertTrue(f.to_python(0) is False) def test_booleanfield_get_db_prep_lookup(self): self._test_get_db_prep_lookup(models.BooleanField()) def test_nullbooleanfield_get_db_prep_lookup(self): self._test_get_db_prep_lookup(models.NullBooleanField()) def test_booleanfield_to_python(self): self._test_to_python(models.BooleanField()) def test_nullbooleanfield_to_python(self): self._test_to_python(models.NullBooleanField()) def test_booleanfield_choices_blank(self): """ Test that BooleanField with choices and defaults doesn't generate a formfield with the blank option (#9640, #10549). """ choices = [(1, 'Si'), (2, 'No')] f = models.BooleanField(choices=choices, default=1, null=True) self.assertEqual(f.formfield().choices, [('', '---------')] + choices) f = models.BooleanField(choices=choices, default=1, null=False) self.assertEqual(f.formfield().choices, choices) def test_return_type(self): b = BooleanModel() b.bfield = True b.save() b2 = BooleanModel.objects.get(pk=b.pk) self.assertIsInstance(b2.bfield, bool) self.assertEqual(b2.bfield, True) b3 = BooleanModel() b3.bfield = False b3.save() b4 = BooleanModel.objects.get(pk=b3.pk) self.assertIsInstance(b4.bfield, bool) self.assertEqual(b4.bfield, False) b = NullBooleanModel() b.nbfield = True b.save() b2 = NullBooleanModel.objects.get(pk=b.pk) self.assertIsInstance(b2.nbfield, bool) self.assertEqual(b2.nbfield, True) b3 = NullBooleanModel() b3.nbfield = False b3.save() b4 = NullBooleanModel.objects.get(pk=b3.pk) self.assertIsInstance(b4.nbfield, bool) self.assertEqual(b4.nbfield, False) # http://code.djangoproject.com/ticket/13293 # Verify that when an extra clause exists, the boolean # conversions are applied with an offset b5 = BooleanModel.objects.all().extra( select={'string_col': 'string'})[0] self.assertFalse(isinstance(b5.pk, bool)) def test_select_related(self): """ Test type of boolean fields when retrieved via select_related() (MySQL, #15040) """ bmt = BooleanModel.objects.create(bfield=True) bmf = BooleanModel.objects.create(bfield=False) nbmt = NullBooleanModel.objects.create(nbfield=True) nbmf = NullBooleanModel.objects.create(nbfield=False) m1 = FksToBooleans.objects.create(bf=bmt, nbf=nbmt) m2 = FksToBooleans.objects.create(bf=bmf, nbf=nbmf) # Test select_related('fk_field_name') ma = FksToBooleans.objects.select_related('bf').get(pk=m1.id) # verify types -- should't be 0/1 self.assertIsInstance(ma.bf.bfield, bool) self.assertIsInstance(ma.nbf.nbfield, bool) # verify values self.assertEqual(ma.bf.bfield, True) self.assertEqual(ma.nbf.nbfield, True) # Test select_related() mb = FksToBooleans.objects.select_related().get(pk=m1.id) mc = FksToBooleans.objects.select_related().get(pk=m2.id) # verify types -- shouldn't be 0/1 self.assertIsInstance(mb.bf.bfield, bool) self.assertIsInstance(mb.nbf.nbfield, bool) self.assertIsInstance(mc.bf.bfield, bool) self.assertIsInstance(mc.nbf.nbfield, bool) # verify values self.assertEqual(mb.bf.bfield, True) self.assertEqual(mb.nbf.nbfield, True) self.assertEqual(mc.bf.bfield, False) self.assertEqual(mc.nbf.nbfield, False) def test_null_default(self): """ Check that a BooleanField defaults to None -- which isn't a valid value (#15124). """ # Patch the boolean field's default value. We give it a default # value when defining the model to satisfy the check tests # #20895. boolean_field = BooleanModel._meta.get_field('bfield') self.assertTrue(boolean_field.has_default()) old_default = boolean_field.default try: boolean_field.default = models.NOT_PROVIDED # check patch was succcessful self.assertFalse(boolean_field.has_default()) b = BooleanModel() self.assertIsNone(b.bfield) with self.assertRaises(IntegrityError): b.save() finally: boolean_field.default = old_default nb = NullBooleanModel() self.assertIsNone(nb.nbfield) nb.save() # no error class ChoicesTests(test.TestCase): def test_choices_and_field_display(self): """ Check that get_choices and get_flatchoices interact with get_FIELD_display to return the expected values (#7913). """ self.assertEqual(Whiz(c=1).get_c_display(), 'First') # A nested value self.assertEqual(Whiz(c=0).get_c_display(), 'Other') # A top level value self.assertEqual(Whiz(c=9).get_c_display(), 9) # Invalid value self.assertEqual(Whiz(c=None).get_c_display(), None) # Blank value self.assertEqual(Whiz(c='').get_c_display(), '') # Empty value class SlugFieldTests(test.TestCase): def test_slugfield_max_length(self): """ Make sure SlugField honors max_length (#9706) """ bs = BigS.objects.create(s = 'slug'*50) bs = BigS.objects.get(pk=bs.pk) self.assertEqual(bs.s, 'slug'*50) class ValidationTest(test.TestCase): def test_charfield_raises_error_on_empty_string(self): f = models.CharField() self.assertRaises(ValidationError, f.clean, "", None) def test_charfield_cleans_empty_string_when_blank_true(self): f = models.CharField(blank=True) self.assertEqual('', f.clean('', None)) def test_integerfield_cleans_valid_string(self): f = models.IntegerField() self.assertEqual(2, f.clean('2', None)) def test_integerfield_raises_error_on_invalid_intput(self): f = models.IntegerField() self.assertRaises(ValidationError, f.clean, "a", None) def test_charfield_with_choices_cleans_valid_choice(self): f = models.CharField(max_length=1, choices=[('a','A'), ('b','B')]) self.assertEqual('a', f.clean('a', None)) def test_charfield_with_choices_raises_error_on_invalid_choice(self): f = models.CharField(choices=[('a','A'), ('b','B')]) self.assertRaises(ValidationError, f.clean, "not a", None) def test_choices_validation_supports_named_groups(self): f = models.IntegerField(choices=(('group',((10,'A'),(20,'B'))),(30,'C'))) self.assertEqual(10, f.clean(10, None)) def test_nullable_integerfield_raises_error_with_blank_false(self): f = models.IntegerField(null=True, blank=False) self.assertRaises(ValidationError, f.clean, None, None) def test_nullable_integerfield_cleans_none_on_null_and_blank_true(self): f = models.IntegerField(null=True, blank=True) self.assertEqual(None, f.clean(None, None)) def test_integerfield_raises_error_on_empty_input(self): f = models.IntegerField(null=False) self.assertRaises(ValidationError, f.clean, None, None) self.assertRaises(ValidationError, f.clean, '', None) def test_integerfield_validates_zero_against_choices(self): f = models.IntegerField(choices=((1, 1),)) self.assertRaises(ValidationError, f.clean, '0', None) def test_charfield_raises_error_on_empty_input(self): f = models.CharField(null=False) self.assertRaises(ValidationError, f.clean, None, None) def test_datefield_cleans_date(self): f = models.DateField() self.assertEqual(datetime.date(2008, 10, 10), f.clean('2008-10-10', None)) def test_boolean_field_doesnt_accept_empty_input(self): f = models.BooleanField() self.assertRaises(ValidationError, f.clean, None, None) class BigIntegerFieldTests(test.TestCase): def test_limits(self): # Ensure that values that are right at the limits can be saved # and then retrieved without corruption. maxval = 9223372036854775807 minval = -maxval - 1 BigInt.objects.create(value=maxval) qs = BigInt.objects.filter(value__gte=maxval) self.assertEqual(qs.count(), 1) self.assertEqual(qs[0].value, maxval) BigInt.objects.create(value=minval) qs = BigInt.objects.filter(value__lte=minval) self.assertEqual(qs.count(), 1) self.assertEqual(qs[0].value, minval) def test_types(self): b = BigInt(value = 0) self.assertIsInstance(b.value, six.integer_types) b.save() self.assertIsInstance(b.value, six.integer_types) b = BigInt.objects.all()[0] self.assertIsInstance(b.value, six.integer_types) def test_coercing(self): BigInt.objects.create(value ='10') b = BigInt.objects.get(value = '10') self.assertEqual(b.value, 10) class TypeCoercionTests(test.TestCase): """ Test that database lookups can accept the wrong types and convert them with no error: especially on Postgres 8.3+ which does not do automatic casting at the DB level. See #10015. """ def test_lookup_integer_in_charfield(self): self.assertEqual(Post.objects.filter(title=9).count(), 0) def test_lookup_integer_in_textfield(self): self.assertEqual(Post.objects.filter(body=24).count(), 0) class FileFieldTests(unittest.TestCase): def test_clearable(self): """ Test that FileField.save_form_data will clear its instance attribute value if passed False. """ d = Document(myfile='something.txt') self.assertEqual(d.myfile, 'something.txt') field = d._meta.get_field('myfile') field.save_form_data(d, False) self.assertEqual(d.myfile, '') def test_unchanged(self): """ Test that FileField.save_form_data considers None to mean "no change" rather than "clear". """ d = Document(myfile='something.txt') self.assertEqual(d.myfile, 'something.txt') field = d._meta.get_field('myfile') field.save_form_data(d, None) self.assertEqual(d.myfile, 'something.txt') def test_changed(self): """ Test that FileField.save_form_data, if passed a truthy value, updates its instance attribute. """ d = Document(myfile='something.txt') self.assertEqual(d.myfile, 'something.txt') field = d._meta.get_field('myfile') field.save_form_data(d, 'else.txt') self.assertEqual(d.myfile, 'else.txt') def test_delete_when_file_unset(self): """ Calling delete on an unset FileField should not call the file deletion process, but fail silently (#20660). """ d = Document() try: d.myfile.delete() except OSError: self.fail("Deleting an unset FileField should not raise OSError.") class BinaryFieldTests(test.TestCase): binary_data = b'\x00\x46\xFE' def test_set_and_retrieve(self): data_set = (self.binary_data, six.memoryview(self.binary_data)) for bdata in data_set: dm = DataModel(data=bdata) dm.save() dm = DataModel.objects.get(pk=dm.pk) self.assertEqual(bytes(dm.data), bytes(bdata)) # Resave (=update) dm.save() dm = DataModel.objects.get(pk=dm.pk) self.assertEqual(bytes(dm.data), bytes(bdata)) # Test default value self.assertEqual(bytes(dm.short_data), b'\x08') if connection.vendor == 'mysql' and six.PY3: # Existing MySQL DB-API drivers fail on binary data. test_set_and_retrieve = unittest.expectedFailure(test_set_and_retrieve) def test_max_length(self): dm = DataModel(short_data=self.binary_data*4) self.assertRaises(ValidationError, dm.full_clean) class GenericIPAddressFieldTests(test.TestCase): def test_genericipaddressfield_formfield_protocol(self): """ Test that GenericIPAddressField with a specified protocol does not generate a formfield with no specified protocol. See #20740. """ model_field = models.GenericIPAddressField(protocol='IPv4') form_field = model_field.formfield() self.assertRaises(ValidationError, form_field.clean, '::1') model_field = models.GenericIPAddressField(protocol='IPv6') form_field = model_field.formfield() self.assertRaises(ValidationError, form_field.clean, '127.0.0.1') class PrepValueTest(test.TestCase): def test_AutoField(self): self.assertIsInstance(AutoField(primary_key=True).get_prep_value(1), int) @unittest.skipIf(six.PY3, "Python 3 has no `long` type.") def test_BigIntegerField(self): self.assertIsInstance(BigIntegerField().get_prep_value(long(9999999999999999999)), long) def test_BinaryField(self): self.assertIsInstance(BinaryField().get_prep_value(b''), bytes) def test_BooleanField(self): self.assertIsInstance(BooleanField().get_prep_value(True), bool) def test_CharField(self): self.assertIsInstance(CharField().get_prep_value(''), six.text_type) self.assertIsInstance(CharField().get_prep_value(0), six.text_type) def test_CommaSeparatedIntegerField(self): self.assertIsInstance(CommaSeparatedIntegerField().get_prep_value('1,2'), six.text_type) self.assertIsInstance(CommaSeparatedIntegerField().get_prep_value(0), six.text_type) def test_DateField(self): self.assertIsInstance(DateField().get_prep_value(datetime.date.today()), datetime.date) def test_DateTimeField(self): self.assertIsInstance(DateTimeField().get_prep_value(datetime.datetime.now()), datetime.datetime) def test_DecimalField(self): self.assertIsInstance(DecimalField().get_prep_value(Decimal('1.2')), Decimal) def test_EmailField(self): self.assertIsInstance(EmailField().get_prep_value('mailbox@domain.com'), six.text_type) def test_FileField(self): self.assertIsInstance(FileField().get_prep_value('filename.ext'), six.text_type) self.assertIsInstance(FileField().get_prep_value(0), six.text_type) def test_FilePathField(self): self.assertIsInstance(FilePathField().get_prep_value('tests.py'), six.text_type) self.assertIsInstance(FilePathField().get_prep_value(0), six.text_type) def test_FloatField(self): self.assertIsInstance(FloatField().get_prep_value(1.2), float) def test_ImageField(self): self.assertIsInstance(ImageField().get_prep_value('filename.ext'), six.text_type) def test_IntegerField(self): self.assertIsInstance(IntegerField().get_prep_value(1), int) def test_IPAddressField(self): self.assertIsInstance(IPAddressField().get_prep_value('127.0.0.1'), six.text_type) self.assertIsInstance(IPAddressField().get_prep_value(0), six.text_type) def test_GenericIPAddressField(self): self.assertIsInstance(GenericIPAddressField().get_prep_value('127.0.0.1'), six.text_type) self.assertIsInstance(GenericIPAddressField().get_prep_value(0), six.text_type) def test_NullBooleanField(self): self.assertIsInstance(NullBooleanField().get_prep_value(True), bool) def test_PositiveIntegerField(self): self.assertIsInstance(PositiveIntegerField().get_prep_value(1), int) def test_PositiveSmallIntegerField(self): self.assertIsInstance(PositiveSmallIntegerField().get_prep_value(1), int) def test_SlugField(self): self.assertIsInstance(SlugField().get_prep_value('slug'), six.text_type) self.assertIsInstance(SlugField().get_prep_value(0), six.text_type) def test_SmallIntegerField(self): self.assertIsInstance(SmallIntegerField().get_prep_value(1), int) def test_TextField(self): self.assertIsInstance(TextField().get_prep_value('Abc'), six.text_type) self.assertIsInstance(TextField().get_prep_value(0), six.text_type) def test_TimeField(self): self.assertIsInstance( TimeField().get_prep_value(datetime.datetime.now().time()), datetime.time) def test_URLField(self): self.assertIsInstance(URLField().get_prep_value('http://domain.com'), six.text_type) class CustomFieldTests(unittest.TestCase): def test_14786(self): """ Regression test for #14786 -- Test that field values are not prepared twice in get_db_prep_lookup(). """ prepare_count = [0] class NoopField(models.TextField): def get_prep_value(self, value): prepare_count[0] += 1 return super(NoopField, self).get_prep_value(value) field = NoopField() field.get_db_prep_lookup('exact', 'TEST', connection=connection, prepared=False) self.assertEqual(prepare_count[0], 1)
apache-2.0
mottosso/mindbender-setup
bin/windows/python36/Lib/distutils/versionpredicate.py
55
5133
"""Module for parsing and testing package version predicate strings. """ import re import distutils.version import operator re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", re.ASCII) # (package) (rest) re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") # (comp) (version) def splitUp(pred): """Parse a single version comparison. Return (comparison string, StrictVersion) """ res = re_splitComparison.match(pred) if not res: raise ValueError("bad package restriction syntax: %r" % pred) comp, verStr = res.groups() return (comp, distutils.version.StrictVersion(verStr)) compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, ">": operator.gt, ">=": operator.ge, "!=": operator.ne} class VersionPredicate: """Parse and test package version predicates. >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)') The `name` attribute provides the full dotted name that is given:: >>> v.name 'pyepat.abc' The str() of a `VersionPredicate` provides a normalized human-readable version of the expression:: >>> print(v) pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3) The `satisfied_by()` method can be used to determine with a given version number is included in the set described by the version restrictions:: >>> v.satisfied_by('1.1') True >>> v.satisfied_by('1.4') True >>> v.satisfied_by('1.0') False >>> v.satisfied_by('4444.4') False >>> v.satisfied_by('1555.1b3') False `VersionPredicate` is flexible in accepting extra whitespace:: >>> v = VersionPredicate(' pat( == 0.1 ) ') >>> v.name 'pat' >>> v.satisfied_by('0.1') True >>> v.satisfied_by('0.2') False If any version numbers passed in do not conform to the restrictions of `StrictVersion`, a `ValueError` is raised:: >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)') Traceback (most recent call last): ... ValueError: invalid version number '1.2zb3' It the module or package name given does not conform to what's allowed as a legal module or package name, `ValueError` is raised:: >>> v = VersionPredicate('foo-bar') Traceback (most recent call last): ... ValueError: expected parenthesized list: '-bar' >>> v = VersionPredicate('foo bar (12.21)') Traceback (most recent call last): ... ValueError: expected parenthesized list: 'bar (12.21)' """ def __init__(self, versionPredicateStr): """Parse a version predicate string. """ # Fields: # name: package name # pred: list of (comparison string, StrictVersion) versionPredicateStr = versionPredicateStr.strip() if not versionPredicateStr: raise ValueError("empty package restriction") match = re_validPackage.match(versionPredicateStr) if not match: raise ValueError("bad package name in %r" % versionPredicateStr) self.name, paren = match.groups() paren = paren.strip() if paren: match = re_paren.match(paren) if not match: raise ValueError("expected parenthesized list: %r" % paren) str = match.groups()[0] self.pred = [splitUp(aPred) for aPred in str.split(",")] if not self.pred: raise ValueError("empty parenthesized list in %r" % versionPredicateStr) else: self.pred = [] def __str__(self): if self.pred: seq = [cond + " " + str(ver) for cond, ver in self.pred] return self.name + " (" + ", ".join(seq) + ")" else: return self.name def satisfied_by(self, version): """True if version is compatible with all the predicates in self. The parameter version must be acceptable to the StrictVersion constructor. It may be either a string or StrictVersion. """ for cond, ver in self.pred: if not compmap[cond](version, ver): return False return True _provision_rx = None def split_provision(value): """Return the name and optional version number of a provision. The version number, if given, will be returned as a `StrictVersion` instance, otherwise it will be `None`. >>> split_provision('mypkg') ('mypkg', None) >>> split_provision(' mypkg( 1.2 ) ') ('mypkg', StrictVersion ('1.2')) """ global _provision_rx if _provision_rx is None: _provision_rx = re.compile( r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", re.ASCII) value = value.strip() m = _provision_rx.match(value) if not m: raise ValueError("illegal provides specification: %r" % value) ver = m.group(2) or None if ver: ver = distutils.version.StrictVersion(ver) return m.group(1), ver
mit
autopulated/mbed
workspace_tools/export/emblocks.py
51
2509
""" mbed SDK Copyright (c) 2014 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from exporters import Exporter from os.path import splitext, basename from workspace_tools.targets import TARGETS # filter all the GCC_ARM targets out of the target list gccTargets = [] for t in TARGETS: if 'GCC_ARM' in t.supported_toolchains: gccTargets.append(t.name) class IntermediateFile(Exporter): NAME = 'EmBlocks' TOOLCHAIN = 'GCC_ARM' # we support all GCC targets (is handled on IDE side) TARGETS = gccTargets FILE_TYPES = { 'headers': 'h', 'c_sources': 'c', 's_sources': 'a', 'cpp_sources': 'cpp' } def generate(self): self.resources.win_to_unix() source_files = [] for r_type, n in IntermediateFile.FILE_TYPES.iteritems(): for file in getattr(self.resources, r_type): source_files.append({ 'name': file, 'type': n }) libraries = [] for lib in self.resources.libraries: l, _ = splitext(basename(lib)) libraries.append(l[3:]) if self.resources.linker_script is None: self.resources.linker_script = '' ctx = { 'name': self.program_name, 'target': self.target, 'toolchain': self.toolchain.name, 'source_files': source_files, 'include_paths': self.resources.inc_dirs, 'script_file': self.resources.linker_script, 'library_paths': self.resources.lib_dirs, 'libraries': libraries, 'symbols': self.get_symbols(), 'object_files': self.resources.objects, 'sys_libs': self.toolchain.sys_libs, 'cc_org': self.toolchain.cc[1:], 'ld_org': self.toolchain.ld[1:], 'cppc_org': self.toolchain.cppc[1:] } # EmBlocks intermediate file template self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.program_name)
apache-2.0
thnee/ansible
lib/ansible/modules/network/aci/mso_schema_template_filter_entry.py
13
12049
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2018, Dag Wieers (@dagwieers) <dag@wieers.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: mso_schema_template_filter_entry short_description: Manage filter entries in schema templates description: - Manage filter entries in schema templates on Cisco ACI Multi-Site. author: - Dag Wieers (@dagwieers) version_added: '2.8' options: schema: description: - The name of the schema. type: str required: yes template: description: - The name of the template. type: str required: yes filter: description: - The name of the filter to manage. type: str required: yes filter_display_name: description: - The name as displayed on the MSO web interface. type: str entry: description: - The filter entry name to manage. type: str aliases: [ name ] display_name: description: - The name as displayed on the MSO web interface. type: str aliases: [ entry_display_name ] description: description: - The description of this filer entry. type: str aliases: [ entry_description ] ethertype: description: - The ethernet type to use for this filter entry. type: str choices: [ arp, fcoe, ip, ipv4, ipv6, mac-security, mpls-unicast, trill, unspecified ] ip_protocol: description: - The IP protocol to use for this filter entry. type: str choices: [ eigrp, egp, icmp, icmpv6, igmp, igp, l2tp, ospfigp, pim, tcp, udp, unspecified ] tcp_session_rules: description: - A list of TCP session rules. type: list choices: [ acknowledgement, established, finish, synchronize, reset, unspecified ] source_from: description: - The source port range from. type: str source_to: description: - The source port range to. type: str destination_from: description: - The destination port range from. type: str destination_to: description: - The destination port range to. type: str arp_flag: description: - The ARP flag to use for this filter entry. type: str choices: [ reply, request, unspecified ] stateful: description: - Whether this filter entry is stateful. type: bool default: no fragments_only: description: - Whether this filter entry only matches fragments. type: bool default: no state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. type: str choices: [ absent, present, query ] default: present seealso: - module: mso_schema_template_contract_filter notes: - Due to restrictions of the MSO REST API this module creates filters when needed, and removes them when the last entry has been removed. extends_documentation_fragment: mso ''' EXAMPLES = r''' - name: Add a new filter entry mso_schema_template_filter_entry: host: mso_host username: admin password: SomeSecretPassword schema: Schema 1 template: Template 1 filter: Filter 1 state: present delegate_to: localhost - name: Remove a filter entry mso_schema_template_filter_entry: host: mso_host username: admin password: SomeSecretPassword schema: Schema 1 template: Template 1 filter: Filter 1 state: absent delegate_to: localhost - name: Query a specific filter entry mso_schema_template_filter_entry: host: mso_host username: admin password: SomeSecretPassword schema: Schema 1 template: Template 1 filter: Filter 1 state: query delegate_to: localhost register: query_result - name: Query all filter entries mso_schema_template_filter_entry: host: mso_host username: admin password: SomeSecretPassword schema: Schema 1 template: Template 1 state: query delegate_to: localhost register: query_result ''' RETURN = r''' ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.aci.mso import MSOModule, mso_argument_spec, mso_reference_spec, issubset def main(): argument_spec = mso_argument_spec() argument_spec.update( schema=dict(type='str', required=True), template=dict(type='str', required=True), filter=dict(type='str', required=True), filter_display_name=dict(type='str'), entry=dict(type='str', aliases=['name']), # This parameter is not required for querying all objects description=dict(type='str', aliases=['entry_description']), display_name=dict(type='str', aliases=['entry_display_name']), ethertype=dict(type='str', choices=['arp', 'fcoe', 'ip', 'ipv4', 'ipv6', 'mac-security', 'mpls-unicast', 'trill', 'unspecified']), ip_protocol=dict(type='str', choices=['eigrp', 'egp', 'icmp', 'icmpv6', 'igmp', 'igp', 'l2tp', 'ospfigp', 'pim', 'tcp', 'udp', 'unspecified']), tcp_session_rules=dict(type='list', choices=['acknowledgement', 'established', 'finish', 'synchronize', 'reset', 'unspecified']), source_from=dict(type='str'), source_to=dict(type='str'), destination_from=dict(type='str'), destination_to=dict(type='str'), arp_flag=dict(type='str', choices=['reply', 'request', 'unspecified']), stateful=dict(type='bool'), fragments_only=dict(type='bool'), state=dict(type='str', default='present', choices=['absent', 'present', 'query']), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['entry']], ['state', 'present', ['entry']], ], ) schema = module.params.get('schema') template = module.params.get('template') filter_name = module.params.get('filter') filter_display_name = module.params.get('filter_display_name') entry = module.params.get('entry') display_name = module.params.get('display_name') description = module.params.get('description') ethertype = module.params.get('ethertype') ip_protocol = module.params.get('ip_protocol') tcp_session_rules = module.params.get('tcp_session_rules') source_from = module.params.get('source_from') source_to = module.params.get('source_to') destination_from = module.params.get('destination_from') destination_to = module.params.get('destination_to') arp_flag = module.params.get('arp_flag') stateful = module.params.get('stateful') fragments_only = module.params.get('fragments_only') state = module.params.get('state') mso = MSOModule(module) # Get schema schema_obj = mso.get_obj('schemas', displayName=schema) if not schema_obj: mso.fail_json(msg="Provided schema '{0}' does not exist".format(schema)) schema_path = 'schemas/{id}'.format(**schema_obj) # Get template templates = [t.get('name') for t in schema_obj.get('templates')] if template not in templates: mso.fail_json(msg="Provided template '{template}' does not exist. Existing templates: {templates}".format(template=template, templates=', '.join(templates))) template_idx = templates.index(template) # Get filters mso.existing = {} filter_idx = None entry_idx = None filters = [f.get('name') for f in schema_obj.get('templates')[template_idx]['filters']] if filter_name in filters: filter_idx = filters.index(filter_name) entries = [f.get('name') for f in schema_obj.get('templates')[template_idx]['filters'][filter_idx]['entries']] if entry in entries: entry_idx = entries.index(entry) mso.existing = schema_obj.get('templates')[template_idx]['filters'][filter_idx]['entries'][entry_idx] if state == 'query': if entry is None: if filter_idx is None: mso.fail_json(msg="Filter '{filter}' not found".format(filter=filter_name)) mso.existing = schema_obj.get('templates')[template_idx]['filters'][filter_idx]['entries'] elif not mso.existing: mso.fail_json(msg="Entry '{entry}' not found".format(entry=entry)) mso.exit_json() filters_path = '/templates/{0}/filters'.format(template) filter_path = '/templates/{0}/filters/{1}'.format(template, filter_name) entries_path = '/templates/{0}/filters/{1}/entries'.format(template, filter_name) entry_path = '/templates/{0}/filters/{1}/entries/{2}'.format(template, filter_name, entry) ops = [] mso.previous = mso.existing if state == 'absent': mso.proposed = mso.sent = {} if filter_idx is None: # There was no filter to begin with pass elif entry_idx is None: # There was no entry to begin with pass elif len(entries) == 1: # There is only one entry, remove filter mso.existing = {} ops.append(dict(op='remove', path=filter_path)) else: mso.existing = {} ops.append(dict(op='remove', path=entry_path)) elif state == 'present': if not mso.existing: if display_name is None: display_name = entry if description is None: description = '' if ethertype is None: ethertype = 'unspecified' if ip_protocol is None: ip_protocol = 'unspecified' if tcp_session_rules is None: tcp_session_rules = ['unspecified'] if source_from is None: source_from = 'unspecified' if source_to is None: source_to = 'unspecified' if destination_from is None: destination_from = 'unspecified' if destination_to is None: destination_to = 'unspecified' if arp_flag is None: arp_flag = 'unspecified' if stateful is None: stateful = False if fragments_only is None: fragments_only = False payload = dict( name=entry, displayName=display_name, description=description, etherType=ethertype, ipProtocol=ip_protocol, tcpSessionRules=tcp_session_rules, sourceFrom=source_from, sourceTo=source_to, destinationFrom=destination_from, destinationTo=destination_to, arpFlag=arp_flag, stateful=stateful, matchOnlyFragments=fragments_only, ) mso.sanitize(payload, collate=True) if filter_idx is None: # Filter does not exist, so we have to create it if filter_display_name is None: filter_display_name = filter_name payload = dict( name=filter_name, displayName=filter_display_name, entries=[mso.sent], ) ops.append(dict(op='add', path=filters_path + '/-', value=payload)) elif entry_idx is None: # Entry does not exist, so we have to add it ops.append(dict(op='add', path=entries_path + '/-', value=mso.sent)) else: # Entry exists, we have to update it for (key, value) in mso.sent.items(): ops.append(dict(op='replace', path=entry_path + '/' + key, value=value)) mso.existing = mso.proposed if not module.check_mode: mso.request(schema_path, method='PATCH', data=ops) mso.exit_json() if __name__ == "__main__": main()
gpl-3.0
Comunitea/OCB
addons/product_extended/product_extended.py
185
5460
############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2014 OpenERP S.A. (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields from openerp.osv import osv class product_template(osv.osv): _name = 'product.template' _inherit = 'product.template' def compute_price(self, cr, uid, product_ids, template_ids=False, recursive=False, test=False, real_time_accounting = False, context=None): ''' Will return test dict when the test = False Multiple ids at once? testdict is used to inform the user about the changes to be made ''' testdict = {} if product_ids: ids = product_ids model = 'product.product' else: ids = template_ids model = 'product.template' for prod_id in ids: bom_obj = self.pool.get('mrp.bom') if model == 'product.product': bom_id = bom_obj._bom_find(cr, uid, product_id=prod_id, context=context) else: bom_id = bom_obj._bom_find(cr, uid, product_tmpl_id=prod_id, context=context) if bom_id: # In recursive mode, it will first compute the prices of child boms if recursive: #Search the products that are components of this bom of prod_id bom = bom_obj.browse(cr, uid, bom_id, context=context) #Call compute_price on these subproducts prod_set = set([x.product_id.id for x in bom.bom_line_ids]) res = self.compute_price(cr, uid, list(prod_set), recursive=recursive, test=test, real_time_accounting = real_time_accounting, context=context) if test: testdict.update(res) #Use calc price to calculate and put the price on the product of the BoM if necessary price = self._calc_price(cr, uid, bom_obj.browse(cr, uid, bom_id, context=context), test=test, real_time_accounting = real_time_accounting, context=context) if test: testdict.update({prod_id : price}) if test: return testdict else: return True def _calc_price(self, cr, uid, bom, test = False, real_time_accounting=False, context=None): if context is None: context={} price = 0 uom_obj = self.pool.get("product.uom") tmpl_obj = self.pool.get('product.template') for sbom in bom.bom_line_ids: my_qty = sbom.product_qty / sbom.product_efficiency if not sbom.attribute_value_ids: # No attribute_value_ids means the bom line is not variant specific price += uom_obj._compute_price(cr, uid, sbom.product_id.uom_id.id, sbom.product_id.standard_price, sbom.product_uom.id) * my_qty if bom.routing_id: for wline in bom.routing_id.workcenter_lines: wc = wline.workcenter_id cycle = wline.cycle_nbr hour = (wc.time_start + wc.time_stop + cycle * wc.time_cycle) * (wc.time_efficiency or 1.0) price += wc.costs_cycle * cycle + wc.costs_hour * hour price = self.pool.get('product.uom')._compute_price(cr,uid,bom.product_uom.id, price, bom.product_id.uom_id.id) #Convert on product UoM quantities if price > 0: price = uom_obj._compute_price(cr, uid, bom.product_uom.id, price / bom.product_qty, bom.product_id.uom_id.id) product = tmpl_obj.browse(cr, uid, bom.product_tmpl_id.id, context=context) if not test: if (product.valuation != "real_time" or not real_time_accounting): tmpl_obj.write(cr, uid, [product.id], {'standard_price' : price}, context=context) else: #Call wizard function here wizard_obj = self.pool.get("stock.change.standard.price") ctx = context.copy() ctx.update({'active_id': product.id, 'active_model': 'product.template'}) wiz_id = wizard_obj.create(cr, uid, {'new_price': price}, context=ctx) wizard_obj.change_price(cr, uid, [wiz_id], context=ctx) return price class product_bom(osv.osv): _inherit = 'mrp.bom' _columns = { 'standard_price': fields.related('product_tmpl_id','standard_price',type="float",relation="product.product",string="Standard Price",store=False) } product_bom() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
ryfeus/lambda-packs
Tensorflow_OpenCV_Nightly/source/tensorflow/contrib/learn/python/learn/learn_io/numpy_io.py
91
1572
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Methods to allow dict of numpy arrays.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.estimator.inputs.numpy_io import numpy_input_fn as core_numpy_input_fn def numpy_input_fn(x, y=None, batch_size=128, num_epochs=1, shuffle=True, queue_capacity=1000, num_threads=1): """This input_fn diffs from the core version with default `shuffle`.""" return core_numpy_input_fn(x=x, y=y, batch_size=batch_size, shuffle=shuffle, num_epochs=num_epochs, queue_capacity=queue_capacity, num_threads=num_threads)
mit
tdtrask/ansible
lib/ansible/modules/network/netscaler/netscaler_lb_vserver.py
101
72334
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2017 Citrix Systems # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: netscaler_lb_vserver short_description: Manage load balancing vserver configuration description: - Manage load balancing vserver configuration - This module is intended to run either on the ansible control node or a bastion (jumpserver) with access to the actual netscaler instance version_added: "2.4" author: George Nikolopoulos (@giorgos-nikolopoulos) options: name: description: - >- Name for the virtual server. Must begin with an ASCII alphanumeric or underscore C(_) character, and must contain only ASCII alphanumeric, underscore, hash C(#), period C(.), space C( ), colon C(:), at sign C(@), equal sign C(=), and hyphen C(-) characters. Can be changed after the virtual server is created. - "Minimum length = 1" servicetype: choices: - 'HTTP' - 'FTP' - 'TCP' - 'UDP' - 'SSL' - 'SSL_BRIDGE' - 'SSL_TCP' - 'DTLS' - 'NNTP' - 'DNS' - 'DHCPRA' - 'ANY' - 'SIP_UDP' - 'SIP_TCP' - 'SIP_SSL' - 'DNS_TCP' - 'RTSP' - 'PUSH' - 'SSL_PUSH' - 'RADIUS' - 'RDP' - 'MYSQL' - 'MSSQL' - 'DIAMETER' - 'SSL_DIAMETER' - 'TFTP' - 'ORACLE' - 'SMPP' - 'SYSLOGTCP' - 'SYSLOGUDP' - 'FIX' - 'SSL_FIX' description: - "Protocol used by the service (also called the service type)." ipv46: description: - "IPv4 or IPv6 address to assign to the virtual server." ippattern: description: - >- IP address pattern, in dotted decimal notation, for identifying packets to be accepted by the virtual server. The IP Mask parameter specifies which part of the destination IP address is matched against the pattern. Mutually exclusive with the IP Address parameter. - >- For example, if the IP pattern assigned to the virtual server is C(198.51.100.0) and the IP mask is C(255.255.240.0) (a forward mask), the first 20 bits in the destination IP addresses are matched with the first 20 bits in the pattern. The virtual server accepts requests with IP addresses that range from C(198.51.96.1) to C(198.51.111.254). You can also use a pattern such as C(0.0.2.2) and a mask such as C(0.0.255.255) (a reverse mask). - >- If a destination IP address matches more than one IP pattern, the pattern with the longest match is selected, and the associated virtual server processes the request. For example, if virtual servers C(vs1) and C(vs2) have the same IP pattern, C(0.0.100.128), but different IP masks of C(0.0.255.255) and C(0.0.224.255), a destination IP address of C(198.51.100.128) has the longest match with the IP pattern of vs1. If a destination IP address matches two or more virtual servers to the same extent, the request is processed by the virtual server whose port number matches the port number in the request. ipmask: description: - >- IP mask, in dotted decimal notation, for the IP Pattern parameter. Can have leading or trailing non-zero octets (for example, C(255.255.240.0) or C(0.0.255.255)). Accordingly, the mask specifies whether the first n bits or the last n bits of the destination IP address in a client request are to be matched with the corresponding bits in the IP pattern. The former is called a forward mask. The latter is called a reverse mask. port: description: - "Port number for the virtual server." - "Range C(1) - C(65535)" - "* in CLI is represented as C(65535) in NITRO API" range: description: - >- Number of IP addresses that the appliance must generate and assign to the virtual server. The virtual server then functions as a network virtual server, accepting traffic on any of the generated IP addresses. The IP addresses are generated automatically, as follows: - >- * For a range of n, the last octet of the address specified by the IP Address parameter increments n-1 times. - "* If the last octet exceeds 255, it rolls over to 0 and the third octet increments by 1." - >- Note: The Range parameter assigns multiple IP addresses to one virtual server. To generate an array of virtual servers, each of which owns only one IP address, use brackets in the IP Address and Name parameters to specify the range. For example: - "add lb vserver my_vserver[1-3] HTTP 192.0.2.[1-3] 80." - "Minimum value = C(1)" - "Maximum value = C(254)" persistencetype: choices: - 'SOURCEIP' - 'COOKIEINSERT' - 'SSLSESSION' - 'RULE' - 'URLPASSIVE' - 'CUSTOMSERVERID' - 'DESTIP' - 'SRCIPDESTIP' - 'CALLID' - 'RTSPSID' - 'DIAMETER' - 'FIXSESSION' - 'NONE' description: - "Type of persistence for the virtual server. Available settings function as follows:" - "* C(SOURCEIP) - Connections from the same client IP address belong to the same persistence session." - >- * C(COOKIEINSERT) - Connections that have the same HTTP Cookie, inserted by a Set-Cookie directive from a server, belong to the same persistence session. - "* C(SSLSESSION) - Connections that have the same SSL Session ID belong to the same persistence session." - >- * C(CUSTOMSERVERID) - Connections with the same server ID form part of the same session. For this persistence type, set the Server ID (CustomServerID) parameter for each service and configure the Rule parameter to identify the server ID in a request. - "* C(RULE) - All connections that match a user defined rule belong to the same persistence session." - >- * C(URLPASSIVE) - Requests that have the same server ID in the URL query belong to the same persistence session. The server ID is the hexadecimal representation of the IP address and port of the service to which the request must be forwarded. This persistence type requires a rule to identify the server ID in the request. - "* C(DESTIP) - Connections to the same destination IP address belong to the same persistence session." - >- * C(SRCIPDESTIP) - Connections that have the same source IP address and destination IP address belong to the same persistence session. - "* C(CALLID) - Connections that have the same CALL-ID SIP header belong to the same persistence session." - "* C(RTSPSID) - Connections that have the same RTSP Session ID belong to the same persistence session." - >- * FIXSESSION - Connections that have the same SenderCompID and TargetCompID values belong to the same persistence session. timeout: description: - "Time period for which a persistence session is in effect." - "Minimum value = C(0)" - "Maximum value = C(1440)" persistencebackup: choices: - 'SOURCEIP' - 'NONE' description: - >- Backup persistence type for the virtual server. Becomes operational if the primary persistence mechanism fails. backuppersistencetimeout: description: - "Time period for which backup persistence is in effect." - "Minimum value = C(2)" - "Maximum value = C(1440)" lbmethod: choices: - 'ROUNDROBIN' - 'LEASTCONNECTION' - 'LEASTRESPONSETIME' - 'URLHASH' - 'DOMAINHASH' - 'DESTINATIONIPHASH' - 'SOURCEIPHASH' - 'SRCIPDESTIPHASH' - 'LEASTBANDWIDTH' - 'LEASTPACKETS' - 'TOKEN' - 'SRCIPSRCPORTHASH' - 'LRTM' - 'CALLIDHASH' - 'CUSTOMLOAD' - 'LEASTREQUEST' - 'AUDITLOGHASH' - 'STATICPROXIMITY' description: - "Load balancing method. The available settings function as follows:" - >- * C(ROUNDROBIN) - Distribute requests in rotation, regardless of the load. Weights can be assigned to services to enforce weighted round robin distribution. - "* C(LEASTCONNECTION) (default) - Select the service with the fewest connections." - "* C(LEASTRESPONSETIME) - Select the service with the lowest average response time." - "* C(LEASTBANDWIDTH) - Select the service currently handling the least traffic." - "* C(LEASTPACKETS) - Select the service currently serving the lowest number of packets per second." - "* C(CUSTOMLOAD) - Base service selection on the SNMP metrics obtained by custom load monitors." - >- * C(LRTM) - Select the service with the lowest response time. Response times are learned through monitoring probes. This method also takes the number of active connections into account. - >- Also available are a number of hashing methods, in which the appliance extracts a predetermined portion of the request, creates a hash of the portion, and then checks whether any previous requests had the same hash value. If it finds a match, it forwards the request to the service that served those previous requests. Following are the hashing methods: - "* C(URLHASH) - Create a hash of the request URL (or part of the URL)." - >- * C(DOMAINHASH) - Create a hash of the domain name in the request (or part of the domain name). The domain name is taken from either the URL or the Host header. If the domain name appears in both locations, the URL is preferred. If the request does not contain a domain name, the load balancing method defaults to C(LEASTCONNECTION). - "* C(DESTINATIONIPHASH) - Create a hash of the destination IP address in the IP header." - "* C(SOURCEIPHASH) - Create a hash of the source IP address in the IP header." - >- * C(TOKEN) - Extract a token from the request, create a hash of the token, and then select the service to which any previous requests with the same token hash value were sent. - >- * C(SRCIPDESTIPHASH) - Create a hash of the string obtained by concatenating the source IP address and destination IP address in the IP header. - "* C(SRCIPSRCPORTHASH) - Create a hash of the source IP address and source port in the IP header." - "* C(CALLIDHASH) - Create a hash of the SIP Call-ID header." hashlength: description: - >- Number of bytes to consider for the hash value used in the URLHASH and DOMAINHASH load balancing methods. - "Minimum value = C(1)" - "Maximum value = C(4096)" netmask: description: - >- IPv4 subnet mask to apply to the destination IP address or source IP address when the load balancing method is C(DESTINATIONIPHASH) or C(SOURCEIPHASH). - "Minimum length = 1" v6netmasklen: description: - >- Number of bits to consider in an IPv6 destination or source IP address, for creating the hash that is required by the C(DESTINATIONIPHASH) and C(SOURCEIPHASH) load balancing methods. - "Minimum value = C(1)" - "Maximum value = C(128)" backuplbmethod: choices: - 'ROUNDROBIN' - 'LEASTCONNECTION' - 'LEASTRESPONSETIME' - 'SOURCEIPHASH' - 'LEASTBANDWIDTH' - 'LEASTPACKETS' - 'CUSTOMLOAD' description: - "Backup load balancing method. Becomes operational if the primary load balancing me" - "thod fails or cannot be used." - "Valid only if the primary method is based on static proximity." cookiename: description: - >- Use this parameter to specify the cookie name for C(COOKIE) peristence type. It specifies the name of cookie with a maximum of 32 characters. If not specified, cookie name is internally generated. listenpolicy: description: - >- Default syntax expression identifying traffic accepted by the virtual server. Can be either an expression (for example, C(CLIENT.IP.DST.IN_SUBNET(192.0.2.0/24)) or the name of a named expression. In the above example, the virtual server accepts all requests whose destination IP address is in the 192.0.2.0/24 subnet. listenpriority: description: - >- Integer specifying the priority of the listen policy. A higher number specifies a lower priority. If a request matches the listen policies of more than one virtual server the virtual server whose listen policy has the highest priority (the lowest priority number) accepts the request. - "Minimum value = C(0)" - "Maximum value = C(101)" resrule: description: - >- Default syntax expression specifying which part of a server's response to use for creating rule based persistence sessions (persistence type RULE). Can be either an expression or the name of a named expression. - "Example:" - "C(HTTP.RES.HEADER(\\"setcookie\\").VALUE(0).TYPECAST_NVLIST_T('=',';').VALUE(\\"server1\\"))." persistmask: description: - "Persistence mask for IP based persistence types, for IPv4 virtual servers." - "Minimum length = 1" v6persistmasklen: description: - "Persistence mask for IP based persistence types, for IPv6 virtual servers." - "Minimum value = C(1)" - "Maximum value = C(128)" rtspnat: description: - "Use network address translation (NAT) for RTSP data connections." type: bool m: choices: - 'IP' - 'MAC' - 'IPTUNNEL' - 'TOS' description: - "Redirection mode for load balancing. Available settings function as follows:" - >- * C(IP) - Before forwarding a request to a server, change the destination IP address to the server's IP address. - >- * C(MAC) - Before forwarding a request to a server, change the destination MAC address to the server's MAC address. The destination IP address is not changed. MAC-based redirection mode is used mostly in firewall load balancing deployments. - >- * C(IPTUNNEL) - Perform IP-in-IP encapsulation for client IP packets. In the outer IP headers, set the destination IP address to the IP address of the server and the source IP address to the subnet IP (SNIP). The client IP packets are not modified. Applicable to both IPv4 and IPv6 packets. - "* C(TOS) - Encode the virtual server's TOS ID in the TOS field of the IP header." - "You can use either the C(IPTUNNEL) or the C(TOS) option to implement Direct Server Return (DSR)." tosid: description: - >- TOS ID of the virtual server. Applicable only when the load balancing redirection mode is set to TOS. - "Minimum value = C(1)" - "Maximum value = C(63)" datalength: description: - >- Length of the token to be extracted from the data segment of an incoming packet, for use in the token method of load balancing. The length of the token, specified in bytes, must not be greater than 24 KB. Applicable to virtual servers of type TCP. - "Minimum value = C(1)" - "Maximum value = C(100)" dataoffset: description: - >- Offset to be considered when extracting a token from the TCP payload. Applicable to virtual servers, of type TCP, using the token method of load balancing. Must be within the first 24 KB of the TCP payload. - "Minimum value = C(0)" - "Maximum value = C(25400)" sessionless: choices: - 'enabled' - 'disabled' description: - >- Perform load balancing on a per-packet basis, without establishing sessions. Recommended for load balancing of intrusion detection system (IDS) servers and scenarios involving direct server return (DSR), where session information is unnecessary. connfailover: choices: - 'DISABLED' - 'STATEFUL' - 'STATELESS' description: - >- Mode in which the connection failover feature must operate for the virtual server. After a failover, established TCP connections and UDP packet flows are kept active and resumed on the secondary appliance. Clients remain connected to the same servers. Available settings function as follows: - >- * C(STATEFUL) - The primary appliance shares state information with the secondary appliance, in real time, resulting in some runtime processing overhead. - >- * C(STATELESS) - State information is not shared, and the new primary appliance tries to re-create the packet flow on the basis of the information contained in the packets it receives. - "* C(DISABLED) - Connection failover does not occur." redirurl: description: - "URL to which to redirect traffic if the virtual server becomes unavailable." - >- WARNING! Make sure that the domain in the URL does not match the domain specified for a content switching policy. If it does, requests are continuously redirected to the unavailable virtual server. - "Minimum length = 1" cacheable: description: - >- Route cacheable requests to a cache redirection virtual server. The load balancing virtual server can forward requests only to a transparent cache redirection virtual server that has an IP address and port combination of *:80, so such a cache redirection virtual server must be configured on the appliance. type: bool clttimeout: description: - "Idle time, in seconds, after which a client connection is terminated." - "Minimum value = C(0)" - "Maximum value = C(31536000)" somethod: choices: - 'CONNECTION' - 'DYNAMICCONNECTION' - 'BANDWIDTH' - 'HEALTH' - 'NONE' description: - "Type of threshold that, when exceeded, triggers spillover. Available settings function as follows:" - "* C(CONNECTION) - Spillover occurs when the number of client connections exceeds the threshold." - >- * DYNAMICCONNECTION - Spillover occurs when the number of client connections at the virtual server exceeds the sum of the maximum client (Max Clients) settings for bound services. Do not specify a spillover threshold for this setting, because the threshold is implied by the Max Clients settings of bound services. - >- * C(BANDWIDTH) - Spillover occurs when the bandwidth consumed by the virtual server's incoming and outgoing traffic exceeds the threshold. - >- * C(HEALTH) - Spillover occurs when the percentage of weights of the services that are UP drops below the threshold. For example, if services svc1, svc2, and svc3 are bound to a virtual server, with weights 1, 2, and 3, and the spillover threshold is 50%, spillover occurs if svc1 and svc3 or svc2 and svc3 transition to DOWN. - "* C(NONE) - Spillover does not occur." sopersistence: choices: - 'enabled' - 'disabled' description: - >- If spillover occurs, maintain source IP address based persistence for both primary and backup virtual servers. sopersistencetimeout: description: - "Timeout for spillover persistence, in minutes." - "Minimum value = C(2)" - "Maximum value = C(1440)" healththreshold: description: - >- Threshold in percent of active services below which vserver state is made down. If this threshold is 0, vserver state will be up even if one bound service is up. - "Minimum value = C(0)" - "Maximum value = C(100)" sothreshold: description: - >- Threshold at which spillover occurs. Specify an integer for the C(CONNECTION) spillover method, a bandwidth value in kilobits per second for the C(BANDWIDTH) method (do not enter the units), or a percentage for the C(HEALTH) method (do not enter the percentage symbol). - "Minimum value = C(1)" - "Maximum value = C(4294967287)" sobackupaction: choices: - 'DROP' - 'ACCEPT' - 'REDIRECT' description: - >- Action to be performed if spillover is to take effect, but no backup chain to spillover is usable or exists. redirectportrewrite: choices: - 'enabled' - 'disabled' description: - "Rewrite the port and change the protocol to ensure successful HTTP redirects from services." downstateflush: choices: - 'enabled' - 'disabled' description: - >- Flush all active transactions associated with a virtual server whose state transitions from UP to DOWN. Do not enable this option for applications that must complete their transactions. disableprimaryondown: choices: - 'enabled' - 'disabled' description: - >- If the primary virtual server goes down, do not allow it to return to primary status until manually enabled. insertvserveripport: choices: - 'OFF' - 'VIPADDR' - 'V6TOV4MAPPING' description: - >- Insert an HTTP header, whose value is the IP address and port number of the virtual server, before forwarding a request to the server. The format of the header is <vipHeader>: <virtual server IP address>_<port number >, where vipHeader is the name that you specify for the header. If the virtual server has an IPv6 address, the address in the header is enclosed in brackets ([ and ]) to separate it from the port number. If you have mapped an IPv4 address to a virtual server's IPv6 address, the value of this parameter determines which IP address is inserted in the header, as follows: - >- * C(VIPADDR) - Insert the IP address of the virtual server in the HTTP header regardless of whether the virtual server has an IPv4 address or an IPv6 address. A mapped IPv4 address, if configured, is ignored. - >- * C(V6TOV4MAPPING) - Insert the IPv4 address that is mapped to the virtual server's IPv6 address. If a mapped IPv4 address is not configured, insert the IPv6 address. - "* C(OFF) - Disable header insertion." vipheader: description: - "Name for the inserted header. The default name is vip-header." - "Minimum length = 1" authenticationhost: description: - >- Fully qualified domain name (FQDN) of the authentication virtual server to which the user must be redirected for authentication. Make sure that the Authentication parameter is set to C(yes). - "Minimum length = 3" - "Maximum length = 252" authentication: description: - "Enable or disable user authentication." type: bool authn401: description: - "Enable or disable user authentication with HTTP 401 responses." type: bool authnvsname: description: - "Name of an authentication virtual server with which to authenticate users." - "Minimum length = 1" - "Maximum length = 252" push: choices: - 'enabled' - 'disabled' description: - "Process traffic with the push virtual server that is bound to this load balancing virtual server." pushvserver: description: - >- Name of the load balancing virtual server, of type PUSH or SSL_PUSH, to which the server pushes updates received on the load balancing virtual server that you are configuring. - "Minimum length = 1" pushlabel: description: - >- Expression for extracting a label from the server's response. Can be either an expression or the name of a named expression. pushmulticlients: description: - >- Allow multiple Web 2.0 connections from the same client to connect to the virtual server and expect updates. type: bool tcpprofilename: description: - "Name of the TCP profile whose settings are to be applied to the virtual server." - "Minimum length = 1" - "Maximum length = 127" httpprofilename: description: - "Name of the HTTP profile whose settings are to be applied to the virtual server." - "Minimum length = 1" - "Maximum length = 127" dbprofilename: description: - "Name of the DB profile whose settings are to be applied to the virtual server." - "Minimum length = 1" - "Maximum length = 127" comment: description: - "Any comments that you might want to associate with the virtual server." l2conn: description: - >- Use Layer 2 parameters (channel number, MAC address, and VLAN ID) in addition to the 4-tuple (<source IP>:<source port>::<destination IP>:<destination port>) that is used to identify a connection. Allows multiple TCP and non-TCP connections with the same 4-tuple to co-exist on the NetScaler appliance. type: bool oracleserverversion: choices: - '10G' - '11G' description: - "Oracle server version." mssqlserverversion: choices: - '70' - '2000' - '2000SP1' - '2005' - '2008' - '2008R2' - '2012' - '2014' description: - >- For a load balancing virtual server of type C(MSSQL), the Microsoft SQL Server version. Set this parameter if you expect some clients to run a version different from the version of the database. This setting provides compatibility between the client-side and server-side connections by ensuring that all communication conforms to the server's version. mysqlprotocolversion: description: - "MySQL protocol version that the virtual server advertises to clients." mysqlserverversion: description: - "MySQL server version string that the virtual server advertises to clients." - "Minimum length = 1" - "Maximum length = 31" mysqlcharacterset: description: - "Character set that the virtual server advertises to clients." mysqlservercapabilities: description: - "Server capabilities that the virtual server advertises to clients." appflowlog: choices: - 'enabled' - 'disabled' description: - "Apply AppFlow logging to the virtual server." netprofile: description: - >- Name of the network profile to associate with the virtual server. If you set this parameter, the virtual server uses only the IP addresses in the network profile as source IP addresses when initiating connections with servers. - "Minimum length = 1" - "Maximum length = 127" icmpvsrresponse: choices: - 'PASSIVE' - 'ACTIVE' description: - >- How the NetScaler appliance responds to ping requests received for an IP address that is common to one or more virtual servers. Available settings function as follows: - >- * If set to C(PASSIVE) on all the virtual servers that share the IP address, the appliance always responds to the ping requests. - >- * If set to C(ACTIVE) on all the virtual servers that share the IP address, the appliance responds to the ping requests if at least one of the virtual servers is UP. Otherwise, the appliance does not respond. - >- * If set to C(ACTIVE) on some virtual servers and PASSIVE on the others, the appliance responds if at least one virtual server with the ACTIVE setting is UP. Otherwise, the appliance does not respond. - >- Note: This parameter is available at the virtual server level. A similar parameter, ICMP Response, is available at the IP address level, for IPv4 addresses of type VIP. To set that parameter, use the add ip command in the CLI or the Create IP dialog box in the GUI. rhistate: choices: - 'PASSIVE' - 'ACTIVE' description: - >- Route Health Injection (RHI) functionality of the NetSaler appliance for advertising the route of the VIP address associated with the virtual server. When Vserver RHI Level (RHI) parameter is set to VSVR_CNTRLD, the following are different RHI behaviors for the VIP address on the basis of RHIstate (RHI STATE) settings on the virtual servers associated with the VIP address: - >- * If you set C(rhistate) to C(PASSIVE) on all virtual servers, the NetScaler ADC always advertises the route for the VIP address. - >- * If you set C(rhistate) to C(ACTIVE) on all virtual servers, the NetScaler ADC advertises the route for the VIP address if at least one of the associated virtual servers is in UP state. - >- * If you set C(rhistate) to C(ACTIVE) on some and PASSIVE on others, the NetScaler ADC advertises the route for the VIP address if at least one of the associated virtual servers, whose C(rhistate) set to C(ACTIVE), is in UP state. newservicerequest: description: - >- Number of requests, or percentage of the load on existing services, by which to increase the load on a new service at each interval in slow-start mode. A non-zero value indicates that slow-start is applicable. A zero value indicates that the global RR startup parameter is applied. Changing the value to zero will cause services currently in slow start to take the full traffic as determined by the LB method. Subsequently, any new services added will use the global RR factor. newservicerequestunit: choices: - 'PER_SECOND' - 'PERCENT' description: - "Units in which to increment load at each interval in slow-start mode." newservicerequestincrementinterval: description: - >- Interval, in seconds, between successive increments in the load on a new service or a service whose state has just changed from DOWN to UP. A value of 0 (zero) specifies manual slow start. - "Minimum value = C(0)" - "Maximum value = C(3600)" minautoscalemembers: description: - "Minimum number of members expected to be present when vserver is used in Autoscale." - "Minimum value = C(0)" - "Maximum value = C(5000)" maxautoscalemembers: description: - "Maximum number of members expected to be present when vserver is used in Autoscale." - "Minimum value = C(0)" - "Maximum value = C(5000)" persistavpno: description: - "Persist AVP number for Diameter Persistency." - "In case this AVP is not defined in Base RFC 3588 and it is nested inside a Grouped AVP," - "define a sequence of AVP numbers (max 3) in order of parent to child. So say persist AVP number X" - "is nested inside AVP Y which is nested in Z, then define the list as Z Y X." - "Minimum value = C(1)" skippersistency: choices: - 'Bypass' - 'ReLb' - 'None' description: - >- This argument decides the behavior incase the service which is selected from an existing persistence session has reached threshold. td: description: - >- Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0. - "Minimum value = C(0)" - "Maximum value = C(4094)" authnprofile: description: - "Name of the authentication profile to be used when authentication is turned on." macmoderetainvlan: choices: - 'enabled' - 'disabled' description: - "This option is used to retain vlan information of incoming packet when macmode is enabled." dbslb: choices: - 'enabled' - 'disabled' description: - "Enable database specific load balancing for MySQL and MSSQL service types." dns64: choices: - 'enabled' - 'disabled' description: - "This argument is for enabling/disabling the C(dns64) on lbvserver." bypassaaaa: description: - >- If this option is enabled while resolving DNS64 query AAAA queries are not sent to back end dns server. type: bool recursionavailable: description: - >- When set to YES, this option causes the DNS replies from this vserver to have the RA bit turned on. Typically one would set this option to YES, when the vserver is load balancing a set of DNS servers thatsupport recursive queries. type: bool processlocal: choices: - 'enabled' - 'disabled' description: - >- By turning on this option packets destined to a vserver in a cluster will not under go any steering. Turn this option for single packet request response mode or when the upstream device is performing a proper RSS for connection based distribution. dnsprofilename: description: - >- Name of the DNS profile to be associated with the VServer. DNS profile properties will be applied to the transactions processed by a VServer. This parameter is valid only for DNS and DNS-TCP VServers. - "Minimum length = 1" - "Maximum length = 127" servicebindings: description: - List of services along with the weights that are load balanced. - The following suboptions are available. suboptions: servicename: description: - "Service to bind to the virtual server." - "Minimum length = 1" weight: description: - "Weight to assign to the specified service." - "Minimum value = C(1)" - "Maximum value = C(100)" servicegroupbindings: description: - List of service groups along with the weights that are load balanced. - The following suboptions are available. suboptions: servicegroupname: description: - "The service group name bound to the selected load balancing virtual server." weight: description: - >- Integer specifying the weight of the service. A larger number specifies a greater weight. Defines the capacity of the service relative to the other services in the load balancing configuration. Determines the priority given to the service in load balancing decisions. - "Minimum value = C(1)" - "Maximum value = C(100)" ssl_certkey: description: - The name of the ssl certificate that is bound to this service. - The ssl certificate must already exist. - Creating the certificate can be done with the M(netscaler_ssl_certkey) module. - This option is only applicable only when C(servicetype) is C(SSL). disabled: description: - When set to C(yes) the lb vserver will be disabled. - When set to C(no) the lb vserver will be enabled. - >- Note that due to limitations of the underlying NITRO API a C(disabled) state change alone does not cause the module result to report a changed status. type: bool default: 'no' extends_documentation_fragment: netscaler requirements: - nitro python sdk ''' EXAMPLES = ''' # Netscaler services service-http-1, service-http-2 must have been already created with the netscaler_service module - name: Create a load balancing vserver bound to services delegate_to: localhost netscaler_lb_vserver: nsip: 172.18.0.2 nitro_user: nsroot nitro_pass: nsroot validate_certs: no state: present name: lb_vserver_1 servicetype: HTTP timeout: 12 ipv46: 6.93.3.3 port: 80 servicebindings: - servicename: service-http-1 weight: 80 - servicename: service-http-2 weight: 20 # Service group service-group-1 must have been already created with the netscaler_servicegroup module - name: Create load balancing vserver bound to servicegroup delegate_to: localhost netscaler_lb_vserver: nsip: 172.18.0.2 nitro_user: nsroot nitro_pass: nsroot validate_certs: no state: present name: lb_vserver_2 servicetype: HTTP ipv46: 6.92.2.2 port: 80 timeout: 10 servicegroupbindings: - servicegroupname: service-group-1 ''' RETURN = ''' loglines: description: list of logged messages by the module returned: always type: list sample: ['message 1', 'message 2'] msg: description: Message detailing the failure reason returned: failure type: str sample: "Action does not exist" diff: description: List of differences between the actual configured object and the configuration specified in the module returned: failure type: dict sample: { 'clttimeout': 'difference. ours: (float) 10.0 other: (float) 20.0' } ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.netscaler.netscaler import ( ConfigProxy, get_nitro_client, netscaler_common_arguments, log, loglines, get_immutables_intersection, ensure_feature_is_enabled ) import copy try: from nssrc.com.citrix.netscaler.nitro.resource.config.lb.lbvserver import lbvserver from nssrc.com.citrix.netscaler.nitro.resource.config.lb.lbvserver_servicegroup_binding import lbvserver_servicegroup_binding from nssrc.com.citrix.netscaler.nitro.resource.config.lb.lbvserver_service_binding import lbvserver_service_binding from nssrc.com.citrix.netscaler.nitro.resource.config.ssl.sslvserver_sslcertkey_binding import sslvserver_sslcertkey_binding from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception PYTHON_SDK_IMPORTED = True except ImportError as e: IMPORT_ERROR = str(e) PYTHON_SDK_IMPORTED = False def lb_vserver_exists(client, module): log('Checking if lb vserver exists') if lbvserver.count_filtered(client, 'name:%s' % module.params['name']) > 0: return True else: return False def lb_vserver_identical(client, module, lbvserver_proxy): log('Checking if configured lb vserver is identical') lbvserver_list = lbvserver.get_filtered(client, 'name:%s' % module.params['name']) if lbvserver_proxy.has_equal_attributes(lbvserver_list[0]): return True else: return False def lb_vserver_diff(client, module, lbvserver_proxy): lbvserver_list = lbvserver.get_filtered(client, 'name:%s' % module.params['name']) return lbvserver_proxy.diff_object(lbvserver_list[0]) def get_configured_service_bindings(client, module): log('Getting configured service bindings') readwrite_attrs = [ 'weight', 'name', 'servicename', 'servicegroupname' ] readonly_attrs = [ 'preferredlocation', 'vserverid', 'vsvrbindsvcip', 'servicetype', 'cookieipport', 'port', 'vsvrbindsvcport', 'curstate', 'ipv46', 'dynamicweight', ] configured_bindings = {} if 'servicebindings' in module.params and module.params['servicebindings'] is not None: for binding in module.params['servicebindings']: attribute_values_dict = copy.deepcopy(binding) attribute_values_dict['name'] = module.params['name'] key = binding['servicename'].strip() configured_bindings[key] = ConfigProxy( actual=lbvserver_service_binding(), client=client, attribute_values_dict=attribute_values_dict, readwrite_attrs=readwrite_attrs, readonly_attrs=readonly_attrs, ) return configured_bindings def get_configured_servicegroup_bindings(client, module): log('Getting configured service group bindings') readwrite_attrs = [ 'weight', 'name', 'servicename', 'servicegroupname', ] readonly_attrs = [] configured_bindings = {} if 'servicegroupbindings' in module.params and module.params['servicegroupbindings'] is not None: for binding in module.params['servicegroupbindings']: attribute_values_dict = copy.deepcopy(binding) attribute_values_dict['name'] = module.params['name'] key = binding['servicegroupname'].strip() configured_bindings[key] = ConfigProxy( actual=lbvserver_servicegroup_binding(), client=client, attribute_values_dict=attribute_values_dict, readwrite_attrs=readwrite_attrs, readonly_attrs=readonly_attrs, ) return configured_bindings def get_actual_service_bindings(client, module): log('Getting actual service bindings') bindings = {} try: if lbvserver_service_binding.count(client, module.params['name']) == 0: return bindings except nitro_exception as e: if e.errorcode == 258: return bindings else: raise bindigs_list = lbvserver_service_binding.get(client, module.params['name']) for item in bindigs_list: key = item.servicename bindings[key] = item return bindings def get_actual_servicegroup_bindings(client, module): log('Getting actual service group bindings') bindings = {} try: if lbvserver_servicegroup_binding.count(client, module.params['name']) == 0: return bindings except nitro_exception as e: if e.errorcode == 258: return bindings else: raise bindigs_list = lbvserver_servicegroup_binding.get(client, module.params['name']) for item in bindigs_list: key = item.servicegroupname bindings[key] = item return bindings def service_bindings_identical(client, module): log('service_bindings_identical') # Compare service keysets configured_service_bindings = get_configured_service_bindings(client, module) service_bindings = get_actual_service_bindings(client, module) configured_keyset = set(configured_service_bindings.keys()) service_keyset = set(service_bindings.keys()) if len(configured_keyset ^ service_keyset) > 0: return False # Compare service item to item for key in configured_service_bindings.keys(): conf = configured_service_bindings[key] serv = service_bindings[key] log('s diff %s' % conf.diff_object(serv)) if not conf.has_equal_attributes(serv): return False # Fallthrough to success return True def servicegroup_bindings_identical(client, module): log('servicegroup_bindings_identical') # Compare servicegroup keysets configured_servicegroup_bindings = get_configured_servicegroup_bindings(client, module) servicegroup_bindings = get_actual_servicegroup_bindings(client, module) configured_keyset = set(configured_servicegroup_bindings.keys()) service_keyset = set(servicegroup_bindings.keys()) log('len %s' % len(configured_keyset ^ service_keyset)) if len(configured_keyset ^ service_keyset) > 0: return False # Compare servicegroup item to item for key in configured_servicegroup_bindings.keys(): conf = configured_servicegroup_bindings[key] serv = servicegroup_bindings[key] log('sg diff %s' % conf.diff_object(serv)) if not conf.has_equal_attributes(serv): return False # Fallthrough to success return True def sync_service_bindings(client, module): log('sync_service_bindings') actual_bindings = get_actual_service_bindings(client, module) configured_bindigns = get_configured_service_bindings(client, module) # Delete actual but not configured delete_keys = list(set(actual_bindings.keys()) - set(configured_bindigns.keys())) for key in delete_keys: log('Deleting service binding %s' % key) actual_bindings[key].servicegroupname = '' actual_bindings[key].delete(client, actual_bindings[key]) # Add configured but not in actual add_keys = list(set(configured_bindigns.keys()) - set(actual_bindings.keys())) for key in add_keys: log('Adding service binding %s' % key) configured_bindigns[key].add() # Update existing if changed modify_keys = list(set(configured_bindigns.keys()) & set(actual_bindings.keys())) for key in modify_keys: if not configured_bindigns[key].has_equal_attributes(actual_bindings[key]): log('Updating service binding %s' % key) actual_bindings[key].servicegroupname = '' actual_bindings[key].delete(client, actual_bindings[key]) configured_bindigns[key].add() def sync_servicegroup_bindings(client, module): log('sync_servicegroup_bindings') actual_bindings = get_actual_servicegroup_bindings(client, module) configured_bindigns = get_configured_servicegroup_bindings(client, module) # Delete actual but not configured delete_keys = list(set(actual_bindings.keys()) - set(configured_bindigns.keys())) for key in delete_keys: log('Deleting servicegroup binding %s' % key) actual_bindings[key].servicename = None actual_bindings[key].delete(client, actual_bindings[key]) # Add configured but not in actual add_keys = list(set(configured_bindigns.keys()) - set(actual_bindings.keys())) for key in add_keys: log('Adding servicegroup binding %s' % key) configured_bindigns[key].add() # Update existing if changed modify_keys = list(set(configured_bindigns.keys()) & set(actual_bindings.keys())) for key in modify_keys: if not configured_bindigns[key].has_equal_attributes(actual_bindings[key]): log('Updating servicegroup binding %s' % key) actual_bindings[key].servicename = None actual_bindings[key].delete(client, actual_bindings[key]) configured_bindigns[key].add() def ssl_certkey_bindings_identical(client, module): log('Entering ssl_certkey_bindings_identical') vservername = module.params['name'] if sslvserver_sslcertkey_binding.count(client, vservername) == 0: bindings = [] else: bindings = sslvserver_sslcertkey_binding.get(client, vservername) log('Existing certs %s' % bindings) if module.params['ssl_certkey'] is None: if len(bindings) == 0: return True else: return False else: certificate_list = [item.certkeyname for item in bindings] log('certificate_list %s' % certificate_list) if certificate_list == [module.params['ssl_certkey']]: return True else: return False def ssl_certkey_bindings_sync(client, module): log('Syncing ssl certificates') vservername = module.params['name'] if sslvserver_sslcertkey_binding.count(client, vservername) == 0: bindings = [] else: bindings = sslvserver_sslcertkey_binding.get(client, vservername) log('bindings len is %s' % len(bindings)) # Delete existing bindings for binding in bindings: sslvserver_sslcertkey_binding.delete(client, binding) # Add binding if appropriate if module.params['ssl_certkey'] is not None: binding = sslvserver_sslcertkey_binding() binding.vservername = module.params['name'] binding.certkeyname = module.params['ssl_certkey'] sslvserver_sslcertkey_binding.add(client, binding) def do_state_change(client, module, lbvserver_proxy): if module.params['disabled']: log('Disabling lb server') result = lbvserver.disable(client, lbvserver_proxy.actual) else: log('Enabling lb server') result = lbvserver.enable(client, lbvserver_proxy.actual) return result def main(): module_specific_arguments = dict( name=dict(type='str'), servicetype=dict( type='str', choices=[ 'HTTP', 'FTP', 'TCP', 'UDP', 'SSL', 'SSL_BRIDGE', 'SSL_TCP', 'DTLS', 'NNTP', 'DNS', 'DHCPRA', 'ANY', 'SIP_UDP', 'SIP_TCP', 'SIP_SSL', 'DNS_TCP', 'RTSP', 'PUSH', 'SSL_PUSH', 'RADIUS', 'RDP', 'MYSQL', 'MSSQL', 'DIAMETER', 'SSL_DIAMETER', 'TFTP', 'ORACLE', 'SMPP', 'SYSLOGTCP', 'SYSLOGUDP', 'FIX', 'SSL_FIX', ] ), ipv46=dict(type='str'), ippattern=dict(type='str'), ipmask=dict(type='str'), port=dict(type='int'), range=dict(type='float'), persistencetype=dict( type='str', choices=[ 'SOURCEIP', 'COOKIEINSERT', 'SSLSESSION', 'RULE', 'URLPASSIVE', 'CUSTOMSERVERID', 'DESTIP', 'SRCIPDESTIP', 'CALLID', 'RTSPSID', 'DIAMETER', 'FIXSESSION', 'NONE', ] ), timeout=dict(type='float'), persistencebackup=dict( type='str', choices=[ 'SOURCEIP', 'NONE', ] ), backuppersistencetimeout=dict(type='float'), lbmethod=dict( type='str', choices=[ 'ROUNDROBIN', 'LEASTCONNECTION', 'LEASTRESPONSETIME', 'URLHASH', 'DOMAINHASH', 'DESTINATIONIPHASH', 'SOURCEIPHASH', 'SRCIPDESTIPHASH', 'LEASTBANDWIDTH', 'LEASTPACKETS', 'TOKEN', 'SRCIPSRCPORTHASH', 'LRTM', 'CALLIDHASH', 'CUSTOMLOAD', 'LEASTREQUEST', 'AUDITLOGHASH', 'STATICPROXIMITY', ] ), hashlength=dict(type='float'), netmask=dict(type='str'), v6netmasklen=dict(type='float'), backuplbmethod=dict( type='str', choices=[ 'ROUNDROBIN', 'LEASTCONNECTION', 'LEASTRESPONSETIME', 'SOURCEIPHASH', 'LEASTBANDWIDTH', 'LEASTPACKETS', 'CUSTOMLOAD', ] ), cookiename=dict(type='str'), listenpolicy=dict(type='str'), listenpriority=dict(type='float'), persistmask=dict(type='str'), v6persistmasklen=dict(type='float'), rtspnat=dict(type='bool'), m=dict( type='str', choices=[ 'IP', 'MAC', 'IPTUNNEL', 'TOS', ] ), tosid=dict(type='float'), datalength=dict(type='float'), dataoffset=dict(type='float'), sessionless=dict( type='str', choices=[ 'enabled', 'disabled', ] ), connfailover=dict( type='str', choices=[ 'DISABLED', 'STATEFUL', 'STATELESS', ] ), redirurl=dict(type='str'), cacheable=dict(type='bool'), clttimeout=dict(type='float'), somethod=dict( type='str', choices=[ 'CONNECTION', 'DYNAMICCONNECTION', 'BANDWIDTH', 'HEALTH', 'NONE', ] ), sopersistence=dict( type='str', choices=[ 'enabled', 'disabled', ] ), sopersistencetimeout=dict(type='float'), healththreshold=dict(type='float'), sothreshold=dict(type='float'), sobackupaction=dict( type='str', choices=[ 'DROP', 'ACCEPT', 'REDIRECT', ] ), redirectportrewrite=dict( type='str', choices=[ 'enabled', 'disabled', ] ), downstateflush=dict( type='str', choices=[ 'enabled', 'disabled', ] ), disableprimaryondown=dict( type='str', choices=[ 'enabled', 'disabled', ] ), insertvserveripport=dict( type='str', choices=[ 'OFF', 'VIPADDR', 'V6TOV4MAPPING', ] ), vipheader=dict(type='str'), authenticationhost=dict(type='str'), authentication=dict(type='bool'), authn401=dict(type='bool'), authnvsname=dict(type='str'), push=dict( type='str', choices=[ 'enabled', 'disabled', ] ), pushvserver=dict(type='str'), pushlabel=dict(type='str'), pushmulticlients=dict(type='bool'), tcpprofilename=dict(type='str'), httpprofilename=dict(type='str'), dbprofilename=dict(type='str'), comment=dict(type='str'), l2conn=dict(type='bool'), oracleserverversion=dict( type='str', choices=[ '10G', '11G', ] ), mssqlserverversion=dict( type='str', choices=[ '70', '2000', '2000SP1', '2005', '2008', '2008R2', '2012', '2014', ] ), mysqlprotocolversion=dict(type='float'), mysqlserverversion=dict(type='str'), mysqlcharacterset=dict(type='float'), mysqlservercapabilities=dict(type='float'), appflowlog=dict( type='str', choices=[ 'enabled', 'disabled', ] ), netprofile=dict(type='str'), icmpvsrresponse=dict( type='str', choices=[ 'PASSIVE', 'ACTIVE', ] ), rhistate=dict( type='str', choices=[ 'PASSIVE', 'ACTIVE', ] ), newservicerequest=dict(type='float'), newservicerequestunit=dict( type='str', choices=[ 'PER_SECOND', 'PERCENT', ] ), newservicerequestincrementinterval=dict(type='float'), minautoscalemembers=dict(type='float'), maxautoscalemembers=dict(type='float'), skippersistency=dict( type='str', choices=[ 'Bypass', 'ReLb', 'None', ] ), authnprofile=dict(type='str'), macmoderetainvlan=dict( type='str', choices=[ 'enabled', 'disabled', ] ), dbslb=dict( type='str', choices=[ 'enabled', 'disabled', ] ), dns64=dict( type='str', choices=[ 'enabled', 'disabled', ] ), bypassaaaa=dict(type='bool'), recursionavailable=dict(type='bool'), processlocal=dict( type='str', choices=[ 'enabled', 'disabled', ] ), dnsprofilename=dict(type='str'), ) hand_inserted_arguments = dict( servicebindings=dict(type='list'), servicegroupbindings=dict(type='list'), ssl_certkey=dict(type='str'), disabled=dict( type='bool', default=False ), ) argument_spec = dict() argument_spec.update(netscaler_common_arguments) argument_spec.update(module_specific_arguments) argument_spec.update(hand_inserted_arguments) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) module_result = dict( changed=False, failed=False, loglines=loglines, ) # Fail the module if imports failed if not PYTHON_SDK_IMPORTED: module.fail_json(msg='Could not load nitro python sdk') # Fallthrough to rest of execution client = get_nitro_client(module) try: client.login() except nitro_exception as e: msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message) module.fail_json(msg=msg) except Exception as e: if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>": module.fail_json(msg='Connection error %s' % str(e)) elif str(type(e)) == "<class 'requests.exceptions.SSLError'>": module.fail_json(msg='SSL Error %s' % str(e)) else: module.fail_json(msg='Unexpected error during login %s' % str(e)) readwrite_attrs = [ 'name', 'servicetype', 'ipv46', 'ippattern', 'ipmask', 'port', 'range', 'persistencetype', 'timeout', 'persistencebackup', 'backuppersistencetimeout', 'lbmethod', 'hashlength', 'netmask', 'v6netmasklen', 'backuplbmethod', 'cookiename', 'listenpolicy', 'listenpriority', 'persistmask', 'v6persistmasklen', 'rtspnat', 'm', 'tosid', 'datalength', 'dataoffset', 'sessionless', 'connfailover', 'redirurl', 'cacheable', 'clttimeout', 'somethod', 'sopersistence', 'sopersistencetimeout', 'healththreshold', 'sothreshold', 'sobackupaction', 'redirectportrewrite', 'downstateflush', 'disableprimaryondown', 'insertvserveripport', 'vipheader', 'authenticationhost', 'authentication', 'authn401', 'authnvsname', 'push', 'pushvserver', 'pushlabel', 'pushmulticlients', 'tcpprofilename', 'httpprofilename', 'dbprofilename', 'comment', 'l2conn', 'oracleserverversion', 'mssqlserverversion', 'mysqlprotocolversion', 'mysqlserverversion', 'mysqlcharacterset', 'mysqlservercapabilities', 'appflowlog', 'netprofile', 'icmpvsrresponse', 'rhistate', 'newservicerequest', 'newservicerequestunit', 'newservicerequestincrementinterval', 'minautoscalemembers', 'maxautoscalemembers', 'skippersistency', 'authnprofile', 'macmoderetainvlan', 'dbslb', 'dns64', 'bypassaaaa', 'recursionavailable', 'processlocal', 'dnsprofilename', ] readonly_attrs = [ 'value', 'ipmapping', 'ngname', 'type', 'curstate', 'effectivestate', 'status', 'lbrrreason', 'redirect', 'precedence', 'homepage', 'dnsvservername', 'domain', 'policyname', 'cachevserver', 'health', 'gotopriorityexpression', 'ruletype', 'groupname', 'cookiedomain', 'map', 'gt2gb', 'consolidatedlconn', 'consolidatedlconngbl', 'thresholdvalue', 'bindpoint', 'invoke', 'labeltype', 'labelname', 'version', 'totalservices', 'activeservices', 'statechangetimesec', 'statechangetimeseconds', 'statechangetimemsec', 'tickssincelaststatechange', 'isgslb', 'vsvrdynconnsothreshold', 'backupvserverstatus', '__count', ] immutable_attrs = [ 'name', 'servicetype', 'ipv46', 'port', 'range', 'state', 'redirurl', 'vipheader', 'newservicerequestunit', 'td', ] transforms = { 'rtspnat': ['bool_on_off'], 'authn401': ['bool_on_off'], 'bypassaaaa': ['bool_yes_no'], 'authentication': ['bool_on_off'], 'cacheable': ['bool_yes_no'], 'l2conn': ['bool_on_off'], 'pushmulticlients': ['bool_yes_no'], 'recursionavailable': ['bool_yes_no'], 'sessionless': [lambda v: v.upper()], 'sopersistence': [lambda v: v.upper()], 'redirectportrewrite': [lambda v: v.upper()], 'downstateflush': [lambda v: v.upper()], 'disableprimaryondown': [lambda v: v.upper()], 'push': [lambda v: v.upper()], 'appflowlog': [lambda v: v.upper()], 'macmoderetainvlan': [lambda v: v.upper()], 'dbslb': [lambda v: v.upper()], 'dns64': [lambda v: v.upper()], 'processlocal': [lambda v: v.upper()], } lbvserver_proxy = ConfigProxy( actual=lbvserver(), client=client, attribute_values_dict=module.params, readwrite_attrs=readwrite_attrs, readonly_attrs=readonly_attrs, immutable_attrs=immutable_attrs, transforms=transforms, ) try: ensure_feature_is_enabled(client, 'LB') if module.params['state'] == 'present': log('Applying actions for state present') if not lb_vserver_exists(client, module): log('Add lb vserver') if not module.check_mode: lbvserver_proxy.add() if module.params['save_config']: client.save_config() module_result['changed'] = True elif not lb_vserver_identical(client, module, lbvserver_proxy): # Check if we try to change value of immutable attributes diff_dict = lb_vserver_diff(client, module, lbvserver_proxy) immutables_changed = get_immutables_intersection(lbvserver_proxy, diff_dict.keys()) if immutables_changed != []: msg = 'Cannot update immutable attributes %s. Must delete and recreate entity.' % (immutables_changed,) module.fail_json(msg=msg, diff=diff_dict, **module_result) log('Update lb vserver') if not module.check_mode: lbvserver_proxy.update() if module.params['save_config']: client.save_config() module_result['changed'] = True else: log('Present noop') if not service_bindings_identical(client, module): if not module.check_mode: sync_service_bindings(client, module) if module.params['save_config']: client.save_config() module_result['changed'] = True if not servicegroup_bindings_identical(client, module): if not module.check_mode: sync_servicegroup_bindings(client, module) if module.params['save_config']: client.save_config() module_result['changed'] = True if module.params['servicetype'] != 'SSL' and module.params['ssl_certkey'] is not None: module.fail_json(msg='ssl_certkey is applicable only to SSL vservers', **module_result) # Check if SSL certkey is sane if module.params['servicetype'] == 'SSL': if not ssl_certkey_bindings_identical(client, module): if not module.check_mode: ssl_certkey_bindings_sync(client, module) module_result['changed'] = True if not module.check_mode: res = do_state_change(client, module, lbvserver_proxy) if res.errorcode != 0: msg = 'Error when setting disabled state. errorcode: %s message: %s' % (res.errorcode, res.message) module.fail_json(msg=msg, **module_result) # Sanity check log('Sanity checks for state present') if not module.check_mode: if not lb_vserver_exists(client, module): module.fail_json(msg='Did not create lb vserver', **module_result) if not lb_vserver_identical(client, module, lbvserver_proxy): msg = 'lb vserver is not configured correctly' module.fail_json(msg=msg, diff=lb_vserver_diff(client, module, lbvserver_proxy), **module_result) if not service_bindings_identical(client, module): module.fail_json(msg='service bindings are not identical', **module_result) if not servicegroup_bindings_identical(client, module): module.fail_json(msg='servicegroup bindings are not identical', **module_result) if module.params['servicetype'] == 'SSL': if not ssl_certkey_bindings_identical(client, module): module.fail_json(msg='sll certkey bindings not identical', **module_result) elif module.params['state'] == 'absent': log('Applying actions for state absent') if lb_vserver_exists(client, module): if not module.check_mode: log('Delete lb vserver') lbvserver_proxy.delete() if module.params['save_config']: client.save_config() module_result['changed'] = True else: log('Absent noop') module_result['changed'] = False # Sanity check log('Sanity checks for state absent') if not module.check_mode: if lb_vserver_exists(client, module): module.fail_json(msg='lb vserver still exists', **module_result) except nitro_exception as e: msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message) module.fail_json(msg=msg, **module_result) client.logout() module.exit_json(**module_result) if __name__ == "__main__": main()
gpl-3.0
loopCM/chromium
tools/telemetry/telemetry/core/chrome/android_browser_finder_unittest.py
5
2778
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from telemetry.core import browser_options from telemetry.core.chrome import android_browser_finder from telemetry.test import system_stub class LoggingStub(object): def __init__(self): self.warnings = [] def info(self, msg, *args): pass def warn(self, msg, *args): self.warnings.append(msg % args) class AndroidBrowserFinderTest(unittest.TestCase): def setUp(self): self._stubs = system_stub.Override(android_browser_finder, ['adb_commands', 'subprocess']) def tearDown(self): self._stubs.Restore() def test_no_adb(self): options = browser_options.BrowserOptions() def NoAdb(*args, **kargs): # pylint: disable=W0613 raise OSError('not found') self._stubs.subprocess.Popen = NoAdb browsers = android_browser_finder.FindAllAvailableBrowsers(options) self.assertEquals(0, len(browsers)) def test_adb_no_devices(self): options = browser_options.BrowserOptions() browsers = android_browser_finder.FindAllAvailableBrowsers(options) self.assertEquals(0, len(browsers)) def test_adb_permissions_error(self): options = browser_options.BrowserOptions() self._stubs.subprocess.Popen.communicate_result = ( """List of devices attached ????????????\tno permissions""", """* daemon not running. starting it now on port 5037 * * daemon started successfully * """) log_stub = LoggingStub() browsers = android_browser_finder.FindAllAvailableBrowsers( options, log_stub) self.assertEquals(3, len(log_stub.warnings)) self.assertEquals(0, len(browsers)) def test_adb_two_devices(self): options = browser_options.BrowserOptions() self._stubs.adb_commands.attached_devices = ['015d14fec128220c', '015d14fec128220d'] log_stub = LoggingStub() browsers = android_browser_finder.FindAllAvailableBrowsers( options, log_stub) self.assertEquals(1, len(log_stub.warnings)) self.assertEquals(0, len(browsers)) def test_adb_one_device(self): options = browser_options.BrowserOptions() self._stubs.adb_commands.attached_devices = ['015d14fec128220c'] def OnPM(args): assert args[0] == 'pm' assert args[1] == 'list' assert args[2] == 'packages' return ['package:org.chromium.content_shell_apk', 'package.com.google.android.setupwizard'] self._stubs.adb_commands.shell_command_handlers['pm'] = OnPM browsers = android_browser_finder.FindAllAvailableBrowsers(options) self.assertEquals(1, len(browsers))
bsd-3-clause
swegener/libsigrokdecode
decoders/mlx90614/pd.py
3
2741
## ## This file is part of the libsigrokdecode project. ## ## Copyright (C) 2012 Uwe Hermann <uwe@hermann-uwe.de> ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, see <http://www.gnu.org/licenses/>. ## import sigrokdecode as srd class Decoder(srd.Decoder): api_version = 3 id = 'mlx90614' name = 'MLX90614' longname = 'Melexis MLX90614' desc = 'Melexis MLX90614 infrared thermometer protocol.' license = 'gplv2+' inputs = ['i2c'] outputs = [] tags = ['IC', 'Sensor'] annotations = ( ('celsius', 'Temperature / °C'), ('kelvin', 'Temperature / K'), ) annotation_rows = ( ('temps-celsius', 'Temperature / °C', (0,)), ('temps-kelvin', 'Temperature / K', (1,)), ) def __init__(self): self.reset() def reset(self): self.state = 'IGNORE START REPEAT' self.data = [] def start(self): self.out_ann = self.register(srd.OUTPUT_ANN) def putx(self, data): self.put(self.ss, self.es, self.out_ann, data) # Quick hack implementation! This needs to be improved a lot! def decode(self, ss, es, data): cmd, databyte = data # State machine. if self.state == 'IGNORE START REPEAT': if cmd != 'START REPEAT': return self.state = 'IGNORE ADDRESS WRITE' elif self.state == 'IGNORE ADDRESS WRITE': if cmd != 'ADDRESS WRITE': return self.state = 'GET TEMPERATURE' elif self.state == 'GET TEMPERATURE': if cmd != 'DATA WRITE': return if len(self.data) == 0: self.data.append(databyte) self.ss = ss elif len(self.data) == 1: self.data.append(databyte) self.es = es else: kelvin = (self.data[0] | (self.data[1] << 8)) * 0.02 celsius = kelvin - 273.15 self.putx([0, ['Temperature: %3.2f °C' % celsius]]) self.putx([1, ['Temperature: %3.2f K' % kelvin]]) self.state = 'IGNORE START REPEAT' self.data = []
gpl-3.0
flowroute/xhtml2pdf
xhtml2pdf/__init__.py
27
1577
# -*- coding: utf-8 -*- import logging # Copyright 2010 Dirk Holtwick, holtwick.it # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __reversion__ = "$Revision: 238 $" __author__ = "$Author: holtwick $" __date__ = "$Date: 2008-06-26 20:06:02 +0200 (Do, 26 Jun 2008) $" REQUIRED_INFO = """ **************************************************** IMPORT ERROR! %s **************************************************** The following Python packages are required for PISA: - Reportlab Toolkit >= 2.2 <http://www.reportlab.org/> - HTML5lib >= 0.11.1 <http://code.google.com/p/html5lib/> Optional packages: - PyPDF2 <https://pypi.python.org/pypi/PyPDF2> - PIL <http://www.pythonware.com/products/pil/> """.lstrip() log = logging.getLogger(__name__) try: from xhtml2pdf.util import REPORTLAB22 if not REPORTLAB22: raise ImportError, "Reportlab Toolkit Version 2.2 or higher needed" except ImportError, e: import sys sys.stderr.write(REQUIRED_INFO % e) log.error(REQUIRED_INFO % e) raise from xhtml2pdf.version import VERSION __version__ = VERSION
apache-2.0
mbauskar/erpnext
erpnext/hr/doctype/employee_loan_application/test_employee_loan_application.py
23
1815
# -*- coding: utf-8 -*- # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # See license.txt from __future__ import unicode_literals import frappe import unittest from erpnext.hr.doctype.salary_structure.test_salary_structure import make_employee class TestEmployeeLoanApplication(unittest.TestCase): def setUp(self): self.create_loan_type() self.employee = make_employee("kate_loan@loan.com") self.create_loan_application() def create_loan_type(self): if not frappe.db.get_value("Loan Type", "Home Loan"): frappe.get_doc({ "doctype": "Loan Type", "loan_name": "Home Loan", "maximum_loan_amount": 500000, "rate_of_interest": 9.2 }).insert() def create_loan_application(self): if not frappe.db.get_value("Employee Loan Application", {"employee":self.employee}, "name"): loan_application = frappe.new_doc("Employee Loan Application") loan_application.update({ "employee": self.employee, "loan_type": "Home Loan", "rate_of_interest": 9.2, "loan_amount": 250000, "repayment_method": "Repay Over Number of Periods", "repayment_periods": 24 }) loan_application.insert() def test_loan_totals(self): loan_application = frappe.get_doc("Employee Loan Application", {"employee":self.employee}) self.assertEquals(loan_application.repayment_amount, 11445) self.assertEquals(loan_application.total_payable_interest, 24657) self.assertEquals(loan_application.total_payable_amount, 274657) loan_application.repayment_method = "Repay Fixed Amount per Period" loan_application.repayment_amount = 15000 loan_application.save() self.assertEquals(loan_application.repayment_periods, 18) self.assertEquals(loan_application.total_payable_interest, 18506) self.assertEquals(loan_application.total_payable_amount, 268506)
gpl-3.0
yufengg/tensorflow
tensorflow/contrib/keras/python/keras/layers/merge_test.py
52
6534
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for merge layers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.keras.python import keras from tensorflow.python.platform import test class MergeLayersTest(test.TestCase): def test_merge_add(self): with self.test_session(): i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(4, 5)) i3 = keras.layers.Input(shape=(4, 5)) o = keras.layers.add([i1, i2, i3]) self.assertListEqual(o.get_shape().as_list(), [None, 4, 5]) model = keras.models.Model([i1, i2, i3], o) x1 = np.random.random((2, 4, 5)) x2 = np.random.random((2, 4, 5)) x3 = np.random.random((2, 4, 5)) out = model.predict([x1, x2, x3]) self.assertEqual(out.shape, (2, 4, 5)) self.assertAllClose(out, x1 + x2 + x3, atol=1e-4) # test masking i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(4, 5)) m1 = keras.layers.Masking()(i1) layer = keras.layers.Add() o = layer([m1, i2]) self.assertListEqual(o.get_shape().as_list(), [None, 4, 5]) mask = layer.output_mask self.assertListEqual(mask.get_shape().as_list(), [None, 4]) def test_merge_elementwise_errors(self): i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(4, 6)) with self.assertRaises(ValueError): keras.layers.add([i1, i2]) with self.assertRaises(ValueError): keras.layers.add(i1) with self.assertRaises(ValueError): keras.layers.add([i1]) def test_merge_multiply(self): with self.test_session(): i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(4, 5)) i3 = keras.layers.Input(shape=(4, 5)) o = keras.layers.multiply([i1, i2, i3]) self.assertListEqual(o.get_shape().as_list(), [None, 4, 5]) model = keras.models.Model([i1, i2, i3], o) x1 = np.random.random((2, 4, 5)) x2 = np.random.random((2, 4, 5)) x3 = np.random.random((2, 4, 5)) out = model.predict([x1, x2, x3]) self.assertEqual(out.shape, (2, 4, 5)) self.assertAllClose(out, x1 * x2 * x3, atol=1e-4) def test_merge_average(self): with self.test_session(): i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(4, 5)) o = keras.layers.average([i1, i2]) self.assertListEqual(o.get_shape().as_list(), [None, 4, 5]) model = keras.models.Model([i1, i2], o) x1 = np.random.random((2, 4, 5)) x2 = np.random.random((2, 4, 5)) out = model.predict([x1, x2]) self.assertEqual(out.shape, (2, 4, 5)) self.assertAllClose(out, 0.5 * (x1 + x2), atol=1e-4) def test_merge_maximum(self): with self.test_session(): i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(4, 5)) o = keras.layers.maximum([i1, i2]) self.assertListEqual(o.get_shape().as_list(), [None, 4, 5]) model = keras.models.Model([i1, i2], o) x1 = np.random.random((2, 4, 5)) x2 = np.random.random((2, 4, 5)) out = model.predict([x1, x2]) self.assertEqual(out.shape, (2, 4, 5)) self.assertAllClose(out, np.maximum(x1, x2), atol=1e-4) def test_merge_concatenate(self): with self.test_session(): i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(4, 5)) o = keras.layers.concatenate([i1, i2], axis=1) self.assertListEqual(o.get_shape().as_list(), [None, 8, 5]) model = keras.models.Model([i1, i2], o) x1 = np.random.random((2, 4, 5)) x2 = np.random.random((2, 4, 5)) out = model.predict([x1, x2]) self.assertEqual(out.shape, (2, 8, 5)) self.assertAllClose(out, np.concatenate([x1, x2], axis=1), atol=1e-4) def test_concatenate_errors(self): i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(3, 5)) with self.assertRaises(ValueError): keras.layers.concatenate([i1, i2], axis=-1) with self.assertRaises(ValueError): keras.layers.concatenate(i1, axis=-1) with self.assertRaises(ValueError): keras.layers.concatenate([i1], axis=-1) def test_merge_dot(self): with self.test_session(): i1 = keras.layers.Input(shape=(4,)) i2 = keras.layers.Input(shape=(4,)) o = keras.layers.dot([i1, i2], axes=1) self.assertListEqual(o.get_shape().as_list(), [None, 1]) model = keras.models.Model([i1, i2], o) x1 = np.random.random((2, 4)) x2 = np.random.random((2, 4)) out = model.predict([x1, x2]) self.assertEqual(out.shape, (2, 1)) expected = np.zeros((2, 1)) expected[0, 0] = np.dot(x1[0], x2[0]) expected[1, 0] = np.dot(x1[1], x2[1]) self.assertAllClose(out, expected, atol=1e-4) # Test with negative tuple of axes. o = keras.layers.dot([i1, i2], axes=(-1, -1)) self.assertListEqual(o.get_shape().as_list(), [None, 1]) model = keras.models.Model([i1, i2], o) out = model.predict([x1, x2]) self.assertEqual(out.shape, (2, 1)) self.assertAllClose(out, expected, atol=1e-4) # test _compute_output_shape layer = keras.layers.Dot(axes=-1) self.assertEqual(layer._compute_output_shape([(4, 5), (4, 5)]), (4, 1)) def test_dot_errors(self): i1 = keras.layers.Input(shape=(4, 5)) i2 = keras.layers.Input(shape=(4, 6)) i3 = keras.layers.Input(shape=(4, 6)) with self.assertRaises(ValueError): keras.layers.dot([i1, i2], axes=-1) with self.assertRaises(ValueError): keras.layers.dot(i1, axes=-1) with self.assertRaises(ValueError): keras.layers.dot([i1], axes=-1) with self.assertRaises(ValueError): keras.layers.dot([i1, i2, i3], axes=-1) if __name__ == '__main__': test.main()
apache-2.0
gcd0318/python-oauth2
vagrant/create_testclient.py
5
2908
import mysql.connector from pymongo import MongoClient client_id = "tc" client_secret = "abc" authorized_grants = ["authorization_code", "client_credentials", "password", "refresh_token"] authorized_response_types = ["code", "token"] redirect_uris = ["http://127.0.0.1/index.html"] def create_in_mongodb(): client = MongoClient() db = client.testdb clients = db.clients client = clients.find_one({"identifier": client_id}) if client is None: print("Creating test client in mongodb...") clients.insert({"identifier": client_id, "secret": client_secret, "authorized_grants": authorized_grants, "authorized_response_types": authorized_response_types, "redirect_uris": redirect_uris}) def create_in_mysql(): connection = mysql.connector.connect(host="127.0.0.1", user="root", passwd="", db="testdb") check_client = connection.cursor() check_client.execute("SELECT * FROM clients WHERE identifier = %s", (client_id,)) client_data = check_client.fetchone() check_client.close() if client_data is None: print("Creating client in mysql...") create_client = connection.cursor() create_client.execute(""" INSERT INTO clients ( identifier, secret ) VALUES ( %s, %s )""", (client_id, client_secret)) client_id_in_mysql = create_client.lastrowid connection.commit() create_client.close() for authorized_grant in authorized_grants: create_grant = connection.cursor() create_grant.execute(""" INSERT INTO client_grants ( name, client_id ) VALUES ( %s, %s )""", (authorized_grant, client_id_in_mysql)) connection.commit() create_grant.close() for response_type in authorized_response_types: create_response_type = connection.cursor() create_response_type.execute(""" INSERT INTO client_response_types ( response_type, client_id ) VALUES ( %s, %s )""", (response_type, client_id_in_mysql)) connection.commit() create_response_type.close() for redirect_uri in redirect_uris: create_redirect_uri = connection.cursor() create_redirect_uri.execute(""" INSERT INTO client_redirect_uris ( redirect_uri, client_id ) VALUES ( %s, %s )""", (redirect_uri, client_id_in_mysql)) connection.commit() create_redirect_uri.close() create_in_mysql() create_in_mongodb()
mit
KiranJKurian/XScheduler
venv/lib/python2.7/site-packages/pip/utils/outdated.py
191
5555
from __future__ import absolute_import import datetime import json import logging import os.path import sys from pip._vendor import lockfile from pip._vendor.packaging import version as packaging_version from pip.compat import total_seconds, WINDOWS from pip.index import PyPI from pip.locations import USER_CACHE_DIR, running_under_virtualenv from pip.utils import ensure_dir, get_installed_version from pip.utils.filesystem import check_path_owner SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" logger = logging.getLogger(__name__) class VirtualenvSelfCheckState(object): def __init__(self): self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json") # Load the existing state try: with open(self.statefile_path) as statefile: self.state = json.load(statefile) except (IOError, ValueError): self.state = {} def save(self, pypi_version, current_time): # Attempt to write out our version check file with open(self.statefile_path, "w") as statefile: json.dump( { "last_check": current_time.strftime(SELFCHECK_DATE_FMT), "pypi_version": pypi_version, }, statefile, sort_keys=True, separators=(",", ":") ) class GlobalSelfCheckState(object): def __init__(self): self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json") # Load the existing state try: with open(self.statefile_path) as statefile: self.state = json.load(statefile)[sys.prefix] except (IOError, ValueError, KeyError): self.state = {} def save(self, pypi_version, current_time): # Check to make sure that we own the directory if not check_path_owner(os.path.dirname(self.statefile_path)): return # Now that we've ensured the directory is owned by this user, we'll go # ahead and make sure that all our directories are created. ensure_dir(os.path.dirname(self.statefile_path)) # Attempt to write out our version check file with lockfile.LockFile(self.statefile_path): if os.path.exists(self.statefile_path): with open(self.statefile_path) as statefile: state = json.load(statefile) else: state = {} state[sys.prefix] = { "last_check": current_time.strftime(SELFCHECK_DATE_FMT), "pypi_version": pypi_version, } with open(self.statefile_path, "w") as statefile: json.dump(state, statefile, sort_keys=True, separators=(",", ":")) def load_selfcheck_statefile(): if running_under_virtualenv(): return VirtualenvSelfCheckState() else: return GlobalSelfCheckState() def pip_version_check(session): """Check for an update for pip. Limit the frequency of checks to once per week. State is stored either in the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix of the pip script path. """ installed_version = get_installed_version("pip") if installed_version is None: return pip_version = packaging_version.parse(installed_version) pypi_version = None try: state = load_selfcheck_statefile() current_time = datetime.datetime.utcnow() # Determine if we need to refresh the state if "last_check" in state.state and "pypi_version" in state.state: last_check = datetime.datetime.strptime( state.state["last_check"], SELFCHECK_DATE_FMT ) if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60: pypi_version = state.state["pypi_version"] # Refresh the version if we need to or just see if we need to warn if pypi_version is None: resp = session.get( PyPI.pip_json_url, headers={"Accept": "application/json"}, ) resp.raise_for_status() pypi_version = [ v for v in sorted( list(resp.json()["releases"]), key=packaging_version.parse, ) if not packaging_version.parse(v).is_prerelease ][-1] # save that we've performed a check state.save(pypi_version, current_time) remote_version = packaging_version.parse(pypi_version) # Determine if our pypi_version is older if (pip_version < remote_version and pip_version.base_version != remote_version.base_version): # Advise "python -m pip" on Windows to avoid issues # with overwriting pip.exe. if WINDOWS: pip_cmd = "python -m pip" else: pip_cmd = "pip" logger.warning( "You are using pip version %s, however version %s is " "available.\nYou should consider upgrading via the " "'%s install --upgrade pip' command." % (pip_version, pypi_version, pip_cmd) ) except Exception: logger.debug( "There was an error checking the latest version of pip", exc_info=True, )
mit
AdrianHuang/rt-thread-for-vmm
tools/wizard.py
46
2514
#! /usr/bin/env python #coding=utf-8 # # File : wizard.py # This file is part of RT-Thread RTOS # COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Change Logs: # Date Author Notes # 2015-01-20 Bernard Add copyright information # """ wizard.py - a script to generate SConscript in RT-Thread RTOS. `wizard --component name' to generate SConscript for name component. `wizard --bridge' to generate SConscript as a bridge to connect each SConscript script file of sub-directory. """ import sys SConscript_com = '''# RT-Thread building script for component from building import * cwd = GetCurrentDir() src = Glob('*.c') + Glob('*.cpp') CPPPATH = [cwd] group = DefineGroup('COMPONENT_NAME', src, depend = [''], CPPPATH = CPPPATH) Return('group') ''' SConscript_bridge = '''# RT-Thread building script for bridge import os from building import * cwd = GetCurrentDir() objs = [] list = os.listdir(cwd) for d in list: path = os.path.join(cwd, d) if os.path.isfile(os.path.join(path, 'SConscript')): objs = objs + SConscript(os.path.join(d, 'SConscript')) Return('objs') ''' def usage(): print 'wizard --component name' print 'wizard --bridge' def gen_component(name): print 'generate SConscript for ' + name text = SConscript_com.replace('COMPONENT_NAME', name) f = file('SConscript', 'w') f.write(text) f.close() def gen_bridge(): print 'generate SConscript for bridge' f = file('SConscript', 'w') f.write(SConscript_bridge) f.close() if __name__ == '__main__': if len(sys.argv) == 1: usage() sys.exit(2) if sys.argv[1] == '--component': gen_component(sys.argv[2]) elif sys.argv[1] == '--bridge': gen_bridge() else: usage()
gpl-2.0
orion1024/Sick-Beard
lib/hachoir_parser/image/tga.py
90
2927
""" Truevision Targa Graphic (TGA) picture parser. Author: Victor Stinner Creation: 18 december 2006 """ from lib.hachoir_parser import Parser from lib.hachoir_core.field import FieldSet, UInt8, UInt16, Enum, RawBytes from lib.hachoir_core.endian import LITTLE_ENDIAN from lib.hachoir_parser.image.common import PaletteRGB class Line(FieldSet): def __init__(self, *args): FieldSet.__init__(self, *args) self._size = self["/width"].value * self["/bpp"].value def createFields(self): for x in xrange(self["/width"].value): yield UInt8(self, "pixel[]") class Pixels(FieldSet): def __init__(self, *args): FieldSet.__init__(self, *args) self._size = self["/width"].value * self["/height"].value * self["/bpp"].value def createFields(self): if self["/options"].value == 0: RANGE = xrange(self["/height"].value-1,-1,-1) else: RANGE = xrange(self["/height"].value) for y in RANGE: yield Line(self, "line[%u]" % y) class TargaFile(Parser): PARSER_TAGS = { "id": "targa", "category": "image", "file_ext": ("tga",), "mime": (u"image/targa", u"image/tga", u"image/x-tga"), "min_size": 18*8, "description": u"Truevision Targa Graphic (TGA)" } CODEC_NAME = { 1: u"8-bit uncompressed", 2: u"24-bit uncompressed", 9: u"8-bit RLE", 10: u"24-bit RLE", } endian = LITTLE_ENDIAN def validate(self): if self["version"].value != 1: return "Unknown version" if self["codec"].value not in self.CODEC_NAME: return "Unknown codec" if self["x_min"].value != 0 or self["y_min"].value != 0: return "(x_min, y_min) is not (0,0)" if self["bpp"].value not in (8, 24): return "Unknown bits/pixel value" return True def createFields(self): yield UInt8(self, "hdr_size", "Header size in bytes") yield UInt8(self, "version", "Targa version (always one)") yield Enum(UInt8(self, "codec", "Pixels encoding"), self.CODEC_NAME) yield UInt16(self, "palette_ofs", "Palette absolute file offset") yield UInt16(self, "nb_color", "Number of color") yield UInt8(self, "color_map_size", "Color map entry size") yield UInt16(self, "x_min") yield UInt16(self, "y_min") yield UInt16(self, "width") yield UInt16(self, "height") yield UInt8(self, "bpp", "Bits per pixel") yield UInt8(self, "options", "Options (0: vertical mirror)") if self["bpp"].value == 8: yield PaletteRGB(self, "palette", 256) if self["codec"].value == 1: yield Pixels(self, "pixels") else: size = (self.size - self.current_size) // 8 if size: yield RawBytes(self, "raw_pixels", size)
gpl-3.0
cloudbau/nova
nova/virt/storage_users.py
4
4122
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 Michael Still and Canonical Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import os import time from nova import utils from oslo.config import cfg CONF = cfg.CONF TWENTY_FOUR_HOURS = 3600 * 24 # NOTE(morganfainberg): Due to circular import dependencies, the use of the # CONF.instances_path needs to be wrapped so that it can be resolved at the # appropriate time. Because compute.manager imports this file, we end up in # a rather ugly dependency loop without moving this into a wrapped function. # This issue mostly stems from the use of a decorator for the lock # synchronize and the implications of how decorators wrap the wrapped function # or method. If this needs to be used outside of compute.manager, it should # be refactored to eliminate this circular dependency loop. def register_storage_use(storage_path, hostname): """Identify the id of this instance storage.""" # NOTE(morganfainberg): config option import is avoided here since it is # explicitly imported from compute.manager and may cause issues with # defining options after config has been processed with the # wrapped-function style used here. LOCK_PATH = os.path.join(CONF.instances_path, 'locks') @utils.synchronized('storage-registry-lock', external=True, lock_path=LOCK_PATH) def do_register_storage_use(storage_path, hostname): # NOTE(mikal): this is required to determine if the instance storage is # shared, which is something that the image cache manager needs to # know. I can imagine other uses as well though. d = {} id_path = os.path.join(storage_path, 'compute_nodes') if os.path.exists(id_path): with open(id_path) as f: d = json.loads(f.read()) d[hostname] = time.time() with open(id_path, 'w') as f: f.write(json.dumps(d)) return do_register_storage_use(storage_path, hostname) # NOTE(morganfainberg): Due to circular import dependencies, the use of the # CONF.instances_path needs to be wrapped so that it can be resolved at the # appropriate time. Because compute.manager imports this file, we end up in # a rather ugly dependency loop without moving this into a wrapped function. # This issue mostly stems from the use of a decorator for the lock # synchronize and the implications of how decorators wrap the wrapped function # or method. If this needs to be used outside of compute.manager, it should # be refactored to eliminate this circular dependency loop. def get_storage_users(storage_path): """Get a list of all the users of this storage path.""" # NOTE(morganfainberg): config option import is avoided here since it is # explicitly imported from compute.manager and may cause issues with # defining options after config has been processed with the # wrapped-function style used here. LOCK_PATH = os.path.join(CONF.instances_path, 'locks') @utils.synchronized('storage-registry-lock', external=True, lock_path=LOCK_PATH) def do_get_storage_users(storage_path): d = {} id_path = os.path.join(storage_path, 'compute_nodes') if os.path.exists(id_path): with open(id_path) as f: d = json.loads(f.read()) recent_users = [] for node in d: if time.time() - d[node] < TWENTY_FOUR_HOURS: recent_users.append(node) return recent_users return do_get_storage_users(storage_path)
apache-2.0
vFense/vFenseAgent-nix
agent/deps/rpm/Python-2.7.5/lib/python2.7/encodings/palmos.py
647
2936
""" Python Character Mapping Codec for PalmOS 3.5. Written by Sjoerd Mullender (sjoerd@acm.org); based on iso8859_15.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_map) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_map) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_map)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_map)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='palmos', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Map decoding_map = codecs.make_identity_dict(range(256)) # The PalmOS character set is mostly iso-8859-1 with some differences. decoding_map.update({ 0x0080: 0x20ac, # EURO SIGN 0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK 0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK 0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK 0x0085: 0x2026, # HORIZONTAL ELLIPSIS 0x0086: 0x2020, # DAGGER 0x0087: 0x2021, # DOUBLE DAGGER 0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT 0x0089: 0x2030, # PER MILLE SIGN 0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON 0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK 0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE 0x008d: 0x2666, # BLACK DIAMOND SUIT 0x008e: 0x2663, # BLACK CLUB SUIT 0x008f: 0x2665, # BLACK HEART SUIT 0x0090: 0x2660, # BLACK SPADE SUIT 0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK 0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK 0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK 0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK 0x0095: 0x2022, # BULLET 0x0096: 0x2013, # EN DASH 0x0097: 0x2014, # EM DASH 0x0098: 0x02dc, # SMALL TILDE 0x0099: 0x2122, # TRADE MARK SIGN 0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON 0x009c: 0x0153, # LATIN SMALL LIGATURE OE 0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS }) ### Encoding Map encoding_map = codecs.make_encoding_map(decoding_map)
lgpl-3.0
neliogodoi/IC_IOT
4_AcionaRele.py
1
1956
from machine import Pin import webrepl import socket import time import network import ure sta = network.WLAN(network.STA_IF) sta.active(True) sta.connect('PERNINHA', 'Manicomiof4022') sta.ifconfig() webrepl.start() light = Pin(5, Pin.OUT) addr = socket.getaddrinfo('0.0.0.0', 80)[0][-1] s = socket.socket() s.bind(addr) s.listen(1) print('listening on', addr) html = """<!DOCTYPE html> <html> <head> <title>Interruptor Wi-Fi</title> <style type="text/css"> body{ background-color: #fc0;} #Lampada{ margin: 2px solid ; height: 30px; position: absolute; left: 30%;} </style> </head> <body> <script type="text/javascript"> function request(url) { var xhttp; if (window.XMLHttpRequest) { xhttp = new XMLHttpRequest(); } else { xhttp = new ActiveXObject("Microsoft.XMLHTTP"); } xhttp.onreadystatechange = function() { if (this.readyState == 4 && this.status == 200) { alert('Finished!'); } }; xhttp.open("GET", url, true); xhttp.send(); } </script> <div id="Lampada"></h1> <button type="submit" onclick="request('LightStatus=On');">Ligar</button> <button type="submit" onclick="request('LightStatus=Off');">Desligar</button> </div></body> </html> """ while True: cl, addr = s.accept() print('client connected from', addr) cl_file = cl.makefile('rwb', 0) while True: line = cl_file.readline() strLine = str(line) if not line or line == b'\r\n': cl.send(html) break if not ure.search('LightStatus=', strLine): continue else: status = strLine.split(' ')[1].split('=')[1] cl.send(html) if status == 'On': light.value(1) else: light.value(0) break cl.close()
gpl-3.0
kenshay/ImageScripter
ProgramData/Android/ADB/platform-tools/systrace/catapult/telemetry/telemetry/internal/platform/profiler/android_traceview_profiler.py
5
3012
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os from telemetry.internal.backends.chrome import android_browser_finder from telemetry.internal.platform import profiler import py_utils try: from devil.android import device_errors # pylint: disable=import-error except ImportError: device_errors = None class AndroidTraceviewProfiler(profiler.Profiler): """Collects a Traceview on Android.""" _DEFAULT_DEVICE_DIR = '/data/local/tmp/traceview' def __init__(self, browser_backend, platform_backend, output_path, state): super(AndroidTraceviewProfiler, self).__init__( browser_backend, platform_backend, output_path, state) if self._browser_backend.device.FileExists(self._DEFAULT_DEVICE_DIR): # Note: command must be passed as a string to expand wildcards. self._browser_backend.device.RunShellCommand( 'rm ' + os.path.join(self._DEFAULT_DEVICE_DIR, '*'), check_return=True, shell=True) else: self._browser_backend.device.RunShellCommand( ['mkdir', '-p', self._DEFAULT_DEVICE_DIR], check_return=True) self._browser_backend.device.RunShellCommand( ['chmod', '777', self._DEFAULT_DEVICE_DIR], check_return=True) self._trace_files = [] for pid in self._GetProcessOutputFileMap().iterkeys(): device_dump_file = '%s/%s.trace' % (self._DEFAULT_DEVICE_DIR, pid) self._trace_files.append((pid, device_dump_file)) self._browser_backend.device.RunShellCommand( ['am', 'profile', str(pid), 'start', device_dump_file], check_return=True) @classmethod def name(cls): return 'android-traceview' @classmethod def is_supported(cls, browser_type): if browser_type == 'any': return android_browser_finder.CanFindAvailableBrowsers() return browser_type.startswith('android') def CollectProfile(self): output_files = [] for pid, trace_file in self._trace_files: self._browser_backend.device.RunShellCommand( ['am', 'profile', str(pid), 'stop'], check_return=True) # pylint: disable=cell-var-from-loop py_utils.WaitFor(lambda: self._FileSize(trace_file) > 0, timeout=10) output_files.append(trace_file) self._browser_backend.device.PullFile( self._DEFAULT_DEVICE_DIR, self._output_path) # Note: command must be passed as a string to expand wildcards. self._browser_backend.device.RunShellCommand( 'rm ' + os.path.join(self._DEFAULT_DEVICE_DIR, '*'), check_return=True, shell=True) print 'Traceview profiles available in ', self._output_path print 'Use third_party/android_tools/sdk/tools/monitor ' print 'then use "File->Open File" to visualize them.' return output_files def _FileSize(self, file_name): try: return self._browser_backend.device.FileSize(file_name) except device_errors.CommandFailedError: return 0
gpl-3.0
mrquim/mrquimrepo
script.module.schism.common/lib/js2py/host/jsfunctions.py
39
2314
from js2py.base import * RADIX_CHARS = {'1': 1, '0': 0, '3': 3, '2': 2, '5': 5, '4': 4, '7': 7, '6': 6, '9': 9, '8': 8, 'a': 10, 'c': 12, 'b': 11, 'e': 14, 'd': 13, 'g': 16, 'f': 15, 'i': 18, 'h': 17, 'k': 20, 'j': 19, 'm': 22, 'l': 21, 'o': 24, 'n': 23, 'q': 26, 'p': 25, 's': 28, 'r': 27, 'u': 30, 't': 29, 'w': 32, 'v': 31, 'y': 34, 'x': 33, 'z': 35, 'A': 10, 'C': 12, 'B': 11, 'E': 14, 'D': 13, 'G': 16, 'F': 15, 'I': 18, 'H': 17, 'K': 20, 'J': 19, 'M': 22, 'L': 21, 'O': 24, 'N': 23, 'Q': 26, 'P': 25, 'S': 28, 'R': 27, 'U': 30, 'T': 29, 'W': 32, 'V': 31, 'Y': 34, 'X': 33, 'Z': 35} @Js def parseInt (string , radix): string = string.to_string().value.lstrip() sign = 1 if string and string[0] in ['+', '-']: if string[0]=='-': sign = -1 string = string[1:] r = radix.to_int32() strip_prefix = True if r: if r<2 or r>36: return NaN if r!=16: strip_prefix = False else: r = 10 if strip_prefix: if len(string)>=2 and string[:2] in ['0x', '0X']: string = string[2:] r = 16 n = 0 num = 0 while n<len(string): cand = RADIX_CHARS.get(string[n]) if cand is None or not cand < r: break num = cand + num*r n += 1 if not n: return NaN return sign*num @Js def parseFloat(string): string = string.to_string().value.strip() sign = 1 if string and string[0] in ['+', '-']: if string[0]=='-': sign = -1 string = string[1:] num = None length = 1 max_len = None failed = 0 while length<=len(string): try: num = float(string[:length]) max_len = length failed = 0 except: failed += 1 if failed>4: # cant be a number anymore break length += 1 if num is None: return NaN return sign*float(string[:max_len]) @Js def isNaN(number): if number.to_number().is_nan(): return true return false @Js def isFinite(number): num = number.to_number() if num.is_nan() or num.is_infinity(): return false return true #todo URI handling!
gpl-2.0
3v1n0/pywws
src/pywws/examples/scripts/data_mangling/temperature_despike.py
2
2927
#!/usr/bin/env python from __future__ import print_function __usage__ = """ Remove temperature spikes from raw data. usage: %s [options] data_dir options are: -h or --help display this help -n or --noaction show what would be done but don't modify data data_dir is the root directory of the weather data (e.g. $(HOME)/weather/data) """ % __file__ from datetime import datetime, timedelta import getopt import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '../../..')) from pywws.constants import SECOND import pywws.storage def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "hn", ['help', 'noaction']) except getopt.error as msg: print('Error: %s\n' % msg, file=sys.stderr) print(__usage__.strip(), file=sys.stderr) return 1 # process options noaction = False for o, a in opts: if o == '-h' or o == '--help': print(__usage__.strip()) return 0 elif o == '-n' or o == '--noaction': noaction = True # check arguments if len(args) != 1: print('Error: 1 argument required\n', file=sys.stderr) print(__usage__.strip(), file=sys.stderr) return 2 data_dir = args[0] # date & time range of data to be changed, in UTC! start = datetime(2013, 10, 27, 11, 21) stop = datetime(2013, 10, 29, 18, 32) # open data store via PywwsContext context = pywws.storage.PywwsContext(data_dir,False) raw_data = context.raw_data # process the data aperture = timedelta(minutes=14, seconds=30) # make list of changes to apply after examining the data changes = [] for data in raw_data[start:stop]: if data['temp_out'] is None: continue # get temperatures at nearby times idx = data['idx'] temp_list = [] for local_data in raw_data[idx-aperture:idx+aperture]: temp = local_data['temp_out'] if temp is not None: temp_list.append(temp) if len(temp_list) < 3: continue # get median temp_list.sort() median = temp_list[len(temp_list) / 2] # remove anything too far from median if abs(data['temp_out'] - median) >= 2.5: print(str(idx), temp_list, data['temp_out']) changed = dict(data) changed['temp_out'] = None changes.append(changed) # store the changed data if changes and not noaction: for changed in changes: raw_data[changed['idx']] = changed # make sure it's saved raw_data.flush() # clear calibrated data that needs to be regenerated calib_data = context.calib_data del calib_data[changes[0]['idx']:] calib_data.flush() # done return 0 if __name__ == "__main__": sys.exit(main())
gpl-2.0
FHannes/intellij-community
python/lib/Lib/codecs.py
116
33297
""" codecs -- Python Codec Registry, API and helpers. Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """#" import __builtin__, sys ### Registry and builtin stateless codec functions try: from _codecs import * except ImportError, why: raise SystemError('Failed to load the builtin codecs: %s' % why) __all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE", "BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE", "BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE", "BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE", "strict_errors", "ignore_errors", "replace_errors", "xmlcharrefreplace_errors", "register_error", "lookup_error"] ### Constants # # Byte Order Mark (BOM = ZERO WIDTH NO-BREAK SPACE = U+FEFF) # and its possible byte string values # for UTF8/UTF16/UTF32 output and little/big endian machines # # UTF-8 BOM_UTF8 = '\xef\xbb\xbf' # UTF-16, little endian BOM_LE = BOM_UTF16_LE = '\xff\xfe' # UTF-16, big endian BOM_BE = BOM_UTF16_BE = '\xfe\xff' # UTF-32, little endian BOM_UTF32_LE = '\xff\xfe\x00\x00' # UTF-32, big endian BOM_UTF32_BE = '\x00\x00\xfe\xff' if sys.byteorder == 'little': # UTF-16, native endianness BOM = BOM_UTF16 = BOM_UTF16_LE # UTF-32, native endianness BOM_UTF32 = BOM_UTF32_LE else: # UTF-16, native endianness BOM = BOM_UTF16 = BOM_UTF16_BE # UTF-32, native endianness BOM_UTF32 = BOM_UTF32_BE # Old broken names (don't use in new code) BOM32_LE = BOM_UTF16_LE BOM32_BE = BOM_UTF16_BE BOM64_LE = BOM_UTF32_LE BOM64_BE = BOM_UTF32_BE ### Codec base classes (defining the API) class CodecInfo(tuple): def __new__(cls, encode, decode, streamreader=None, streamwriter=None, incrementalencoder=None, incrementaldecoder=None, name=None): self = tuple.__new__(cls, (encode, decode, streamreader, streamwriter)) self.name = name self.encode = encode self.decode = decode self.incrementalencoder = incrementalencoder self.incrementaldecoder = incrementaldecoder self.streamwriter = streamwriter self.streamreader = streamreader return self def __repr__(self): return "<%s.%s object for encoding %s at 0x%x>" % (self.__class__.__module__, self.__class__.__name__, self.name, id(self)) class Codec: """ Defines the interface for stateless encoders/decoders. The .encode()/.decode() methods may use different error handling schemes by providing the errors argument. These string values are predefined: 'strict' - raise a ValueError error (or a subclass) 'ignore' - ignore the character and continue with the next 'replace' - replace with a suitable replacement character; Python will use the official U+FFFD REPLACEMENT CHARACTER for the builtin Unicode codecs on decoding and '?' on encoding. 'xmlcharrefreplace' - Replace with the appropriate XML character reference (only for encoding). 'backslashreplace' - Replace with backslashed escape sequences (only for encoding). The set of allowed values can be extended via register_error. """ def encode(self, input, errors='strict'): """ Encodes the object input and returns a tuple (output object, length consumed). errors defines the error handling to apply. It defaults to 'strict' handling. The method may not store state in the Codec instance. Use StreamCodec for codecs which have to keep state in order to make encoding/decoding efficient. The encoder must be able to handle zero length input and return an empty object of the output object type in this situation. """ raise NotImplementedError def decode(self, input, errors='strict'): """ Decodes the object input and returns a tuple (output object, length consumed). input must be an object which provides the bf_getreadbuf buffer slot. Python strings, buffer objects and memory mapped files are examples of objects providing this slot. errors defines the error handling to apply. It defaults to 'strict' handling. The method may not store state in the Codec instance. Use StreamCodec for codecs which have to keep state in order to make encoding/decoding efficient. The decoder must be able to handle zero length input and return an empty object of the output object type in this situation. """ raise NotImplementedError class IncrementalEncoder(object): """ An IncrementalEncoder encodes an input in multiple steps. The input can be passed piece by piece to the encode() method. The IncrementalEncoder remembers the state of the Encoding process between calls to encode(). """ def __init__(self, errors='strict'): """ Creates an IncrementalEncoder instance. The IncrementalEncoder may use different error handling schemes by providing the errors keyword argument. See the module docstring for a list of possible values. """ self.errors = errors self.buffer = "" def encode(self, input, final=False): """ Encodes input and returns the resulting object. """ raise NotImplementedError def reset(self): """ Resets the encoder to the initial state. """ class BufferedIncrementalEncoder(IncrementalEncoder): """ This subclass of IncrementalEncoder can be used as the baseclass for an incremental encoder if the encoder must keep some of the output in a buffer between calls to encode(). """ def __init__(self, errors='strict'): IncrementalEncoder.__init__(self, errors) self.buffer = "" # unencoded input that is kept between calls to encode() def _buffer_encode(self, input, errors, final): # Overwrite this method in subclasses: It must encode input # and return an (output, length consumed) tuple raise NotImplementedError def encode(self, input, final=False): # encode input (taking the buffer into account) data = self.buffer + input (result, consumed) = self._buffer_encode(data, self.errors, final) # keep unencoded input until the next call self.buffer = data[consumed:] return result def reset(self): IncrementalEncoder.reset(self) self.buffer = "" class IncrementalDecoder(object): """ An IncrementalDecoder decodes an input in multiple steps. The input can be passed piece by piece to the decode() method. The IncrementalDecoder remembers the state of the decoding process between calls to decode(). """ def __init__(self, errors='strict'): """ Creates a IncrementalDecoder instance. The IncrementalDecoder may use different error handling schemes by providing the errors keyword argument. See the module docstring for a list of possible values. """ self.errors = errors def decode(self, input, final=False): """ Decodes input and returns the resulting object. """ raise NotImplementedError def reset(self): """ Resets the decoder to the initial state. """ class BufferedIncrementalDecoder(IncrementalDecoder): """ This subclass of IncrementalDecoder can be used as the baseclass for an incremental decoder if the decoder must be able to handle incomplete byte sequences. """ def __init__(self, errors='strict'): IncrementalDecoder.__init__(self, errors) self.buffer = "" # undecoded input that is kept between calls to decode() def _buffer_decode(self, input, errors, final): # Overwrite this method in subclasses: It must decode input # and return an (output, length consumed) tuple raise NotImplementedError def decode(self, input, final=False): # decode input (taking the buffer into account) data = self.buffer + input (result, consumed) = self._buffer_decode(data, self.errors, final) # keep undecoded input until the next call self.buffer = data[consumed:] return result def reset(self): IncrementalDecoder.reset(self) self.buffer = "" # # The StreamWriter and StreamReader class provide generic working # interfaces which can be used to implement new encoding submodules # very easily. See encodings/utf_8.py for an example on how this is # done. # class StreamWriter(Codec): def __init__(self, stream, errors='strict'): """ Creates a StreamWriter instance. stream must be a file-like object open for writing (binary) data. The StreamWriter may use different error handling schemes by providing the errors keyword argument. These parameters are predefined: 'strict' - raise a ValueError (or a subclass) 'ignore' - ignore the character and continue with the next 'replace'- replace with a suitable replacement character 'xmlcharrefreplace' - Replace with the appropriate XML character reference. 'backslashreplace' - Replace with backslashed escape sequences (only for encoding). The set of allowed parameter values can be extended via register_error. """ self.stream = stream self.errors = errors def write(self, object): """ Writes the object's contents encoded to self.stream. """ data, consumed = self.encode(object, self.errors) self.stream.write(data) def writelines(self, list): """ Writes the concatenated list of strings to the stream using .write(). """ self.write(''.join(list)) def reset(self): """ Flushes and resets the codec buffers used for keeping state. Calling this method should ensure that the data on the output is put into a clean state, that allows appending of new fresh data without having to rescan the whole stream to recover state. """ pass def __getattr__(self, name, getattr=getattr): """ Inherit all other methods from the underlying stream. """ return getattr(self.stream, name) def __enter__(self): return self def __exit__(self, type, value, tb): self.stream.close() ### class StreamReader(Codec): def __init__(self, stream, errors='strict'): """ Creates a StreamReader instance. stream must be a file-like object open for reading (binary) data. The StreamReader may use different error handling schemes by providing the errors keyword argument. These parameters are predefined: 'strict' - raise a ValueError (or a subclass) 'ignore' - ignore the character and continue with the next 'replace'- replace with a suitable replacement character; The set of allowed parameter values can be extended via register_error. """ self.stream = stream self.errors = errors self.bytebuffer = "" # For str->str decoding this will stay a str # For str->unicode decoding the first read will promote it to unicode self.charbuffer = "" self.linebuffer = None def decode(self, input, errors='strict'): raise NotImplementedError def read(self, size=-1, chars=-1, firstline=False): """ Decodes data from the stream self.stream and returns the resulting object. chars indicates the number of characters to read from the stream. read() will never return more than chars characters, but it might return less, if there are not enough characters available. size indicates the approximate maximum number of bytes to read from the stream for decoding purposes. The decoder can modify this setting as appropriate. The default value -1 indicates to read and decode as much as possible. size is intended to prevent having to decode huge files in one step. If firstline is true, and a UnicodeDecodeError happens after the first line terminator in the input only the first line will be returned, the rest of the input will be kept until the next call to read(). The method should use a greedy read strategy meaning that it should read as much data as is allowed within the definition of the encoding and the given size, e.g. if optional encoding endings or state markers are available on the stream, these should be read too. """ # If we have lines cached, first merge them back into characters if self.linebuffer: self.charbuffer = "".join(self.linebuffer) self.linebuffer = None # read until we get the required number of characters (if available) while True: # can the request can be satisfied from the character buffer? if chars < 0: if size < 0: if self.charbuffer: break elif len(self.charbuffer) >= size: break else: if len(self.charbuffer) >= chars: break # we need more data if size < 0: newdata = self.stream.read() else: newdata = self.stream.read(size) # decode bytes (those remaining from the last call included) data = self.bytebuffer + newdata try: newchars, decodedbytes = self.decode(data, self.errors) except UnicodeDecodeError, exc: if firstline: newchars, decodedbytes = self.decode(data[:exc.start], self.errors) lines = newchars.splitlines(True) if len(lines)<=1: raise else: raise # keep undecoded bytes until the next call self.bytebuffer = data[decodedbytes:] # put new characters in the character buffer self.charbuffer += newchars # there was no data available if not newdata: break if chars < 0: # Return everything we've got result = self.charbuffer self.charbuffer = "" else: # Return the first chars characters result = self.charbuffer[:chars] self.charbuffer = self.charbuffer[chars:] return result def readline(self, size=None, keepends=True): """ Read one line from the input stream and return the decoded data. size, if given, is passed as size argument to the read() method. """ # If we have lines cached from an earlier read, return # them unconditionally if self.linebuffer: line = self.linebuffer[0] del self.linebuffer[0] if len(self.linebuffer) == 1: # revert to charbuffer mode; we might need more data # next time self.charbuffer = self.linebuffer[0] self.linebuffer = None if not keepends: line = line.splitlines(False)[0] return line readsize = size or 72 line = "" # If size is given, we call read() only once while True: data = self.read(readsize, firstline=True) if data: # If we're at a "\r" read one extra character (which might # be a "\n") to get a proper line ending. If the stream is # temporarily exhausted we return the wrong line ending. if data.endswith("\r"): data += self.read(size=1, chars=1) line += data lines = line.splitlines(True) if lines: if len(lines) > 1: # More than one line result; the first line is a full line # to return line = lines[0] del lines[0] if len(lines) > 1: # cache the remaining lines lines[-1] += self.charbuffer self.linebuffer = lines self.charbuffer = None else: # only one remaining line, put it back into charbuffer self.charbuffer = lines[0] + self.charbuffer if not keepends: line = line.splitlines(False)[0] break line0withend = lines[0] line0withoutend = lines[0].splitlines(False)[0] if line0withend != line0withoutend: # We really have a line end # Put the rest back together and keep it until the next call self.charbuffer = "".join(lines[1:]) + self.charbuffer if keepends: line = line0withend else: line = line0withoutend break # we didn't get anything or this was our only try if not data or size is not None: if line and not keepends: line = line.splitlines(False)[0] break if readsize<8000: readsize *= 2 return line def readlines(self, sizehint=None, keepends=True): """ Read all lines available on the input stream and return them as list of lines. Line breaks are implemented using the codec's decoder method and are included in the list entries. sizehint, if given, is ignored since there is no efficient way to finding the true end-of-line. """ data = self.read() return data.splitlines(keepends) def reset(self): """ Resets the codec buffers used for keeping state. Note that no stream repositioning should take place. This method is primarily intended to be able to recover from decoding errors. """ self.bytebuffer = "" self.charbuffer = u"" self.linebuffer = None def seek(self, offset, whence=0): """ Set the input stream's current position. Resets the codec buffers used for keeping state. """ self.reset() self.stream.seek(offset, whence) def next(self): """ Return the next decoded line from the input stream.""" line = self.readline() if line: return line raise StopIteration def __iter__(self): return self def __getattr__(self, name, getattr=getattr): """ Inherit all other methods from the underlying stream. """ return getattr(self.stream, name) def __enter__(self): return self def __exit__(self, type, value, tb): self.stream.close() ### class StreamReaderWriter: """ StreamReaderWriter instances allow wrapping streams which work in both read and write modes. The design is such that one can use the factory functions returned by the codec.lookup() function to construct the instance. """ # Optional attributes set by the file wrappers below encoding = 'unknown' def __init__(self, stream, Reader, Writer, errors='strict'): """ Creates a StreamReaderWriter instance. stream must be a Stream-like object. Reader, Writer must be factory functions or classes providing the StreamReader, StreamWriter interface resp. Error handling is done in the same way as defined for the StreamWriter/Readers. """ self.stream = stream self.reader = Reader(stream, errors) self.writer = Writer(stream, errors) self.errors = errors def read(self, size=-1): return self.reader.read(size) def readline(self, size=None): return self.reader.readline(size) def readlines(self, sizehint=None): return self.reader.readlines(sizehint) def next(self): """ Return the next decoded line from the input stream.""" return self.reader.next() def __iter__(self): return self def write(self, data): return self.writer.write(data) def writelines(self, list): return self.writer.writelines(list) def reset(self): self.reader.reset() self.writer.reset() def __getattr__(self, name, getattr=getattr): """ Inherit all other methods from the underlying stream. """ return getattr(self.stream, name) # these are needed to make "with codecs.open(...)" work properly def __enter__(self): return self def __exit__(self, type, value, tb): self.stream.close() ### class StreamRecoder: """ StreamRecoder instances provide a frontend - backend view of encoding data. They use the complete set of APIs returned by the codecs.lookup() function to implement their task. Data written to the stream is first decoded into an intermediate format (which is dependent on the given codec combination) and then written to the stream using an instance of the provided Writer class. In the other direction, data is read from the stream using a Reader instance and then return encoded data to the caller. """ # Optional attributes set by the file wrappers below data_encoding = 'unknown' file_encoding = 'unknown' def __init__(self, stream, encode, decode, Reader, Writer, errors='strict'): """ Creates a StreamRecoder instance which implements a two-way conversion: encode and decode work on the frontend (the input to .read() and output of .write()) while Reader and Writer work on the backend (reading and writing to the stream). You can use these objects to do transparent direct recodings from e.g. latin-1 to utf-8 and back. stream must be a file-like object. encode, decode must adhere to the Codec interface, Reader, Writer must be factory functions or classes providing the StreamReader, StreamWriter interface resp. encode and decode are needed for the frontend translation, Reader and Writer for the backend translation. Unicode is used as intermediate encoding. Error handling is done in the same way as defined for the StreamWriter/Readers. """ self.stream = stream self.encode = encode self.decode = decode self.reader = Reader(stream, errors) self.writer = Writer(stream, errors) self.errors = errors def read(self, size=-1): data = self.reader.read(size) data, bytesencoded = self.encode(data, self.errors) return data def readline(self, size=None): if size is None: data = self.reader.readline() else: data = self.reader.readline(size) data, bytesencoded = self.encode(data, self.errors) return data def readlines(self, sizehint=None): data = self.reader.read() data, bytesencoded = self.encode(data, self.errors) return data.splitlines(1) def next(self): """ Return the next decoded line from the input stream.""" data = self.reader.next() data, bytesencoded = self.encode(data, self.errors) return data def __iter__(self): return self def write(self, data): data, bytesdecoded = self.decode(data, self.errors) return self.writer.write(data) def writelines(self, list): data = ''.join(list) data, bytesdecoded = self.decode(data, self.errors) return self.writer.write(data) def reset(self): self.reader.reset() self.writer.reset() def __getattr__(self, name, getattr=getattr): """ Inherit all other methods from the underlying stream. """ return getattr(self.stream, name) def __enter__(self): return self def __exit__(self, type, value, tb): self.stream.close() ### Shortcuts def open(filename, mode='rb', encoding=None, errors='strict', buffering=1): """ Open an encoded file using the given mode and return a wrapped version providing transparent encoding/decoding. Note: The wrapped version will only accept the object format defined by the codecs, i.e. Unicode objects for most builtin codecs. Output is also codec dependent and will usually be Unicode as well. Files are always opened in binary mode, even if no binary mode was specified. This is done to avoid data loss due to encodings using 8-bit values. The default file mode is 'rb' meaning to open the file in binary read mode. encoding specifies the encoding which is to be used for the file. errors may be given to define the error handling. It defaults to 'strict' which causes ValueErrors to be raised in case an encoding error occurs. buffering has the same meaning as for the builtin open() API. It defaults to line buffered. The returned wrapped file object provides an extra attribute .encoding which allows querying the used encoding. This attribute is only available if an encoding was specified as parameter. """ if encoding is not None and \ 'b' not in mode: # Force opening of the file in binary mode mode = mode + 'b' file = __builtin__.open(filename, mode, buffering) if encoding is None: return file info = lookup(encoding) srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors) # Add attributes to simplify introspection srw.encoding = encoding return srw def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'): """ Return a wrapped version of file which provides transparent encoding translation. Strings written to the wrapped file are interpreted according to the given data_encoding and then written to the original file as string using file_encoding. The intermediate encoding will usually be Unicode but depends on the specified codecs. Strings are read from the file using file_encoding and then passed back to the caller as string using data_encoding. If file_encoding is not given, it defaults to data_encoding. errors may be given to define the error handling. It defaults to 'strict' which causes ValueErrors to be raised in case an encoding error occurs. The returned wrapped file object provides two extra attributes .data_encoding and .file_encoding which reflect the given parameters of the same name. The attributes can be used for introspection by Python programs. """ if file_encoding is None: file_encoding = data_encoding data_info = lookup(data_encoding) file_info = lookup(file_encoding) sr = StreamRecoder(file, data_info.encode, data_info.decode, file_info.streamreader, file_info.streamwriter, errors) # Add attributes to simplify introspection sr.data_encoding = data_encoding sr.file_encoding = file_encoding return sr ### Helpers for codec lookup def getencoder(encoding): """ Lookup up the codec for the given encoding and return its encoder function. Raises a LookupError in case the encoding cannot be found. """ return lookup(encoding).encode def getdecoder(encoding): """ Lookup up the codec for the given encoding and return its decoder function. Raises a LookupError in case the encoding cannot be found. """ return lookup(encoding).decode def getincrementalencoder(encoding): """ Lookup up the codec for the given encoding and return its IncrementalEncoder class or factory function. Raises a LookupError in case the encoding cannot be found or the codecs doesn't provide an incremental encoder. """ encoder = lookup(encoding).incrementalencoder if encoder is None: raise LookupError(encoding) return encoder def getincrementaldecoder(encoding): """ Lookup up the codec for the given encoding and return its IncrementalDecoder class or factory function. Raises a LookupError in case the encoding cannot be found or the codecs doesn't provide an incremental decoder. """ decoder = lookup(encoding).incrementaldecoder if decoder is None: raise LookupError(encoding) return decoder def getreader(encoding): """ Lookup up the codec for the given encoding and return its StreamReader class or factory function. Raises a LookupError in case the encoding cannot be found. """ return lookup(encoding).streamreader def getwriter(encoding): """ Lookup up the codec for the given encoding and return its StreamWriter class or factory function. Raises a LookupError in case the encoding cannot be found. """ return lookup(encoding).streamwriter def iterencode(iterator, encoding, errors='strict', **kwargs): """ Encoding iterator. Encodes the input strings from the iterator using a IncrementalEncoder. errors and kwargs are passed through to the IncrementalEncoder constructor. """ encoder = getincrementalencoder(encoding)(errors, **kwargs) for input in iterator: output = encoder.encode(input) if output: yield output output = encoder.encode("", True) if output: yield output def iterdecode(iterator, encoding, errors='strict', **kwargs): """ Decoding iterator. Decodes the input strings from the iterator using a IncrementalDecoder. errors and kwargs are passed through to the IncrementalDecoder constructor. """ decoder = getincrementaldecoder(encoding)(errors, **kwargs) for input in iterator: output = decoder.decode(input) if output: yield output output = decoder.decode("", True) if output: yield output ### Helpers for charmap-based codecs def make_identity_dict(rng): """ make_identity_dict(rng) -> dict Return a dictionary where elements of the rng sequence are mapped to themselves. """ res = {} for i in rng: res[i]=i return res def make_encoding_map(decoding_map): """ Creates an encoding map from a decoding map. If a target mapping in the decoding map occurs multiple times, then that target is mapped to None (undefined mapping), causing an exception when encountered by the charmap codec during translation. One example where this happens is cp875.py which decodes multiple character to \u001a. """ m = {} for k,v in decoding_map.items(): if not v in m: m[v] = k else: m[v] = None return m ### error handlers try: strict_errors = lookup_error("strict") ignore_errors = lookup_error("ignore") replace_errors = lookup_error("replace") xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace") backslashreplace_errors = lookup_error("backslashreplace") except LookupError: # In --disable-unicode builds, these error handler are missing strict_errors = None ignore_errors = None replace_errors = None xmlcharrefreplace_errors = None backslashreplace_errors = None # Tell modulefinder that using codecs probably needs the encodings # package _false = 0 if _false: import encodings ### Tests if __name__ == '__main__': # Make stdout translate Latin-1 output into UTF-8 output sys.stdout = EncodedFile(sys.stdout, 'latin-1', 'utf-8') # Have stdin translate Latin-1 input into UTF-8 input sys.stdin = EncodedFile(sys.stdin, 'utf-8', 'latin-1')
apache-2.0
kustodian/ansible
lib/ansible/executor/stats.py
88
3295
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.module_utils.common._collections_compat import MutableMapping from ansible.utils.vars import merge_hash class AggregateStats: ''' holds stats about per-host activity during playbook runs ''' def __init__(self): self.processed = {} self.failures = {} self.ok = {} self.dark = {} self.changed = {} self.skipped = {} self.rescued = {} self.ignored = {} # user defined stats, which can be per host or global self.custom = {} def increment(self, what, host): ''' helper function to bump a statistic ''' self.processed[host] = 1 prev = (getattr(self, what)).get(host, 0) getattr(self, what)[host] = prev + 1 def decrement(self, what, host): _what = getattr(self, what) try: if _what[host] - 1 < 0: # This should never happen, but let's be safe raise KeyError("Don't be so negative") _what[host] -= 1 except KeyError: _what[host] = 0 def summarize(self, host): ''' return information about a particular host ''' return dict( ok=self.ok.get(host, 0), failures=self.failures.get(host, 0), unreachable=self.dark.get(host, 0), changed=self.changed.get(host, 0), skipped=self.skipped.get(host, 0), rescued=self.rescued.get(host, 0), ignored=self.ignored.get(host, 0), ) def set_custom_stats(self, which, what, host=None): ''' allow setting of a custom stat''' if host is None: host = '_run' if host not in self.custom: self.custom[host] = {which: what} else: self.custom[host][which] = what def update_custom_stats(self, which, what, host=None): ''' allow aggregation of a custom stat''' if host is None: host = '_run' if host not in self.custom or which not in self.custom[host]: return self.set_custom_stats(which, what, host) # mismatching types if not isinstance(what, type(self.custom[host][which])): return None if isinstance(what, MutableMapping): self.custom[host][which] = merge_hash(self.custom[host][which], what) else: # let overloaded + take care of other types self.custom[host][which] += what
gpl-3.0
timur-enikeev/ispmanager5-samba
samba.py
1
6791
#!/usr/bin/python # -*- coding: utf-8 -*- #Configuration ENABLE_FTP=True # Enable creating samba users for ftp accounts (only for ProFTPd). Disalbe this option if you use another FTP server! import logging, os, subprocess, sys, pwd, traceback, time logging.basicConfig(level=logging.INFO) # , filename='/samba_addon.log') env = os.environ def create_or_edit(): """Create or change password of samba user""" smbpasswd_cmdlist = ['smbpasswd'] + [env['PARAM_name']] + ['-s'] # smbpasswd command with parameters if subprocess.call('pdbedit -u {}'.format(env['PARAM_name']).split(), stdout=sys.stderr): # check if user exists in SMB database smbpasswd_cmdlist.append('-a') # if user doesn't exist say smbpasswd to create this user smbpasswd = subprocess.Popen(smbpasswd_cmdlist, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Launching smbpasswd smbpasswd.stdin.write(env['PARAM_passwd'] + '\n') # Entering password and confirmation smbpasswd.stdin.write(env['PARAM_passwd'] + '\n') logging.info(smbpasswd.communicate()) if smbpasswd.returncode == 0: enable_user(env['PARAM_name']) def enable_user(user): """Enable SMB user""" subprocess.call('smbpasswd -e {}'.format(user).split(), stdout=sys.stderr) def disable_user(user): """Disable SMB user""" subprocess.call('smbpasswd -d {}'.format(user).split(), stdout=sys.stderr) def suspend(): """Disable SMB accounts for suspended users""" users = env['PARAM_elids'].split(', ') for user in users: disable_user(user) def resume(): """Enable SMB accounts for enabled users""" users = env['PARAM_elids'].split(', ') for user in users: enable_user(user) def delete(): """Delete samba users""" logging.info('env: ' + str(env)) users = env['PARAM_elids'].split(', ') for user in users: subprocess.call('smbpasswd -x {}'.format(user).split(), stdout=sys.stderr) if env['PARAM_func'] == 'ftp.user.delete': del_system_user(user) def get_ftp_users(): fp = open('/etc/proftpd/proftpd.passwd') fpl = fp.readlines() ftp_accounts = [] for l in fpl: ftp_acc = {} l_splitted = l.split(':') if len(l_splitted) >= 7: ftp_acc['name'] = l_splitted[0] ftp_acc['uid'] = l_splitted[2] ftp_acc['gid'] = l_splitted[3] ftp_acc['homedir'] = l_splitted[5] ftp_accounts.append(ftp_acc) return ftp_accounts def get_owner_of_ftp(ftpuser): #Get username, UID and GID of owner of the FTP account. Returns first user in passwd file which is not in proftpd.passwd file try: ftp_users = get_ftp_users() ftp_account = None ftp_list = [] for ftp_user in ftp_users: if ftp_user['name'] == ftpuser: ftp_account = ftp_user break else: return 1 ftp_list = [ftp_user['name'] for ftp_user in ftp_users if ftp_user['uid'] == ftp_account['uid']] system_list = [u.pw_name for u in pwd.getpwall() if u.pw_uid == int(ftp_account['uid'])] for i, u in enumerate(system_list): if u in ftp_list: del system_list[i] return system_list[0] except: return None def user_for_ftp(): #Create system user for ftp account. Return error if user with this name but different parameters (UID, home dir) already exists. Returns 0 if success, returns 1 if user wasn't created but FTP user can be created, returns 2 if FTP user can't be created, returns 3 if user with same name exists try: logging.info(str(env)) ftpuser = env['PARAM_name'] if 'PARAM_elid' in env: return 0 if 'PARAM_owner' in env and env['PARAM_owner']: #If function was called by root and has "owner" parameter owner = env['PARAM_owner'] elif int(env['AUTH_LEVEL']) < 30: owner = env['AUTH_USER'] else: owner = get_owner_of_ftp(ftpuser) if not owner: return 1 if owner == ftpuser: return 1 owner_pwd = pwd.getpwnam(owner) uid = owner_pwd.pw_uid gid = owner_pwd.pw_gid homedir = os.path.normpath(owner_pwd.pw_dir + '/' + env['PARAM_home']) try: ftpuser_pwd = pwd.getpwnam(ftpuser) except: subprocess.call('useradd {} -u {} -g {} -o --shell /bin/false --home-dir={}'.format(ftpuser, uid, gid, homedir).split(), stdout=sys.stderr) else: if ftpuser_pwd.pw_uid != uid or ftpuser_pwd.pw_gid != gid: return 3 elif os.path.normpath(ftpuser_pwd.pw_dir) != homedir: subprocess.call('usermod --home {} {}'.format(homedir, ftpuser).split(), stdout=sys.stderr) except: logging.error(traceback.format_exc()) return 2 return 0 def del_system_user(ftpuser): owner = get_owner_of_ftp(ftpuser) if owner and pwd.getpwnam(owner).pw_uid == pwd.getpwnam(ftpuser).pw_uid: subprocess.call('userdel -f {}'.format(ftpuser).split()) def success(): print """<?xml version="1.0" encoding="UTF-8"?> <doc> </doc> """ def user_exists(): print """<?xml version="1.0" encoding="UTF-8"?> <doc> <error type="exists" object="user" lang="ru"> <msg>Пользователь уже существует. User already exists.</msg> </error> </doc>""" def other_error(): print """<?xml version="1.0" encoding="UTF-8"?> <doc> <error /> </doc>""" logging.info(str(env)) try: if env['PARAM_func'] == 'user.delete' or (env['PARAM_func'] == 'ftp.user.delete' and ENABLE_FTP): delete() success() elif env['PARAM_func'] == 'user.suspend' or (env['PARAM_func'] == 'ftp.user.suspend' and ENABLE_FTP): suspend() success() elif env['PARAM_func'] == 'user.resume' or (env['PARAM_func'] == 'ftp.user.resume' and ENABLE_FTP): logging.info(str(env)) resume() success() elif not 'PARAM_sok' in env or not env['PARAM_sok']: success() elif env['PARAM_func'] in ('user.add.finish', 'user.edit') and env['PARAM_passwd']: create_or_edit() success() elif ENABLE_FTP and env['PARAM_func'] == 'ftp.user.edit' and 'PARAM_sok' in env and env['PARAM_passwd']: r = user_for_ftp() if r == 0: create_or_edit() success() elif r == 1: success() elif r == 3: user_exists() # success() else: other_error() else: success() except: logging.error(traceback.format_exc()) success() logging.shutdown()
lgpl-2.1
pypot/scikit-learn
examples/linear_model/plot_ridge_path.py
254
1655
""" =========================================================== Plot Ridge coefficients as a function of the regularization =========================================================== Shows the effect of collinearity in the coefficients of an estimator. .. currentmodule:: sklearn.linear_model :class:`Ridge` Regression is the estimator used in this example. Each color represents a different feature of the coefficient vector, and this is displayed as a function of the regularization parameter. At the end of the path, as alpha tends toward zero and the solution tends towards the ordinary least squares, coefficients exhibit big oscillations. """ # Author: Fabian Pedregosa -- <fabian.pedregosa@inria.fr> # License: BSD 3 clause print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn import linear_model # X is the 10x10 Hilbert matrix X = 1. / (np.arange(1, 11) + np.arange(0, 10)[:, np.newaxis]) y = np.ones(10) ############################################################################### # Compute paths n_alphas = 200 alphas = np.logspace(-10, -2, n_alphas) clf = linear_model.Ridge(fit_intercept=False) coefs = [] for a in alphas: clf.set_params(alpha=a) clf.fit(X, y) coefs.append(clf.coef_) ############################################################################### # Display results ax = plt.gca() ax.set_color_cycle(['b', 'r', 'g', 'c', 'k', 'y', 'm']) ax.plot(alphas, coefs) ax.set_xscale('log') ax.set_xlim(ax.get_xlim()[::-1]) # reverse axis plt.xlabel('alpha') plt.ylabel('weights') plt.title('Ridge coefficients as a function of the regularization') plt.axis('tight') plt.show()
bsd-3-clause
jasonseminara/OpenSourceFinal
venv/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.py
926
3200
# -*- coding: utf-8 -*- from .structures import LookupDict _codes = { # Informational. 100: ('continue',), 101: ('switching_protocols',), 102: ('processing',), 103: ('checkpoint',), 122: ('uri_too_long', 'request_uri_too_long'), 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), 201: ('created',), 202: ('accepted',), 203: ('non_authoritative_info', 'non_authoritative_information'), 204: ('no_content',), 205: ('reset_content', 'reset'), 206: ('partial_content', 'partial'), 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), 208: ('already_reported',), 226: ('im_used',), # Redirection. 300: ('multiple_choices',), 301: ('moved_permanently', 'moved', '\\o-'), 302: ('found',), 303: ('see_other', 'other'), 304: ('not_modified',), 305: ('use_proxy',), 306: ('switch_proxy',), 307: ('temporary_redirect', 'temporary_moved', 'temporary'), 308: ('permanent_redirect', 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 # Client Error. 400: ('bad_request', 'bad'), 401: ('unauthorized',), 402: ('payment_required', 'payment'), 403: ('forbidden',), 404: ('not_found', '-o-'), 405: ('method_not_allowed', 'not_allowed'), 406: ('not_acceptable',), 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), 408: ('request_timeout', 'timeout'), 409: ('conflict',), 410: ('gone',), 411: ('length_required',), 412: ('precondition_failed', 'precondition'), 413: ('request_entity_too_large',), 414: ('request_uri_too_large',), 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 417: ('expectation_failed',), 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), 422: ('unprocessable_entity', 'unprocessable'), 423: ('locked',), 424: ('failed_dependency', 'dependency'), 425: ('unordered_collection', 'unordered'), 426: ('upgrade_required', 'upgrade'), 428: ('precondition_required', 'precondition'), 429: ('too_many_requests', 'too_many'), 431: ('header_fields_too_large', 'fields_too_large'), 444: ('no_response', 'none'), 449: ('retry_with', 'retry'), 450: ('blocked_by_windows_parental_controls', 'parental_controls'), 451: ('unavailable_for_legal_reasons', 'legal_reasons'), 499: ('client_closed_request',), # Server Error. 500: ('internal_server_error', 'server_error', '/o\\', '✗'), 501: ('not_implemented',), 502: ('bad_gateway',), 503: ('service_unavailable', 'unavailable'), 504: ('gateway_timeout',), 505: ('http_version_not_supported', 'http_version'), 506: ('variant_also_negotiates',), 507: ('insufficient_storage',), 509: ('bandwidth_limit_exceeded', 'bandwidth'), 510: ('not_extended',), } codes = LookupDict(name='status_codes') for (code, titles) in list(_codes.items()): for title in titles: setattr(codes, title, code) if not title.startswith('\\'): setattr(codes, title.upper(), code)
mit
danknights/OTUsearch
bin/precompute.py
1
1229
# Contains functions for precomputing data structures for otusearch import numpy as np import sys from otusearch import alignmentToBinaryMatrix from pysparse.sparse import spmatrix import optparse import os def get_opts(): p = optparse.OptionParser() p.add_option("-r", "--ref_fp", type="string", \ default=None, help="Reference alignment file [required].") p.add_option("-o", "--output", type="string", \ default=None, help="Compressed output file path [default <ref_fp base name>.npz].") p.add_option("--suppress_random_order", action="store_true", \ help="Suppress randomization of order of sequences.") p.add_option("--verbose", action="store_true", \ help="Print all output.") opts, args = p.parse_args(sys.argv) return opts, args def check_opts(opts): if opts.ref_fp is None: raise ValueError('\n\nPlease include an input reference alignment.') if __name__ == '__main__': opts, args = get_opts() check_opts(opts) if opts.output is None: opts.output = os.path.splitext(os.path.split(opts.ref_fp)[1])[0] + '.npz' refmat = alignmentToBinaryMatrix(opts.ref_fp, transpose=False, random_order = not opts.suppress_random_order, save_to_fp=opts.output)
gpl-3.0
eunchong/build
third_party/twisted_10_2/twisted/words/test/test_jabberclient.py
52
13953
# Copyright (c) 2001-2009 Twisted Matrix Laboratories. # See LICENSE for details. """ Tests for L{twisted.words.protocols.jabber.client} """ from twisted.internet import defer from twisted.python.hashlib import sha1 from twisted.trial import unittest from twisted.words.protocols.jabber import client, error, jid, xmlstream from twisted.words.protocols.jabber.sasl import SASLInitiatingInitializer from twisted.words.xish import utility IQ_AUTH_GET = '/iq[@type="get"]/query[@xmlns="jabber:iq:auth"]' IQ_AUTH_SET = '/iq[@type="set"]/query[@xmlns="jabber:iq:auth"]' NS_BIND = 'urn:ietf:params:xml:ns:xmpp-bind' IQ_BIND_SET = '/iq[@type="set"]/bind[@xmlns="%s"]' % NS_BIND NS_SESSION = 'urn:ietf:params:xml:ns:xmpp-session' IQ_SESSION_SET = '/iq[@type="set"]/session[@xmlns="%s"]' % NS_SESSION class CheckVersionInitializerTest(unittest.TestCase): def setUp(self): a = xmlstream.Authenticator() xs = xmlstream.XmlStream(a) self.init = client.CheckVersionInitializer(xs) def testSupported(self): """ Test supported version number 1.0 """ self.init.xmlstream.version = (1, 0) self.init.initialize() def testNotSupported(self): """ Test unsupported version number 0.0, and check exception. """ self.init.xmlstream.version = (0, 0) exc = self.assertRaises(error.StreamError, self.init.initialize) self.assertEquals('unsupported-version', exc.condition) class InitiatingInitializerHarness(object): """ Testing harness for interacting with XML stream initializers. This sets up an L{utility.XmlPipe} to create a communication channel between the initializer and the stubbed receiving entity. It features a sink and source side that both act similarly to a real L{xmlstream.XmlStream}. The sink is augmented with an authenticator to which initializers can be added. The harness also provides some utility methods to work with event observers and deferreds. """ def setUp(self): self.output = [] self.pipe = utility.XmlPipe() self.xmlstream = self.pipe.sink self.authenticator = xmlstream.ConnectAuthenticator('example.org') self.xmlstream.authenticator = self.authenticator def waitFor(self, event, handler): """ Observe an output event, returning a deferred. The returned deferred will be fired when the given event has been observed on the source end of the L{XmlPipe} tied to the protocol under test. The handler is added as the first callback. @param event: The event to be observed. See L{utility.EventDispatcher.addOnetimeObserver}. @param handler: The handler to be called with the observed event object. @rtype: L{defer.Deferred}. """ d = defer.Deferred() d.addCallback(handler) self.pipe.source.addOnetimeObserver(event, d.callback) return d class IQAuthInitializerTest(InitiatingInitializerHarness, unittest.TestCase): """ Tests for L{client.IQAuthInitializer}. """ def setUp(self): super(IQAuthInitializerTest, self).setUp() self.init = client.IQAuthInitializer(self.xmlstream) self.authenticator.jid = jid.JID('user@example.com/resource') self.authenticator.password = 'secret' def testPlainText(self): """ Test plain-text authentication. Act as a server supporting plain-text authentication and expect the C{password} field to be filled with the password. Then act as if authentication succeeds. """ def onAuthGet(iq): """ Called when the initializer sent a query for authentication methods. The response informs the client that plain-text authentication is supported. """ # Create server response response = xmlstream.toResponse(iq, 'result') response.addElement(('jabber:iq:auth', 'query')) response.query.addElement('username') response.query.addElement('password') response.query.addElement('resource') # Set up an observer for the next request we expect. d = self.waitFor(IQ_AUTH_SET, onAuthSet) # Send server response self.pipe.source.send(response) return d def onAuthSet(iq): """ Called when the initializer sent the authentication request. The server checks the credentials and responds with an empty result signalling success. """ self.assertEquals('user', unicode(iq.query.username)) self.assertEquals('secret', unicode(iq.query.password)) self.assertEquals('resource', unicode(iq.query.resource)) # Send server response response = xmlstream.toResponse(iq, 'result') self.pipe.source.send(response) # Set up an observer for the request for authentication fields d1 = self.waitFor(IQ_AUTH_GET, onAuthGet) # Start the initializer d2 = self.init.initialize() return defer.gatherResults([d1, d2]) def testDigest(self): """ Test digest authentication. Act as a server supporting digest authentication and expect the C{digest} field to be filled with a sha1 digest of the concatenated stream session identifier and password. Then act as if authentication succeeds. """ def onAuthGet(iq): """ Called when the initializer sent a query for authentication methods. The response informs the client that digest authentication is supported. """ # Create server response response = xmlstream.toResponse(iq, 'result') response.addElement(('jabber:iq:auth', 'query')) response.query.addElement('username') response.query.addElement('digest') response.query.addElement('resource') # Set up an observer for the next request we expect. d = self.waitFor(IQ_AUTH_SET, onAuthSet) # Send server response self.pipe.source.send(response) return d def onAuthSet(iq): """ Called when the initializer sent the authentication request. The server checks the credentials and responds with an empty result signalling success. """ self.assertEquals('user', unicode(iq.query.username)) self.assertEquals(sha1('12345secret').hexdigest(), unicode(iq.query.digest).encode('utf-8')) self.assertEquals('resource', unicode(iq.query.resource)) # Send server response response = xmlstream.toResponse(iq, 'result') self.pipe.source.send(response) # Digest authentication relies on the stream session identifier. Set it. self.xmlstream.sid = u'12345' # Set up an observer for the request for authentication fields d1 = self.waitFor(IQ_AUTH_GET, onAuthGet) # Start the initializer d2 = self.init.initialize() return defer.gatherResults([d1, d2]) def testFailRequestFields(self): """ Test initializer failure of request for fields for authentication. """ def onAuthGet(iq): """ Called when the initializer sent a query for authentication methods. The server responds that the client is not authorized to authenticate. """ response = error.StanzaError('not-authorized').toResponse(iq) self.pipe.source.send(response) # Set up an observer for the request for authentication fields d1 = self.waitFor(IQ_AUTH_GET, onAuthGet) # Start the initializer d2 = self.init.initialize() # The initialized should fail with a stanza error. self.assertFailure(d2, error.StanzaError) return defer.gatherResults([d1, d2]) def testFailAuth(self): """ Test initializer failure to authenticate. """ def onAuthGet(iq): """ Called when the initializer sent a query for authentication methods. The response informs the client that plain-text authentication is supported. """ # Send server response response = xmlstream.toResponse(iq, 'result') response.addElement(('jabber:iq:auth', 'query')) response.query.addElement('username') response.query.addElement('password') response.query.addElement('resource') # Set up an observer for the next request we expect. d = self.waitFor(IQ_AUTH_SET, onAuthSet) # Send server response self.pipe.source.send(response) return d def onAuthSet(iq): """ Called when the initializer sent the authentication request. The server checks the credentials and responds with a not-authorized stanza error. """ response = error.StanzaError('not-authorized').toResponse(iq) self.pipe.source.send(response) # Set up an observer for the request for authentication fields d1 = self.waitFor(IQ_AUTH_GET, onAuthGet) # Start the initializer d2 = self.init.initialize() # The initializer should fail with a stanza error. self.assertFailure(d2, error.StanzaError) return defer.gatherResults([d1, d2]) class BindInitializerTest(InitiatingInitializerHarness, unittest.TestCase): """ Tests for L{client.BindInitializer}. """ def setUp(self): super(BindInitializerTest, self).setUp() self.init = client.BindInitializer(self.xmlstream) self.authenticator.jid = jid.JID('user@example.com/resource') def testBasic(self): """ Set up a stream, and act as if resource binding succeeds. """ def onBind(iq): response = xmlstream.toResponse(iq, 'result') response.addElement((NS_BIND, 'bind')) response.bind.addElement('jid', content='user@example.com/other resource') self.pipe.source.send(response) def cb(result): self.assertEquals(jid.JID('user@example.com/other resource'), self.authenticator.jid) d1 = self.waitFor(IQ_BIND_SET, onBind) d2 = self.init.start() d2.addCallback(cb) return defer.gatherResults([d1, d2]) def testFailure(self): """ Set up a stream, and act as if resource binding fails. """ def onBind(iq): response = error.StanzaError('conflict').toResponse(iq) self.pipe.source.send(response) d1 = self.waitFor(IQ_BIND_SET, onBind) d2 = self.init.start() self.assertFailure(d2, error.StanzaError) return defer.gatherResults([d1, d2]) class SessionInitializerTest(InitiatingInitializerHarness, unittest.TestCase): """ Tests for L{client.SessionInitializer}. """ def setUp(self): super(SessionInitializerTest, self).setUp() self.init = client.SessionInitializer(self.xmlstream) def testSuccess(self): """ Set up a stream, and act as if session establishment succeeds. """ def onSession(iq): response = xmlstream.toResponse(iq, 'result') self.pipe.source.send(response) d1 = self.waitFor(IQ_SESSION_SET, onSession) d2 = self.init.start() return defer.gatherResults([d1, d2]) def testFailure(self): """ Set up a stream, and act as if session establishment fails. """ def onSession(iq): response = error.StanzaError('forbidden').toResponse(iq) self.pipe.source.send(response) d1 = self.waitFor(IQ_SESSION_SET, onSession) d2 = self.init.start() self.assertFailure(d2, error.StanzaError) return defer.gatherResults([d1, d2]) class XMPPAuthenticatorTest(unittest.TestCase): """ Test for both XMPPAuthenticator and XMPPClientFactory. """ def testBasic(self): """ Test basic operations. Setup an XMPPClientFactory, which sets up an XMPPAuthenticator, and let it produce a protocol instance. Then inspect the instance variables of the authenticator and XML stream objects. """ self.client_jid = jid.JID('user@example.com/resource') # Get an XmlStream instance. Note that it gets initialized with the # XMPPAuthenticator (that has its associateWithXmlStream called) that # is in turn initialized with the arguments to the factory. xs = client.XMPPClientFactory(self.client_jid, 'secret').buildProtocol(None) # test authenticator's instance variables self.assertEqual('example.com', xs.authenticator.otherHost) self.assertEqual(self.client_jid, xs.authenticator.jid) self.assertEqual('secret', xs.authenticator.password) # test list of initializers version, tls, sasl, bind, session = xs.initializers self.assert_(isinstance(tls, xmlstream.TLSInitiatingInitializer)) self.assert_(isinstance(sasl, SASLInitiatingInitializer)) self.assert_(isinstance(bind, client.BindInitializer)) self.assert_(isinstance(session, client.SessionInitializer)) self.assertFalse(tls.required) self.assertTrue(sasl.required) self.assertFalse(bind.required) self.assertFalse(session.required)
bsd-3-clause
sivaprakashniet/push_pull
p2p/lib/python2.7/site-packages/html5lib/treewalkers/dom.py
505
1421
from __future__ import absolute_import, division, unicode_literals from xml.dom import Node from . import _base class TreeWalker(_base.NonRecursiveTreeWalker): def getNodeDetails(self, node): if node.nodeType == Node.DOCUMENT_TYPE_NODE: return _base.DOCTYPE, node.name, node.publicId, node.systemId elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): return _base.TEXT, node.nodeValue elif node.nodeType == Node.ELEMENT_NODE: attrs = {} for attr in list(node.attributes.keys()): attr = node.getAttributeNode(attr) if attr.namespaceURI: attrs[(attr.namespaceURI, attr.localName)] = attr.value else: attrs[(None, attr.name)] = attr.value return (_base.ELEMENT, node.namespaceURI, node.nodeName, attrs, node.hasChildNodes()) elif node.nodeType == Node.COMMENT_NODE: return _base.COMMENT, node.nodeValue elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE): return (_base.DOCUMENT,) else: return _base.UNKNOWN, node.nodeType def getFirstChild(self, node): return node.firstChild def getNextSibling(self, node): return node.nextSibling def getParentNode(self, node): return node.parentNode
bsd-3-clause
Lussarn/sleipnir-velocity-system
python/sleipnir-base/src/sleipnir.py
1
25784
import PySide2 from PySide2 import QtCore, QtGui from PySide2.QtWidgets import QApplication, QMainWindow, QMessageBox import time from SleipnirWindow import SleipnirWindow import CameraServer from Video import Video from CamerasData import CamerasData import CameraServer from Configuration import Configuration from database.DB import DB from Announcements import Announcements, Announcement import database.announcement_dao as announcement_dao from Frame import Frame from Sound import Sound from function_timer import timer import sys import logging import logger logger = logging.getLogger(__name__) class WindowMain(QMainWindow): videos = {} # type: dict[int, Video] def __init__(self): try: self.configuration = Configuration("sleipnir.yml") except IOError as e: logger.error("Unable to open configuration file: " + str(e)) exit(1) self.__db = DB(self.configuration.get_or_throw('save_path')) self.__max_dive_angle = float(self.configuration.get('max_dive_angle', 10.0)) logger.info("Max dive angle is set at " + str(self.__max_dive_angle) + "°") # Data for the cameras self.__flight = 1 self.cameras_data = CamerasData(self.__db, self.__flight) # none / "Left" / "Right" self.run_direction = None self.run_frame_number_cam1 = None self.run_frame_number_cam2 = None # time to abort run self.run_abort_timestamp = 0 # Currently shooting self.__shooting = False # Frame number from shooting cameras self.shooting_frame_number_cam1 = 0 self.shooting_frame_number_cam2 = 0 # Waiting for cameras to stop self.stop_camera_wait = False # Distance self.distance = 100 self.run_tell_speed_timestamp = 0 self.run_tell_speed = 0 # Aligning cameras 1/2 self.aligning_cam1 = False self.aligning_cam2 = False # Sound effects self.__sound = Sound() QMainWindow.__init__(self) self.ui = SleipnirWindow() self.ui.setupUi(self) self.setWindowTitle("Sleipnir Velocity - Go Fast!") self.ui.label_video1_online.setText("Cam1: Offline") self.ui.label_video2_online.setText("Cam2: Offline") self.announcements = Announcements() self.model_announcements = QtGui.QStandardItemModel() self.ui.listView_anouncements.setModel(self.model_announcements) self.__update_announcements_gui() self.ui.verticalSlider_groundlevel.sliderMoved.connect(self.__on_groundlevel_changed) for radio_buttons_flight in self.ui.radio_buttons_flights: radio_buttons_flight.clicked.connect(self.__flight_number_clicked) # Init the videos self.videos[0] = Video( self.__db, "cam1", 1, self.__max_dive_angle, self.ui.label_video1, self.ui.pushbutton_video1_playforward, self.ui.pushbutton_video1_playbackward, self.ui.pushbutton_video1_pause, self.ui.pushbutton_video1_find, self.ui.pushbutton_video1_forwardstep, self.ui.pushbutton_video1_backstep, self.ui.slider_video1, self.ui.pushbutton_video1_copy, self.ui.label_time_video1) self.videos[1] = Video( self.__db, "cam2", 1, self.__max_dive_angle, self.ui.label_video2, self.ui.pushbutton_video2_playforward, self.ui.pushbutton_video2_playbackward, self.ui.pushbutton_video2_pause, self.ui.pushbutton_video2_find, self.ui.pushbutton_video2_forwardstep, self.ui.pushbutton_video2_backstep, self.ui.slider_video2, self.ui.pushbutton_video2_copy, self.ui.label_time_video2) self.videos[0].set_sibling_video(self.videos[1]) self.videos[1].set_sibling_video(self.videos[0]) # Load flight number 1 self.load_flight(1) self.ui.label_speed.setText("") # Start / Stop connects self.ui.pushbutton_start.clicked.connect(self.startCameras) self.ui.pushbutton_stop.clicked.connect(self.stopCameras) # Align cameras connects self.ui.pushButton_video1_align.clicked.connect(self.align_cam1) self.ui.pushButton_video2_align.clicked.connect(self.align_cam2) # distance connect self.ui.lineEdit_distance.setText(str(self.distance)) self.ui.lineEdit_distance.textChanged.connect(self.__on_distance_changed) self.ui.listView_anouncements.clicked.connect(self.__on_announcement_changed) self.ui.pushButton_remove_announcement.clicked.connect(self.__on_remove_announcement) # Show GUI self.show() self.raise_() # Start camera server CameraServer.start_server(self.__db) # Run Gui self.timer = QtCore.QTimer(self) self.timer.timeout.connect(self.__timerGui) self.timer.start(20) def load_flight(self, flight): self.__flight = flight self.cameras_data = CamerasData(self.__db, self.__flight) self.ui.radio_buttons_flights[self.__flight - 1].setChecked(True) self.cameras_data.load(self.__db, self.__flight) self.__load_announcements(self.__flight) self.__update_announcements_gui() # FIXME: Clean this shit up to some kind of API self.videos[0].cameras_data = self.cameras_data self.videos[1].cameras_data = self.cameras_data self.videos[0].set_flight(self.__flight) self.videos[1].set_flight(self.__flight) self.videos[0].slider.setMinimum(1) self.videos[0].slider.setMaximum(0 if not self.cameras_data.get_last_frame("cam1") else (self.cameras_data.get_last_frame("cam1").get_position() or 0)) self.videos[1].slider.setMinimum(1) self.videos[1].slider.setMaximum(0 if not self.cameras_data.get_last_frame("cam2") else (self.cameras_data.get_last_frame("cam2").get_position() or 0)) self.videos[0].setStartTimestamp(self.cameras_data.get_start_timestamp()) self.videos[1].setStartTimestamp(self.cameras_data.get_start_timestamp()) self.videos[0].comparison_image_cv = None self.videos[1].comparison_image_cv = None self.videos[0].view_frame(1) self.videos[1].view_frame(1) def __flight_number_clicked(self): for i in range(0,20): if self.ui.radio_buttons_flights[i].isChecked(): break self.__flight = i + 1 self.load_flight(i + 1) def __on_distance_changed(self, value): try: value = int(value) except: value = 100 self.distance = value def __on_groundlevel_changed(self, value): # Forward ground level to videos self.videos[0].groundlevel = value self.videos[1].groundlevel = value self.videos[0].view_frame(self.videos[0].get_current_frame_number()) self.videos[1].view_frame(self.videos[1].get_current_frame_number()) def __on_announcement_changed(self, event): self.videos[0].view_frame(self.announcements.get_announcement_by_index(event.row()).get_cam1_position()) self.videos[1].view_frame(self.announcements.get_announcement_by_index(event.row()).get_cam2_position()) def __on_remove_announcement(self, event): index = self.ui.listView_anouncements.currentIndex().row() if index == -1: QMessageBox.information(self, 'Sleipnir Information', 'Select announcement to delete') return ret = QMessageBox.question(self, 'Sleipnir Information', "Confirm removing announcement", QMessageBox.Ok | QMessageBox.Cancel) if ret == QMessageBox.Cancel: return self.announcements.remove_announcement_by_index(index) self.__update_announcements_gui() @timer("Time to run gui", logging.INFO, None, average=1000) def __timerGui(self): online = CameraServer.is_online("cam1") and CameraServer.is_online("cam2") if CameraServer.is_online("cam1"): self.ui.label_video1_online.setText("Cam1: Online") if not self.aligning_cam2 and not self.__shooting: self.ui.pushButton_video1_align.setEnabled(True) if CameraServer.is_online("cam2"): self.ui.label_video2_online.setText("Cam2: Online") if not self.aligning_cam1 and not self.__shooting: self.ui.pushButton_video2_align.setEnabled(True) if not CameraServer.is_online("cam1"): self.ui.label_video1_online.setText("Cam1: Offline") self.ui.pushButton_video1_align.setEnabled(False) if not CameraServer.is_online("cam2"): self.ui.label_video2_online.setText("Cam2: Offline") self.ui.pushButton_video2_align.setEnabled(False) if (self.__shooting and not online): # Camera lost? self.__shooting = False CameraServer.stop_shooting() if not online: self.ui.pushbutton_start.setEnabled(False) self.ui.pushbutton_stop.setEnabled(False) elif self.__shooting: self.ui.pushbutton_start.setEnabled(False) self.ui.pushbutton_stop.setEnabled(True) else: self.ui.pushbutton_start.setEnabled(True) self.ui.pushbutton_stop.setEnabled(False) if (self.stop_camera_wait): if self.aligning_cam1: logger.info("Stop aligning camera 1") self.ui.pushButton_video1_align.setEnabled(False) if not CameraServer.is_shooting(): self.aligning_cam1 = False self.ui.pushButton_video1_align.setEnabled(True) self.stop_camera_wait = False self.videos[0].set_shooting(False) self.ui.pushButton_video1_align.setText("Align Camera") elif self.aligning_cam2: logger.info("Stop aligning camera 2") self.ui.pushButton_video2_align.setEnabled(False) if not CameraServer.is_shooting(): self.aligning_cam2 = False self.ui.pushButton_video2_align.setEnabled(True) self.stop_camera_wait = False self.videos[1].set_shooting(False) self.ui.pushButton_video2_align.setText("Align Camera") else: self.ui.pushbutton_stop.setText("Waiting...") self.ui.pushbutton_stop.setEnabled(False) if not CameraServer.is_shooting(): self.stop_camera_wait = False self.__shooting = False self.ui.pushbutton_stop.setText("Stop cameras") self.ui.pushbutton_start.setEnabled(True) self.videos[0].view_frame(1) self.videos[1].view_frame(1) self.videos[0].set_shooting(False) self.videos[1].set_shooting(False) self.timer.start(20) self.enable_all_gui_elements(True) # Update the video view if CameraServer.is_shooting(): if self.aligning_cam1: ''' Align cam 1 ''' frame_number = self.cameras_data.get_last_frame("cam1").get_position() if frame_number > 0: self.videos[0].view_frame(frame_number) elif self.aligning_cam2: ''' Align cam 2 ''' frame_number = self.cameras_data.get_last_frame("cam2").get_position() if frame_number > 0: self.videos[1].view_frame(frame_number) else: ''' Motion Track ''' if self.ui.checkBox_motion_track.isChecked(): for i in range(2): cam = 'cam' + str(i+1) video = self.videos[i] video.setStartTimestamp(self.cameras_data.get_start_timestamp()) if not video.is_analyzer_running(): last_frame = self.cameras_data.get_last_frame(cam) if last_frame is not None: frame_to_motion_check = self.get_frame_allow_lag(cam, last_frame.get_position()) if frame_to_motion_check is not None: motion = video.view_frame_motion_track( frame_to_motion_check.get_position(), self.ui.checkBox_live.isChecked()) if motion is not None: self.check_run(cam, motion) else: logger.warning("Frame to motioncheck is None on camera " +cam) else: logger.warning("Last frame is None on camera " +cam) else: ''' No motion track ''' for i in range(2): cam = 'cam' + str(i+1) self.videos[i].setStartTimestamp(self.cameras_data.get_start_timestamp()) if self.ui.checkBox_live.isChecked(): last_frame = self.cameras_data.get_last_frame(cam) if last_frame is not None: self.videos[i].view_frame(last_frame.get_position()) if self.run_direction is not None and self.run_abort_timestamp < int(round(time.time() * 1000)): # Abort run logger.info("Aborting run due to timeout") self.run_direction = None self.__sound.play_error() if self.run_tell_speed != 0 and self.run_tell_speed_timestamp < int(round(time.time() * 1000)): self.__sound.play_number(self.run_tell_speed) self.run_tell_speed = 0 if self.cameras_data \ and not self.__shooting \ and (self.cameras_data.get_frame_count('cam1') or 0) >= 90 \ and (self.cameras_data.get_frame_count('cam2') or 0) >= 90 \ and not self.aligning_cam1 and not self.aligning_cam2: # Calculate the speed cam1_frame_number = self.videos[0].get_current_frame_number() cam2_frame_number = self.videos[1].get_current_frame_number() self.set_speed(cam1_frame_number, cam2_frame_number) __last_served_frame = { 'cam1': 0, 'cam2': 0 } def get_frame_allow_lag(self, cam: str, position: int) -> Frame: # Served frame can be larger than position if we stop and start camera, detect this # and reset served frame if position == 0: return None if self.__last_served_frame[cam] > position: self.__last_served_frame[cam] = 0 if self.__last_served_frame[cam] < position - 30: # lag detected, jump self.__last_served_frame[cam] = position logger.warning("Lag detected when motion tracking " + cam + ": " + str(position)) else: # Clamp __last_served_frame to position self.__last_served_frame[cam] = min(self.__last_served_frame[cam] + 1, position) return self.cameras_data.get_frame(cam, self.__last_served_frame[cam]) def set_speed(self, cam1_frame_number, cam2_frame_number): """ Set speed from camera frame numbers """ cam1_timestamp = self.cameras_data.get_frame('cam1', cam1_frame_number).get_timestamp() cam2_timestamp = self.cameras_data.get_frame('cam2', cam2_frame_number).get_timestamp() milliseconds = abs((cam1_timestamp or 0)- (cam2_timestamp or 0)) kilometer = float(self.distance) / 1000 hours = float(milliseconds) / 1000 / 60 / 60 if (hours > 0): kmh = kilometer / hours else: kmh = 0 if (kmh > 999 or kmh < 10): speed_text = "Out of range" time_text = "Out of range" else: speed_text = '{1:.{0}f} km/h'.format(1, kmh) time_text = '{1:.{0}f} sec'.format(3, float(milliseconds) / 1000) self.ui.label_speed.setText(speed_text) self.ui.label_time.setText(time_text) return int(kmh) def align_cam1(self): """ Align camera one """ if (self.aligning_cam1): self.stop_camera_wait = True CameraServer.stop_shooting() else: self.__flight = 1 self.aligning_cam1 = True self.videos[0].set_shooting(True) self.cameras_data = CamerasData(self.__db, self.__flight) self.videos[0].cameras_data = self.cameras_data CameraServer.start_shooting(self.cameras_data, 1) self.enable_all_gui_elements(False) self.ui.pushButton_video1_align.setText("Stop") def align_cam2(self): """ Align camera two """ if (self.aligning_cam2): self.stop_camera_wait = True CameraServer.stop_shooting() else: self.__flight = 1 self.aligning_cam2 = True self.videos[1].set_shooting(True) self.cameras_data = CamerasData(self.__db, self.__flight) self.videos[1].cameras_data = self.cameras_data CameraServer.start_shooting(self.cameras_data, 1) self.enable_all_gui_elements(False) self.ui.pushButton_video2_align.setText("Stop") def startCameras(self): logger.info("Starting Cameras") if not CameraServer.is_ready_to_shoot(): return False for i in range(0,20): if self.ui.radio_buttons_flights[i].isChecked(): break self.__flight = i + 1 self.enable_all_gui_elements(False) self.announcements.clear() self.__update_announcements_gui() self.shooting_frame_number_cam1 = 1 self.shooting_frame_number_cam2 = 1 self.timer.start(10) self.ui.label_speed.setText("") self.stop_camera_wait = False self.__shooting = True self.videos[0].reset() self.videos[1].reset() self.videos[0].set_shooting(True) self.videos[1].set_shooting(True) self.cameras_data = CamerasData(self.__db, self.__flight) self.videos[0].cameras_data = self.cameras_data self.videos[1].cameras_data = self.cameras_data CameraServer.ServerData.flight = self.__flight CameraServer.start_shooting(self.cameras_data, self.__flight) def stopCameras(self): logger.info("Stoping Cameras") self.stop_camera_wait = True CameraServer.stop_shooting() self.__save_announcements() def enable_all_gui_elements(self, enabled): self.ui.pushbutton_video1_playforward.setEnabled(enabled) self.ui.pushbutton_video1_playbackward.setEnabled(enabled) self.ui.pushbutton_video1_pause.setEnabled(enabled) self.ui.pushbutton_video1_find.setEnabled(enabled) self.ui.pushbutton_video1_forwardstep.setEnabled(enabled) self.ui.pushbutton_video1_backstep.setEnabled(enabled) self.ui.pushButton_video1_align.setEnabled(enabled) self.ui.slider_video1.setEnabled(enabled) self.ui.pushbutton_video1_copy.setEnabled(enabled) self.ui.pushbutton_video2_playforward.setEnabled(enabled) self.ui.pushbutton_video2_playbackward.setEnabled(enabled) self.ui.pushbutton_video2_pause.setEnabled(enabled) self.ui.pushbutton_video2_find.setEnabled(enabled) self.ui.pushbutton_video2_forwardstep.setEnabled(enabled) self.ui.pushbutton_video2_backstep.setEnabled(enabled) self.ui.pushButton_video2_align.setEnabled(enabled) self.ui.slider_video2.setEnabled(enabled) self.ui.pushbutton_video2_copy.setEnabled(enabled) self.ui.pushbutton_stop.setEnabled(enabled) self.ui.pushbutton_start.setEnabled(enabled) self.ui.checkBox_motion_track.setEnabled(enabled) self.ui.listView_anouncements.setEnabled(enabled) self.ui.verticalSlider_groundlevel.setEnabled(enabled) self.ui.pushButton_remove_announcement.setEnabled(enabled) for i in range(0, len(self.ui.radio_buttons_flights)): self.ui.radio_buttons_flights[i].setEnabled(enabled) def check_run(self, cam, motion): """ Checking the motion tracking """ if cam == "cam1" and self.run_direction == None and motion["direction"] == -1: # Camera 1 triggered LEFT run without being on a timed run. Reset the camera self.videos[0].currently_tracking = 0 logger.info("Camera 1 triggered the wrong way for start of run, reseting") if cam == "cam2" and self.run_direction == None and motion["direction"] == 1: # Camera 1 triggered RIGHT run without being on a timed run. Reset the camera self.videos[1].currently_tracking = 0 logger.info("Camera 2 triggered the wrong way for start of run, reseting") if cam == "cam1" and self.run_direction == 'LEFT' and motion["direction"] == 1: # Camera 1 triggered LEFT run without being on a timed run. Reset the camera self.videos[0].currently_tracking = 0 logger.info("Camera 1 triggered the wrong way in run, reseting") if cam == "cam2" and self.run_direction == 'RIGHT' and motion["direction"] == -1: # Camera 1 triggered RIGHT run without being on a timed run. Reset the camera self.videos[1].currently_tracking = 0 logger.info("Camera 2 triggered the wrong way in run, reseting") # Check right run if cam == "cam1" and self.run_direction == None and motion["direction"] == 1: # Starting run from cam 1 self.run_frame_number_cam1 = motion["frame_number"] self.run_frame_number_cam2 = 0 self.run_direction = "RIGHT" # Max 6 second run self.run_abort_timestamp = int(round(time.time() * 1000)) + 6000 self.__sound.play_gate_1() logger.info("Initiating time run from cam 1 -->") if cam == "cam2" and self.run_direction == "RIGHT" and motion["direction"] == 1: # Ending run on Cam 2 self.run_frame_number_cam2 = motion["frame_number"] self.run_direction = None kmh = self.set_speed(self.run_frame_number_cam1, self.run_frame_number_cam2) logger.info("Timed run completed on cam 2 -->") self.__sound.play_gate_2() if (kmh < 500): self.run_tell_speed_timestamp = int(round(time.time() * 1000)) + 1000 self.run_tell_speed = kmh logger.info("Adding announcement --> " + str(kmh) + " km/h") self.add_announcement(self.run_frame_number_cam1, self.run_frame_number_cam2, kmh, 1) else: logger.warning("Do not add announcement over 500 km/h") # Check left run if cam == "cam2" and self.run_direction == None and motion["direction"] == -1: # Starting run from cam 2 self.run_frame_number_cam2 = motion["frame_number"] self.run_frame_number_cam1 = 0 self.run_direction = "LEFT" # Max 6 second run self.run_abort_timestamp = int(round(time.time() * 1000)) + 6000 self.__sound.play_gate_1() logger.info("Initiating time run from cam 2 <--") if cam == "cam1" and self.run_direction == "LEFT" and motion["direction"] == -1: # Ending run on Cam 1 self.run_frame_number_cam1 = motion["frame_number"] self.run_direction = None kmh = self.set_speed(self.run_frame_number_cam1, self.run_frame_number_cam2) self.__sound.play_gate_2() logger.info("Timed run completed on cam 2 <--") if (kmh < 500): self.run_tell_speed_timestamp = int(round(time.time() * 1000)) + 1000 self.run_tell_speed = kmh self.add_announcement(self.run_frame_number_cam1, self.run_frame_number_cam2, kmh, -1) logger.info("Adding announcement <-- " + str(kmh) + " km/h") else: logger.warning("Do not add announcement over 500 km/h") def add_announcement(self, cam1_frame_number, cam2_frame_number, speed, direction): cam1_timestamp = self.cameras_data.get_frame("cam1", cam1_frame_number).get_timestamp() cam2_timestamp = self.cameras_data.get_frame("cam2", cam2_frame_number).get_timestamp() milliseconds = abs(cam1_timestamp - cam2_timestamp) self.announcements.append(Announcement( cam1_frame_number, cam2_frame_number, milliseconds, speed, direction )) self.__update_announcements_gui() def __update_announcements_gui(self): self.model_announcements.clear() max_left = 0 max_right = 0 for announcement in self.announcements.get_announcements(): out = ("--> " if (announcement.get_direction() == 1) else "<-- ") + \ "%.3f" % (float(announcement.get_duration()) / 1000) + "s " + \ str(announcement.get_speed()) + " km/h " self.model_announcements.appendRow(QtGui.QStandardItem(out)) if announcement.get_direction() == 1: max_right = max(max_right, announcement.get_speed()) if announcement.get_direction() == -1: max_left = max(max_left, announcement.get_speed()) average = (max_left + max_right) / 2 if max_left > 0 and max_right > 0 else 0 self.ui.label_average.setText("Average: " + "%.1f" % average + " km/h") def __save_announcements(self): logger.info("Saving announcements") announcement_dao.store(self.__db, self.__flight, self.announcements) def __load_announcements(self, flight): logger.info("Loading announcements") self.announcements = announcement_dao.fetch(self.__db, flight) def __del__(self): logger.debug("Mainwindow destructor called") self.__db.stop() if __name__ == '__main__': import sys app = QApplication(sys.argv) try: window = WindowMain() ret = app.exec_() del window sys.exit(ret) except Exception: import traceback var = traceback.format_exc() msg_box = QMessageBox() msg_box.setIcon(QMessageBox.Critical) msg_box.setWindowTitle("Sleipnir message") msg_box.setText("UNRECOVERABLE ERROR!\n\n" + var) msg_box.exec_()
gpl-3.0
quheng/scikit-learn
sklearn/utils/testing.py
71
26178
"""Testing utilities.""" # Copyright (c) 2011, 2012 # Authors: Pietro Berkes, # Andreas Muller # Mathieu Blondel # Olivier Grisel # Arnaud Joly # Denis Engemann # License: BSD 3 clause import os import inspect import pkgutil import warnings import sys import re import platform import scipy as sp import scipy.io from functools import wraps try: # Python 2 from urllib2 import urlopen from urllib2 import HTTPError except ImportError: # Python 3+ from urllib.request import urlopen from urllib.error import HTTPError import tempfile import shutil import os.path as op import atexit # WindowsError only exist on Windows try: WindowsError except NameError: WindowsError = None import sklearn from sklearn.base import BaseEstimator from sklearn.externals import joblib # Conveniently import all assertions in one place. from nose.tools import assert_equal from nose.tools import assert_not_equal from nose.tools import assert_true from nose.tools import assert_false from nose.tools import assert_raises from nose.tools import raises from nose import SkipTest from nose import with_setup from numpy.testing import assert_almost_equal from numpy.testing import assert_array_equal from numpy.testing import assert_array_almost_equal from numpy.testing import assert_array_less import numpy as np from sklearn.base import (ClassifierMixin, RegressorMixin, TransformerMixin, ClusterMixin) __all__ = ["assert_equal", "assert_not_equal", "assert_raises", "assert_raises_regexp", "raises", "with_setup", "assert_true", "assert_false", "assert_almost_equal", "assert_array_equal", "assert_array_almost_equal", "assert_array_less", "assert_less", "assert_less_equal", "assert_greater", "assert_greater_equal"] try: from nose.tools import assert_in, assert_not_in except ImportError: # Nose < 1.0.0 def assert_in(x, container): assert_true(x in container, msg="%r in %r" % (x, container)) def assert_not_in(x, container): assert_false(x in container, msg="%r in %r" % (x, container)) try: from nose.tools import assert_raises_regex except ImportError: # for Python 2 def assert_raises_regex(expected_exception, expected_regexp, callable_obj=None, *args, **kwargs): """Helper function to check for message patterns in exceptions""" not_raised = False try: callable_obj(*args, **kwargs) not_raised = True except expected_exception as e: error_message = str(e) if not re.compile(expected_regexp).search(error_message): raise AssertionError("Error message should match pattern " "%r. %r does not." % (expected_regexp, error_message)) if not_raised: raise AssertionError("%s not raised by %s" % (expected_exception.__name__, callable_obj.__name__)) # assert_raises_regexp is deprecated in Python 3.4 in favor of # assert_raises_regex but lets keep the bacward compat in scikit-learn with # the old name for now assert_raises_regexp = assert_raises_regex def _assert_less(a, b, msg=None): message = "%r is not lower than %r" % (a, b) if msg is not None: message += ": " + msg assert a < b, message def _assert_greater(a, b, msg=None): message = "%r is not greater than %r" % (a, b) if msg is not None: message += ": " + msg assert a > b, message def assert_less_equal(a, b, msg=None): message = "%r is not lower than or equal to %r" % (a, b) if msg is not None: message += ": " + msg assert a <= b, message def assert_greater_equal(a, b, msg=None): message = "%r is not greater than or equal to %r" % (a, b) if msg is not None: message += ": " + msg assert a >= b, message def assert_warns(warning_class, func, *args, **kw): """Test that a certain warning occurs. Parameters ---------- warning_class : the warning class The class to test for, e.g. UserWarning. func : callable Calable object to trigger warnings. *args : the positional arguments to `func`. **kw : the keyword arguments to `func` Returns ------- result : the return value of `func` """ # very important to avoid uncontrolled state propagation clean_warning_registry() with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # Trigger a warning. result = func(*args, **kw) if hasattr(np, 'VisibleDeprecationWarning'): # Filter out numpy-specific warnings in numpy >= 1.9 w = [e for e in w if e.category is not np.VisibleDeprecationWarning] # Verify some things if not len(w) > 0: raise AssertionError("No warning raised when calling %s" % func.__name__) found = any(warning.category is warning_class for warning in w) if not found: raise AssertionError("%s did not give warning: %s( is %s)" % (func.__name__, warning_class, w)) return result def assert_warns_message(warning_class, message, func, *args, **kw): # very important to avoid uncontrolled state propagation """Test that a certain warning occurs and with a certain message. Parameters ---------- warning_class : the warning class The class to test for, e.g. UserWarning. message : str | callable The entire message or a substring to test for. If callable, it takes a string as argument and will trigger an assertion error if it returns `False`. func : callable Calable object to trigger warnings. *args : the positional arguments to `func`. **kw : the keyword arguments to `func`. Returns ------- result : the return value of `func` """ clean_warning_registry() with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") if hasattr(np, 'VisibleDeprecationWarning'): # Let's not catch the numpy internal DeprecationWarnings warnings.simplefilter('ignore', np.VisibleDeprecationWarning) # Trigger a warning. result = func(*args, **kw) # Verify some things if not len(w) > 0: raise AssertionError("No warning raised when calling %s" % func.__name__) found = [issubclass(warning.category, warning_class) for warning in w] if not any(found): raise AssertionError("No warning raised for %s with class " "%s" % (func.__name__, warning_class)) message_found = False # Checks the message of all warnings belong to warning_class for index in [i for i, x in enumerate(found) if x]: # substring will match, the entire message with typo won't msg = w[index].message # For Python 3 compatibility msg = str(msg.args[0] if hasattr(msg, 'args') else msg) if callable(message): # add support for certain tests check_in_message = message else: check_in_message = lambda msg: message in msg if check_in_message(msg): message_found = True break if not message_found: raise AssertionError("Did not receive the message you expected " "('%s') for <%s>, got: '%s'" % (message, func.__name__, msg)) return result # To remove when we support numpy 1.7 def assert_no_warnings(func, *args, **kw): # XXX: once we may depend on python >= 2.6, this can be replaced by the # warnings module context manager. # very important to avoid uncontrolled state propagation clean_warning_registry() with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') result = func(*args, **kw) if hasattr(np, 'VisibleDeprecationWarning'): # Filter out numpy-specific warnings in numpy >= 1.9 w = [e for e in w if e.category is not np.VisibleDeprecationWarning] if len(w) > 0: raise AssertionError("Got warnings when calling %s: %s" % (func.__name__, w)) return result def ignore_warnings(obj=None): """ Context manager and decorator to ignore warnings Note. Using this (in both variants) will clear all warnings from all python modules loaded. In case you need to test cross-module-warning-logging this is not your tool of choice. Examples -------- >>> with ignore_warnings(): ... warnings.warn('buhuhuhu') >>> def nasty_warn(): ... warnings.warn('buhuhuhu') ... print(42) >>> ignore_warnings(nasty_warn)() 42 """ if callable(obj): return _ignore_warnings(obj) else: return _IgnoreWarnings() def _ignore_warnings(fn): """Decorator to catch and hide warnings without visual nesting""" @wraps(fn) def wrapper(*args, **kwargs): # very important to avoid uncontrolled state propagation clean_warning_registry() with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') return fn(*args, **kwargs) w[:] = [] return wrapper class _IgnoreWarnings(object): """Improved and simplified Python warnings context manager Copied from Python 2.7.5 and modified as required. """ def __init__(self): """ Parameters ========== category : warning class The category to filter. Defaults to Warning. If None, all categories will be muted. """ self._record = True self._module = sys.modules['warnings'] self._entered = False self.log = [] def __repr__(self): args = [] if self._record: args.append("record=True") if self._module is not sys.modules['warnings']: args.append("module=%r" % self._module) name = type(self).__name__ return "%s(%s)" % (name, ", ".join(args)) def __enter__(self): clean_warning_registry() # be safe and not propagate state + chaos warnings.simplefilter('always') if self._entered: raise RuntimeError("Cannot enter %r twice" % self) self._entered = True self._filters = self._module.filters self._module.filters = self._filters[:] self._showwarning = self._module.showwarning if self._record: self.log = [] def showwarning(*args, **kwargs): self.log.append(warnings.WarningMessage(*args, **kwargs)) self._module.showwarning = showwarning return self.log else: return None def __exit__(self, *exc_info): if not self._entered: raise RuntimeError("Cannot exit %r without entering first" % self) self._module.filters = self._filters self._module.showwarning = self._showwarning self.log[:] = [] clean_warning_registry() # be safe and not propagate state + chaos try: from nose.tools import assert_less except ImportError: assert_less = _assert_less try: from nose.tools import assert_greater except ImportError: assert_greater = _assert_greater def _assert_allclose(actual, desired, rtol=1e-7, atol=0, err_msg='', verbose=True): actual, desired = np.asanyarray(actual), np.asanyarray(desired) if np.allclose(actual, desired, rtol=rtol, atol=atol): return msg = ('Array not equal to tolerance rtol=%g, atol=%g: ' 'actual %s, desired %s') % (rtol, atol, actual, desired) raise AssertionError(msg) if hasattr(np.testing, 'assert_allclose'): assert_allclose = np.testing.assert_allclose else: assert_allclose = _assert_allclose def assert_raise_message(exceptions, message, function, *args, **kwargs): """Helper function to test error messages in exceptions Parameters ---------- exceptions : exception or tuple of exception Name of the estimator func : callable Calable object to raise error *args : the positional arguments to `func`. **kw : the keyword arguments to `func` """ try: function(*args, **kwargs) except exceptions as e: error_message = str(e) if message not in error_message: raise AssertionError("Error message does not include the expected" " string: %r. Observed error message: %r" % (message, error_message)) else: # concatenate exception names if isinstance(exceptions, tuple): names = " or ".join(e.__name__ for e in exceptions) else: names = exceptions.__name__ raise AssertionError("%s not raised by %s" % (names, function.__name__)) def fake_mldata(columns_dict, dataname, matfile, ordering=None): """Create a fake mldata data set. Parameters ---------- columns_dict : dict, keys=str, values=ndarray Contains data as columns_dict[column_name] = array of data. dataname : string Name of data set. matfile : string or file object The file name string or the file-like object of the output file. ordering : list, default None List of column_names, determines the ordering in the data set. Notes ----- This function transposes all arrays, while fetch_mldata only transposes 'data', keep that into account in the tests. """ datasets = dict(columns_dict) # transpose all variables for name in datasets: datasets[name] = datasets[name].T if ordering is None: ordering = sorted(list(datasets.keys())) # NOTE: setting up this array is tricky, because of the way Matlab # re-packages 1D arrays datasets['mldata_descr_ordering'] = sp.empty((1, len(ordering)), dtype='object') for i, name in enumerate(ordering): datasets['mldata_descr_ordering'][0, i] = name scipy.io.savemat(matfile, datasets, oned_as='column') class mock_mldata_urlopen(object): def __init__(self, mock_datasets): """Object that mocks the urlopen function to fake requests to mldata. `mock_datasets` is a dictionary of {dataset_name: data_dict}, or {dataset_name: (data_dict, ordering). `data_dict` itself is a dictionary of {column_name: data_array}, and `ordering` is a list of column_names to determine the ordering in the data set (see `fake_mldata` for details). When requesting a dataset with a name that is in mock_datasets, this object creates a fake dataset in a StringIO object and returns it. Otherwise, it raises an HTTPError. """ self.mock_datasets = mock_datasets def __call__(self, urlname): dataset_name = urlname.split('/')[-1] if dataset_name in self.mock_datasets: resource_name = '_' + dataset_name from io import BytesIO matfile = BytesIO() dataset = self.mock_datasets[dataset_name] ordering = None if isinstance(dataset, tuple): dataset, ordering = dataset fake_mldata(dataset, resource_name, matfile, ordering) matfile.seek(0) return matfile else: raise HTTPError(urlname, 404, dataset_name + " is not available", [], None) def install_mldata_mock(mock_datasets): # Lazy import to avoid mutually recursive imports from sklearn import datasets datasets.mldata.urlopen = mock_mldata_urlopen(mock_datasets) def uninstall_mldata_mock(): # Lazy import to avoid mutually recursive imports from sklearn import datasets datasets.mldata.urlopen = urlopen # Meta estimators need another estimator to be instantiated. META_ESTIMATORS = ["OneVsOneClassifier", "OutputCodeClassifier", "OneVsRestClassifier", "RFE", "RFECV", "BaseEnsemble"] # estimators that there is no way to default-construct sensibly OTHER = ["Pipeline", "FeatureUnion", "GridSearchCV", "RandomizedSearchCV"] # some trange ones DONT_TEST = ['SparseCoder', 'EllipticEnvelope', 'DictVectorizer', 'LabelBinarizer', 'LabelEncoder', 'MultiLabelBinarizer', 'TfidfTransformer', 'TfidfVectorizer', 'IsotonicRegression', 'OneHotEncoder', 'RandomTreesEmbedding', 'FeatureHasher', 'DummyClassifier', 'DummyRegressor', 'TruncatedSVD', 'PolynomialFeatures', 'GaussianRandomProjectionHash', 'HashingVectorizer', 'CheckingClassifier', 'PatchExtractor', 'CountVectorizer', # GradientBoosting base estimators, maybe should # exclude them in another way 'ZeroEstimator', 'ScaledLogOddsEstimator', 'QuantileEstimator', 'MeanEstimator', 'LogOddsEstimator', 'PriorProbabilityEstimator', '_SigmoidCalibration', 'VotingClassifier'] def all_estimators(include_meta_estimators=False, include_other=False, type_filter=None, include_dont_test=False): """Get a list of all estimators from sklearn. This function crawls the module and gets all classes that inherit from BaseEstimator. Classes that are defined in test-modules are not included. By default meta_estimators such as GridSearchCV are also not included. Parameters ---------- include_meta_estimators : boolean, default=False Whether to include meta-estimators that can be constructed using an estimator as their first argument. These are currently BaseEnsemble, OneVsOneClassifier, OutputCodeClassifier, OneVsRestClassifier, RFE, RFECV. include_other : boolean, default=False Wether to include meta-estimators that are somehow special and can not be default-constructed sensibly. These are currently Pipeline, FeatureUnion and GridSearchCV include_dont_test : boolean, default=False Whether to include "special" label estimator or test processors. type_filter : string, list of string, or None, default=None Which kind of estimators should be returned. If None, no filter is applied and all estimators are returned. Possible values are 'classifier', 'regressor', 'cluster' and 'transformer' to get estimators only of these specific types, or a list of these to get the estimators that fit at least one of the types. Returns ------- estimators : list of tuples List of (name, class), where ``name`` is the class name as string and ``class`` is the actuall type of the class. """ def is_abstract(c): if not(hasattr(c, '__abstractmethods__')): return False if not len(c.__abstractmethods__): return False return True all_classes = [] # get parent folder path = sklearn.__path__ for importer, modname, ispkg in pkgutil.walk_packages( path=path, prefix='sklearn.', onerror=lambda x: None): if ".tests." in modname: continue module = __import__(modname, fromlist="dummy") classes = inspect.getmembers(module, inspect.isclass) all_classes.extend(classes) all_classes = set(all_classes) estimators = [c for c in all_classes if (issubclass(c[1], BaseEstimator) and c[0] != 'BaseEstimator')] # get rid of abstract base classes estimators = [c for c in estimators if not is_abstract(c[1])] if not include_dont_test: estimators = [c for c in estimators if not c[0] in DONT_TEST] if not include_other: estimators = [c for c in estimators if not c[0] in OTHER] # possibly get rid of meta estimators if not include_meta_estimators: estimators = [c for c in estimators if not c[0] in META_ESTIMATORS] if type_filter is not None: if not isinstance(type_filter, list): type_filter = [type_filter] else: type_filter = list(type_filter) # copy filtered_estimators = [] filters = {'classifier': ClassifierMixin, 'regressor': RegressorMixin, 'transformer': TransformerMixin, 'cluster': ClusterMixin} for name, mixin in filters.items(): if name in type_filter: type_filter.remove(name) filtered_estimators.extend([est for est in estimators if issubclass(est[1], mixin)]) estimators = filtered_estimators if type_filter: raise ValueError("Parameter type_filter must be 'classifier', " "'regressor', 'transformer', 'cluster' or None, got" " %s." % repr(type_filter)) # drop duplicates, sort for reproducibility return sorted(set(estimators)) def set_random_state(estimator, random_state=0): if "random_state" in estimator.get_params().keys(): estimator.set_params(random_state=random_state) def if_matplotlib(func): """Test decorator that skips test if matplotlib not installed. """ @wraps(func) def run_test(*args, **kwargs): try: import matplotlib matplotlib.use('Agg', warn=False) # this fails if no $DISPLAY specified import matplotlib.pyplot as plt plt.figure() except ImportError: raise SkipTest('Matplotlib not available.') else: return func(*args, **kwargs) return run_test def if_not_mac_os(versions=('10.7', '10.8', '10.9'), message='Multi-process bug in Mac OS X >= 10.7 ' '(see issue #636)'): """Test decorator that skips test if OS is Mac OS X and its major version is one of ``versions``. """ warnings.warn("if_not_mac_os is deprecated in 0.17 and will be removed" " in 0.19: use the safer and more generic" " if_safe_multiprocessing_with_blas instead", DeprecationWarning) mac_version, _, _ = platform.mac_ver() skip = '.'.join(mac_version.split('.')[:2]) in versions def decorator(func): if skip: @wraps(func) def func(*args, **kwargs): raise SkipTest(message) return func return decorator def if_safe_multiprocessing_with_blas(func): """Decorator for tests involving both BLAS calls and multiprocessing Under Python < 3.4 and POSIX (e.g. Linux or OSX), using multiprocessing in conjunction with some implementation of BLAS (or other libraries that manage an internal posix thread pool) can cause a crash or a freeze of the Python process. Under Python 3.4 and later, joblib uses the forkserver mode of multiprocessing which does not trigger this problem. In practice all known packaged distributions (from Linux distros or Anaconda) of BLAS under Linux seems to be safe. So we this problem seems to only impact OSX users. This wrapper makes it possible to skip tests that can possibly cause this crash under OSX with. """ @wraps(func) def run_test(*args, **kwargs): if sys.platform == 'darwin' and sys.version_info[:2] < (3, 4): raise SkipTest( "Possible multi-process bug with some BLAS under Python < 3.4") return func(*args, **kwargs) return run_test def clean_warning_registry(): """Safe way to reset warnings """ warnings.resetwarnings() reg = "__warningregistry__" for mod_name, mod in list(sys.modules.items()): if 'six.moves' in mod_name: continue if hasattr(mod, reg): getattr(mod, reg).clear() def check_skip_network(): if int(os.environ.get('SKLEARN_SKIP_NETWORK_TESTS', 0)): raise SkipTest("Text tutorial requires large dataset download") def check_skip_travis(): """Skip test if being run on Travis.""" if os.environ.get('TRAVIS') == "true": raise SkipTest("This test needs to be skipped on Travis") def _delete_folder(folder_path, warn=False): """Utility function to cleanup a temporary folder if still existing. Copy from joblib.pool (for independance)""" try: if os.path.exists(folder_path): # This can fail under windows, # but will succeed when called by atexit shutil.rmtree(folder_path) except WindowsError: if warn: warnings.warn("Could not delete temporary folder %s" % folder_path) class TempMemmap(object): def __init__(self, data, mmap_mode='r'): self.temp_folder = tempfile.mkdtemp(prefix='sklearn_testing_') self.mmap_mode = mmap_mode self.data = data def __enter__(self): fpath = op.join(self.temp_folder, 'data.pkl') joblib.dump(self.data, fpath) data_read_only = joblib.load(fpath, mmap_mode=self.mmap_mode) atexit.register(lambda: _delete_folder(self.temp_folder, warn=True)) return data_read_only def __exit__(self, exc_type, exc_val, exc_tb): _delete_folder(self.temp_folder) with_network = with_setup(check_skip_network) with_travis = with_setup(check_skip_travis)
bsd-3-clause
geometrybase/gensim
gensim/parsing/porter.py
86
10907
#!/usr/bin/env python """Porter Stemming Algorithm This is the Porter stemming algorithm, ported to Python from the version coded up in ANSI C by the author. It may be be regarded as canonical, in that it follows the algorithm presented in Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14, no. 3, pp 130-137, only differing from it at the points maked --DEPARTURE-- below. See also http://www.tartarus.org/~martin/PorterStemmer The algorithm as described in the paper could be exactly replicated by adjusting the points of DEPARTURE, but this is barely necessary, because (a) the points of DEPARTURE are definitely improvements, and (b) no encoding of the Porter stemmer I have seen is anything like as exact as this version, even with the points of DEPARTURE! Vivake Gupta (v@nano.com) Release 1: January 2001 Further adjustments by Santiago Bruno (bananabruno@gmail.com) to allow word input not restricted to one word per line, leading to: Release 2: July 2008 Optimizations and cleanup of the code by Lars Buitinck, July 2012. """ from six.moves import xrange class PorterStemmer(object): def __init__(self): """The main part of the stemming algorithm starts here. b is a buffer holding a word to be stemmed. The letters are in b[0], b[1] ... ending at b[k]. k is readjusted downwards as the stemming progresses. Note that only lower case sequences are stemmed. Forcing to lower case should be done before stem(...) is called. """ self.b = "" # buffer for word to be stemmed self.k = 0 self.j = 0 # j is a general offset into the string def _cons(self, i): """True <=> b[i] is a consonant.""" ch = self.b[i] if ch in "aeiou": return False if ch == 'y': return i == 0 or not self._cons(i - 1) return True def _m(self): """Returns the number of consonant sequences between 0 and j. If c is a consonant sequence and v a vowel sequence, and <..> indicates arbitrary presence, <c><v> gives 0 <c>vc<v> gives 1 <c>vcvc<v> gives 2 <c>vcvcvc<v> gives 3 .... """ i = 0 while True: if i > self.j: return 0 if not self._cons(i): break i += 1 i += 1 n = 0 while True: while True: if i > self.j: return n if self._cons(i): break i += 1 i += 1 n += 1 while 1: if i > self.j: return n if not self._cons(i): break i += 1 i += 1 def _vowelinstem(self): """True <=> 0,...j contains a vowel""" return not all(self._cons(i) for i in xrange(self.j + 1)) def _doublec(self, j): """True <=> j,(j-1) contain a double consonant.""" return j > 0 and self.b[j] == self.b[j-1] and self._cons(j) def _cvc(self, i): """True <=> i-2,i-1,i has the form consonant - vowel - consonant and also if the second c is not w,x or y. This is used when trying to restore an e at the end of a short word, e.g. cav(e), lov(e), hop(e), crim(e), but snow, box, tray. """ if i < 2 or not self._cons(i) or self._cons(i-1) or not self._cons(i-2): return False return self.b[i] not in "wxy" def _ends(self, s): """True <=> 0,...k ends with the string s.""" if s[-1] != self.b[self.k]: # tiny speed-up return 0 length = len(s) if length > (self.k + 1): return 0 if self.b[self.k-length+1:self.k+1] != s: return 0 self.j = self.k - length return 1 def _setto(self, s): """Set (j+1),...k to the characters in the string s, adjusting k.""" self.b = self.b[:self.j+1] + s self.k = len(self.b) - 1 def _r(self, s): if self._m() > 0: self._setto(s) def _step1ab(self): """Get rid of plurals and -ed or -ing. E.g., caresses -> caress ponies -> poni ties -> ti caress -> caress cats -> cat feed -> feed agreed -> agree disabled -> disable matting -> mat mating -> mate meeting -> meet milling -> mill messing -> mess meetings -> meet """ if self.b[self.k] == 's': if self._ends("sses"): self.k -= 2 elif self._ends("ies"): self._setto("i") elif self.b[self.k - 1] != 's': self.k -= 1 if self._ends("eed"): if self._m() > 0: self.k -= 1 elif (self._ends("ed") or self._ends("ing")) and self._vowelinstem(): self.k = self.j if self._ends("at"): self._setto("ate") elif self._ends("bl"): self._setto("ble") elif self._ends("iz"): self._setto("ize") elif self._doublec(self.k): if self.b[self.k - 1] not in "lsz": self.k -= 1 elif self._m() == 1 and self._cvc(self.k): self._setto("e") def _step1c(self): """Turn terminal y to i when there is another vowel in the stem.""" if self._ends("y") and self._vowelinstem(): self.b = self.b[:self.k] + 'i' def _step2(self): """Map double suffices to single ones. So, -ization ( = -ize plus -ation) maps to -ize etc. Note that the string before the suffix must give _m() > 0. """ ch = self.b[self.k - 1] if ch == 'a': if self._ends("ational"): self._r("ate") elif self._ends("tional"): self._r("tion") elif ch == 'c': if self._ends("enci"): self._r("ence") elif self._ends("anci"): self._r("ance") elif ch == 'e': if self._ends("izer"): self._r("ize") elif ch == 'l': if self._ends("bli"): self._r("ble") # --DEPARTURE-- # To match the published algorithm, replace this phrase with # if self._ends("abli"): self._r("able") elif self._ends("alli"): self._r("al") elif self._ends("entli"): self._r("ent") elif self._ends("eli"): self._r("e") elif self._ends("ousli"): self._r("ous") elif ch == 'o': if self._ends("ization"): self._r("ize") elif self._ends("ation"): self._r("ate") elif self._ends("ator"): self._r("ate") elif ch == 's': if self._ends("alism"): self._r("al") elif self._ends("iveness"): self._r("ive") elif self._ends("fulness"): self._r("ful") elif self._ends("ousness"): self._r("ous") elif ch == 't': if self._ends("aliti"): self._r("al") elif self._ends("iviti"): self._r("ive") elif self._ends("biliti"): self._r("ble") elif ch == 'g': # --DEPARTURE-- if self._ends("logi"): self._r("log") # To match the published algorithm, delete this phrase def _step3(self): """Deal with -ic-, -full, -ness etc. Similar strategy to _step2.""" ch = self.b[self.k] if ch == 'e': if self._ends("icate"): self._r("ic") elif self._ends("ative"): self._r("") elif self._ends("alize"): self._r("al") elif ch == 'i': if self._ends("iciti"): self._r("ic") elif ch == 'l': if self._ends("ical"): self._r("ic") elif self._ends("ful"): self._r("") elif ch == 's': if self._ends("ness"): self._r("") def _step4(self): """_step4() takes off -ant, -ence etc., in context <c>vcvc<v>.""" ch = self.b[self.k - 1] if ch == 'a': if not self._ends("al"): return elif ch == 'c': if not self._ends("ance") and not self._ends("ence"): return elif ch == 'e': if not self._ends("er"): return elif ch == 'i': if not self._ends("ic"): return elif ch == 'l': if not self._ends("able") and not self._ends("ible"): return elif ch == 'n': if self._ends("ant"): pass elif self._ends("ement"): pass elif self._ends("ment"): pass elif self._ends("ent"): pass else: return elif ch == 'o': if self._ends("ion") and self.b[self.j] in "st": pass elif self._ends("ou"): pass # takes care of -ous else: return elif ch == 's': if not self._ends("ism"): return elif ch == 't': if not self._ends("ate") and not self._ends("iti"): return elif ch == 'u': if not self._ends("ous"): return elif ch == 'v': if not self._ends("ive"): return elif ch == 'z': if not self._ends("ize"): return else: return if self._m() > 1: self.k = self.j def _step5(self): """Remove a final -e if _m() > 1, and change -ll to -l if m() > 1. """ k = self.j = self.k if self.b[k] == 'e': a = self._m() if a > 1 or (a == 1 and not self._cvc(k - 1)): self.k -= 1 if self.b[self.k] == 'l' and self._doublec(self.k) and self._m() > 1: self.k -= 1 def stem(self, w): """Stem the word w, return the stemmed form.""" w = w.lower() k = len(w) - 1 if k <= 1: return w # --DEPARTURE-- # With this line, strings of length 1 or 2 don't go through the # stemming process, although no mention is made of this in the # published algorithm. Remove the line to match the published # algorithm. self.b = w self.k = k self._step1ab() self._step1c() self._step2() self._step3() self._step4() self._step5() return self.b[:self.k+1] def stem_sentence(self, txt): return " ".join(map(self.stem, txt.split())) def stem_documents(self, docs): return map(self.stem_sentence, docs) if __name__ == '__main__': import sys p = PorterStemmer() for f in sys.argv[1:]: with open(f) as infile: for line in infile: print(p.stem_sentence(line))
gpl-3.0
carthage-college/django-djforms
djforms/polisci/mun/forms.py
2
4001
#) -*- coding: utf-8 -*- from django import forms from djforms.core.models import STATE_CHOICES from djforms.core.models import BINARY_CHOICES from djforms.polisci.mun import COUNTRIES from djtools.fields.localflavor import USPhoneNumberField from localflavor.us.forms import USZipCodeField DELEGATIONS = ( ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ) class AttenderForm(forms.Form): """ A form to collect registration data for the Model United Nations """ school_name = forms.CharField( max_length=100, label="School name" ) first_name = forms.CharField( max_length=128, label="Faculty advisor first name" ) last_name = forms.CharField( max_length=128 ) address1 = forms.CharField( max_length=128, label = "Address", required=True ) address2 = forms.CharField( max_length=128, label = "", required=False ) city = forms.CharField( max_length=128, required=True ) state = forms.CharField( widget=forms.Select(choices=STATE_CHOICES), required=True ) postal_code = USZipCodeField(label="Zip Code") office = forms.CharField(max_length=100) phone = USPhoneNumberField( help_text="Format: XXX-XXX-XXXX" ) email = forms.EmailField() number_of_del = forms.TypedChoiceField( choices=DELEGATIONS, label="Number of delegations" ) number_of_stu = forms.CharField( max_length=3, label="Number of students" ) comments = forms.CharField( label="Questions/Comments", help_text=""" Feel free to list alternate countries in the space above (include your choice and delegation number) """, widget=forms.Textarea, required=False ) missle_crisis = forms.TypedChoiceField( label=""" Do you want to be entered into the random draw for participation in the United States Senate: Border Crisis simulation? """, choices=BINARY_CHOICES, widget=forms.RadioSelect ) class CountryForm(forms.Form): # delegation 1 d1c1 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d1c2 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d1c3 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d1c4 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d1c5 = forms.TypedChoiceField(choices=COUNTRIES, required=False) # delegation 2 d2c1 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d2c2 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d2c3 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d2c4 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d2c5 = forms.TypedChoiceField(choices=COUNTRIES, required=False) # delegation 3 d3c1 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d3c2 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d3c3 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d3c4 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d3c5 = forms.TypedChoiceField(choices=COUNTRIES, required=False) # delegation 4 d4c1 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d4c2 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d4c3 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d4c4 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d4c5 = forms.TypedChoiceField(choices=COUNTRIES, required=False) # delegation 5 d5c1 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d5c2 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d5c3 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d5c4 = forms.TypedChoiceField(choices=COUNTRIES, required=False) d5c5 = forms.TypedChoiceField(choices=COUNTRIES, required=False)
unlicense
lumig242/Hue-Integration-with-CDAP
desktop/core/ext-py/lxml-3.3.6/doc/s5/ep2008/atom.py
50
18452
# ET is 80's! #import elementtree as etree # LXML is 00's! from lxml import etree from lxml.etree import tostring #from dateutil.parser import parse as parse_date from datetime import datetime import uuid import cgi import copy __all__ = [ 'ATOM', 'atom_ns', 'Element', 'tostring'] ATOM_NAMESPACE = atom_ns = 'http://www.w3.org/2005/Atom' app_ns = 'http://www.w3.org/2007/app' xhtml_ns = 'http://www.w3.org/1999/xhtml' nsmap = {'': atom_ns, 'app': app_ns} _rel_alternate_xpath = etree.XPath( "./atom:link[not(@rel) or @rel = 'alternate']", namespaces=dict(atom=atom_ns)) _rel_other_xpath = etree.XPath( "./atom:link[@rel = $rel]", namespaces=dict(atom=atom_ns)) class AtomLookup(etree.CustomElementClassLookup): _elements = {} _app_elements = {} def lookup(self, node_type, document, namespace, name): if node_type == 'element': if namespace == atom_ns: return self._elements.get(name, AtomElement) elif namespace == app_ns: return self._app_elements.get(name, APPElement) ## FIXME: is this default good? return AtomElement # Otherwise normal lookup return None atom_parser = etree.XMLParser() atom_parser.setElementClassLookup(AtomLookup()) def parse(input): return etree.parse(input, atom_parser) def ATOM(atom): """ Parse an Atom document """ return etree.XML(atom, atom_parser) def Element(tag, *args, **kw): """ Create an Atom element. Adds the Atom namespace if no namespace is given. """ if '{' not in tag: # No namespace means the atom namespace tag = '{%s}%s' % (atom_ns, tag) return atom_parser.makeelement(tag, *args, **kw) def _strftime(d): """ Format a date the way Atom likes it (RFC3339?) """ return d.strftime('%Y-%m-%dT%H:%M:%SZ%z') ## try: ## from lxml import builder ## except ImportError: ## pass ## else: ## E = builder.ElementMaker(parser=atom_parser, ## typemap={datetime: lambda e, v: _strftime(v)}) from lxml import builder E = builder.ElementMaker(#parser=atom_parser, typemap={datetime: lambda e, v: _strftime(v)}) __all__.append('E') class NoDefault: pass class _LiveList(list): """ This list calls on_add or on_remove whenever the list is modified. """ on_add = on_remove = None name = None def __init__(self, *args, **kw): on_add = on_remove = name = None if 'on_add' in kw: on_add = kw.pop('on_add') if 'on_remove' in kw: on_remove = kw.pop('on_remove') if 'name' in kw: name = kw.pop('name') list.__init__(self, *args, **kw) self.on_add = on_add self.on_remove = on_remove self.name = name def _make_list(self, obj): if not isinstance(obj, (list, tuple)): obj = list(obj) return obj def _do_add(self, items): if self.on_add is not None: for item in items: self.on_add(self, item) def _do_remove(self, items): if self.on_remove is not None: for item in items: self.on_remove(self, item) def __setslice__(self, i, j, other): other = self._make_list(other) old = self[i:j] list.__setslice__(self, i, j, other) self._do_remove(old) self._do_add(other) def __delslice__(self, i, j): old = self[i:j] list.__delslice__(self, i, j) self._do_remove(old) def __iadd__(self, other): other = self._make_list(other) list.__iadd__(self, other) self._do_add(other) def __imul__(self, n): while n > 0: self += self n -= 1 def append(self, item): list.append(self, item) self._do_add([item]) def insert(self, i, item): list.insert(self, i, item) self._do_add([item]) def pop(self, i=-1): item = self[i] result = list.pop(self, i) self._do_remove([item]) return result def remove(self, item): list.remove(self, item) self._do_remove([item]) def extend(self, other): for item in other: self.append(item) def __repr__(self): name = self.name if name is None: name = '_LiveList' return '%s(%s)' % (name, list.__repr__(self)) class _findall_property(object): """ Returns a LiveList of all the objects with the given tag. You can append or remove items to the list to add or remove them from the containing tag. """ def __init__(self, tag, ns=atom_ns): self.tag = tag self.ns = ns self.__doc__ = 'Return live list of all the <atom:%s> element' % self.tag def __get__(self, obj, type=None): if obj is None: return self def add(lst, item): # FIXME: shouldn't just be an append obj.append(item) def remove(lst, item): obj.remove(item) return _LiveList(obj._atom_iter(self.tag, ns=self.ns), on_add=add, on_remove=remove, name='live_%s_list' % self.tag) def __set__(self, obj, value): cur = self.__get__(obj) cur[:] = value class _text_element_property(object): """ Creates an attribute that returns the text content of the given subelement. E.g., ``title = _text_element_property('title')`` will make ``obj.title`` return the contents of the ``<title>``. Similarly setting the attribute sets the text content of the attribute. """ def __init__(self, tag, strip=True): self.tag = tag self.strip = strip self.__doc__ = 'Access the <atom:%s> element as text' % self.tag def __get__(self, obj, type=None): if obj is None: return self v = obj._atom_findtext(self.tag) if self.strip: if v is not None: v = v.strip() else: return '' return v def __set__(self, obj, value): el = obj._get_or_create(self.tag) el.text = value def __delete__(self, obj): el = obj._atom_get(self.tag) if el: # FIXME: should it be an error if it doesn't exist? obj.remove(el) class _element_property(object): """ Returns a single subelement based on tag. Setting the attribute removes the element and adds a new one. Deleting it removes the element. """ def __init__(self, tag): self.tag = tag self.__doc__ = 'Get the <atom:%s> element' % self.tag def __get__(self, obj, type=None): if obj is None: return self return obj._atom_get(self.tag) def __set__(self, obj, value): el = obj._atom_get(self.tag) if el is not None: parent = el.getparent() index = parent.index(el) parent[index] = value else: obj.append(value) def __delete__(self): el = obj._atom_get(self.tag) if el is not None: obj.remove(el) class _attr_element_property(object): """ Get/set the value of the attribute on this element. """ def __init__(self, attr, default=NoDefault): self.attr = attr self.default = default self.__doc__ = 'Access the %s attribute' % self.attr def __get__(self, obj, type=None): if obj is None: return self try: return obj.attrib[self.attr] except KeyError: if self.default is not NoDefault: return self.default raise AttributeError(self.attr) def __set__(self, obj, value): if value is None: self.__delete__(obj) else: obj.attrib[self.attr] = value def __delete__(self, obj): if self.attr in obj.attrib: del obj.attrib[self.attr] class _date_element_property(object): """ Get/set the parsed date value of the text content of a tag. """ def __init__(self, tag, ns=atom_ns): self.tag = tag self.ns = ns self.__doc__ = 'Access the date in %s' % self.tag def __get__(self, obj, type=None): if obj is None: return self el = obj._atom_get(self.tag, ns=self.ns) if el is None: return None return el.date def __set__(self, obj, value): el = obj._get_or_create(self.tag, ns=self.ns) el.date = value def __delete__(self): el = obj._atom_get(self.tag) if el is not None: obj.remove(el) class _date_text_property(object): def __get__(self, obj, type=None): if obj is None: return self return parse_date(obj.text) def __set__(self, obj, value): if not value: obj.text = None return if isinstance(value, datetime): value = _strftime(value) obj.text = value def __del__(self, obj): obj.text = None class AtomElement(etree.ElementBase): def _get_or_create(self, tag, ns=atom_ns): el = self.find('{%s}%s' % (ns, tag)) if el is None: el = self.makeelement('{%s}%s' % (ns, tag)) self.append(el) return el def _atom_get(self, tag, ns=atom_ns): for item in self._atom_iter(tag, ns=ns): return item return None def _atom_iter(self, tag, ns=atom_ns): return self.getiterator('{%s}%s' % (ns, tag)) def _atom_findtext(self, tag, ns=atom_ns): return self.findtext('{%s}%s' % (ns, tag)) def _get_parent(self, tag, ns=atom_ns): parent = self while 1: if parent.tag == '{%s}%s' % (ns, tag): return parent parent = parent.getparent() if parent is None: return None @property def feed(self): return self._get_parent('feed') def rel_links(self, rel='alternate'): """ Return all the links with the given ``rel`` attribute. The default relation is ``'alternate'``, and as specified for Atom links with no ``rel`` attribute are assumed to mean alternate. """ if rel is None: return self._atom_iter('link') return [ el for el in self._atom_iter('link') if el.get('rel') == rel or rel == 'alternate' and not el.get('rel')] def __repr__(self): tag = self.tag if '}' in tag: tag = tag.split('}', 1)[1] return '<%s.%s atom:%s at %s>' % ( self.__class__.__module__, self.__class__.__name__, tag, hex(abs(id(self)))[2:]) class Feed(AtomElement): """ For ``<feed>`` elements. """ @property def feed(self): return self entries = _findall_property('entry') title = _text_element_property('title') author = _element_property('author') class Entry(AtomElement): """ For ``<entry>`` elements. """ @property def entry(self): return self id = _text_element_property('id') title = _text_element_property('title') published = _date_element_property('published') updated = _date_element_property('updated') edited = _date_element_property('edited', ns=app_ns) def update_edited(self): """ Set app:edited to current time """ self.edited = datetime.utcnow() def update_updated(self): """ Set atom:updated to the current time """ self.updated = datetime.utcnow() def make_id(self): """ Create an artificial id for this entry """ assert not self.id, ( "You cannot make an id if one already exists") self.id = 'uuid:%s' % uuid.uuid4() def author__get(self): el = self._atom_get('author') if el is None: if self.feed is not None: return self.feed.author return el def author__set(self, value): el = self._atom_get('author') if el is not None: self.remove(el) self.append(value) def author__del(self): el = self._atom_get('author') if el is not None: self.remove(el) author = property(author__get, author__set, author__del) categories = _findall_property('category') class _EntryElement(AtomElement): @property def entry(self): return self._get_parent('entry') class Category(_EntryElement): """ For ``<category>`` elements. """ term = _attr_element_property('term') scheme = _attr_element_property('scheme', None) label = _attr_element_property('label', None) def as_string(self): """ Returns the string representation of the category, using the GData convention of ``{scheme}term`` """ if self.scheme is not None: return '{%s}%s' % (self.scheme, self.term) else: return self.term class PersonElement(_EntryElement): """ Represents authors and contributors """ email = _text_element_property('email') uri = _text_element_property('uri') name = _text_element_property('name') class DateElement(_EntryElement): """ For elements that contain a date in their text content. """ date = _date_text_property() class TextElement(_EntryElement): type = _attr_element_property('type', None) src = _attr_element_property('src', None) def _html__get(self): """ Gives the parsed HTML of element's content. May return an HtmlElement (from lxml.html) or an XHTML tree. If the element is ``type="text"`` then it is returned as quoted HTML. You can also set this attribute to either an lxml.html element, an XHTML element, or an HTML string. Raises AttributeError if this is not HTML content. """ ## FIXME: should this handle text/html types? if self.type == 'html': content = self.text elif self.type == 'text': content = cgi.escape(self.text) elif self.type == 'xhtml': div = copy.deepcopy(self[0]) # Now remove the namespaces: for el in div.getiterator(): if el.tag.startswith('{'): el.tag = el.tag.split('}', 1)[1] if div.tag.startswith('{'): div.tag = el.tag.split('}', 1)[1] from lxml.html import tostring content = tostring(div) else: raise AttributeError( "Not an HTML or text content (type=%r)" % self.type) from lxml.html import fromstring return fromstring(content) def _html__set(self, value): if value is None: del self.html return if isinstance(value, basestring): # Some HTML text self.type = 'html' self.text = value return if value.tag.startswith('{%s}' % xhtml_ns): if value.tag != '{%s}div' % xhtml_ns: # Need to wrap it in a <div> el = self.makeelement('{%s}div' % xhtml_ns) el.append(value) value = el self[:] = [] self.type = 'xhtml' self.append(value) return from lxml import html if isinstance(value, html.HtmlElement): value = tostring(value) self[:] = [] self.type = 'html' self.text = value return raise TypeError( "Unknown HTML type: %s" % type(value)) def _html__del(self): self.text = None html = property(_html__get, _html__set, _html__del, doc=_html__get.__doc__) def _binary__get(self): """ Gets/sets the binary content, which is base64 encoded in the text. """ text = self.text if text is None: raise AttributeError( "No text (maybe in src?)") text = text.decode('base64') return text def _binary__set(self, value): if isinstance(value, unicode): ## FIXME: is this kosher? value = value.encode('utf8') if not isinstance(value, str): raise TypeError( "Must set .binary to a str or unicode object (not %s)" % type(value)) value = value.encode('base64') self.text = value def _binary__del(self): self.text = None binary = property(_binary__get, _binary__set, _binary__del, doc=_binary__get.__doc__) class LinkElement(_EntryElement): """ For ``<link>`` elements. """ href = _attr_element_property('href', None) rel = _attr_element_property('rel', None) type = _attr_element_property('type', None) title = _attr_element_property('title', None) def __repr__(self): return '<%s.%s at %s rel=%r href=%r>' % ( self.__class__.__module__, self.__class__.__name__, hex(abs(id(self)))[2:], self.rel, self.href) AtomLookup._elements.update(dict( feed=Feed, entry=Entry, category=Category, author=PersonElement, contributor=PersonElement, published=DateElement, updated=DateElement, content=TextElement, summary=TextElement, title=TextElement, rights=TextElement, subtitle=TextElement, link=LinkElement, )) class APPElement(etree.ElementBase): def __repr__(self): tag = self.tag if '}' in tag: tag = tag.split('}', 1)[1] return '<%s.%s app:%s at %s>' % ( self.__class__.__module__, self.__class__.__name__, tag, hex(abs(id(self)))[2:]) class Service(APPElement): workspaces = _findall_property('workspace', ns=app_ns) class Workspace(APPElement): collections = _findall_property('collection', ns=app_ns) class Collection(APPElement): pass class Edited(APPElement): date = _date_text_property() AtomLookup._app_elements.update(dict( service=Service, workspace=Workspace, collection=Collection, edited=Edited, ))
apache-2.0
dan1/horizon-proto
horizon/test/settings.py
27
5519
# Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import socket import sys import django from django.utils import html_parser from openstack_dashboard.static_settings import get_staticfiles_dirs # noqa from horizon.test import patches STATICFILES_DIRS = get_staticfiles_dirs() # Patch django.utils.html_parser.HTMLParser as a workaround for bug 1273943 if django.get_version() == '1.4' and sys.version_info[:3] > (2, 7, 3): html_parser.HTMLParser.parse_starttag = patches.parse_starttag_patched socket.setdefaulttimeout(1) LOGIN_URL = '/auth/login/' LOGOUT_URL = '/auth/logout/' LOGIN_REDIRECT_URL = '/' ROOT_PATH = os.path.dirname(os.path.abspath(__file__)) STATIC_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'static')) DEBUG = False TEMPLATE_DEBUG = DEBUG TESTSERVER = 'http://testserver' SECRET_KEY = 'elj1IWiLoWHgcyYxFVLj7cM5rGOOxWl0' USE_I18N = True USE_L10N = True USE_TZ = True DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}} DEFAULT_EXCEPTION_REPORTER_FILTER = 'horizon.exceptions.HorizonReporterFilter' INSTALLED_APPS = ( 'django.contrib.sessions', 'django.contrib.staticfiles', 'django.contrib.messages', 'django.contrib.humanize', 'django.contrib.auth', 'django.contrib.contenttypes', 'django_nose', 'django_pyscss', 'compressor', 'horizon', 'horizon.test', 'horizon.test.test_dashboards.cats', 'horizon.test.test_dashboards.dogs' ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) if django.VERSION >= (1, 8, 0): MIDDLEWARE_CLASSES += ( 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',) else: MIDDLEWARE_CLASSES += ('django.middleware.doc.XViewMiddleware',) MIDDLEWARE_CLASSES += ( 'horizon.middleware.HorizonMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) TEMPLATE_CONTEXT_PROCESSORS = ( 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.request', 'django.core.context_processors.media', 'django.core.context_processors.static', 'django.contrib.messages.context_processors.messages', 'horizon.context_processors.horizon') TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', 'horizon.loaders.TemplateLoader' ) STATIC_URL = '/static/' MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage' ROOT_URLCONF = 'horizon.test.urls' TEMPLATE_DIRS = (os.path.join(ROOT_PATH, 'tests', 'templates'),) SITE_ID = 1 SITE_BRANDING = 'Horizon' TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--nocapture', '--nologcapture', '--exclude-dir=horizon/conf/', '--exclude-dir=horizon/test/customization', '--cover-package=horizon', '--cover-inclusive', '--all-modules'] EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies' SESSION_COOKIE_HTTPONLY = True SESSION_EXPIRE_AT_BROWSER_CLOSE = True SESSION_COOKIE_SECURE = False HORIZON_CONFIG = { 'dashboards': ('cats', 'dogs'), 'default_dashboard': 'cats', "password_validator": { "regex": '^.{8,18}$', "help_text": "Password must be between 8 and 18 characters." }, 'user_home': None, 'help_url': "http://example.com", } COMPRESS_ENABLED = True COMPRESS_OFFLINE = False COMPRESS_ROOT = "/tmp/" COMPRESS_PARSER = 'compressor.parser.HtmlParser' STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', 'compressor.finders.CompressorFinder', ) LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'null': { 'level': 'DEBUG', 'class': 'django.utils.log.NullHandler', }, 'test': { 'level': 'ERROR', 'class': 'logging.StreamHandler', } }, 'loggers': { 'django.db.backends': { 'handlers': ['null'], 'propagate': False, }, 'horizon': { 'handlers': ['test'], 'propagate': False, }, 'nose.plugins.manager': { 'handlers': ['null'], 'propagate': False, }, 'selenium': { 'handlers': ['null'], 'propagate': False, } } }
apache-2.0
nvoron23/arangodb
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxserialtest.py
17
3020
# ocxserialtest.py # # Sample that uses the mscomm OCX to talk to a serial # device. # Very simple - queries a modem for ATI responses import win32ui, win32uiole import win32con from pywin.mfc import dialog, activex from win32com.client import gencache import pythoncom SERIAL_SETTINGS = '19200,n,8,1' SERIAL_PORT = 2 win32ui.DoWaitCursor(1) serialModule = gencache.EnsureModule("{648A5603-2C6E-101B-82B6-000000000014}", 0, 1, 1) win32ui.DoWaitCursor(0) if serialModule is None: raise ImportError, "MS COMM Control does not appear to be installed on the PC" def MakeDlgTemplate(): style = win32con.DS_MODALFRAME | win32con.WS_POPUP \ | win32con.WS_VISIBLE | win32con.WS_CAPTION \ | win32con.WS_SYSMENU | win32con.DS_SETFONT cs = win32con.WS_CHILD | win32con.WS_VISIBLE dlg = [ ["Very Basic Terminal", (0, 0, 350, 180), style, None, (8, "MS Sans Serif")], ] s = win32con.WS_TABSTOP | cs dlg.append(["RICHEDIT", None, 132, (5, 5, 340, 170),s | win32con.ES_WANTRETURN | win32con.ES_MULTILINE | win32con.ES_AUTOVSCROLL | win32con.WS_VSCROLL]) return dlg #################################### # # Serial Control # class MySerialControl(activex.Control, serialModule.MSComm): def __init__(self, parent): activex.Control.__init__(self) serialModule.MSComm.__init__(self) self.parent = parent def OnComm(self): self.parent.OnComm() class TestSerDialog(dialog.Dialog): def __init__(self, *args): apply( dialog.Dialog.__init__, (self,)+args ) self.olectl = None def OnComm(self): event = self.olectl.CommEvent if event == serialModule.OnCommConstants.comEvReceive: self.editwindow.ReplaceSel(self.olectl.Input) def OnKey(self, key): if self.olectl: self.olectl.Output = chr(key) def OnInitDialog(self): rc = dialog.Dialog.OnInitDialog(self) self.editwindow = self.GetDlgItem(132) self.editwindow.HookAllKeyStrokes(self.OnKey) self.olectl = MySerialControl(self) try: self.olectl.CreateControl("OCX", win32con.WS_TABSTOP | win32con.WS_VISIBLE, (7,43,500,300), self._obj_, 131) except win32ui.error: self.MessageBox("The Serial Control could not be created") self.olectl = None self.EndDialog(win32con.IDCANCEL) if self.olectl: self.olectl.Settings = SERIAL_SETTINGS self.olectl.CommPort = SERIAL_PORT self.olectl.RThreshold = 1 try: self.olectl.PortOpen = 1 except pythoncom.com_error, details: print "Could not open the specified serial port - %s" % (details[2][2]) self.EndDialog(win32con.IDCANCEL) return rc def OnDestroy(self, msg): if self.olectl: try: self.olectl.PortOpen = 0 except pythoncom.com_error, details: print "Error closing port - %s" % (details[2][2]) return dialog.Dialog.OnDestroy(self, msg) def test(): d = TestSerDialog(MakeDlgTemplate() ) d.DoModal() if __name__ == "__main__": import demoutils if demoutils.NeedGoodGUI(): test()
apache-2.0
PandaWei/tp-libvirt
libvirt/tests/src/virsh_cmd/virsh_help.py
8
2956
import logging from autotest.client.shared import error from virttest import virsh def run(test, params, env): """ Test command: virsh help. 1.Get all parameters from configuration. 2.Perform virsh help operation. 3.Check help information valid or not. 4.Check result. """ extra = params.get("help_extra", "") cmd = params.get("help_command", "") test_target = params.get("help_target", "") status_error = params.get("status_error", "no") def help_check(test_target): """ Check all virsh commands or groups's help information :param test_target: Test target,all virsh or all virsh groups :return: True if check successfully """ help_list = [] if test_target == "all_command": help_list = virsh.help_command_only("", False, ignore_status=True) elif test_target == "all_group": help_list = virsh.help_command_group("", False, ignore_status=True) if len(help_list) == 0: raise error.TestError("Cannot get any virsh command/group!") fail_list = [] # If any command or group's check failed, the test failed check_result = True for virsh_cmd_group in help_list: logging.info("Test command or group: '%s'", virsh_cmd_group) result = virsh.help(virsh_cmd_group, ignore_status=True) status = result.exit_status output = result.stdout.strip() if status != 0: fail_list.append(virsh_cmd_group) # No need to check output continue if not output: fail_list.append(virsh_cmd_group) # List all failed commands or groups if len(fail_list) > 0: check_result = False logging.info("These commands or groups' check failed!!!") for fail_cmd in fail_list: logging.info("%s", fail_cmd) return check_result if test_target == "": cmd = "%s %s" % (cmd, extra) result = virsh.help(cmd, ignore_status=True) else: check_result = help_check(test_target) if test_target == "": status = result.exit_status output = result.stdout.strip() # Check status_error if status_error == "yes": if test_target == "": if status == 0: raise error.TestFail("Run successfully with wrong command!") elif status_error == "no": if test_target == "": if status != 0: raise error.TestFail("Run failed with right command") if output == "": raise error.TestFail("Cannot see help information") else: if not check_result: raise error.TestFail( "virsh help command or groups test failed")
gpl-2.0
orionzhou/robin
apps/venn3.py
1
2679
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Readlist utilities """ import os.path as op import sys import re import logging import pandas as pd from matplotlib import pyplot as plt import numpy as np from matplotlib_venn import venn3, venn3_circles def venn3_coord(args): fhi = open(args.fi, 'r') s1 = fhi.readline().strip().split(",") s2 = fhi.readline().strip().split(",") s3 = fhi.readline().strip().split(",") fhi.close() s1, s2, s3 = set(s1), set(s2), set(s3) v = venn3([s1, s2, s3], ('A','B','C')) fho1 = open(args.fo1, 'w') for xy, l in zip(v.centers, v.radii): x, y = xy fho1.write("%s\t%s\t%s\n" % (x, y, l)) fho1.close() fho2 = open(args.fo2, 'w') for xyl in v.subset_labels: x, y = xyl.get_position() l = xyl.get_text() fho2.write("%s\t%s\t%s\n" % (x, y, l)) fho2.close() def add_stat(args): cvt = {k: int for k in 'Replicate'.split()} sl = pd.read_csv(args.fi, sep="\t", header=0, converters=cvt) firstN = 10000 sl['spots'] = [0] * len(sl.index) sl['avgLength'] = [0] * len(sl.index) for i in range(len(sl)): sid = sl['SampleID'][i] fq = '' if sl['paired'][i]: r1, r2 = sl['r1'][i], sl['r2'][i] fq = r1 else: fq = sl['r0'][i] nrcd = 0 L = [] for rec in iter_fastq(fq): if not rec: break nrcd += 1 if nrcd <= firstN: L.append(len(rec)) avgLength = SummaryStats(L).mean if sl['paired'][i]: avgLength = avgLength * 2 print("\t".join(str(x) for x in (sid, nrcd, avgLength))) sl.at[i, 'spots'] = nrcd sl.at[i, 'avgLength'] = avgLength sl.to_csv(args.fo, sep="\t", header=True, index=False) def main(): import argparse ps = argparse.ArgumentParser( formatter_class = argparse.ArgumentDefaultsHelpFormatter, description = '3-way venn-diagram' ) sp = ps.add_subparsers(title = 'available commands', dest = 'command') sp1 = sp.add_parser('coord', help='compute venn3 coordinates', formatter_class = argparse.ArgumentDefaultsHelpFormatter) sp1.add_argument('fi', help = 'input file containing sets') sp1.add_argument('fo1', help = 'output circle coordinates') sp1.add_argument('fo2', help = 'output label coordinates') sp1.set_defaults(func = venn3_coord) args = ps.parse_args() if args.command: args.func(args) else: print('Error: need to specify a sub command\n') parser.print_help() if __name__ == '__main__': main()
gpl-2.0
tvalacarta/tvalacarta
python/main-classic/servers/rtva.py
1
1030
# -*- coding: utf-8 -*- #------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Conector para rtva # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import urlparse,urllib2,urllib,re import os from core import scrapertools from core import logger from core import config def get_video_url( page_url , premium = False , user="" , password="", video_password="", page_data="" ): logger.info("tvalacarta.servers.rtva get_video_url(page_url='%s')" % page_url) data = scrapertools.cache_page(page_url) url = scrapertools.get_match(data,'file\:"([^"]+)"') video_urls = [] video_urls.append( [ "(mp4) [rtva]" , url ] ) for video_url in video_urls: logger.info("tvalacarta.servers.rtva %s - %s" % (video_url[0],video_url[1])) return video_urls # Encuentra vídeos del servidor en el texto pasado def find_videos(data): encontrados = set() devuelve = [] return devuelve
gpl-3.0
fulmicoton/pylearn2
pylearn2/utils/track_version.py
33
7516
#!/usr/bin/env python """ Script to obtain version of Python modules and basic information on the experiment setup (e.g. cpu, os), e.g. * numpy: 1.6.1 | pylearn: a6e634b83d | pylearn2: 57a156beb0 * CPU: x86_64 * OS: Linux-2.6.35.14-106.fc14.x86_64-x86_64-with-fedora-14-Laughlin You can also define the modules to be tracked with the environment variable `PYLEARN2_TRACK_MODULES`. Use ":" to separate module names between them, e.g. `PYLEARN2_TRACK_MODULES = module1:module2:module3` By default, the following modules are tracked: pylearn2, theano, numpy, scipy """ __authors__ = "Olivier Dellaleau and Raul Chandias Ferrari" __copyright__ = "Copyright 2013, Universite de Montreal" __credits__ = ["Olivier Dellaleau", "Raul Chandias Ferrari"] __license__ = "3-clause BSD" __maintainer__ = "Raul Chandias Ferrari" __email__ = "chandiar@iro" import copy import logging import os import platform import socket import subprocess import sys import warnings from theano.compat import six logger = logging.getLogger(__name__) class MetaLibVersion(type): """ Constructor that will be called everytime another's class constructor is called (if the "__metaclass__ = MetaLibVersion" line is present in the other class definition). Parameters ---------- cls : WRITEME name : WRITEME bases : WRITEME dict : WRITEME """ def __init__(cls, name, bases, dict): type.__init__(cls, name, bases, dict) cls.libv = LibVersion() class LibVersion(object): """ Initialize a LibVersion object that will store the version of python packages in a dictionary (versions). The python packages that are supported are: pylearn, pylearn2, theano, jobman, numpy and scipy. The key for the versions dict is the name of the package and the associated value is the version number. """ def __init__(self): self.versions = {} self.str_versions = '' self.exp_env_info = {} self._get_lib_versions() self._get_exp_env_info() def _get_exp_env_info(self): """ Get information about the experimental environment such as the cpu, os and the hostname of the machine on which the experiment is running. """ self.exp_env_info['host'] = socket.gethostname() self.exp_env_info['cpu'] = platform.processor() self.exp_env_info['os'] = platform.platform() if 'theano' in sys.modules: self.exp_env_info['theano_config'] = sys.modules['theano'].config else: self.exp_env_info['theano_config'] = None def _get_lib_versions(self): """Get version of Python packages.""" repos = os.getenv('PYLEARN2_TRACK_MODULES', '') default_repos = 'pylearn2:theano:numpy:scipy' repos = default_repos + ":" + repos repos = set(repos.split(':')) for repo in repos: try: if repo == '': continue __import__(repo) if hasattr(sys.modules[repo], '__version__'): v = sys.modules[repo].__version__ if v != 'unknown': self.versions[repo] = v continue self.versions[repo] = self._get_git_version( self._get_module_parent_path(sys.modules[repo])) except ImportError: self.versions[repo] = None known = copy.copy(self.versions) # Put together all modules with unknown versions. unknown = [k for k, w in known.items() if not w] known = dict((k, w) for k, w in known.items() if w) # Print versions. self.str_versions = ' | '.join( ['%s:%s' % (k, w) for k, w in sorted(six.iteritems(known))] + ['%s:?' % ','.join(sorted(unknown))]) def __str__(self): """ Return version of the Python packages as a string. e.g. numpy:1.6.1 | pylearn:a6e634b83d | pylearn2:57a156beb0 """ return self.str_versions def _get_git_version(self, root): """ Return the git revision of a repository with the letter 'M' appended to the revision if the repo was modified. e.g. 10d3046e85 M Parameters ---------- root : str Root folder of the repository Returns ------- rval : str or None A string with the revision hash, or None if it could not be retrieved (e.g. if it is not actually a git repository) """ if not os.path.isdir(os.path.join(root, '.git')): return None cwd_backup = os.getcwd() try: os.chdir(root) sub_p = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) version = sub_p.communicate()[0][0:10].strip() sub_p = subprocess.Popen(['git', 'diff', '--name-only'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) modified = sub_p.communicate()[0] if len(modified): version += ' M' return version except Exception: pass finally: try: os.chdir(cwd_backup) except Exception: warnings.warn("Could not chdir back to " + cwd_backup) def _get_hg_version(self, root): """Same as `get_git_version` but for a Mercurial repository.""" if not os.path.isdir(os.path.join(root, '.hg')): return None cwd_backup = os.getcwd() try: os.chdir(root) sub_p = subprocess.Popen(['hg', 'parents'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) sub_p_output = sub_p.communicate()[0] finally: os.chdir(cwd_backup) first_line = sub_p_output.split('\n')[0] # The first line looks like: # changeset: 1517:a6e634b83d88 return first_line.split(':')[2][0:10] def _get_module_path(self, module): """Return path to a given module.""" return os.path.realpath(module.__path__[0]) def _get_module_parent_path(self, module): """Return path to the parent directory of a given module.""" return os.path.dirname(self._get_module_path(module)) def print_versions(self): """ Print version of the Python packages as a string. e.g. numpy:1.6.1 | pylearn:a6e634b83d | pylearn2:57a156beb0 """ logger.info(self.__str__()) def print_exp_env_info(self, print_theano_config=False): """ Return basic information about the experiment setup such as the hostname of the machine the experiment was run on, the operating system installed on the machine. Parameters ---------- print_theano_config : bool, optional If True, information about the theano configuration will be displayed. """ logger.info('HOST: {0}'.format(self.exp_env_info['host'])) logger.info('CPU: {0}'.format(self.exp_env_info['cpu'])) logger.info('OS: {0}'.format(self.exp_env_info['os'])) if print_theano_config: logger.info(self.exp_env_info['theano_config'])
bsd-3-clause
yencarnacion/jaikuengine
.google_appengine/lib/django-1.4/django/db/models/related.py
102
3162
from django.utils.encoding import smart_unicode from django.db.models.fields import BLANK_CHOICE_DASH class BoundRelatedObject(object): def __init__(self, related_object, field_mapping, original): self.relation = related_object self.field_mappings = field_mapping[related_object.name] def template_name(self): raise NotImplementedError def __repr__(self): return repr(self.__dict__) class RelatedObject(object): def __init__(self, parent_model, model, field): self.parent_model = parent_model self.model = model self.opts = model._meta self.field = field self.name = '%s:%s' % (self.opts.app_label, self.opts.module_name) self.var_name = self.opts.object_name.lower() def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_to_currently_related=False): """Returns choices with a default blank choices included, for use as SelectField choices for this field. Analogue of django.db.models.fields.Field.get_choices, provided initially for utilisation by RelatedFieldListFilter. """ first_choice = include_blank and blank_choice or [] queryset = self.model._default_manager.all() if limit_to_currently_related: queryset = queryset.complex_filter( {'%s__isnull' % self.parent_model._meta.module_name: False}) lst = [(x._get_pk_val(), smart_unicode(x)) for x in queryset] return first_choice + lst def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False): # Defer to the actual field definition for db prep return self.field.get_db_prep_lookup(lookup_type, value, connection=connection, prepared=prepared) def editable_fields(self): "Get the fields in this class that should be edited inline." return [f for f in self.opts.fields + self.opts.many_to_many if f.editable and f != self.field] def __repr__(self): return "<RelatedObject: %s related to %s>" % (self.name, self.field.name) def bind(self, field_mapping, original, bound_related_object_class=BoundRelatedObject): return bound_related_object_class(self, field_mapping, original) def get_accessor_name(self): # This method encapsulates the logic that decides what name to give an # accessor descriptor that retrieves related many-to-one or # many-to-many objects. It uses the lower-cased object_name + "_set", # but this can be overridden with the "related_name" option. if self.field.rel.multiple: # If this is a symmetrical m2m relation on self, there is no reverse accessor. if getattr(self.field.rel, 'symmetrical', False) and self.model == self.parent_model: return None return self.field.rel.related_name or (self.opts.object_name.lower() + '_set') else: return self.field.rel.related_name or (self.opts.object_name.lower()) def get_cache_name(self): return "_%s_cache" % self.get_accessor_name()
apache-2.0
conorsch/septapy
septapy/stop.py
1
1560
import utils import requests class Stop(object): """Represents a single location where public transit vehicles pick up and drop off passengers. Has associated attributes such as: .latitude .longitude .coords # latitude and longitude as tuple .route """ def __init__(self, jsonArgs, route=None): self.latitude = jsonArgs['lat'] self.longitude = jsonArgs['lng'] self.coords = (self.latitude, self.longitude) self.stopID = jsonArgs['stopid'] self.name = jsonArgs['stopname'] self.title = self.name self.route = route def __str__(self): representation = """\ Route: %(route)s Stop Name: %(stopName)s Stop ID: %(stopID)s Location: %(lat)s, %(long)s """ % { 'route': self.route, 'stopName': self.name, 'stopID': self.stopID, 'lat': self.latitude, 'long': self.longitude, } return representation def getStopsByRoute(routeIdentifier): stopsURL = 'http://www3.septa.org/hackathon/Stops/' + routeIdentifier r = requests.get(stopsURL) j = r.json() return [Stop(s, route=routeIdentifier) for s in j] def getNearestStops(latitude, longitude, route=None): stops = getStopsByRoute(route) stops.sort(key=lambda s: utils.getDistance(s.latitude, s.longitude, latitude, longitude)) return stops def getNearestStop(latitude, longitude, route=None): return getNearestStops(latitude, longitude, route)[0]
mit
eduNEXT/edunext-platform
lms/djangoapps/courseware/course_tools.py
3
4660
""" Platform plugins to support a verified upgrade tool. """ import datetime import pytz from crum import get_current_request from django.conf import settings from django.utils.translation import ugettext as _ from django.urls import reverse from course_modes.models import CourseMode from lms.djangoapps.courseware.utils import verified_upgrade_deadline_link from openedx.features.course_experience.course_tools import CourseTool from student.models import CourseEnrollment from openedx.core.djangoapps.content.course_overviews.models import CourseOverview class VerifiedUpgradeTool(CourseTool): """ The verified upgrade tool. """ @classmethod def analytics_id(cls): """ Returns an id to uniquely identify this tool in analytics events. """ return 'edx.tool.verified_upgrade' @classmethod def is_enabled(cls, request, course_key): """ Show this tool to all learners who are eligible to upgrade. """ enrollment = CourseEnrollment.get_enrollment(request.user, course_key) if enrollment is None: return False if enrollment.dynamic_upgrade_deadline is None: return False if not enrollment.is_active: return False if enrollment.mode not in CourseMode.UPSELL_TO_VERIFIED_MODES: return False if enrollment.course_upgrade_deadline is None: return False if datetime.datetime.now(pytz.UTC) >= enrollment.course_upgrade_deadline: return False return True @classmethod def title(cls): """ Returns the title of this tool. """ return _('Upgrade to Verified') @classmethod def icon_classes(cls): """ Returns the icon classes needed to represent this tool. """ return 'fa fa-certificate' @classmethod def url(cls, course_key): """ Returns the URL for this tool for the specified course key. """ request = get_current_request() return verified_upgrade_deadline_link(request.user, course_id=course_key) class FinancialAssistanceTool(CourseTool): """ The financial assistance tool. """ @classmethod def analytics_id(cls): """ Returns an id to uniquely identify this tool in analytics events. """ return 'edx.tool.financial_assistance' @classmethod def is_enabled(cls, request, course_key): """ Show this link for active courses where financial assistance is available, unless upgrade deadline has passed """ now = datetime.datetime.now(pytz.UTC) feature_flags = None try: course_overview = CourseOverview.objects.get(id=course_key) except CourseOverview.DoesNotExist: course_overview = None # hide link if there's no ENABLE_FINANCIAL_ASSISTANCE_FORM setting (ex: Edge) or if it's False subset_name = 'FEATURES' feature_flags = getattr(settings, subset_name) if feature_flags is None or not feature_flags.get('ENABLE_FINANCIAL_ASSISTANCE_FORM'): return False # hide link for archived courses if course_overview is not None and course_overview.end_date is not None and now > course_overview.end_date: return False # hide link if not logged in or user not enrolled in the course if not request.user or not CourseEnrollment.is_enrolled(request.user, course_key): return False enrollment = CourseEnrollment.get_enrollment(request.user, course_key) # hide if we're no longer in an upsell mode (already upgraded) if enrollment.mode not in CourseMode.UPSELL_TO_VERIFIED_MODES: return False # hide if there's no course_upgrade_deadline, or one with a value in the past if enrollment.course_upgrade_deadline: if now > enrollment.course_upgrade_deadline: return False else: return False return bool(course_overview.eligible_for_financial_aid) @classmethod def title(cls, course_key=None): """ Returns the title of this tool. """ return _('Financial Assistance') @classmethod def icon_classes(cls, course_key=None): """ Returns the icon classes needed to represent this tool. """ return 'fa fa-info' @classmethod def url(cls, course_key): """ Returns the URL for this tool for the specified course key. """ return reverse('financial_assistance')
agpl-3.0
ericzhou2008/WinObjC
deps/scripts/freetype2.py
159
9307
# python 2.7.9 import subprocess, os.path, os, shutil DEBUG = False BUILD_CONFIGURATION = "RelWithDebInfo" PATCH_FILE = os.path.realpath(os.path.join(".", "freetype2.patch")) BUILD_DIR = os.path.realpath(os.path.join(".", "temp_build")) PATH_FILE = "paths.txt" OUTPUT_DIR = os.path.realpath(os.path.join("..", "prebuilt")) HEADER_DIR = os.path.realpath(os.path.join(OUTPUT_DIR, "include", "freetype")) MSBUILD_PATH = None CMAKE_PATH = None ARCH_TO_DIR_NAME = {"Win32": "x86", "Win64": "x64", "ARM" : "ARM"} PLATFORM_TO_DIR_NAME = {"Windows8.1": "Windows 8.1", "WindowsPhone8.1": "Windows Phone 8.1", "Windows10.0": "Windows Universal"} class Configuration: def __init__(self, generator, platform, arch, system_name, system_version): self.generator = generator self.platform = platform self.arch = arch self.system_name = system_name self.system_version = system_version def generate_cmake_args(self): '''returns list of configuration specific args to pass to cmake''' args = [] # generator args.append("-G") if self.arch == "Win32": args.append(self.generator) else: args.append("%s %s" % (self.generator, self.arch)) # system name args.append('-DCMAKE_SYSTEM_NAME=%s' % self.system_name) # system version args.append('-DCMAKE_SYSTEM_VERSION=%s' % self.system_version) return args def output_directory(self, prefix): return os.path.join(prefix, PLATFORM_TO_DIR_NAME[self.platform], ARCH_TO_DIR_NAME[self.arch]) def __repr__(self): return "Configuration(%s, %s, %s, %s, %s)" % (self.generator, self.platform, self.arch, self.system_name, self.system_version) def create_configurations(): '''returns list of predefined Configuration objects for builds''' configs = [ Configuration("Visual Studio 12 2013", "WindowsPhone8.1", "Win32", "WindowsPhone", "8.1"), Configuration("Visual Studio 12 2013", "WindowsPhone8.1", "ARM", "WindowsPhone", "8.1"), Configuration("Visual Studio 12 2013", "Windows8.1", "Win32", "WindowsStore", "8.1"), Configuration("Visual Studio 12 2013", "Windows8.1", "Win64", "WindowsStore", "8.1"), Configuration("Visual Studio 12 2013", "Windows8.1", "ARM", "WindowsStore", "8.1"), Configuration("Visual Studio 14 2015", "Windows10.0", "Win32", "WindowsStore", "10.0"), Configuration("Visual Studio 14 2015", "Windows10.0", "Win64", "WindowsStore", "10.0"), Configuration("Visual Studio 14 2015", "Windows10.0", "ARM", "WindowsStore", "10.0"), ] return configs def check_path_file(): '''returns True if path file meets requirements, False otherwise''' if not os.path.exists(PATH_FILE): return False fp = open(PATH_FILE, 'r') lines = fp.readlines() fp.close() if len(lines) < 2: return False return True def import_paths(): '''reads PATH_FILE if it exists, otherwise queries user for paths''' while not check_path_file(): query_paths() fp = open(PATH_FILE, 'r') lines = fp.readlines() fp.close() if len(lines) >= 2: global MSBUILD_PATH global CMAKE_PATH MSBUILD_PATH = ' '.join(lines[0].split(' ')[2:]).rstrip() CMAKE_PATH = ' '.join(lines[1].split(' ')[2:]).rstrip() if DEBUG: print "MSBUILD_PATH: %s" % MSBUILD_PATH print "CMAKE_PATH: %s" % CMAKE_PATH def query_paths(): '''queries user for paths and writes them to PATH_FILE''' msbuild_path = raw_input("msbuild path: ") while not os.path.exists(msbuild_path): msbuild_path = raw_input("msbuild path: ") cmake_path = raw_input("cmake path: ") while not os.path.exists(msbuild_path): cmake_path = raw_input("cmake path: ") fp = open(PATH_FILE, 'w') fp.write("MSBUILD_PATH = %s\n" % msbuild_path) fp.write("CMAKE_PATH = %s\n" % cmake_path) fp.close() def make_prebuilt_dirs(configs): '''creates output directory for each config''' if not os.path.exists(HEADER_DIR): os.makedirs(HEADER_DIR) for config in configs: if not os.path.exists(config.output_directory(OUTPUT_DIR)): os.makedirs(config.output_directory(OUTPUT_DIR)) def cmake_generate_project_files(config, source_code_dir): '''calls CMAKE_PATH to generates vcxproj files for the given config. returns True if cmake exited with a result of 0, False otherwise''' cmake_command = [CMAKE_PATH] + config.generate_cmake_args() + [source_code_dir] if DEBUG: print cmake_command wait = raw_input() result = subprocess.call(cmake_command) return result == 0 def compile_project(config, project_filename): '''calls MSBUILD_PATH to compile the existing vcxproj. returns True if the process exited with a result of 0, False otherwise''' msbuild_command = [MSBUILD_PATH, project_filename, "/p:configuration=%s" % BUILD_CONFIGURATION] if DEBUG: print msbuild_command wait = raw_input() result = subprocess.call(msbuild_command) return result == 0 def add_to_prebuilt(output_dir, filename, output_filename=None): '''copies filename to output_dir. can optionally change copied file name''' source_file = os.path.join(BUILD_CONFIGURATION, filename) if output_filename == None: output_filename = filename shutil.copyfile(source_file, os.path.join(output_dir, output_filename)) def clean_build(build_dir): '''cleans generated files''' shutil.rmtree(build_dir, ignore_errors=True) def add_header(input_directory, filename): '''copies file in input_directory to HEADER_DIR''' source_file = os.path.join(input_directory, filename) shutil.copyfile(source_file, os.path.join(HEADER_DIR, filename)) def apply_patch(patch_file, git_dir): '''uses git to apply patch_file to git_dir''' cwd = os.getcwd() os.chdir(git_dir) result = subprocess.call(["git", "apply", patch_file, "--ignore-whitespace"]) os.chdir(cwd) return result == 0 def need_to_apply_patch(git_dir): '''returns True if patch needs to be applied (which means no changes have happened to git_dir)''' cwd = os.getcwd() os.chdir(git_dir) result = subprocess.call(["git", "diff", "--exit-code"]) os.chdir(cwd) return result == 0 def copy_dir_contents(input_dir, target_dir): '''copies input_dir contents into target_dir. will create new subdirectories as needed but target_dir must already exist. Will not check before overwriting files''' for item in os.listdir(input_dir): item_path = os.path.join(input_dir, item) target_path = os.path.join(target_dir, item) if not os.path.exists(target_path): if os.path.isdir(item_path): os.makedirs(target_path) copy_dir_contents(item_path, target_path) else: shutil.copy2(item_path, target_path) def run(): '''runs builds on all configurations, places binaries in prebuilt dir''' original_dir = os.path.realpath(".") import_paths() configs = create_configurations() # make sure prebuilt dir tree exists make_prebuilt_dirs(configs) src_dir = os.path.realpath(os.path.join("..", "3rdParty", "freetype2")) # make sure patch is applied if need_to_apply_patch(src_dir): result = apply_patch(os.path.realpath(PATCH_FILE), src_dir) if not result: print "Patch failed to apply, won't be able to generate correct builds" exit(1) # remove old build dir if it exists if os.path.exists(BUILD_DIR): shutil.rmtree(BUILD_DIR) os.mkdir(BUILD_DIR) os.chdir(BUILD_DIR) build_dir = os.path.realpath(".") failed_builds = [] for config in configs: os.chdir(build_dir) os.makedirs(config.output_directory(".")) os.chdir(config.output_directory(".")) result = cmake_generate_project_files(config, src_dir) if result: result = compile_project(config, "freetype.vcxproj") if result: output_dir = config.output_directory(OUTPUT_DIR) add_to_prebuilt(output_dir, "freetype.dll") add_to_prebuilt(output_dir, "freetype.lib") if BUILD_CONFIGURATION in ["Debug", "RelWithDebInfo"]: add_to_prebuilt(output_dir, "freetype.pdb") continue # cmake or build failed failed_builds.append(config) if DEBUG: print "FAILED: %s" % config wait = raw_input() os.chdir(original_dir) # copy relevant header files to right directory copy_dir_contents(os.path.join(src_dir, "include"), HEADER_DIR) clean_build(build_dir) if len(failed_builds) == 0: print "Successful" exit() else: print "%d failed builds!" % len(failed_builds) for config in failed_builds: print config exit(1) if __name__ == "__main__": run()
mit
Sunsoo/ecogwiki
lib/pyatom.py
4
15700
# -*- coding: utf-8 -*- """ This module provides a class called :class:`AtomFeed` which can be used to generate feeds in the Atom syndication format (see :rfc:`4287`). Example:: from pyatom import AtomFeed import datetime feed = AtomFeed(title="My Blog", subtitle="My example blog for a feed test.", feed_url="http://example.org/feed", url="http://example.org", author="Me") # Do this for each feed entry feed.add(title="My Post", content="Body of my post", content_type="html", author="Me", url="http://example.org/entry1", updated=datetime.datetime.utcnow()) print feed.to_string() :copyright: (c) 2010 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from datetime import datetime XHTML_NAMESPACE = 'http://www.w3.org/1999/xhtml' def escape(s, quote=False): """Replace special characters "&", "<" and ">" to HTML-safe sequences. If the optional flag `quote` is `True`, the quotation mark character (") is also translated. There is a special handling for `None` which escapes to an empty string. :param s: the string to escape. :param quote: set to true to also escape double quotes. """ if s is None: return '' elif hasattr(s, '__html__'): return s.__html__() elif not isinstance(s, basestring): s = unicode(s) s = s.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;') if quote: s = s.replace('"', "&quot;") return s def _make_text_block(name, content, content_type=None): """Helper function for the builder that creates an XML text block.""" if content_type == 'xhtml': return u'<%s type="xhtml"><div xmlns="%s">%s</div></%s>\n' % \ (name, XHTML_NAMESPACE, content, name) if not content_type: return u'<%s>%s</%s>\n' % (name, escape(content), name) return u'<%s type="%s">%s</%s>\n' % (name, content_type, escape(content), name) def format_iso8601(obj, timezone): """Format a datetime object for iso8601""" updated = '%Y-%m-%dT%H:%M:%S' + timezone return obj.strftime(updated) class AtomFeed(object): """A helper class that creates Atom feeds. :param title: the title of the feed. Required. :param title_type: the type attribute for the title element. One of ``'html'``, ``'text'`` or ``'xhtml'``. :param url: the url for the feed (not the url *of* the feed) :param id: a globally unique id for the feed. Must be an URI. If not present the `feed_url` is used, but one of both is required. :param updated: the time the feed was modified the last time. Must be a :class:`datetime.datetime` object. If not present the latest entry's `updated` is used. :param timezone: the timezone is based on utc. format the "+0900" :param feed_url: the URL to the feed. Should be the URL that was requested. :param author: the author of the feed. Must be either a string (the name) or a dict with name (required) and uri or email (both optional). Can be a list of (may be mixed, too) strings and dicts, too, if there are multiple authors. Required if not every entry has an author element. :param icon: an icon for the feed. :param logo: a logo for the feed. :param rights: copyright information for the feed. :param rights_type: the type attribute for the rights element. One of ``'html'``, ``'text'`` or ``'xhtml'``. Default is ``'text'``. :param subtitle: a short description of the feed. :param subtitle_type: the type attribute for the subtitle element. One of ``'text'``, ``'html'``, ``'text'`` or ``'xhtml'``. Default is ``'text'``. :param links: additional links. Must be a list of dictionaries with href (required) and rel, type, hreflang, title, length (all optional) :param generator: the software that generated this feed. This must be a tuple in the form ``(name, url, version)``. If you don't want to specify one of them, set the item to `None`. :param entries: a list with the entries for the feed. Entries can also be added later with :meth:`add`. For more information on the elements see http://www.atomenabled.org/developers/syndication/ Everywhere where a list is demanded, any iterable can be used. """ default_generator = ('PyAtom', None, None) def __init__(self, title=None, entries=None, **kwargs): self.title = title self.title_type = kwargs.get('title_type', 'text') self.url = kwargs.get('url') self.feed_url = kwargs.get('feed_url', self.url) self.id = kwargs.get('id', self.feed_url) self.updated = kwargs.get('updated') self.timezone = kwargs.get('timezone', 'Z') self.author = kwargs.get('author', ()) self.icon = kwargs.get('icon') self.logo = kwargs.get('logo') self.rights = kwargs.get('rights') self.rights_type = kwargs.get('rights_type') self.subtitle = kwargs.get('subtitle') self.subtitle_type = kwargs.get('subtitle_type', 'text') self.generator = kwargs.get('generator') if self.generator is None: self.generator = self.default_generator self.links = kwargs.get('links', []) self.entries = entries and list(entries) or [] if not hasattr(self.author, '__iter__') \ or isinstance(self.author, (basestring, dict)): self.author = [self.author] for i, author in enumerate(self.author): if not isinstance(author, dict): self.author[i] = {'name': author} if not self.title: raise ValueError('title is required') if not self.id: raise ValueError('id is required') for author in self.author: if 'name' not in author: raise TypeError('author must contain at least a name') def add(self, *args, **kwargs): """Add a new entry to the feed. This function can either be called with a :class:`FeedEntry` or some keyword and positional arguments that are forwarded to the :class:`FeedEntry` constructor. """ if len(args) == 1 and not kwargs and isinstance(args[0], FeedEntry): self.entries.append(args[0]) else: kwargs['feed_url'] = self.feed_url kwargs['timezone'] = self.timezone self.entries.append(FeedEntry(*args, **kwargs)) def __repr__(self): return '<%s %r (%d entries)>' % ( self.__class__.__name__, self.title, len(self.entries) ) def generate(self): """Return a generator that yields pieces of XML.""" # atom demands either an author element in every entry or a global one if not self.author: if False in map(lambda e: bool(e.author), self.entries): self.author = ({'name': u'unbekannter Autor'},) if not self.updated: dates = sorted([entry.updated for entry in self.entries]) self.updated = dates and dates[-1] or datetime.utcnow() yield u'<?xml version="1.0" encoding="utf-8"?>\n' yield u'<feed xmlns="http://www.w3.org/2005/Atom">\n' yield ' ' + _make_text_block('title', self.title, self.title_type) yield u' <id>%s</id>\n' % escape(self.id) yield u' <updated>%s</updated>\n' % format_iso8601(self.updated, self.timezone) if self.url: yield u' <link href="%s" />\n' % escape(self.url, True) if self.feed_url: yield u' <link href="%s" rel="self" />\n' % \ escape(self.feed_url, True) for link in self.links: yield u' <link %s/>\n' % ''.join('%s="%s" ' % \ (k, escape(link[k], True)) for k in link) for author in self.author: yield u' <author>\n' yield u' <name>%s</name>\n' % escape(author['name']) if 'uri' in author: yield u' <uri>%s</uri>\n' % escape(author['uri']) if 'email' in author: yield ' <email>%s</email>\n' % escape(author['email']) yield ' </author>\n' if self.subtitle: yield ' ' + _make_text_block('subtitle', self.subtitle, self.subtitle_type) if self.icon: yield u' <icon>%s</icon>\n' % escape(self.icon) if self.logo: yield u' <logo>%s</logo>\n' % escape(self.logo) if self.rights: yield ' ' + _make_text_block('rights', self.rights, self.rights_type) generator_name, generator_url, generator_version = self.generator if generator_name or generator_url or generator_version: tmp = [u' <generator'] if generator_url: tmp.append(u' uri="%s"' % escape(generator_url, True)) if generator_version: tmp.append(u' version="%s"' % escape(generator_version, True)) tmp.append(u'>%s</generator>\n' % escape(generator_name)) yield u''.join(tmp) for entry in self.entries: for line in entry.generate(): yield u' ' + line yield u'</feed>\n' def to_string(self): """Convert the feed into a string.""" return u''.join(self.generate()) def __unicode__(self): return self.to_string() def __str__(self): return self.to_string().encode('utf-8') class FeedEntry(object): """Represents a single entry in a feed. :param title: the title of the entry. Required. :param title_type: the type attribute for the title element. One of ``'html'``, ``'text'`` or ``'xhtml'``. :param content: the content of the entry. :param content_type: the type attribute for the content element. One of ``'html'``, ``'text'`` or ``'xhtml'``. :param summary: a summary of the entry's content. :param summary_type: the type attribute for the summary element. One of ``'html'``, ``'text'`` or ``'xhtml'``. :param url: the url for the entry. :param id: a globally unique id for the entry. Must be an URI. If not present the URL is used, but one of both is required. :param updated: the time the entry was modified the last time. Must be a :class:`datetime.datetime` object. Required. :param timezone: the timezone is based on utc. format the "+0900" :param author: the author of the feed. Must be either a string (the name) or a dict with name (required) and uri or email (both optional). Can be a list of (may be mixed, too) strings and dicts, too, if there are multiple authors. Required if not every entry has an author element. :param published: the time the entry was initially published. Must be a :class:`datetime.datetime` object. :param rights: copyright information for the entry. :param rights_type: the type attribute for the rights element. One of ``'html'``, ``'text'`` or ``'xhtml'``. Default is ``'text'``. :param links: additional links. Must be a list of dictionaries with href (required) and rel, type, hreflang, title, length (all optional) :param xml_base: The xml base (url) for this feed item. If not provided it will default to the item url. For more information on the elements see http://www.atomenabled.org/developers/syndication/ Everywhere where a list is demanded, any iterable can be used. """ def __init__(self, title=None, content=None, feed_url=None, **kwargs): self.title = title self.title_type = kwargs.get('title_type', 'text') self.content = content self.content_type = kwargs.get('content_type', 'html') self.url = kwargs.get('url') self.id = kwargs.get('id', self.url) self.updated = kwargs.get('updated') self.timezone = kwargs.get('timezone', 'Z') self.summary = kwargs.get('summary') self.summary_type = kwargs.get('summary_type', 'html') self.author = kwargs.get('author') self.published = kwargs.get('published') self.rights = kwargs.get('rights') self.links = kwargs.get('links', []) self.xml_base = kwargs.get('xml_base', feed_url) if not hasattr(self.author, '__iter__') \ or isinstance(self.author, (basestring, dict)): self.author = [self.author] for i, author in enumerate(self.author): if not isinstance(author, dict): self.author[i] = {'name': author} if not self.title: raise ValueError('title is required') if not self.id: raise ValueError('id is required') if not self.updated: raise ValueError('updated is required') def __repr__(self): return '<%s %r>' % ( self.__class__.__name__, self.title ) def generate(self): """Yields pieces of ATOM XML.""" base = '' if self.xml_base: base = ' xml:base="%s"' % escape(self.xml_base, True) yield u'<entry%s>\n' % base yield u' ' + _make_text_block('title', self.title, self.title_type) yield u' <id>%s</id>\n' % escape(self.id) yield u' <updated>%s</updated>\n' % format_iso8601(self.updated, self.timezone) if self.published: yield u' <published>%s</published>\n' % \ format_iso8601(self.published, self.timezone) if self.url: yield u' <link href="%s" />\n' % escape(self.url) for author in self.author: yield u' <author>\n' yield u' <name>%s</name>\n' % escape(author['name']) if 'uri' in author: yield u' <uri>%s</uri>\n' % escape(author['uri']) if 'email' in author: yield u' <email>%s</email>\n' % escape(author['email']) yield u' </author>\n' for link in self.links: yield u' <link %s/>\n' % ''.join('%s="%s" ' % \ (k, escape(link[k], True)) for k in link) if self.summary: yield u' ' + _make_text_block('summary', self.summary, self.summary_type) if self.content: yield u' ' + _make_text_block('content', self.content, self.content_type) yield u'</entry>\n' def to_string(self): """Convert the feed item into a unicode object.""" return u''.join(self.generate()) def __unicode__(self): return self.to_string() def __str__(self): return self.to_string().encode('utf-8')
gpl-3.0
ameerbadri/amazon-alexa-twilio-customer-service
Lambda Function/requests/packages/chardet/big5freq.py
3133
82594
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Big5 frequency table # by Taiwan's Mandarin Promotion Council # <http://www.edu.tw:81/mandr/> # # 128 --> 0.42261 # 256 --> 0.57851 # 512 --> 0.74851 # 1024 --> 0.89384 # 2048 --> 0.97583 # # Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 # Random Distribution Ration = 512/(5401-512)=0.105 # # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 #Char to FreqOrder table BIG5_TABLE_SIZE = 5376 Big5CharToFreqOrder = ( 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512 #Everything below is of no interest for detection purpose 2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392 2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408 5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424 5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440 5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456 5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472 5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488 5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504 5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520 5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536 5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552 5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568 5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584 5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600 6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616 6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632 6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648 6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664 6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680 6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696 6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712 6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728 6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744 6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760 6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776 6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792 6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808 6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824 6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840 6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856 6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872 6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888 6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904 6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920 6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936 6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952 6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968 6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984 6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000 6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016 6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032 6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048 6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064 6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080 6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096 6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112 6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128 6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144 6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160 6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176 6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192 6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208 6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224 6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240 6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256 3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272 6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288 6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304 3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320 6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336 6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352 6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368 6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384 6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400 6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416 6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432 4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448 6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464 6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480 3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496 6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512 6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528 6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544 6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560 6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576 6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592 6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608 6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624 6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640 6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656 6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672 7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688 7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704 7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720 7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736 7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752 7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768 7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784 7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800 7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816 7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832 7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848 7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864 7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880 7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896 7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912 7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928 7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944 7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960 7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976 7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992 7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008 7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024 7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040 7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056 7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072 7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088 7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104 7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120 7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136 7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152 7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168 7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184 7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200 7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216 7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232 7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248 7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264 7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280 7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296 7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312 7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328 7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344 7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360 7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376 7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392 7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408 7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424 7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440 3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456 7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472 7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488 7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504 7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520 4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536 7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552 7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568 7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584 7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600 7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616 7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632 7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648 7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664 7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680 7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696 7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712 8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728 8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744 8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760 8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776 8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792 8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808 8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824 8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840 8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856 8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872 8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888 8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904 8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920 8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936 8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952 8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968 8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984 8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000 8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016 8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032 8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048 8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064 8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080 8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096 8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112 8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128 8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144 8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160 8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176 8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192 8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208 8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224 8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240 8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256 8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272 8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288 8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304 8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320 8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336 8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352 8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368 8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384 8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400 8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416 8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432 8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448 8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464 8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480 8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496 8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512 8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528 8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544 8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560 8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576 8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592 8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608 8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624 8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640 8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656 8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672 8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688 4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704 8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720 8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736 8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752 8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768 9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784 9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800 9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816 9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832 9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848 9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864 9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880 9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896 9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912 9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928 9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944 9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960 9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976 9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992 9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008 9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024 9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040 9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056 9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072 9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088 9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104 9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120 9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136 9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152 9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168 9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184 9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200 9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216 9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232 9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248 9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264 9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280 9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296 9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312 9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328 9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344 9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360 9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376 3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392 9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408 9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424 9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440 4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456 9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472 9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488 9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504 9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520 9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536 9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552 9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568 9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584 9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600 9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616 9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632 9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648 9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664 9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680 9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696 9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712 9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728 9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744 9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760 9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776 9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792 9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808 9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824 10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840 10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856 10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872 10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888 10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904 10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920 10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936 10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952 10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968 4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984 10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000 10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016 10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032 10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048 10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064 10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080 10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096 10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112 4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128 10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144 10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160 10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176 10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192 10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208 10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224 10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240 10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256 10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272 10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288 10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304 10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320 10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336 10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352 10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368 10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384 10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400 4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416 10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432 10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448 10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464 10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480 10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496 10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512 10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528 10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544 10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560 10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576 10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592 10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608 10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624 10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640 10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656 10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672 10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688 10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704 10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720 10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736 10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752 10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768 10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784 10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800 10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816 10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832 10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848 10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864 10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880 10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896 11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912 11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928 11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944 4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960 11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976 11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992 11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008 11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024 11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040 11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056 11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072 11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088 11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104 11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120 11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136 11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152 11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168 11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184 11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200 11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216 11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232 11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248 11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264 11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280 11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296 11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312 11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328 11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344 11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360 11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376 11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392 11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408 11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424 11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440 11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456 11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472 4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488 11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504 11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520 11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536 11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552 11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568 11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584 11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600 11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616 11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632 11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648 11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664 11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680 11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696 11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712 11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728 11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744 11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760 11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776 11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792 11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808 11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824 11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840 11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856 11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872 11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888 11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904 11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920 11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936 12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952 12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968 12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984 12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000 12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016 12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032 12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048 12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064 12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080 12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096 12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112 12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128 12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144 12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160 12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176 4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192 4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208 4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224 12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240 12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256 12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272 12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288 12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304 12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320 12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336 12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352 12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368 12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384 12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400 12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416 12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432 12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448 12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464 12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480 12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496 12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512 12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528 12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544 12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560 12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576 12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592 12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608 12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624 12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640 12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656 12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672 12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688 12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704 12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720 12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736 12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752 12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768 12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784 12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800 12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816 12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832 12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848 12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864 12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880 12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896 12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912 12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928 12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944 12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960 12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976 4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992 13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008 13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024 13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040 13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056 13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072 13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088 13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104 4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120 13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136 13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152 13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168 13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184 13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200 13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216 13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232 13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248 13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264 13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280 13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296 13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312 13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328 13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344 13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360 5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376 13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392 13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408 13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424 13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440 13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456 13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472 13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488 13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504 13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520 13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536 13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552 13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568 13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584 13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600 13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616 13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632 13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648 13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664 13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680 13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696 13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712 13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728 13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744 13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760 13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776 13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792 13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808 13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824 13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840 13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856 13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872 13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888 13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904 13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920 13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936 13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952 13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968 13968,13969,13970,13971,13972) #13973 # flake8: noqa
mit