repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
shreesundara/netsnmp
build/lib/pysmi/reader/base.py
6
1338
import os class AbstractReader(object): maxMibSize = 10000000 # MIBs can't be that large fuzzyMatching = True # try different file names while searching for MIB originalMatching = uppercaseMatching = lowcaseMatching = True exts = ['', os.path.extsep + 'txt', os.path.extsep + 'mib', os.path.extsep + 'my'] exts.extend([x.upper() for x in exts if x]) def setOptions(self, **kwargs): for k in kwargs: setattr(self, k, kwargs[k]) return self def getMibVariants(self, mibname): filenames = [] if self.originalMatching: filenames.append(mibname) if self.uppercaseMatching: filenames.append(mibname.upper()) if self.lowcaseMatching: filenames.append(mibname.lower()) if self.fuzzyMatching: part = filenames[-1].find('-mib') if part != -1: filenames.extend( [x[:part] for x in filenames] ) else: suffixed = mibname + '-mib' filenames.append(suffixed.upper()) filenames.append(suffixed.lower()) return ((x, x+y) for x in filenames for y in self.exts) def getData(self, filename): raise NotImplementedError()
bsd-2-clause
LS80/xbmc
addons/service.xbmc.versioncheck/lib/versions.py
25
7009
# -*- coding: utf-8 -*- # # Copyright (C) 2013 Team-XBMC # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from lib.common import log def compare_version(version_installed, versionlist): # Create separate version lists versionlist_stable = versionlist['releases']['stable'] versionlist_rc = versionlist['releases']['releasecandidate'] versionlist_beta = versionlist['releases']['beta'] versionlist_alpha = versionlist['releases']['alpha'] versionlist_prealpha = versionlist['releases']['prealpha'] log('Version installed %s' %version_installed) ### Check to upgrade to newest available stable version # check on smaller major version. Smaller version than available always notify oldversion = False version_available = '' # check if installed major version is smaller than available major stable # here we don't care if running non stable if version_installed['major'] < int(versionlist_stable[0]['major']): version_available = versionlist_stable[0] oldversion = 'stable' log('Version available %s' %versionlist_stable[0]) log('You are running an older version') # check if installed major version is equal than available major stable # however also check on minor version and still don't care about non stable elif version_installed['major'] == int(versionlist_stable[0]['major']): if version_installed['minor'] < int(versionlist_stable[0]['minor']): version_available = versionlist_stable[0] oldversion = 'stable' log('Version available %s' %versionlist_stable[0]) log('You are running an older minor version') # check for <= minor !stable elif version_installed['tag'] != 'stable' and version_installed['minor'] <= int(versionlist_stable[0]['minor']): version_available = versionlist_stable[0] oldversion = True log('Version available %s' %versionlist_stable[0]) log('You are running an older non stable minor version') else: log('Version available %s' %versionlist_stable[0]) log('There is no newer stable available') # Already skipped a possible newer stable build. Let's continue with non stable builds. # Check also 'oldversion' hasn't been set to 'stable' or true by previous checks because if so, # those part need to be skipped #check for RC builds if not oldversion and version_installed['tag'] in ['releasecandidate']: # check if you are using a RC build lower than current available RC # then check if you are using a beta/alpha lower than current available RC # 14.0rc3 is newer than: 14.0rc1, 14.0b9, 14.0a15 if version_installed['major'] <= int(versionlist_rc[0]['major']): if version_installed['minor'] <= int(versionlist_rc[0]['minor']): if version_installed.get('tagversion','') < versionlist_rc[0]['tagversion']: version_available = versionlist_rc[0] oldversion = True log('Version available %s' %versionlist_rc[0]) log('You are running an older RC version') # now check if installed !=rc elif not oldversion and version_installed['tag'] in ['beta','alpha','prealpha']: if version_installed['major'] <= int(versionlist_rc[0]['major']): if version_installed['minor'] <= int(versionlist_beta[0]['minor']): version_available = versionlist_rc[0] oldversion = True log('Version available %s' %versionlist_rc[0]) log('You are running an older non RC version') #check for beta builds if not oldversion and version_installed['tag'] == 'beta': # check if you are using a RC build lower than current available RC # then check if you are using a beta/alpha lower than current available RC # 14.0b3 is newer than: 14.0b1, 14.0a15 if version_installed['major'] <= int(versionlist_beta[0]['major']): if version_installed['minor'] <= int(versionlist_beta[0]['minor']): if version_installed.get('tagversion','') < versionlist_beta[0]['tagversion']: version_available = versionlist_beta[0] oldversion = True log('Version available %s' %versionlist_beta[0]) log('You are running an older beta version') # now check if installed !=beta elif not oldversion and version_installed['tag'] in ['alpha','prealpha']: if version_installed['major'] <= int(versionlist_beta[0]['major']): if version_installed['minor'] <= int(versionlist_beta[0]['minor']): version_available = versionlist_beta[0] oldversion = True log('Version available %s' %versionlist_beta[0]) log('You are running an older non beta version') #check for alpha builds and older if not oldversion and version_installed['tag'] == 'alpha': # check if you are using a RC build lower than current available RC # then check if you are using a beta/alpha lower than current available RC # 14.0a3 is newer than: 14.0a1 or pre-alpha if version_installed['major'] <= int(versionlist_alpha[0]['major']): if version_installed['minor'] <= int(versionlist_alpha[0]['minor']): if version_installed.get('tagversion','') < versionlist_alpha[0]['tagversion']: version_available = versionlist_alpha[0] oldversion = True log('Version available %s' %versionlist_alpha[0]) log('You are running an older alpha version') # now check if installed !=alpha elif not oldversion and version_installed['tag'] in ['prealpha']: if version_installed['major'] <= int(versionlist_alpha[0]['major']): if version_installed['minor'] <= int(versionlist_alpha[0]['minor']): version_available = versionlist_alpha[0] oldversion = True log('Version available %s' %versionlist_alpha[0]) log('You are running an older non alpha version') version_stable = versionlist_stable[0] return oldversion, version_installed, version_available, version_stable
gpl-2.0
saafir7/exynos7420
tools/perf/scripts/python/futex-contention.py
11261
1486
# futex contention # (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com> # Licensed under the terms of the GNU GPL License version 2 # # Translation of: # # http://sourceware.org/systemtap/wiki/WSFutexContention # # to perf python scripting. # # Measures futex contention import os, sys sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from Util import * process_names = {} thread_thislock = {} thread_blocktime = {} lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time process_names = {} # long-lived pid-to-execname mapping def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm, nr, uaddr, op, val, utime, uaddr2, val3): cmd = op & FUTEX_CMD_MASK if cmd != FUTEX_WAIT: return # we don't care about originators of WAKE events process_names[tid] = comm thread_thislock[tid] = uaddr thread_blocktime[tid] = nsecs(s, ns) def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm, nr, ret): if thread_blocktime.has_key(tid): elapsed = nsecs(s, ns) - thread_blocktime[tid] add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed) del thread_blocktime[tid] del thread_thislock[tid] def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): for (tid, lock) in lock_waits: min, max, avg, count = lock_waits[tid, lock] print "%s[%d] lock %x contended %d times, %d avg ns" % \ (process_names[tid], tid, lock, count, avg)
gpl-2.0
aymanim/rosdep
test/test_rosdep_gbpdistro_support.py
7
8987
# Copyright (c) 2012, Willow Garage, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the Willow Garage, Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen def get_test_dir(): return os.path.abspath(os.path.join(os.path.dirname(__file__), 'sources.list.d')) def test_url_constants(): from rosdep2.gbpdistro_support import FUERTE_GBPDISTRO_URL for url_name, url in [ ('FUERTE_GBPDISTRO_URL', FUERTE_GBPDISTRO_URL)]: try: f = urlopen(url) f.read() f.close() except: assert False, "URL [%s][%s] failed to download" % (url_name, url) def test_get_gbprepo_as_rosdep_data(): from rosdep2.rosdistrohelper import get_index from rosdep2.gbpdistro_support import get_gbprepo_as_rosdep_data distro = list(get_index().distributions.keys())[0] data = get_gbprepo_as_rosdep_data(distro) for k in ['ros', 'catkin', 'genmsg']: assert k in data, data assert data['ros']['ubuntu'] try: get_gbprepo_as_rosdep_data('fooNonExistantDistro') assert False, "should have raised" except RuntimeError: pass def test_download_gbpdistro_as_rosdep_data(): from rosdep2.gbpdistro_support import download_gbpdistro_as_rosdep_data from rosdep2.gbpdistro_support import FUERTE_GBPDISTRO_URL from rosdep2.rep3 import REP3_TARGETS_URL from rosdep2 import DownloadFailure data = download_gbpdistro_as_rosdep_data(FUERTE_GBPDISTRO_URL) # don't go beyond this, this test is just making sure the download # plumbing is correct, not the loader. for k in ['ros', 'catkin', 'genmsg']: assert k in data, data assert data['ros']['ubuntu'] # try with bad url to trigger exception handling try: # override targets URL with bad URL download_gbpdistro_as_rosdep_data(FUERTE_GBPDISTRO_URL, targets_url='http://bad.ros.org/foo.yaml') assert False, "should have raised" except DownloadFailure: pass try: # use targets URL, which should have a bad format download_gbpdistro_as_rosdep_data(REP3_TARGETS_URL) assert False, "should have raised" except DownloadFailure: pass def test_gbprepo_to_rosdep_data_on_bad_inputs(): from rosdep2.gbpdistro_support import gbprepo_to_rosdep_data from rosdep2 import InvalidData simple_gbpdistro = {'release-name': 'foorte', 'repositories': {}, 'type': 'gbp'} targets = {'foorte': ['lucid', 'oneiric']} # test bad data try: gbprepo_to_rosdep_data(simple_gbpdistro, [targets]) assert False, "should have raised" except InvalidData: pass try: gbprepo_to_rosdep_data({ 'targets': 1, 'repositories': [], 'type': 'gbp'}, targets) assert False, "should have raised" except InvalidData: pass try: gbprepo_to_rosdep_data([], targets) assert False, "should have raised" except InvalidData: pass # release-name must be in targets try: gbprepo_to_rosdep_data({ 'release-name': 'barte', 'repositories': [], 'type': 'gbp'}, targets) assert False, "should have raised" except InvalidData: pass # gbp-distros must be list of dicts try: gbprepo_to_rosdep_data({ 'release-name': 'foorte', 'repositories': [1], 'type': 'gbp'}, targets) assert False, "should have raised" except InvalidData: pass # gbp-distro target must be 'all' or a list of strings try: bad_example = {'name': 'common', 'target': [1], 'url': 'git://github.com/wg-debs/common_msgs.git'} gbprepo_to_rosdep_data({ 'release-name': 'foorte', 'repositories': [bad_example], 'type': 'gbp'}, targets) assert False, "should have raised" except InvalidData: pass def test_gbprepo_to_rosdep_data_on_ok_input(): from rosdep2.gbpdistro_support import gbprepo_to_rosdep_data simple_gbpdistro = {'release-name': 'foorte', 'repositories': {}, 'type': 'gbp'} targets = {'foorte': ['lucid', 'oneiric']} # make sure our sample files work for the above checks before # proceeding to real data rosdep_data = gbprepo_to_rosdep_data(simple_gbpdistro, targets) assert rosdep_data is not None assert {} == rosdep_data gbpdistro_data = {'release-name': 'foorte', 'repositories': { 'common_msgs': dict( target='all', url='git://github.com/wg-debs/common_msgs.git', packages={ 'foo': 'subdir/foo', 'bar': 'subdir/bar' }), 'gazebo': dict( target=['lucid', 'natty'], url='git://github.com/wg-debs/gazebo.git'), 'foo-bar': dict( target=['precise'], url='git://github.com/wg-debs/gazebo.git', packages={ 'foo-bar': None }), }, 'type': 'gbp', } rosdep_data = gbprepo_to_rosdep_data(gbpdistro_data, targets) for k in ['foo', 'bar', 'gazebo', 'foo-bar']: assert k in rosdep_data, k # all targets and name transform # These are from the 'common_msgs' repo above. pkgs = ['foo', 'bar'] v = 'ros-foorte-%s' for pkg in pkgs: for p in ['lucid', 'oneiric']: rule = rosdep_data[pkg]['ubuntu'][p] assert rule['apt']['packages'] == [v % pkg], rule['apt']['packages'] for p in ['maverick', 'natty']: assert p not in rosdep_data[k]['ubuntu'] # target overrides pkg = 'gazebo' v = 'ros-foorte-gazebo' for p in ['lucid', 'natty']: rule = rosdep_data[pkg]['ubuntu'][p] assert rule['apt']['packages'] == [v], rule['apt']['packages'] for p in ['oneiric', 'precise']: assert p not in rosdep_data[pkg]['ubuntu'] # target overrides # These are from the 'foo-bar' repo above. v = 'ros-foorte-foo-bar' for pkg in ['foo-bar']: for p in ['precise']: rule = rosdep_data[pkg]['ubuntu'][p] assert rule['apt']['packages'] == [v], rule['apt']['packages'] for p in ['oneiric', 'natty', 'lucid']: assert p not in rosdep_data[pkg]['ubuntu'] def test_get_owner_name_homebrew(): from rosdep2.gbpdistro_support import get_owner_name empty_url = '' assert get_owner_name(empty_url) == 'ros', 'url: ' + empty_url https_test_url = 'https://github.com/' \ + 'ros/rosdistro/raw/master/releases/fuerte.yaml' assert get_owner_name(https_test_url) == 'ros', 'url: ' + https_test_url user_test_url = 'https://github.com/' \ + 'zklapow/rosdistro/raw/master/releases/fuerte.yaml' assert get_owner_name(user_test_url) == 'zklapow', 'url: ' + user_test_url non_github_url = 'https://ros.org/files/releases/fuerte.yaml' assert get_owner_name(non_github_url) == 'ros', 'url: ' + non_github_url
bsd-3-clause
yohanko88/gem5-DC
src/mem/slicc/ast/DeclListAST.py
27
2210
# Copyright (c) 1999-2008 Mark D. Hill and David A. Wood # Copyright (c) 2009 The Hewlett-Packard Development Company # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from slicc.ast.AST import AST class DeclListAST(AST): def __init__(self, slicc, decls): super(DeclListAST, self).__init__(slicc) if not isinstance(decls, (list, tuple)): decls = [ decls ] self.decls = decls def __repr__(self): return "[DeclListAST: %s]" % (', '.join(repr(d) for d in self.decls)) def files(self, parent=None): s = set() for decl in self.decls: s |= decl.files(parent) return s def generate(self): for decl in self.decls: decl.findMachines() decl.generate()
bsd-3-clause
av8ramit/tensorflow
tensorflow/contrib/gan/python/namedtuples.py
17
6624
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Named tuples for TFGAN. TFGAN training occurs in four steps, and each step communicates with the next step via one of these named tuples. At each step, you can either use a TFGAN helper function in `train.py`, or you can manually construct a tuple. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections __all__ = [ 'GANModel', 'InfoGANModel', 'ACGANModel', 'CycleGANModel', 'GANLoss', 'CycleGANLoss', 'GANTrainOps', 'GANTrainSteps', ] class GANModel( collections.namedtuple('GANModel', ( 'generator_inputs', 'generated_data', 'generator_variables', 'generator_scope', 'generator_fn', 'real_data', 'discriminator_real_outputs', 'discriminator_gen_outputs', 'discriminator_variables', 'discriminator_scope', 'discriminator_fn', ))): """A GANModel contains all the pieces needed for GAN training. Generative Adversarial Networks (https://arxiv.org/abs/1406.2661) attempt to create an implicit generative model of data by solving a two agent game. The generator generates candidate examples that are supposed to match the data distribution, and the discriminator aims to tell the real examples apart from the generated samples. Args: generator_inputs: The random noise source that acts as input to the generator. generated_data: The generated output data of the GAN. generator_variables: A list of all generator variables. generator_scope: Variable scope all generator variables live in. generator_fn: The generator function. real_data: A tensor or real data. discriminator_real_outputs: The discriminator's output on real data. discriminator_gen_outputs: The discriminator's output on generated data. discriminator_variables: A list of all discriminator variables. discriminator_scope: Variable scope all discriminator variables live in. discriminator_fn: The discriminator function. """ # TODO(joelshor): Have this class inherit from `GANModel`. class InfoGANModel( collections.namedtuple('InfoGANModel', GANModel._fields + ( 'structured_generator_inputs', 'predicted_distributions', 'discriminator_and_aux_fn', ))): """An InfoGANModel contains all the pieces needed for InfoGAN training. See https://arxiv.org/abs/1606.03657 for more details. Args: structured_generator_inputs: A list of Tensors representing the random noise that must have high mutual information with the generator output. List length should match `predicted_distributions`. predicted_distributions: A list of tf.Distributions. Predicted by the recognizer, and used to evaluate the likelihood of the structured noise. List length should match `structured_generator_inputs`. discriminator_and_aux_fn: The original discriminator function that returns a tuple of (logits, `predicted_distributions`). """ class ACGANModel( collections.namedtuple('ACGANModel', GANModel._fields + ('one_hot_labels', 'discriminator_real_classification_logits', 'discriminator_gen_classification_logits',))): """An ACGANModel contains all the pieces needed for ACGAN training. See https://arxiv.org/abs/1610.09585 for more details. Args: one_hot_labels: A Tensor holding one-hot-labels for the batch. discriminator_real_classification_logits: Classification logits for real data. discriminator_gen_classification_logits: Classification logits for generated data. """ class CycleGANModel( collections.namedtuple( 'CycleGANModel', ('model_x2y', 'model_y2x', 'reconstructed_x', 'reconstructed_y'))): """An CycleGANModel contains all the pieces needed for CycleGAN training. The model `model_x2y` generator F maps data set X to Y, while the model `model_y2x` generator G maps data set Y to X. See https://arxiv.org/abs/1703.10593 for more details. Args: model_x2y: A `GANModel` namedtuple whose generator maps data set X to Y. model_y2x: A `GANModel` namedtuple whose generator maps data set Y to X. reconstructed_x: A `Tensor` of reconstructed data X which is G(F(X)). reconstructed_y: A `Tensor` of reconstructed data Y which is F(G(Y)). """ class GANLoss( collections.namedtuple('GANLoss', ( 'generator_loss', 'discriminator_loss' ))): """GANLoss contains the generator and discriminator losses. Args: generator_loss: A tensor for the generator loss. discriminator_loss: A tensor for the discriminator loss. """ class CycleGANLoss( collections.namedtuple('CycleGANLoss', ('loss_x2y', 'loss_y2x'))): """CycleGANLoss contains the losses for `CycleGANModel`. See https://arxiv.org/abs/1703.10593 for more details. Args: loss_x2y: A `GANLoss` namedtuple representing the loss of `model_x2y`. loss_y2x: A `GANLoss` namedtuple representing the loss of `model_y2x`. """ class GANTrainOps( collections.namedtuple('GANTrainOps', ( 'generator_train_op', 'discriminator_train_op', 'global_step_inc_op' ))): """GANTrainOps contains the training ops. Args: generator_train_op: Op that performs a generator update step. discriminator_train_op: Op that performs a discriminator update step. global_step_inc_op: Op that increments the shared global step. """ class GANTrainSteps( collections.namedtuple('GANTrainSteps', ( 'generator_train_steps', 'discriminator_train_steps' ))): """Contains configuration for the GAN Training. Args: generator_train_steps: Number of generator steps to take in each GAN step. discriminator_train_steps: Number of discriminator steps to take in each GAN step. """
apache-2.0
kenshay/ImageScripter
ProgramData/SystemFiles/Python/Lib/site-packages/elan/onHoldScripts/-00error__.py
2
1043
from ImageScripter import * from elan import * Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.media.Click() Configurator.security.Click() Configurator.system.Click() Configurator.security.Click() Configurator.system.Click() raise ValueError('asdasd')
gpl-3.0
pasiegel/SickGear
lib/hachoir_parser/audio/itunesdb.py
90
17106
""" iPod iTunesDB parser. Documentation: - http://ipodlinux.org/ITunesDB Author: Romain HERAULT Creation date: 19 august 2006 """ from lib.hachoir_parser import Parser from lib.hachoir_core.field import (FieldSet, UInt8, UInt16, UInt32, UInt64, TimestampMac32, String, Float32, NullBytes, Enum) from lib.hachoir_core.endian import LITTLE_ENDIAN from lib.hachoir_core.tools import humanDuration from lib.hachoir_core.text_handler import displayHandler, filesizeHandler list_order={ 1 : "playlist order (manual sort order)", 2 : "???", 3 : "songtitle", 4 : "album", 5 : "artist", 6 : "bitrate", 7 : "genre", 8 : "kind", 9 : "date modified", 10 : "track number", 11 : "size", 12 : "time", 13 : "year", 14 : "sample rate", 15 : "comment", 16 : "date added", 17 : "equalizer", 18 : "composer", 19 : "???", 20 : "play count", 21 : "last played", 22 : "disc number", 23 : "my rating", 24 : "release date", 25 : "BPM", 26 : "grouping", 27 : "category", 28 : "description", 29 : "show", 30 : "season", 31 : "episode number" } class DataObject(FieldSet): type_name={ 1:"Title", 2:"Location", 3:"Album", 4:"Artist", 5:"Genre", 6:"Filetype", 7:"EQ Setting", 8:"Comment", 9:"Category", 12:"Composer", 13:"Grouping", 14:"Description text", 15:"Podcast Enclosure URL", 16:"Podcast RSS URL", 17:"Chapter data", 18:"Subtitle", 19:"Show (for TV Shows only)", 20:"Episode", 21:"TV Network", 50:"Smart Playlist Data", 51:"Smart Playlist Rules", 52:"Library Playlist Index", 100:"Column info", } mhod52_sort_index_type_name={ 3:"Title", 4:"Album, then Disk/Tracknumber, then Title", 5:"Artist, then Album, then Disc/Tracknumber, then Title", 7:"Genre, then Artist, then Album, then Disc/Tracknumber, then Title", 8:"Composer, then Title" } def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = self["entry_length"].value *8 def createFields(self): yield String(self, "header_id", 4, "Data Object Header Markup (\"mhod\")", charset="ISO-8859-1") yield UInt32(self, "header_length", "Header Length") yield UInt32(self, "entry_length", "Entry Length") yield Enum(UInt32(self, "type", "type"),self.type_name) if(self["type"].value<15): yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "position", "Position") yield UInt32(self, "length", "String Length in bytes") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield String(self, "string", self["length"].value, "String Data", charset="UTF-16-LE") elif (self["type"].value<17): yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield String(self, "string", self._size/8-self["header_length"].value, "String Data", charset="UTF-8") elif (self["type"].value == 52): yield UInt32(self, "unknown[]", "unk1") yield UInt32(self, "unknown[]", "unk2") yield Enum(UInt32(self, "sort_index_type", "Sort Index Type"),self.mhod52_sort_index_type_name) yield UInt32(self, "entry_count", "Entry Count") indexes_size = self["entry_count"].value*4 padding_offset = self["entry_length"].value - indexes_size padding = self.seekByte(padding_offset, "header padding") if padding: yield padding for i in xrange(self["entry_count"].value): yield UInt32(self, "index["+str(i)+"]", "Index of the "+str(i)+"nth mhit") else: padding = self.seekByte(self["header_length"].value, "header padding") if padding: yield padding padding = self.seekBit(self._size, "entry padding") if padding: yield padding class TrackItem(FieldSet): x1_type_name={ 0:"AAC or CBR MP3", 1:"VBR MP3" } x2_type_name={ 0:"AAC", 1:"MP3" } media_type_name={ 0x00:"Audio/Video", 0x01:"Audio", 0x02:"Video", 0x04:"Podcast", 0x06:"Video Podcast", 0x08:"Audiobook", 0x20:"Music Video", 0x40:"TV Show", 0X60:"TV Show (Music lists)", } def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = self["entry_length"].value *8 def createFields(self): yield String(self, "header_id", 4, "Track Item Header Markup (\"mhit\")", charset="ISO-8859-1") yield UInt32(self, "header_length", "Header Length") yield UInt32(self, "entry_length", "Entry Length") yield UInt32(self, "string_number", "Number of Strings") yield UInt32(self, "unique_id", "Unique ID") yield UInt32(self, "visible_tag", "Visible Tag") yield String(self, "file_type", 4, "File Type") yield Enum(UInt8(self, "x1_type", "Extended Type 1"),self.x1_type_name) yield Enum(UInt8(self, "x2_type", "Extended type 2"),self.x2_type_name) yield UInt8(self, "compilation_flag", "Compilation Flag") yield UInt8(self, "rating", "Rating") yield TimestampMac32(self, "added_date", "Date when the item was added") yield filesizeHandler(UInt32(self, "size", "Track size in bytes")) yield displayHandler(UInt32(self, "length", "Track length in milliseconds"), humanDuration) yield UInt32(self, "track_number", "Number of this track") yield UInt32(self, "total_track", "Total number of tracks") yield UInt32(self, "year", "Year of the track") yield UInt32(self, "bitrate", "Bitrate") yield UInt32(self, "samplerate", "Sample Rate") yield UInt32(self, "volume", "volume") yield UInt32(self, "start_time", "Start playing at, in milliseconds") yield UInt32(self, "stop_time", "Stop playing at, in milliseconds") yield UInt32(self, "soundcheck", "SoundCheck preamp") yield UInt32(self, "playcount_1", "Play count of the track") yield UInt32(self, "playcount_2", "Play count of the track (identical to playcount_1)") yield UInt32(self, "last_played_time", "Time the song was last played") yield UInt32(self, "disc_number", "disc number in multi disc sets") yield UInt32(self, "total_discs", "Total number of discs in the disc set") yield UInt32(self, "userid", "User ID in the DRM scheme") yield TimestampMac32(self, "last_modified", "Time of the last modification of the track") yield UInt32(self, "bookmark_time", "Bookmark time for AudioBook") yield UInt64(self, "dbid", "Unique DataBase ID for the song (identical in mhit and in mhii)") yield UInt8(self, "checked", "song is checked") yield UInt8(self, "application_rating", "Last Rating before change") yield UInt16(self, "BPM", "BPM of the track") yield UInt16(self, "artwork_count", "number of artworks fo this item") yield UInt16(self, "unknown[]") yield UInt32(self, "artwork_size", "Total size of artworks in bytes") yield UInt32(self, "unknown[]") yield Float32(self, "sample_rate_2", "Sample Rate express in float") yield UInt32(self, "released_date", "Date of release in Music Store or in Podcast") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt8(self, "has_artwork", "0x01 for track with artwork, 0x02 otherwise") yield UInt8(self, "skip_wen_shuffling", "Skip that track when shuffling") yield UInt8(self, "remember_playback_position", "Remember playback position") yield UInt8(self, "flag4", "Flag 4") yield UInt64(self, "dbid2", "Unique DataBase ID for the song (identical as above)") yield UInt8(self, "lyrics_flag", "Lyrics Flag") yield UInt8(self, "movie_file_flag", "Movie File Flag") yield UInt8(self, "played_mark", "Track has been played") yield UInt8(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "sample_count", "Number of samples in the song (only for WAV and AAC files)") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield Enum(UInt32(self, "media_type", "Media Type for video iPod"),self.media_type_name) yield UInt32(self, "season_number", "Season Number") yield UInt32(self, "episode_number", "Episode Number") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") padding = self.seekByte(self["header_length"].value, "header padding") if padding: yield padding #while ((self.stream.readBytes(0, 4) == 'mhod') and ((self.current_size/8) < self["entry_length"].value)): for i in xrange(self["string_number"].value): yield DataObject(self, "data[]") padding = self.seekBit(self._size, "entry padding") if padding: yield padding class TrackList(FieldSet): def createFields(self): yield String(self, "header_id", 4, "Track List Header Markup (\"mhlt\")", charset="ISO-8859-1") yield UInt32(self, "header_length", "Header Length") yield UInt32(self, "track_number", "Number of Tracks") padding = self.seekByte(self["header_length"].value, "header padding") if padding: yield padding for i in xrange(self["track_number"].value): yield TrackItem(self, "track[]") class PlaylistItem(FieldSet): def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = self["entry_length"].value *8 def createFields(self): yield String(self, "header_id", 4, "Playlist Item Header Markup (\"mhip\")", charset="ISO-8859-1") yield UInt32(self, "header_length", "Header Length") yield UInt32(self, "entry_length", "Entry Length") yield UInt32(self, "data_object_child_count", "Number of Child Data Objects") yield UInt32(self, "podcast_grouping_flag", "Podcast Grouping Flag") yield UInt32(self, "group_id", "Group ID") yield UInt32(self, "track_id", "Track ID") yield TimestampMac32(self, "timestamp", "Song Timestamp") yield UInt32(self, "podcast_grouping_ref", "Podcast Grouping Reference") padding = self.seekByte(self["header_length"].value, "header padding") if padding: yield padding for i in xrange(self["data_object_child_count"].value): yield DataObject(self, "mhod[]") class Playlist(FieldSet): is_master_pl_name={ 0:"Regular playlist", 1:"Master playlist" } is_podcast_name={ 0:"Normal Playlist List", 1:"Podcast Playlist List" } list_sort_order_name={ 1:"Manual Sort Order", 2:"???", 3:"Song Title", 4:"Album", 5:"Artist", 6:"Bitrate", 7:"Genre", 8:"Kind", 9:"Date Modified", 10:"Track Number", 11:"Size", 12:"Time", 13:"Year", 14:"Sample Rate", 15:"Comment", 16:"Date Added", 17:"Equalizer", 18:"Composer", 19:"???", 20:"Play Count", 21:"Last Played", 22:"Disc Number", 23:"My Rating", 24:"Release Date", 25:"BPM", 26:"Grouping", 27:"Category", 28:"Description", 29:"Show", 30:"Season", 31:"Episode Number" } def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = self["entry_length"].value *8 def createFields(self): yield String(self, "header_id", 4, "Playlist List Header Markup (\"mhyp\")", charset="ISO-8859-1") yield UInt32(self, "header_length", "Header Length") yield UInt32(self, "entry_length", "Entry Length") yield UInt32(self, "data_object_child_count", "Number of Child Data Objects") yield UInt32(self, "playlist_count", "Number of Playlist Items") yield Enum(UInt8(self, "type", "Normal or master playlist?"), self.is_master_pl_name) yield UInt8(self, "XXX1", "XXX1") yield UInt8(self, "XXX2", "XXX2") yield UInt8(self, "XXX3", "XXX3") yield TimestampMac32(self, "creation_date", "Date when the playlist was created") yield UInt64(self, "playlistid", "Persistent Playlist ID") yield UInt32(self, "unk3", "unk3") yield UInt16(self, "string_mhod_count", "Number of string MHODs for this playlist") yield Enum(UInt16(self, "is_podcast", "Playlist or Podcast List?"), self.is_podcast_name) yield Enum(UInt32(self, "sort_order", "Playlist Sort Order"), self.list_sort_order_name) padding = self.seekByte(self["header_length"].value, "entry padding") if padding: yield padding for i in xrange(self["data_object_child_count"].value): yield DataObject(self, "mhod[]") for i in xrange(self["playlist_count"].value): yield PlaylistItem(self, "playlist_item[]") class PlaylistList(FieldSet): def createFields(self): yield String(self, "header_id", 4, "Playlist List Header Markup (\"mhlp\")", charset="ISO-8859-1") yield UInt32(self, "header_length", "Header Length") yield UInt32(self, "playlist_number", "Number of Playlists") padding = self.seekByte(self["header_length"].value, "header padding") if padding: yield padding for i in xrange(self["playlist_number"].value): yield Playlist(self, "playlist[]") class DataSet(FieldSet): type_name={ 1:"Track List", 2:"Play List", 3:"Podcast List" } def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = self["entry_length"].value *8 def createFields(self): yield String(self, "header_id", 4, "DataSet Header Markup (\"mhsd\")", charset="ISO-8859-1") yield UInt32(self, "header_length", "Header Length") yield UInt32(self, "entry_length", "Entry Length") yield Enum(UInt32(self, "type", "type"),self.type_name) padding = self.seekByte(self["header_length"].value, "header_raw") if padding: yield padding if self["type"].value == 1: yield TrackList(self, "tracklist[]") if self["type"].value == 2: yield PlaylistList(self, "playlist_list[]"); if self["type"].value == 3: yield PlaylistList(self, "podcast_list[]"); padding = self.seekBit(self._size, "entry padding") if padding: yield padding class DataBase(FieldSet): def __init__(self, *args, **kw): FieldSet.__init__(self, *args, **kw) self._size = self["entry_length"].value *8 # def createFields(self): class ITunesDBFile(Parser): PARSER_TAGS = { "id": "itunesdb", "category": "audio", "min_size": 44*8, "magic": (('mhbd',0),), "description": "iPod iTunesDB file" } endian = LITTLE_ENDIAN def validate(self): return self.stream.readBytes(0, 4) == 'mhbd' def createFields(self): yield String(self, "header_id", 4, "DataBase Header Markup (\"mhbd\")", charset="ISO-8859-1") yield UInt32(self, "header_length", "Header Length") yield UInt32(self, "entry_length", "Entry Length") yield UInt32(self, "unknown[]") yield UInt32(self, "version_number", "Version Number") yield UInt32(self, "child_number", "Number of Children") yield UInt64(self, "id", "ID for this database") yield UInt32(self, "unknown[]") yield UInt64(self, "initial_dbid", "Initial DBID") size = self["header_length"].value-self.current_size/ 8 if size>0: yield NullBytes(self, "padding", size) for i in xrange(self["child_number"].value): yield DataSet(self, "dataset[]") padding = self.seekByte(self["entry_length"].value, "entry padding") if padding: yield padding def createContentSize(self): return self["entry_length"].value * 8
gpl-3.0
AlexHatesUnicorns/FDTD_Solver
world_lenses_v3/helpers/aspherical_lens.py
8
1352
# -*- coding: utf-8 -*- import math from .dimensions import px2mm, mm2px def aspheric_curve_front(r, lam, lam_px): r = px2mm(r, lam, lam_px) R = 19.371 a1 = 0.027322 a2 = 5.5360 * 10 ** -5 a3 = 1.5599 * 10 ** -7 a4 = 1.0454 * 10 ** -9 a5 = 2.7424 * 10 ** -12 a6 = 0 a = [a1, a2, a3, a4, a5, a6] result = r * r / R / (1 + math.sqrt(1 - (r / R) ** 2)) for i in range(1, 7): result += a[i - 1] * r ** (2 * i) return mm2px(result, lam, lam_px) def aspheric_curve_back(r, lam, lam_px): r = px2mm(r, lam, lam_px) R = -73.7687 a1 = -0.012541 a2 = 3.9795 * 10 ** -4 a3 = -5.1724 * 10 ** -6 a4 = 4.2630 * 10 ** -8 a5 = -1.9919 * 10 ** -10 a6 = 3.9967 * 10 ** -13 a = [a1, a2, a3, a4, a5, a6] result = r * r / R / (1 + math.sqrt(1 - (r / R) ** 2)) for i in range(1, 7): result += a[i - 1] * (r ** (2 * i)) return mm2px(result, lam, lam_px) def check_point(x, y, d, diameter, lam, lam_px): """ Проверяет точку на принадлежность асферической линзы Точка в ск Федера """ if 0 <= x <= d: if -diameter / 2 <= y <= diameter / 2: return aspheric_curve_front(y, lam, lam_px) <= x and (x - d) <= aspheric_curve_back(y, lam, lam_px) return False
mit
michaelBenin/django-jinja
django_jinja/contrib/pipeline/templatetags/_pipeline.py
1
2411
# -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import from django.contrib.staticfiles.storage import staticfiles_storage from django.template.loader import render_to_string from pipeline.conf import settings from pipeline.utils import guess_type from pipeline.packager import Packager, PackageNotFound from django_jinja import library lib = library.Library() @lib.global_function def compressed_css(name): package = settings.PIPELINE_CSS.get(name, {}) if package: package = {name: package} packager = Packager(css_packages=package, js_packages={}) try: package = packager.package_for('css', name) except PackageNotFound: return "" def _render_css(path): template_name = package.template_name or "pipeline/css.jinja" context = package.extra_context context.update({ 'type': guess_type(path, 'text/css'), 'url': staticfiles_storage.url(path) }) return render_to_string(template_name, context) if not settings.DEBUG: return _render_css(package.output_filename) paths = packager.compile(package.paths) tags = [_render_css(path) for path in paths] return '\n'.join(tags) @lib.global_function def compressed_js(name): package = settings.PIPELINE_JS.get(name, {}) if package: package = {name: package} packager = Packager(css_packages={}, js_packages=package) try: package = packager.package_for('js', name) except PackageNotFound: return "" def _render_js(path): template_name = package.template_name or "pipeline/js.jinja" context = package.extra_context context.update({ 'type': guess_type(path, 'text/javascript'), 'url': staticfiles_storage.url(path), }) return render_to_string(template_name, context) def _render_inline_js(js): context = package.extra_context context.update({ 'source': js }) return render_to_string("pipeline/inline_js.jinja", context) if not settings.DEBUG: return _render_js(package.output_filename) paths = packager.compile(package.paths) templates = packager.pack_templates(package) tags = [_render_js(js) for js in paths] if templates: tags.append(_render_inline(templates)) return '\n'.join(tags)
bsd-3-clause
pchmieli/h2o-3
py2/h2o_glm.py
20
29862
import h2o_cmd, h2o_util import h2o2 as h2o import re, random, math from h2o_test import check_sandbox_for_errors, dump_json, verboseprint import h2o_nodes from tabulate import tabulate # recursive walk an object check that it has valid numbers only (no "" or nan or inf def check_obj_has_good_numbers(obj, hierarchy="", curr_depth=0, max_depth=4, allowNaN=False): """Represent instance of a class as JSON. Arguments: obj -- any object Return: String that represent JSON-encoded object. """ def serialize(obj, hierarchy="", curr_depth=0): """Recursively walk object's hierarchy. Limit to max_depth""" if curr_depth>max_depth: return if isinstance(obj, (bool, int, long, float, basestring)): try: number = float(obj) print "Yay!", hierarchy, number except: if obj is None: print "Not Yay! how come you're giving me None for a coefficient? %s %s" % (hierarchy, obj) elif str(obj)=="": print "Not Yay! how come you're giving me an empty string for a coefficient? %s %s" % (hierarchy, obj) else: raise Exception("%s %s %s is not a valid float" % (hierarchy, obj, type(obj))) # hack for now number = 0.0 if not allowNaN and math.isnan(number): raise Exception("%s %s is a NaN" % (hierarchy, obj)) if not allowNaN and math.isinf(number): raise Exception("%s %s is a Inf" % (hierarchy, obj)) return number elif isinstance(obj, dict): obj = obj.copy() for key in obj: obj[key] = serialize(obj[key], hierarchy + ".%" % key, curr_depth+1) return obj elif isinstance(obj, (list, tuple)): return [serialize(item, hierarchy + "[%s]" % i, curr_depth+1) for (i, item) in enumerate(obj)] elif hasattr(obj, '__dict__'): return serialize(obj.__dict__, hierarchy, curr_depth+1) else: return repr(obj) # Don't know how to handle, convert to string return (serialize(obj, hierarchy, curr_depth+1)) #************************************************************88 # where do we get the CM? def simpleCheckGLM(self, model, parameters, labelList, labelListUsed, allowFailWarning=False, allowZeroCoeff=False, prettyPrint=False, noPrint=False, maxExpectedIterations=None, doNormalized=False, allowNaN=False): # FIX! the structure is all different return warnings = '' # binomial = model.binomial residual_deviance = model.training_metrics.residual_deviance threshold = model.training_metrics.threshold check_obj_has_good_numbers(threshold, 'threshold', allowNaN=allowNaN) auc = model.AUC # NaN if not logistic # check_obj_has_good_numbers(auc, 'model.AUC') best_lambda_idx = model.best_lambda_idx model_category = model.model_category name = model.name residual_degrees_of_freedom = model.residual_degrees_of_freedom # is this no longer used? coefficients_magnitude = model.coefficients_magnitude null_deviance = model.null_deviance check_obj_has_good_numbers(null_deviance, 'model.null_deviance', allowNaN=allowNaN) null_degrees_of_freedom = model.null_degrees_of_freedom check_obj_has_good_numbers(null_degrees_of_freedom, 'model.null_degrees_of_freedom', allowNaN=allowNaN) domains = model.domains # when is is this okay to be NaN? AIC = model.AIC check_obj_has_good_numbers(AIC, 'model.AIC', allowNaN=allowNaN) names = model.names coeffs_names = model.coefficients_table.data[0] # these are returned as quoted strings. Turn them into numbers temp = model.coefficients_table.data[1] assert len(coeffs_names)==len(temp), "%s %s" % (len(coeffs_names), len(temp)) # we need coefficients to be floats or empty check_obj_has_good_numbers(temp, 'model.coeffs', allowNaN=False) # print "temp", temp[0:10] # print "temp[5489:5500]", temp[5489:5500] # UPDATE: None (null json) is legal for coeffs coeffs = map(lambda x : float(x) if (x is not None and str(x) != "") else 0, temp) intercept = coeffs[-1] interceptName = coeffs_names[-1] assert interceptName == 'Intercept' assert len(coeffs) == len(coeffs_names), "%s %s" % (len(coeffs), len(coeffs_names)) # FIX! if a coeff is zeroed/ignored, it doesn't show up? # get rid of intercept in glm response # assert (len(coeffs)-1) == len(labelListUsed, \ # "%s %s %s %s" % (len(coeffs), len(labelListUsed), coeffs, labelListUsed) # labelList still has the response column? # ignored columns aren't in model.names, but output response is. # labelListUsed has the response col removed so add 1 # Hmm..dropped coefficients again? can't do this check? # assert len(model.names) == len(labelListUsed), \ # "%s %s %s %s" % (len(model.names), len(labelListUsed), model.names, labelList) # this is no longer true! # assert model.threshold!=0 print "len(coeffs)", len(coeffs) print "coeffs:", coeffs # last one is intercept if interceptName != "Intercept" or abs(intercept)<1e-26: raise Exception("'Intercept' should be last in coeffs_names %s %s" % (interceptName, intercept)) y = parameters['response_column'] cString = "\n" for i,c in enumerate(coeffs_names): cString += "%s: %.5e " % (coeffs_names[i], coeffs[i]) print cString print "\nH2O intercept:\t\t%.5e" % intercept print "\nTotal # of coeffs:", len(coeffs_names) # intercept is buried in there too absIntercept = abs(float(intercept)) self.assertGreater(absIntercept, 1e-26, ( "abs. value of GLM coeffs['Intercept'] is " + str(absIntercept) + ", not >= 1e-26 for Intercept" + "\n" + "parameters:" + dump_json(parameters) )) if (not allowZeroCoeff) and (len(coeffs)>1): s = 0.0 for c in coeffs: s += abs(float(c)) self.assertGreater(s, 1e-26, ( "sum of abs. value of GLM coeffs/intercept is " + str(s) + ", not >= 1e-26\n" + "parameters:" + dump_json(parameters) )) # shouldn't have any errors check_sandbox_for_errors() return (warnings, coeffs, intercept) #************************************************************88 def pickRandGlmParams(paramDict, params): colX = 0 randomGroupSize = random.randint(1,len(paramDict)) for i in range(randomGroupSize): randomKey = random.choice(paramDict.keys()) randomV = paramDict[randomKey] randomValue = random.choice(randomV) params[randomKey] = randomValue if (randomKey=='x'): colX = randomValue # Only identity, log and inverse links are allowed for family=gaussian. # force legal family/ink combos if 'family' not in params: # defaults to gaussian if 'link' in params and params['link'] not in ('identity', 'log', 'inverse', 'familyDefault'): params['link'] = None elif params['family'] is not None and 'link' in params and params['link'] is not None: # only log/identity is legal? if params['family'] == 'poisson': if params['link'] not in ('identity', 'log', 'familyDefault'): params['link'] = None # only tweedie/tweedie is legal? elif params['family'] == 'tweedie': if params['link'] not in ('tweedie'): params['link'] = None elif params['family'] == 'binomial': # only logit and log if params['link'] not in ('logit', 'log', 'familyDefault'): params['link'] = None elif params['family'] == 'gaussian': if params['link'] not in ('identity', 'log', 'inverse', 'familyDefault'): params['link'] = None elif params['family'] is None: # defaults to gaussian if 'link' in params and params['link'] not in ('identity', 'log', 'inverse', 'familyDefault'): params['link'] = None if 'lambda_search' in params and params['lambda_search']==1: if 'nlambdas' in params and params['nlambdas']<=1: params['nlambdas'] = 2 return colX def simpleCheckGLMScore(self, glmScore, family='gaussian', allowFailWarning=False, **kwargs): warnings = None if 'warnings' in glmScore: warnings = glmScore['warnings'] # stop on failed x = re.compile("failed", re.IGNORECASE) # don't stop if fail to converge c = re.compile("converge", re.IGNORECASE) for w in warnings: print "\nwarning:", w if re.search(x,w) and not allowFailWarning: if re.search(c,w): # ignore the fail to converge warning now pass else: # stop on other 'fail' warnings (are there any? fail to solve? raise Exception(w) validation = glmScore['validation'] validation['err'] = h2o_util.cleanseInfNan(validation['err']) validation['nullDev'] = h2o_util.cleanseInfNan(validation['nullDev']) validation['resDev'] = h2o_util.cleanseInfNan(validation['resDev']) print "%15s %s" % ("err:\t", validation['err']) print "%15s %s" % ("nullDev:\t", validation['nullDev']) print "%15s %s" % ("resDev:\t", validation['resDev']) # threshold only there if binomial? # auc only for binomial if family=="binomial": print "%15s %s" % ("AUC:\t", validation['AUC']) print "%15s %s" % ("threshold:\t", validation['threshold']) err = False if family=="poisson" or family=="gaussian": if 'AIC' not in validation: print "AIC is missing from the glm json response" err = True if not allowNaN and math.isnan(validation['err']): print "Why is this err = 'nan'?? %6s %s" % ("err:\t", validation['err']) err = True if not allowNaN and math.isnan(validation['resDev']): print "Why is this resDev = 'nan'?? %6s %s" % ("resDev:\t", validation['resDev']) err = True if err: raise Exception ("How am I supposed to tell that any of these errors should be ignored?") # legal? if not allowNaN and math.isnan(validation['nullDev']): ## emsg = "Why is this nullDev = 'nan'?? %6s %s" % ("nullDev:\t", validation['nullDev']) ## raise Exception(emsg) pass def oldSimpleCheckGLM(self, glm, colX, allowFailWarning=False, allowZeroCoeff=False, prettyPrint=False, noPrint=False, maxExpectedIterations=None, doNormalized=False, **kwargs): # if we hit the max_iter, that means it probably didn't converge. should be 1-maxExpectedIter # h2o GLM will verboseprint the result and print errors. # so don't have to do that # different when cross validation is used? No trainingErrorDetails? GLMModel = glm['glm_model'] if not GLMModel: raise Exception("GLMModel didn't exist in the glm response? %s" % dump_json(glm)) warnings = None if 'warnings' in GLMModel and GLMModel['warnings']: warnings = GLMModel['warnings'] # stop on failed x = re.compile("failed", re.IGNORECASE) # don't stop if fail to converge c = re.compile("converge", re.IGNORECASE) for w in warnings: print "\nwarning:", w if re.search(x,w) and not allowFailWarning: if re.search(c,w): # ignore the fail to converge warning now pass else: # stop on other 'fail' warnings (are there any? fail to solve? raise Exception(w) # for key, value in glm.iteritems(): print key # not in GLMGrid? # FIX! don't get GLMParams if it can't solve? GLMParams = GLMModel['glm'] family = GLMParams["family"] # number of submodels = number of lambda # min of 2. lambda_max is first submodels = GLMModel['submodels'] # since all our tests?? only use one lambda, the best_lamda_idx should = 1 best_lambda_idx = GLMModel['best_lambda_idx'] print "best_lambda_idx:", best_lambda_idx lambda_max = GLMModel['lambda_max'] print "lambda_max:", lambda_max # currently lambda_max is not set by tomas. ..i.e.not valid if 1==0 and (lambda_max <= submodels[best_lambda_idx].lambda_value): raise Exception("lambda_max %s should always be > the lambda result %s we're checking" % (lambda_max, submodels[best_lambda_idx].lambda_value)) # submodels0 = submodels[0] # submodels1 = submodels[-1] # hackery to make it work when there's just one if (best_lambda_idx >= len(submodels)) or (best_lambda_idx < 0): raise Exception("best_lambda_idx: %s should point to one of lambdas (which has len %s)" % (best_lambda_idx, len(submodels))) if (best_lambda_idx >= len(submodels)) or (best_lambda_idx < 0): raise Exception("best_lambda_idx: %s should point to one of submodels (which has len %s)" % (best_lambda_idx, len(submodels))) submodels1 = submodels[best_lambda_idx] # hackery to make it work when there's just one iterations = submodels1['iteration'] print "GLMModel/iterations:", iterations # if we hit the max_iter, that means it probably didn't converge. should be 1-maxExpectedIter if maxExpectedIterations is not None and iterations > maxExpectedIterations: raise Exception("Convergence issue? GLM did iterations: %d which is greater than expected: %d" % (iterations, maxExpectedIterations) ) if 'validation' not in submodels1: raise Exception("Should be a 'validation' key in submodels1: %s" % dump_json(submodels1)) validationsList = submodels1['validation'] validations = validationsList # xval. compare what we asked for and what we got. n_folds = kwargs.setdefault('n_folds', None) print "GLMModel/validations" validations['null_deviance'] = h2o_util.cleanseInfNan(validations['null_deviance']) validations['residual_deviance'] = h2o_util.cleanseInfNan(validations['residual_deviance']) print "%15s %s" % ("null_deviance:\t", validations['null_deviance']) print "%15s %s" % ("residual_deviance:\t", validations['residual_deviance']) # threshold only there if binomial? # auc only for binomial if family=="binomial": print "%15s %s" % ("auc:\t", validations['auc']) best_threshold = validations['best_threshold'] thresholds = validations['thresholds'] print "%15s %s" % ("best_threshold:\t", best_threshold) # have to look up the index for the cm, from the thresholds list best_index = None for i,t in enumerate(thresholds): if t >= best_threshold: # ends up using next one if not present best_index = i break assert best_index!=None, "%s %s" % (best_threshold, thresholds) print "Now printing the right 'best_threshold' %s from '_cms" % best_threshold # cm = glm['glm_model']['submodels'][0]['validation']['_cms'][-1] submodels = glm['glm_model']['submodels'] # FIX! this isn't right if we have multiple lambdas? different submodels? cms = submodels[0]['validation']['_cms'] self.assertEqual(len(thresholds), len(cms), msg="thresholds %s and cm %s should be lists of the same size. %s" % (len(thresholds), len(cms), thresholds)) # FIX! best_threshold isn't necessarily in the list. jump out if >= assert best_index<len(cms), "%s %s" % (best_index, len(cms)) # if we want 0.5..rounds to int # mid = len(cms)/2 # cm = cms[mid] cm = cms[best_index] print "cm:", dump_json(cm['_arr']) predErr = cm['_predErr'] classErr = cm['_classErr'] # compare to predErr # pctWrong = h2o_gbm.pp_cm_summary(cm['_arr']); # FIX! pctWrong = 0 print "predErr:", predErr print "calculated pctWrong from cm:", pctWrong print "classErr:", classErr # self.assertLess(pctWrong, 9,"Should see less than 9% error (class = 4)") print "\nTrain\n==========\n" # print h2o_gbm.pp_cm(cm['_arr']) if family=="poisson" or family=="gaussian": print "%15s %s" % ("AIC:\t", validations['AIC']) coefficients_names = GLMModel['coefficients_names'] # print "coefficients_names:", coefficients_names idxs = submodels1['idxs'] print "idxs:", idxs coefficients_names = coefficients_names # always check both normalized and normal coefficients norm_beta = submodels1['norm_beta'] # if norm_beta and len(coefficients_names)!=len(norm_beta): # print len(coefficients_names), len(norm_beta) # raise Exception("coefficients_names and normalized_norm_beta from h2o json not same length. coefficients_names: %s normalized_norm_beta: %s" % (coefficients_names, norm_beta)) # beta = submodels1['beta'] # print "beta:", beta # if len(coefficients_names)!=len(beta): # print len(coefficients_names), len(beta) # raise Exception("coefficients_names and beta from h2o json not same length. coefficients_names: %s beta: %s" % (coefficients_names, beta)) # test wants to use normalized? if doNormalized: beta_used = norm_beta else: beta_used = beta coefficients = {} # create a dictionary with name, beta (including intercept) just like v1 for i,b in zip(idxs, beta_used[:-1]): name = coefficients_names[i] coefficients[name] = b print "len(idxs)", len(idxs), "len(beta_used)", len(beta_used) print "coefficients:", coefficients print "beta:", beta print "norm_beta:", norm_beta coefficients['Intercept'] = beta_used[-1] print "len(coefficients_names)", len(coefficients_names) print "len(idxs)", len(idxs) print "idxs[-1]", idxs[-1] print "intercept demapping info:", \ "coefficients_names[-i]:", coefficients_names[-1], \ "idxs[-1]:", idxs[-1], \ "coefficients_names[idxs[-1]]:", coefficients_names[idxs[-1]], \ "beta_used[-1]:", beta_used[-1], \ "coefficients['Intercept']", coefficients['Intercept'] # last one is intercept interceptName = coefficients_names[idxs[-1]] if interceptName != "Intercept" or abs(beta_used[-1])<1e-26: raise Exception("'Intercept' should be last in coefficients_names and beta %s %s %s" %\ (idxs[-1], beta_used[-1], "-"+interceptName+"-")) # idxs has the order for non-zero coefficients, it's shorter than beta_used and coefficients_names # new 5/28/14. glm can point to zero coefficients # for i in idxs: # if beta_used[i]==0.0: ## raise Exception("idxs shouldn't point to any 0 coefficients i: %s %s:" % (i, beta_used[i])) if len(idxs) > len(beta_used): raise Exception("idxs shouldn't be longer than beta_used %s %s" % (len(idxs), len(beta_used))) intercept = coefficients.pop('Intercept', None) # intercept demapping info: idxs[-1]: 54 coefficients_names[[idxs[-1]]: Intercept beta_used[-1]: -6.6866753099 # the last one shoudl be 'Intercept' ? coefficients_names.pop() # have to skip the output col! get it from kwargs # better always be there! y = kwargs['response'] # the dict keys are column headers if they exist...how to order those? new: use the 'coefficients_names' # from the response # Tomas created 'coefficients_names which is the coefficient list in order. # Just use it to index coefficients! works for header or no-header cases # I guess now we won't print the "None" cases for dropped columns (constant columns!) # Because Tomas doesn't get everything in 'coefficients_names' if dropped by GLMQuery before # he gets it? def add_to_coefficient_list_and_string(c, cList, cString): if c in coefficients: cValue = coefficients[c] cValueString = "%s: %.5e " % (c, cValue) else: print "Warning: didn't see '" + c + "' in json coefficient response.",\ "Inserting 'None' with assumption it was dropped due to constant column)" cValue = None cValueString = "%s: %s " % (c, cValue) cList.append(cValue) # we put each on newline for easy comparison to R..otherwise keep condensed if prettyPrint: cValueString = "H2O coefficient " + cValueString + "\n" # not mutable? return cString + cValueString # creating both a string for printing and a list of values cString = "" cList = [] # print in order using col_names # coefficients_names is input only now..same for header or no header, or expanded enums for c in coefficients_names: cString = add_to_coefficient_list_and_string(c, cList, cString) if prettyPrint: print "\nH2O intercept:\t\t%.5e" % intercept print cString else: if not noPrint: print "\nintercept:", intercept, cString print "\nTotal # of coefficients:", len(coefficients_names) # pick out the coefficent for the column we enabled for enhanced checking. Can be None. # FIX! temporary hack to deal with disappearing/renaming columns in GLM if (not allowZeroCoeff) and (colX is not None): absXCoeff = abs(float(coefficients[str(colX)])) # add kwargs to help debug without looking at console log self.assertGreater(absXCoeff, 1e-26, ( "abs. value of GLM coefficients['" + str(colX) + "'] is " + str(absXCoeff) + ", not >= 1e-26 for X=" + str(colX) + "\n" + "kwargs:" + dump_json(kwargs) )) # intercept is buried in there too absIntercept = abs(float(intercept)) self.assertGreater(absIntercept, 1e-26, ( "abs. value of GLM coefficients['Intercept'] is " + str(absIntercept) + ", not >= 1e-26 for Intercept" + "\n" + "kwargs:" + dump_json(kwargs) )) # this is good if we just want min or max # maxCoeff = max(coefficients, key=coefficients.get) # for more, just invert the dictionary and ... if (len(coefficients)>0): maxKey = max([(abs(coefficients[x]),x) for x in coefficients])[1] print "H2O Largest abs. coefficient value:", maxKey, coefficients[maxKey] minKey = min([(abs(coefficients[x]),x) for x in coefficients])[1] print "H2O Smallest abs. coefficient value:", minKey, coefficients[minKey] else: print "Warning, no coefficients returned. Must be intercept only?" # many of the GLM tests aren't single column though. # quick and dirty check: if all the coefficients are zero, # something is broken # intercept is in there too, but this will get it okay # just sum the abs value up..look for greater than 0 # skip this test if there is just one coefficient. Maybe pointing to a non-important coeff? if (not allowZeroCoeff) and (len(coefficients)>1): s = 0.0 for c in coefficients: v = coefficients[c] s += abs(float(v)) self.assertGreater(s, 1e-26, ( "sum of abs. value of GLM coefficients/intercept is " + str(s) + ", not >= 1e-26\n" + "kwargs:" + dump_json(kwargs) )) print "submodels1, run_time (milliseconds):", submodels1['run_time'] # shouldn't have any errors check_sandbox_for_errors() return (warnings, cList, intercept) # compare this glm to last one. since the files are concatenations, # the results should be similar? 10% of first is allowed delta def compareToFirstGlm(self, key, glm, firstglm): # if isinstance(firstglm[key], list): # in case it's not a list allready (err is a list) verboseprint("compareToFirstGlm key:", key) verboseprint("compareToFirstGlm glm[key]:", glm[key]) # key could be a list or not. if a list, don't want to create list of that list # so use extend on an empty list. covers all cases? if type(glm[key]) is list: kList = glm[key] firstkList = firstglm[key] elif type(glm[key]) is dict: raise Exception("compareToFirstGLm: Not expecting dict for " + key) else: kList = [glm[key]] firstkList = [firstglm[key]] print "kbn:", kList, firstkList for k, firstk in zip(kList, firstkList): # delta must be a positive number ? delta = .1 * abs(float(firstk)) msg = "Too large a delta (" + str(delta) + ") comparing current and first for: " + key self.assertAlmostEqual(float(k), float(firstk), delta=delta, msg=msg) self.assertGreaterEqual(abs(float(k)), 0.0, str(k) + " abs not >= 0.0 in current") def simpleCheckGLMGrid(self, glmGridResult, colX=None, allowFailWarning=False, **kwargs): # "grid": { # "destination_keys": [ # "GLMGridResults__8222a49156af52532a34fb3ce4304308_0", # "GLMGridResults__8222a49156af52532a34fb3ce4304308_1", # "GLMGridResults__8222a49156af52532a34fb3ce4304308_2" # ] # }, destination_key = glmGridResult['grid']['destination_keys'][0] inspectGG = h2o_nodes.nodes[0].glm_view(destination_key) models = inspectGG['glm_model']['submodels'] verboseprint("GLMGrid inspect GLMGrid model 0(best):", dump_json(models[0])) g = simpleCheckGLM(self, inspectGG, colX, allowFailWarning=allowFailWarning, **kwargs) # just to get some save_model testing for i,m in enumerate(glmGridResult['grid']['destination_keys']): print "Saving model", m, "to model"+str(i) h2o_nodes.nodes[0].save_model(model=m, path='model'+str(i), force=1) return g # This gives me a comma separated x string, for all the columns, with cols with # missing values, enums, and optionally matching a pattern, removed. useful for GLM # since it removes rows with any col with NA # get input from this. # (missingValuesDict, constantValuesDict, enumSizeDict, colTypeDict, colNameDict) = \ # h2o_cmd.columnInfoFromInspect(parseResult['destination_key', # exceptionOnMissingValues=False, timeoutSecs=300) def goodXFromColumnInfo(y, num_cols=None, missingValuesDict=None, constantValuesDict=None, enumSizeDict=None, colTypeDict=None, colNameDict=None, keepPattern=None, key=None, timeoutSecs=120, returnIgnoreX=False, noPrint=False, returnStringX=True): y = str(y) # if we pass a key, means we want to get the info ourselves here if key is not None: (missingValuesDict, constantValuesDict, enumSizeDict, colTypeDict, colNameDict) = \ h2o_cmd.columnInfoFromInspect(key, exceptionOnMissingValues=False, max_column_display=99999999, timeoutSecs=timeoutSecs) num_cols = len(colNameDict) # now remove any whose names don't match the required keepPattern if keepPattern is not None: keepX = re.compile(keepPattern) else: keepX = None x = range(num_cols) # need to walk over a copy, cause we change x xOrig = x[:] ignore_x = [] # for use by RF for k in xOrig: name = colNameDict[k] # remove it if it has the same name as the y output if str(k)== y: # if they pass the col index as y if not noPrint: print "Removing %d because name: %s matches output %s" % (k, str(k), y) x.remove(k) # rf doesn't want it in ignore list # ignore_x.append(k) elif name == y: # if they pass the name as y if not noPrint: print "Removing %d because name: %s matches output %s" % (k, name, y) x.remove(k) # rf doesn't want it in ignore list # ignore_x.append(k) elif keepX is not None and not keepX.match(name): if not noPrint: print "Removing %d because name: %s doesn't match desired keepPattern %s" % (k, name, keepPattern) x.remove(k) ignore_x.append(k) # missing values reports as constant also. so do missing first. # remove all cols with missing values # could change it against num_rows for a ratio elif k in missingValuesDict: value = missingValuesDict[k] if not noPrint: print "Removing %d with name: %s because it has %d missing values" % (k, name, value) x.remove(k) ignore_x.append(k) elif k in constantValuesDict: value = constantValuesDict[k] if not noPrint: print "Removing %d with name: %s because it has constant value: %s " % (k, name, str(value)) x.remove(k) ignore_x.append(k) # this is extra pruning.. # remove all cols with enums, if not already removed elif k in enumSizeDict: value = enumSizeDict[k] if not noPrint: print "Removing %d %s because it has enums of size: %d" % (k, name, value) x.remove(k) ignore_x.append(k) if not noPrint: print "x has", len(x), "cols" print "ignore_x has", len(ignore_x), "cols" # this is probably used in 'cols" in v2, which can take numbers if returnStringX: x = ",".join(map(str, x)) ignore_x = ",".join(map(lambda x: "C" + str(x+1), ignore_x)) if not noPrint: print "\nx:", x print "\nignore_x:", ignore_x if returnIgnoreX: return ignore_x else: return x
apache-2.0
JamesGuthrie/libcloud
libcloud/loadbalancer/drivers/gogrid.py
58
8425
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import time from libcloud.utils.py3 import httplib try: import simplejson as json except ImportError: import json from libcloud.utils.misc import reverse_dict from libcloud.common.types import LibcloudError from libcloud.common.gogrid import GoGridConnection, GoGridResponse,\ BaseGoGridDriver from libcloud.loadbalancer.base import LoadBalancer, Member, Driver, Algorithm from libcloud.loadbalancer.base import DEFAULT_ALGORITHM from libcloud.loadbalancer.types import State, LibcloudLBImmutableError class GoGridLBResponse(GoGridResponse): def success(self): if self.status == httplib.INTERNAL_SERVER_ERROR: # Hack, but at least this error message is more useful than # "unexpected server error" body = json.loads(self.body) if body['method'] == '/grid/loadbalancer/add' and \ len(body['list']) >= 1 and \ body['list'][0]['message'].find( 'unexpected server error') != -1: raise LibcloudError( value='You mostly likely tried to add a member with an IP' ' address not assigned to your account', driver=self) return super(GoGridLBResponse, self).success() class GoGridLBConnection(GoGridConnection): """ Connection class for the GoGrid load-balancer driver. """ responseCls = GoGridLBResponse class GoGridLBDriver(BaseGoGridDriver, Driver): connectionCls = GoGridLBConnection api_name = 'gogrid_lb' name = 'GoGrid LB' website = 'http://www.gogrid.com/' LB_STATE_MAP = {'On': State.RUNNING, 'Unknown': State.UNKNOWN} _VALUE_TO_ALGORITHM_MAP = { 'round robin': Algorithm.ROUND_ROBIN, 'least connect': Algorithm.LEAST_CONNECTIONS } _ALGORITHM_TO_VALUE_MAP = reverse_dict(_VALUE_TO_ALGORITHM_MAP) def __init__(self, *args, **kwargs): """ @inherits: :class:`Driver.__init__` """ super(GoGridLBDriver, self).__init__(*args, **kwargs) def list_protocols(self): # GoGrid only supports http return ['http'] def list_balancers(self): return self._to_balancers( self.connection.request('/api/grid/loadbalancer/list').object) def ex_create_balancer_nowait(self, name, members, protocol='http', port=80, algorithm=DEFAULT_ALGORITHM): """ @inherits: :class:`Driver.create_balancer` """ algorithm = self._algorithm_to_value(algorithm) params = {'name': name, 'loadbalancer.type': algorithm, 'virtualip.ip': self._get_first_ip(), 'virtualip.port': port} params.update(self._members_to_params(members)) resp = self.connection.request('/api/grid/loadbalancer/add', method='GET', params=params) return self._to_balancers(resp.object)[0] def create_balancer(self, name, members, protocol='http', port=80, algorithm=DEFAULT_ALGORITHM): balancer = self.ex_create_balancer_nowait(name, members, protocol, port, algorithm) timeout = 60 * 20 waittime = 0 interval = 2 * 15 if balancer.id is not None: return balancer else: while waittime < timeout: balancers = self.list_balancers() for i in balancers: if i.name == balancer.name and i.id is not None: return i waittime += interval time.sleep(interval) raise Exception('Failed to get id') def destroy_balancer(self, balancer): try: resp = self.connection.request( '/api/grid/loadbalancer/delete', method='POST', params={'id': balancer.id}) except Exception: e = sys.exc_info()[1] if "Update request for LoadBalancer" in str(e): raise LibcloudLBImmutableError( "Cannot delete immutable object", GoGridLBDriver) else: raise return resp.status == 200 def get_balancer(self, **kwargs): params = {} try: params['name'] = kwargs['ex_balancer_name'] except KeyError: balancer_id = kwargs['balancer_id'] params['id'] = balancer_id resp = self.connection.request('/api/grid/loadbalancer/get', params=params) return self._to_balancers(resp.object)[0] def balancer_attach_member(self, balancer, member): members = self.balancer_list_members(balancer) members.append(member) params = {"id": balancer.id} params.update(self._members_to_params(members)) resp = self._update_balancer(params) return [m for m in self._to_members(resp.object["list"][0]["realiplist"], balancer) if m.ip == member.ip][0] def balancer_detach_member(self, balancer, member): members = self.balancer_list_members(balancer) remaining_members = [n for n in members if n.id != member.id] params = {"id": balancer.id} params.update(self._members_to_params(remaining_members)) resp = self._update_balancer(params) return resp.status == 200 def balancer_list_members(self, balancer): resp = self.connection.request('/api/grid/loadbalancer/get', params={'id': balancer.id}) return self._to_members(resp.object["list"][0]["realiplist"], balancer) def _update_balancer(self, params): try: return self.connection.request('/api/grid/loadbalancer/edit', method='POST', params=params) except Exception: e = sys.exc_info()[1] if "Update already pending" in str(e): raise LibcloudLBImmutableError( "Balancer is immutable", GoGridLBDriver) raise LibcloudError(value='Exception: %s' % str(e), driver=self) def _members_to_params(self, members): """ Helper method to convert list of :class:`Member` objects to GET params. """ params = {} i = 0 for member in members: params["realiplist.%s.ip" % i] = member.ip params["realiplist.%s.port" % i] = member.port i += 1 return params def _to_balancers(self, object): return [self._to_balancer(el) for el in object["list"]] def _to_balancer(self, el): lb = LoadBalancer(id=el.get("id"), name=el["name"], state=self.LB_STATE_MAP.get( el["state"]["name"], State.UNKNOWN), ip=el["virtualip"]["ip"]["ip"], port=el["virtualip"]["port"], driver=self.connection.driver) return lb def _to_members(self, object, balancer=None): return [self._to_member(el, balancer) for el in object] def _to_member(self, el, balancer=None): member = Member(id=el["ip"]["id"], ip=el["ip"]["ip"], port=el["port"], balancer=balancer) return member
apache-2.0
itzmesayooj/foursquared.eclair
util/oget.py
262
3416
#!/usr/bin/python """ Pull a oAuth protected page from foursquare. Expects ~/.oget to contain (one on each line): CONSUMER_KEY CONSUMER_KEY_SECRET USERNAME PASSWORD Don't forget to chmod 600 the file! """ import httplib import os import re import sys import urllib import urllib2 import urlparse import user from xml.dom import pulldom from xml.dom import minidom import oauth """From: http://groups.google.com/group/foursquare-api/web/oauth @consumer = OAuth::Consumer.new("consumer_token","consumer_secret", { :site => "http://foursquare.com", :scheme => :header, :http_method => :post, :request_token_path => "/oauth/request_token", :access_token_path => "/oauth/access_token", :authorize_path => "/oauth/authorize" }) """ SERVER = 'api.foursquare.com:80' CONTENT_TYPE_HEADER = {'Content-Type' :'application/x-www-form-urlencoded'} SIGNATURE_METHOD = oauth.OAuthSignatureMethod_HMAC_SHA1() AUTHEXCHANGE_URL = 'http://api.foursquare.com/v1/authexchange' def parse_auth_response(auth_response): return ( re.search('<oauth_token>(.*)</oauth_token>', auth_response).groups()[0], re.search('<oauth_token_secret>(.*)</oauth_token_secret>', auth_response).groups()[0] ) def create_signed_oauth_request(username, password, consumer): oauth_request = oauth.OAuthRequest.from_consumer_and_token( consumer, http_method='POST', http_url=AUTHEXCHANGE_URL, parameters=dict(fs_username=username, fs_password=password)) oauth_request.sign_request(SIGNATURE_METHOD, consumer, None) return oauth_request def main(): url = urlparse.urlparse(sys.argv[1]) # Nevermind that the query can have repeated keys. parameters = dict(urlparse.parse_qsl(url.query)) password_file = open(os.path.join(user.home, '.oget')) lines = [line.strip() for line in password_file.readlines()] if len(lines) == 4: cons_key, cons_key_secret, username, password = lines access_token = None else: cons_key, cons_key_secret, username, password, token, secret = lines access_token = oauth.OAuthToken(token, secret) consumer = oauth.OAuthConsumer(cons_key, cons_key_secret) if not access_token: oauth_request = create_signed_oauth_request(username, password, consumer) connection = httplib.HTTPConnection(SERVER) headers = {'Content-Type' :'application/x-www-form-urlencoded'} connection.request(oauth_request.http_method, AUTHEXCHANGE_URL, body=oauth_request.to_postdata(), headers=headers) auth_response = connection.getresponse().read() token = parse_auth_response(auth_response) access_token = oauth.OAuthToken(*token) open(os.path.join(user.home, '.oget'), 'w').write('\n'.join(( cons_key, cons_key_secret, username, password, token[0], token[1]))) oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer, access_token, http_method='POST', http_url=url.geturl(), parameters=parameters) oauth_request.sign_request(SIGNATURE_METHOD, consumer, access_token) connection = httplib.HTTPConnection(SERVER) connection.request(oauth_request.http_method, oauth_request.to_url(), body=oauth_request.to_postdata(), headers=CONTENT_TYPE_HEADER) print connection.getresponse().read() #print minidom.parse(connection.getresponse()).toprettyxml(indent=' ') if __name__ == '__main__': main()
apache-2.0
mbernasocchi/QGIS
python/plugins/processing/modeler/DeleteModelAction.py
30
2782
# -*- coding: utf-8 -*- """ *************************************************************************** DeleteModelAction.py --------------------- Date : August 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'August 2012' __copyright__ = '(C) 2012, Victor Olaya' import os from qgis.core import (QgsApplication, QgsProcessingAlgorithm, QgsProject) from qgis.PyQt.QtWidgets import QMessageBox from qgis.PyQt.QtCore import QCoreApplication from processing.gui.ContextAction import ContextAction from processing.modeler.ProjectProvider import PROJECT_PROVIDER_ID class DeleteModelAction(ContextAction): def __init__(self): super().__init__() self.name = QCoreApplication.translate('DeleteModelAction', 'Delete Model…') def isEnabled(self): return isinstance(self.itemData, QgsProcessingAlgorithm) and self.itemData.provider().id() in ("model", "project") def execute(self): model = self.itemData if model is None: return # shouldn't happen, but let's be safe project_provider = model.provider().id() == PROJECT_PROVIDER_ID if project_provider: msg = self.tr('Are you sure you want to delete this model from the current project?', 'DeleteModelAction') else: msg = self.tr('Are you sure you want to delete this model?', 'DeleteModelAction') reply = QMessageBox.question( None, self.tr('Delete Model', 'DeleteModelAction'), msg, QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if reply == QMessageBox.Yes: if project_provider: provider = QgsApplication.processingRegistry().providerById(PROJECT_PROVIDER_ID) provider.remove_model(model) QgsProject.instance().setDirty(True) else: os.remove(model.sourceFilePath()) QgsApplication.processingRegistry().providerById('model').refreshAlgorithms()
gpl-2.0
dinnozap/hebel
hebel/layers/softmax_layer.py
2
11562
# Copyright (C) 2013 Hannes Bretschneider # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import numpy as np import cPickle from pycuda import gpuarray from pycuda import cumath from math import sqrt from .. import sampler, memory_pool from .top_layer import TopLayer from ..pycuda_ops import eps, linalg from ..pycuda_ops.elementwise import sign, nan_to_zeros, substract_matrix from ..pycuda_ops.reductions import matrix_sum_out_axis from ..pycuda_ops.matrix import add_vec_to_mat from ..pycuda_ops.softmax import softmax, cross_entropy class SoftmaxLayer(TopLayer): r""" A multiclass classification layer, using cross-entropy loss function and softmax activations. **Parameters:** n_in : integer Number of input units. n_out : integer Number of output units (classes). parameters : array_like of ``GPUArray`` Parameters used to initialize the layer. If this is omitted, then the weights are initalized randomly using *Bengio's rule* (uniform distribution with scale :math:`4 \cdot \sqrt{6 / (\mathtt{n\_in} + \mathtt{n\_out})}`) and the biases are initialized to zero. If ``parameters`` is given, then is must be in the form ``[weights, biases]``, where the shape of weights is ``(n_in, n_out)`` and the shape of ``biases`` is ``(n_out,)``. Both weights and biases must be ``GPUArray``. weights_scale : float, optional If ``parameters`` is omitted, then this factor is used as scale for initializing the weights instead of *Bengio's rule*. l1_penalty_weight : float, optional Weight used for L1 regularization of the weights. l2_penalty_weight : float, optional Weight used for L2 regularization of the weights. lr_multiplier : float, optional If this parameter is omitted, then the learning rate for the layer is scaled by :math:`2 / \sqrt{\mathtt{n\_in}}`. You may specify a different factor here. test_error_fct : {``class_error``, ``kl_error``, ``cross_entropy_error``}, optional Which error function to use on the test set. Default is ``class_error`` for classification error. Other choices are ``kl_error``, the Kullback-Leibler divergence, or ``cross_entropy_error``. **See also:** :class:`hebel.layers.LogisticLayer`, :class:`hebel.models.NeuralNet`, :class:`hebel.models.NeuralNetRegression`, :class:`hebel.layers.LinearRegressionLayer` **Examples**:: # Use the simple initializer and initialize with random weights softmax_layer = SoftmaxLayer(1000, 10) # Sample weights yourself, specify an L1 penalty, and don't # use learning rate scaling import numpy as np from pycuda import gpuarray n_in = 1000 n_out = 10 weights = gpuarray.to_gpu(.01 * np.random.randn(n_in, n_out)) biases = gpuarray.to_gpu(np.zeros((n_out,))) softmax_layer = SoftmaxLayer(n_in, n_out, parameters=(weights, biases), l1_penalty_weight=.1, lr_multiplier=1.) """ n_parameters = 2 def __init__(self, n_in, n_out, parameters=None, weights_scale=None, l1_penalty_weight=0., l2_penalty_weight=0., lr_multiplier=None, test_error_fct='class_error'): # Initialize weight using Bengio's rule self.weights_scale = 4 * sqrt(6. / (n_in + n_out)) \ if weights_scale is None \ else weights_scale if parameters is not None: self.W, self.b = parameters else: self.W = gpuarray.empty((n_in, n_out), dtype=np.float32, allocator=memory_pool.allocate) sampler.fill_uniform(self.W) self.W = self.weights_scale * (self.W - .5) self.b = gpuarray.zeros((n_out,), dtype=np.float32) self.n_in = n_in self.n_out = n_out self.test_error_fct = test_error_fct self.l1_penalty_weight = l1_penalty_weight self.l2_penalty_weight = l2_penalty_weight self.lr_multiplier = 2 * [1. / np.sqrt(n_in, dtype=np.float32)] \ if lr_multiplier is None else lr_multiplier @property def architecture(self): return {'class': self.__class__, 'n_in': self.n_in, 'n_out': self.n_out} def feed_forward(self, input_data, prediction=False): """Propagate forward through the layer. **Parameters:** input_data : ``GPUArray`` Inpute data to compute activations for. prediction : bool, optional Whether to use prediction model. Only relevant when using dropout. If true, then weights are multiplied by 1 - dropout if the layer uses dropout. **Returns:** activations : ``GPUArray`` The activations of the output units. """ if input_data.shape[1] != self.W.shape[0]: raise ValueError('Number of outputs from previous layer (%d) ' 'does not match number of inputs to this layer (%d)' % (input_data.shape[1], self.W.shape[0])) lin_activations = linalg.dot(input_data, self.W) lin_activations = add_vec_to_mat(lin_activations, self.b, inplace=True) activations = softmax(lin_activations) return activations def backprop(self, input_data, targets, cache=None): """ Backpropagate through the logistic layer. **Parameters:** input_data : ``GPUArray`` Inpute data to compute activations for. targets : ``GPUArray`` The target values of the units. cache : list of ``GPUArray`` Cache obtained from forward pass. If the cache is provided, then the activations are not recalculated. **Returns:** gradients : tuple of ``GPUArray`` Gradients with respect to the weights and biases in the form ``(df_weights, df_biases)``. df_input : ``GPUArray`` Gradients with respect to the input. """ if cache is not None: activations = cache else: activations = self.feed_forward(input_data, prediction=False) if activations.shape != targets.shape: raise ValueError('Activations (shape = %s) and targets (shape = %s) are different sizes' % (activations.shape, targets.shape)) delta = substract_matrix(activations, targets) nan_to_zeros(delta, delta) # Gradient wrt weights df_W = linalg.dot(input_data, delta, transa='T') # Gradient wrt bias df_b = matrix_sum_out_axis(delta, 0) # Gradient wrt input df_input = linalg.dot(delta, self.W, transb='T') # L1 penalty if self.l1_penalty_weight: df_W += self.l1_penalty_weight * sign(self.W) # L2 penalty if self.l2_penalty_weight: df_W += self.l2_penalty_weight * self.W return (df_W, df_b), df_input def test_error(self, input_data, targets, average=True, cache=None, prediction=True): """Compute the test error function given some data and targets. Uses the error function defined in :class:`SoftmaxLayer.test_error_fct`, which may be different from the cross-entropy error function used for training'. Alternatively, the other test error functions may be called directly. **Parameters:** input_data : ``GPUArray`` Inpute data to compute the test error function for. targets : ``GPUArray`` The target values of the units. average : bool Whether to divide the value of the error function by the number of data points given. cache : list of ``GPUArray`` Cache obtained from forward pass. If the cache is provided, then the activations are not recalculated. prediction : bool, optional Whether to use prediction model. Only relevant when using dropout. If true, then weights are multiplied by 1 - dropout if the layer uses dropout. **Returns:** test_error : float """ if self.test_error_fct == 'class_error': test_error = self.class_error elif self.test_error_fct == 'kl_error': test_error = self.kl_error elif self.test_error_fct == 'cross_entropy_error': test_error = self.cross_entropy_error else: raise ValueError('unknown test error function "%s"' % self.test_error_fct) return test_error(input_data, targets, average, cache, prediction) def cross_entropy_error(self, input_data, targets, average=True, cache=None, prediction=False): """ Return the cross entropy error """ if cache is not None: activations = cache else: activations = \ self.feed_forward(input_data, prediction=prediction) loss = cross_entropy(activations, targets) if average: loss /= targets.shape[0] return loss.get() train_error = cross_entropy_error def class_error(self, input_data, targets, average=True, cache=None, prediction=False): """ Return the classification error rate """ if cache is not None: activations = cache else: activations = \ self.feed_forward(input_data, prediction=prediction) targets = targets.get().argmax(1) class_error = np.sum(activations.get().argmax(1) != targets) if average: class_error = float(class_error) / targets.shape[0] return class_error def kl_error(self, input_data, targets, average=True, cache=None, prediction=True): """ The KL divergence error """ if cache is not None: activations = cache else: activations = \ self.feed_forward(input_data, prediction=prediction) targets_non_nan = gpuarray.empty_like(targets) nan_to_zeros(targets, targets_non_nan) kl_error = gpuarray.sum(targets_non_nan * (cumath.log(targets_non_nan + eps) - cumath.log(activations + eps))) if average: kl_error /= targets.shape[0] return kl_error.get()
gpl-2.0
eharney/nova
nova/tests/compute/test_flavors.py
8
1426
# Copyright 2014 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for flavor basic functions""" from nova.compute import flavors from nova import exception from nova import test class ExtraSpecTestCase(test.NoDBTestCase): def setUp(self): super(ExtraSpecTestCase, self).setUp() def _flavor_validate_extra_spec_keys_invalid_input(self, key_name_list): self.assertRaises(exception.InvalidInput, flavors.validate_extra_spec_keys, key_name_list) def test_flavor_validate_extra_spec_keys_invalid_input(self): lists = [['', ], ['*', ], ['+', ]] for x in lists: self._flavor_validate_extra_spec_keys_invalid_input(x) def test_flavor_validate_extra_spec_keys(self): key_name_list = ['abc', 'ab c', 'a-b-c', 'a_b-c', 'a:bc'] flavors.validate_extra_spec_keys(key_name_list)
apache-2.0
TEAM-Gummy/platform_external_chromium_org
build/util/lib/common/util.py
208
3815
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Generic utilities for all python scripts.""" import atexit import httplib import os import signal import stat import subprocess import sys import tempfile import urlparse def GetPlatformName(): """Return a string to be used in paths for the platform.""" if IsWindows(): return 'win' if IsMac(): return 'mac' if IsLinux(): return 'linux' raise NotImplementedError('Unknown platform "%s".' % sys.platform) def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def _DeleteDir(path): """Deletes a directory recursively, which must exist.""" # Don't use shutil.rmtree because it can't delete read-only files on Win. for root, dirs, files in os.walk(path, topdown=False): for name in files: filename = os.path.join(root, name) os.chmod(filename, stat.S_IWRITE) os.remove(filename) for name in dirs: os.rmdir(os.path.join(root, name)) os.rmdir(path) def Delete(path): """Deletes the given file or directory (recursively), which must exist.""" if os.path.isdir(path): _DeleteDir(path) else: os.remove(path) def MaybeDelete(path): """Deletes the given file or directory (recurisvely), if it exists.""" if os.path.exists(path): Delete(path) def MakeTempDir(parent_dir=None): """Creates a temporary directory and returns an absolute path to it. The temporary directory is automatically deleted when the python interpreter exits normally. Args: parent_dir: the directory to create the temp dir in. If None, the system temp dir is used. Returns: The absolute path to the temporary directory. """ path = tempfile.mkdtemp(dir=parent_dir) atexit.register(MaybeDelete, path) return path def Unzip(zip_path, output_dir): """Unzips the given zip file using a system installed unzip tool. Args: zip_path: zip file to unzip. output_dir: directory to unzip the contents of the zip file. The directory must exist. Raises: RuntimeError if the unzip operation fails. """ if IsWindows(): unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y'] else: unzip_cmd = ['unzip', '-o'] unzip_cmd += [zip_path] if RunCommand(unzip_cmd, output_dir) != 0: raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir)) def Kill(pid): """Terminate the given pid.""" if IsWindows(): subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)]) else: os.kill(pid, signal.SIGTERM) def RunCommand(cmd, cwd=None): """Runs the given command and returns the exit code. Args: cmd: list of command arguments. cwd: working directory to execute the command, or None if the current working directory should be used. Returns: The exit code of the command. """ process = subprocess.Popen(cmd, cwd=cwd) process.wait() return process.returncode def DoesUrlExist(url): """Determines whether a resource exists at the given URL. Args: url: URL to be verified. Returns: True if url exists, otherwise False. """ parsed = urlparse.urlparse(url) try: conn = httplib.HTTPConnection(parsed.netloc) conn.request('HEAD', parsed.path) response = conn.getresponse() except (socket.gaierror, socket.error): return False finally: conn.close() # Follow both permanent (301) and temporary (302) redirects. if response.status == 302 or response.status == 301: return DoesUrlExist(response.getheader('location')) return response.status == 200
bsd-3-clause
MagicStack/vmbench
servers/asyncio_http_server.py
1
5665
import argparse import asyncio import aiohttp from aiohttp import web import sys import httptools import uvloop from socket import * PRINT = 0 _RESP_CACHE = {} class HttpRequest: __slots__ = ('_protocol', '_url', '_headers', '_version') def __init__(self, protocol, url, headers, version): self._protocol = protocol self._url = url self._headers = headers self._version = version class HttpResponse: __slots__ = ('_protocol', '_request', '_headers_sent') def __init__(self, protocol, request): self._protocol = protocol self._request = request self._headers_sent = False def write(self, data): self._protocol._transport.write(b''.join([ 'HTTP/{} 200 OK\r\n'.format( self._request._version).encode('latin-1'), b'Content-Type: text/plain\r\n', 'Content-Length: {}\r\n'.format(len(data)).encode('latin-1'), b'\r\n', data ])) class HttpProtocol(asyncio.Protocol): __slots__ = ('_loop', '_transport', '_current_request', '_current_parser', '_current_url', '_current_headers') def __init__(self, *, loop=None): if loop is None: loop = asyncio.get_event_loop() self._loop = loop self._transport = None self._current_request = None self._current_parser = None self._current_url = None self._current_headers = None def on_url(self, url): self._current_url = url def on_header(self, name, value): self._current_headers.append((name, value)) def on_headers_complete(self): self._current_request = HttpRequest( self, self._current_url, self._current_headers, self._current_parser.get_http_version()) self._loop.call_soon( self.handle, self._current_request, HttpResponse(self, self._current_request)) #### def connection_made(self, transport): self._transport = transport sock = transport.get_extra_info('socket') try: sock.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) except (OSError, NameError): pass def connection_lost(self, exc): self._current_request = self._current_parser = None def data_received(self, data): if self._current_parser is None: assert self._current_request is None self._current_headers = [] self._current_parser = httptools.HttpRequestParser(self) self._current_parser.feed_data(data) def handle(self, request, response): parsed_url = httptools.parse_url(self._current_url) payload_size = parsed_url.path.decode('ascii')[1:] if not payload_size: payload_size = 1024 else: payload_size = int(payload_size) resp = _RESP_CACHE.get(payload_size) if resp is None: resp = b'X' * payload_size _RESP_CACHE[payload_size] = resp response.write(resp) if not self._current_parser.should_keep_alive(): self._transport.close() self._current_parser = None self._current_request = None def abort(msg): print(msg, file=sys.stderr) sys.exit(1) def aiohttp_server(loop, addr): async def handle(request): payload_size = int(request.match_info.get('size', 1024)) resp = _RESP_CACHE.get(payload_size) if resp is None: resp = b'X' * payload_size _RESP_CACHE[payload_size] = resp return web.Response(body=resp) app = web.Application(loop=loop) app.router.add_route('GET', '/{size}', handle) app.router.add_route('GET', '/', handle) handler = app.make_handler() server = loop.create_server(handler, *addr) return server def httptools_server(loop, addr): return loop.create_server(lambda: HttpProtocol(loop=loop), *addr) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--type', default='asyncio+aiohttp', action='store') parser.add_argument('--addr', default='127.0.0.1:25000', type=str) args = parser.parse_args() if args.type: parts = args.type.split('+') if len(parts) > 1: loop_type = parts[0] server_type = parts[1] else: server_type = args.type if server_type in {'aiohttp', 'httptools'}: if not loop_type: loop_type = 'asyncio' else: loop_type = None if loop_type not in {'asyncio', 'uvloop'}: abort('unrecognized loop type: {}'.format(loop_type)) if server_type not in {'aiohttp', 'httptools'}: abort('unrecognized server type: {}'.format(server_type)) if loop_type: loop = globals()[loop_type].new_event_loop() else: loop = None print('using {} loop: {!r}'.format(loop_type, loop)) print('using {} HTTP server'.format(server_type)) if loop: asyncio.set_event_loop(loop) loop.set_debug(False) unix = False if args.addr.startswith('file:'): unix = True addr = args.addr[5:] else: addr = args.addr.split(':') addr[1] = int(addr[1]) addr = tuple(addr) server_factory = globals()['{}_server'.format(server_type)] print('serving on: {}'.format(addr)) if loop: server = loop.run_until_complete(server_factory(loop, addr)) try: loop.run_forever() finally: server.close() loop.close()
mit
wcevans/grpc
src/python/grpcio_tests/tests/unit/beta/__init__.py
901
1528
# Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
bsd-3-clause
angad/libjingle-mac
scons-2.2.0/build/lib/SCons/Tool/hpc++.py
14
2734
"""SCons.Tool.hpc++ Tool-specific initialization for c++ on HP/UX. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/hpc++.py issue-2856:2676:d23b7a2f45e8 2012/08/05 15:38:28 garyo" import os.path import SCons.Util cplusplus = __import__('c++', globals(), locals(), []) acc = None # search for the acc compiler and linker front end try: dirs = os.listdir('/opt') except (IOError, OSError): # Not being able to read the directory because it doesn't exist # (IOError) or isn't readable (OSError) is okay. dirs = [] for dir in dirs: cc = '/opt/' + dir + '/bin/aCC' if os.path.exists(cc): acc = cc break def generate(env): """Add Builders and construction variables for g++ to an Environment.""" cplusplus.generate(env) if acc: env['CXX'] = acc or 'aCC' env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') # determine version of aCC line = os.popen(acc + ' -V 2>&1').readline().rstrip() if line.find('aCC: HP ANSI C++') == 0: env['CXXVERSION'] = line.split()[-1] if env['PLATFORM'] == 'cygwin': env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS') else: env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') def exists(env): return acc # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
bsd-3-clause
CoherentLabs/depot_tools
third_party/boto/fps/response.py
69
6255
from decimal import Decimal def ResponseFactory(action): class FPSResponse(Response): _action = action _Result = globals().get(action + 'Result', ResponseElement) # due to nodes receiving their closing tags def endElement(self, name, value, connection): if name != action + 'Response': Response.endElement(self, name, value, connection) return FPSResponse class ResponseElement(object): def __init__(self, connection=None, name=None): if connection is not None: self._connection = connection self._name = name or self.__class__.__name__ @property def connection(self): return self._connection def __repr__(self): render = lambda pair: '{!s}: {!r}'.format(*pair) do_show = lambda pair: not pair[0].startswith('_') attrs = filter(do_show, self.__dict__.items()) return '{0}({1})'.format(self.__class__.__name__, ', '.join(map(render, attrs))) def startElement(self, name, attrs, connection): return None # due to nodes receiving their closing tags def endElement(self, name, value, connection): if name != self._name: setattr(self, name, value) class Response(ResponseElement): _action = 'Undefined' def startElement(self, name, attrs, connection): if name == 'ResponseMetadata': setattr(self, name, ResponseElement(name=name)) elif name == self._action + 'Result': setattr(self, name, self._Result(name=name)) else: return ResponseElement.startElement(self, name, attrs, connection) return getattr(self, name) class ComplexAmount(ResponseElement): def __repr__(self): return '{0} {1}'.format(self.CurrencyCode, self.Value) def __float__(self): return float(self.Value) def __str__(self): return str(self.Value) def startElement(self, name, attrs, connection): if name not in ('CurrencyCode', 'Value'): message = 'Unrecognized tag {0} in ComplexAmount'.format(name) raise AssertionError(message) return ResponseElement.startElement(self, name, attrs, connection) def endElement(self, name, value, connection): if name == 'Value': value = Decimal(value) ResponseElement.endElement(self, name, value, connection) class AmountCollection(ResponseElement): def startElement(self, name, attrs, connection): setattr(self, name, ComplexAmount(name=name)) return getattr(self, name) class AccountBalance(AmountCollection): def startElement(self, name, attrs, connection): if name == 'AvailableBalances': setattr(self, name, AmountCollection(name=name)) return getattr(self, name) return AmountCollection.startElement(self, name, attrs, connection) class GetAccountBalanceResult(ResponseElement): def startElement(self, name, attrs, connection): if name == 'AccountBalance': setattr(self, name, AccountBalance(name=name)) return getattr(self, name) return Response.startElement(self, name, attrs, connection) class GetTotalPrepaidLiabilityResult(ResponseElement): def startElement(self, name, attrs, connection): if name == 'OutstandingPrepaidLiability': setattr(self, name, AmountCollection(name=name)) return getattr(self, name) return Response.startElement(self, name, attrs, connection) class GetPrepaidBalanceResult(ResponseElement): def startElement(self, name, attrs, connection): if name == 'PrepaidBalance': setattr(self, name, AmountCollection(name=name)) return getattr(self, name) return Response.startElement(self, name, attrs, connection) class GetOutstandingDebtBalanceResult(ResponseElement): def startElement(self, name, attrs, connection): if name == 'OutstandingDebt': setattr(self, name, AmountCollection(name=name)) return getattr(self, name) return Response.startElement(self, name, attrs, connection) class TransactionPart(ResponseElement): def startElement(self, name, attrs, connection): if name == 'FeesPaid': setattr(self, name, ComplexAmount(name=name)) return getattr(self, name) return ResponseElement.startElement(self, name, attrs, connection) class Transaction(ResponseElement): def __init__(self, *args, **kw): self.TransactionPart = [] ResponseElement.__init__(self, *args, **kw) def startElement(self, name, attrs, connection): if name == 'TransactionPart': getattr(self, name).append(TransactionPart(name=name)) return getattr(self, name)[-1] if name in ('TransactionAmount', 'FPSFees', 'Balance'): setattr(self, name, ComplexAmount(name=name)) return getattr(self, name) return ResponseElement.startElement(self, name, attrs, connection) class GetAccountActivityResult(ResponseElement): def __init__(self, *args, **kw): self.Transaction = [] ResponseElement.__init__(self, *args, **kw) def startElement(self, name, attrs, connection): if name == 'Transaction': getattr(self, name).append(Transaction(name=name)) return getattr(self, name)[-1] return ResponseElement.startElement(self, name, attrs, connection) class GetTransactionResult(ResponseElement): def startElement(self, name, attrs, connection): if name == 'Transaction': setattr(self, name, Transaction(name=name)) return getattr(self, name) return ResponseElement.startElement(self, name, attrs, connection) class GetTokensResult(ResponseElement): def __init__(self, *args, **kw): self.Token = [] ResponseElement.__init__(self, *args, **kw) def startElement(self, name, attrs, connection): if name == 'Token': getattr(self, name).append(ResponseElement(name=name)) return getattr(self, name)[-1] return ResponseElement.startElement(self, name, attrs, connection)
bsd-3-clause
haniehrajabi/ryu
ryu/contrib/ovs/reconnect.py
54
23121
# Copyright (c) 2010, 2011, 2012 Nicira, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import ovs.vlog import ovs.util # Values returned by Reconnect.run() CONNECT = 'connect' DISCONNECT = 'disconnect' PROBE = 'probe' EOF = ovs.util.EOF vlog = ovs.vlog.Vlog("reconnect") class Reconnect(object): """A finite-state machine for connecting and reconnecting to a network resource with exponential backoff. It also provides optional support for detecting a connection on which the peer is no longer responding. The library does not implement anything networking related, only an FSM for networking code to use. Many Reconnect methods take a "now" argument. This makes testing easier since there is no hidden state. When not testing, just pass the return value of ovs.time.msec(). (Perhaps this design should be revisited later.)""" class Void(object): name = "VOID" is_connected = False @staticmethod def deadline(fsm): return None @staticmethod def run(fsm, now): return None class Listening(object): name = "LISTENING" is_connected = False @staticmethod def deadline(fsm): return None @staticmethod def run(fsm, now): return None class Backoff(object): name = "BACKOFF" is_connected = False @staticmethod def deadline(fsm): return fsm.state_entered + fsm.backoff @staticmethod def run(fsm, now): return CONNECT class ConnectInProgress(object): name = "CONNECTING" is_connected = False @staticmethod def deadline(fsm): return fsm.state_entered + max(1000, fsm.backoff) @staticmethod def run(fsm, now): return DISCONNECT class Active(object): name = "ACTIVE" is_connected = True @staticmethod def deadline(fsm): if fsm.probe_interval: base = max(fsm.last_activity, fsm.state_entered) return base + fsm.probe_interval return None @staticmethod def run(fsm, now): vlog.dbg("%s: idle %d ms, sending inactivity probe" % (fsm.name, now - max(fsm.last_activity, fsm.state_entered))) fsm._transition(now, Reconnect.Idle) return PROBE class Idle(object): name = "IDLE" is_connected = True @staticmethod def deadline(fsm): if fsm.probe_interval: return fsm.state_entered + fsm.probe_interval return None @staticmethod def run(fsm, now): vlog.err("%s: no response to inactivity probe after %.3g " "seconds, disconnecting" % (fsm.name, (now - fsm.state_entered) / 1000.0)) return DISCONNECT class Reconnect(object): name = "RECONNECT" is_connected = False @staticmethod def deadline(fsm): return fsm.state_entered @staticmethod def run(fsm, now): return DISCONNECT def __init__(self, now): """Creates and returns a new reconnect FSM with default settings. The FSM is initially disabled. The caller will likely want to call self.enable() and self.set_name() on the returned object.""" self.name = "void" self.min_backoff = 1000 self.max_backoff = 8000 self.probe_interval = 5000 self.passive = False self.info_level = vlog.info self.state = Reconnect.Void self.state_entered = now self.backoff = 0 self.last_activity = now self.last_connected = None self.last_disconnected = None self.max_tries = None self.creation_time = now self.n_attempted_connections = 0 self.n_successful_connections = 0 self.total_connected_duration = 0 self.seqno = 0 def set_quiet(self, quiet): """If 'quiet' is true, this object will log informational messages at debug level, by default keeping them out of log files. This is appropriate if the connection is one that is expected to be short-lived, so that the log messages are merely distracting. If 'quiet' is false, this object logs informational messages at info level. This is the default. This setting has no effect on the log level of debugging, warning, or error messages.""" if quiet: self.info_level = vlog.dbg else: self.info_level = vlog.info def get_name(self): return self.name def set_name(self, name): """Sets this object's name to 'name'. If 'name' is None, then "void" is used instead. The name is used in log messages.""" if name is None: self.name = "void" else: self.name = name def get_min_backoff(self): """Return the minimum number of milliseconds to back off between consecutive connection attempts. The default is 1000 ms.""" return self.min_backoff def get_max_backoff(self): """Return the maximum number of milliseconds to back off between consecutive connection attempts. The default is 8000 ms.""" return self.max_backoff def get_probe_interval(self): """Returns the "probe interval" in milliseconds. If this is zero, it disables the connection keepalive feature. If it is nonzero, then if the interval passes while the FSM is connected and without self.activity() being called, self.run() returns ovs.reconnect.PROBE. If the interval passes again without self.activity() being called, self.run() returns ovs.reconnect.DISCONNECT.""" return self.probe_interval def set_max_tries(self, max_tries): """Limits the maximum number of times that this object will ask the client to try to reconnect to 'max_tries'. None (the default) means an unlimited number of tries. After the number of tries has expired, the FSM will disable itself instead of backing off and retrying.""" self.max_tries = max_tries def get_max_tries(self): """Returns the current remaining number of connection attempts, None if the number is unlimited.""" return self.max_tries def set_backoff(self, min_backoff, max_backoff): """Configures the backoff parameters for this FSM. 'min_backoff' is the minimum number of milliseconds, and 'max_backoff' is the maximum, between connection attempts. 'min_backoff' must be at least 1000, and 'max_backoff' must be greater than or equal to 'min_backoff'.""" self.min_backoff = max(min_backoff, 1000) if self.max_backoff: self.max_backoff = max(max_backoff, 1000) else: self.max_backoff = 8000 if self.min_backoff > self.max_backoff: self.max_backoff = self.min_backoff if (self.state == Reconnect.Backoff and self.backoff > self.max_backoff): self.backoff = self.max_backoff def set_probe_interval(self, probe_interval): """Sets the "probe interval" to 'probe_interval', in milliseconds. If this is zero, it disables the connection keepalive feature. If it is nonzero, then if the interval passes while this FSM is connected and without self.activity() being called, self.run() returns ovs.reconnect.PROBE. If the interval passes again without self.activity() being called, self.run() returns ovs.reconnect.DISCONNECT. If 'probe_interval' is nonzero, then it will be forced to a value of at least 1000 ms.""" if probe_interval: self.probe_interval = max(1000, probe_interval) else: self.probe_interval = 0 def is_passive(self): """Returns true if 'fsm' is in passive mode, false if 'fsm' is in active mode (the default).""" return self.passive def set_passive(self, passive, now): """Configures this FSM for active or passive mode. In active mode (the default), the FSM is attempting to connect to a remote host. In passive mode, the FSM is listening for connections from a remote host.""" if self.passive != passive: self.passive = passive if ((passive and self.state in (Reconnect.ConnectInProgress, Reconnect.Reconnect)) or (not passive and self.state == Reconnect.Listening and self.__may_retry())): self._transition(now, Reconnect.Backoff) self.backoff = 0 def is_enabled(self): """Returns true if this FSM has been enabled with self.enable(). Calling another function that indicates a change in connection state, such as self.disconnected() or self.force_reconnect(), will also enable a reconnect FSM.""" return self.state != Reconnect.Void def enable(self, now): """If this FSM is disabled (the default for newly created FSMs), enables it, so that the next call to reconnect_run() for 'fsm' will return ovs.reconnect.CONNECT. If this FSM is not disabled, this function has no effect.""" if self.state == Reconnect.Void and self.__may_retry(): self._transition(now, Reconnect.Backoff) self.backoff = 0 def disable(self, now): """Disables this FSM. Until 'fsm' is enabled again, self.run() will always return 0.""" if self.state != Reconnect.Void: self._transition(now, Reconnect.Void) def force_reconnect(self, now): """If this FSM is enabled and currently connected (or attempting to connect), forces self.run() to return ovs.reconnect.DISCONNECT the next time it is called, which should cause the client to drop the connection (or attempt), back off, and then reconnect.""" if self.state in (Reconnect.ConnectInProgress, Reconnect.Active, Reconnect.Idle): self._transition(now, Reconnect.Reconnect) def disconnected(self, now, error): """Tell this FSM that the connection dropped or that a connection attempt failed. 'error' specifies the reason: a positive value represents an errno value, EOF indicates that the connection was closed by the peer (e.g. read() returned 0), and 0 indicates no specific error. The FSM will back off, then reconnect.""" if self.state not in (Reconnect.Backoff, Reconnect.Void): # Report what happened if self.state in (Reconnect.Active, Reconnect.Idle): if error > 0: vlog.warn("%s: connection dropped (%s)" % (self.name, os.strerror(error))) elif error == EOF: self.info_level("%s: connection closed by peer" % self.name) else: self.info_level("%s: connection dropped" % self.name) elif self.state == Reconnect.Listening: if error > 0: vlog.warn("%s: error listening for connections (%s)" % (self.name, os.strerror(error))) else: self.info_level("%s: error listening for connections" % self.name) else: if self.passive: type_ = "listen" else: type_ = "connection" if error > 0: vlog.warn("%s: %s attempt failed (%s)" % (self.name, type_, os.strerror(error))) else: self.info_level("%s: %s attempt timed out" % (self.name, type_)) if (self.state in (Reconnect.Active, Reconnect.Idle)): self.last_disconnected = now # Back off if (self.state in (Reconnect.Active, Reconnect.Idle) and (self.last_activity - self.last_connected >= self.backoff or self.passive)): if self.passive: self.backoff = 0 else: self.backoff = self.min_backoff else: if self.backoff < self.min_backoff: self.backoff = self.min_backoff elif self.backoff >= self.max_backoff / 2: self.backoff = self.max_backoff else: self.backoff *= 2 if self.passive: self.info_level("%s: waiting %.3g seconds before trying " "to listen again" % (self.name, self.backoff / 1000.0)) else: self.info_level("%s: waiting %.3g seconds before reconnect" % (self.name, self.backoff / 1000.0)) if self.__may_retry(): self._transition(now, Reconnect.Backoff) else: self._transition(now, Reconnect.Void) def connecting(self, now): """Tell this FSM that a connection or listening attempt is in progress. The FSM will start a timer, after which the connection or listening attempt will be aborted (by returning ovs.reconnect.DISCONNECT from self.run()).""" if self.state != Reconnect.ConnectInProgress: if self.passive: self.info_level("%s: listening..." % self.name) else: self.info_level("%s: connecting..." % self.name) self._transition(now, Reconnect.ConnectInProgress) def listening(self, now): """Tell this FSM that the client is listening for connection attempts. This state last indefinitely until the client reports some change. The natural progression from this state is for the client to report that a connection has been accepted or is in progress of being accepted, by calling self.connecting() or self.connected(). The client may also report that listening failed (e.g. accept() returned an unexpected error such as ENOMEM) by calling self.listen_error(), in which case the FSM will back off and eventually return ovs.reconnect.CONNECT from self.run() to tell the client to try listening again.""" if self.state != Reconnect.Listening: self.info_level("%s: listening..." % self.name) self._transition(now, Reconnect.Listening) def listen_error(self, now, error): """Tell this FSM that the client's attempt to accept a connection failed (e.g. accept() returned an unexpected error such as ENOMEM). If the FSM is currently listening (self.listening() was called), it will back off and eventually return ovs.reconnect.CONNECT from self.run() to tell the client to try listening again. If there is an active connection, this will be delayed until that connection drops.""" if self.state == Reconnect.Listening: self.disconnected(now, error) def connected(self, now): """Tell this FSM that the connection was successful. The FSM will start the probe interval timer, which is reset by self.activity(). If the timer expires, a probe will be sent (by returning ovs.reconnect.PROBE from self.run(). If the timer expires again without being reset, the connection will be aborted (by returning ovs.reconnect.DISCONNECT from self.run().""" if not self.state.is_connected: self.connecting(now) self.info_level("%s: connected" % self.name) self._transition(now, Reconnect.Active) self.last_connected = now def connect_failed(self, now, error): """Tell this FSM that the connection attempt failed. The FSM will back off and attempt to reconnect.""" self.connecting(now) self.disconnected(now, error) def activity(self, now): """Tell this FSM that some activity occurred on the connection. This resets the probe interval timer, so that the connection is known not to be idle.""" if self.state != Reconnect.Active: self._transition(now, Reconnect.Active) self.last_activity = now def _transition(self, now, state): if self.state == Reconnect.ConnectInProgress: self.n_attempted_connections += 1 if state == Reconnect.Active: self.n_successful_connections += 1 connected_before = self.state.is_connected connected_now = state.is_connected if connected_before != connected_now: if connected_before: self.total_connected_duration += now - self.last_connected self.seqno += 1 vlog.dbg("%s: entering %s" % (self.name, state.name)) self.state = state self.state_entered = now def run(self, now): """Assesses whether any action should be taken on this FSM. The return value is one of: - None: The client need not take any action. - Active client, ovs.reconnect.CONNECT: The client should start a connection attempt and indicate this by calling self.connecting(). If the connection attempt has definitely succeeded, it should call self.connected(). If the connection attempt has definitely failed, it should call self.connect_failed(). The FSM is smart enough to back off correctly after successful connections that quickly abort, so it is OK to call self.connected() after a low-level successful connection (e.g. connect()) even if the connection might soon abort due to a failure at a high-level (e.g. SSL negotiation failure). - Passive client, ovs.reconnect.CONNECT: The client should try to listen for a connection, if it is not already listening. It should call self.listening() if successful, otherwise self.connecting() or reconnected_connect_failed() if the attempt is in progress or definitely failed, respectively. A listening passive client should constantly attempt to accept a new connection and report an accepted connection with self.connected(). - ovs.reconnect.DISCONNECT: The client should abort the current connection or connection attempt or listen attempt and call self.disconnected() or self.connect_failed() to indicate it. - ovs.reconnect.PROBE: The client should send some kind of request to the peer that will elicit a response, to ensure that the connection is indeed in working order. (This will only be returned if the "probe interval" is nonzero--see self.set_probe_interval()).""" deadline = self.state.deadline(self) if deadline is not None and now >= deadline: return self.state.run(self, now) else: return None def wait(self, poller, now): """Causes the next call to poller.block() to wake up when self.run() should be called.""" timeout = self.timeout(now) if timeout >= 0: poller.timer_wait(timeout) def timeout(self, now): """Returns the number of milliseconds after which self.run() should be called if nothing else notable happens in the meantime, or None if this is currently unnecessary.""" deadline = self.state.deadline(self) if deadline is not None: remaining = deadline - now return max(0, remaining) else: return None def is_connected(self): """Returns True if this FSM is currently believed to be connected, that is, if self.connected() was called more recently than any call to self.connect_failed() or self.disconnected() or self.disable(), and False otherwise.""" return self.state.is_connected def get_last_connect_elapsed(self, now): """Returns the number of milliseconds since 'fsm' was last connected to its peer. Returns None if never connected.""" if self.last_connected: return now - self.last_connected else: return None def get_last_disconnect_elapsed(self, now): """Returns the number of milliseconds since 'fsm' was last disconnected from its peer. Returns None if never disconnected.""" if self.last_disconnected: return now - self.last_disconnected else: return None def get_stats(self, now): class Stats(object): pass stats = Stats() stats.creation_time = self.creation_time stats.last_connected = self.last_connected stats.last_disconnected = self.last_disconnected stats.last_activity = self.last_activity stats.backoff = self.backoff stats.seqno = self.seqno stats.is_connected = self.is_connected() stats.msec_since_connect = self.get_last_connect_elapsed(now) stats.msec_since_disconnect = self.get_last_disconnect_elapsed(now) stats.total_connected_duration = self.total_connected_duration if self.is_connected(): stats.total_connected_duration += ( self.get_last_connect_elapsed(now)) stats.n_attempted_connections = self.n_attempted_connections stats.n_successful_connections = self.n_successful_connections stats.state = self.state.name stats.state_elapsed = now - self.state_entered return stats def __may_retry(self): if self.max_tries is None: return True elif self.max_tries > 0: self.max_tries -= 1 return True else: return False
apache-2.0
ataylor32/django
django/db/migrations/topological_sort.py
538
1129
def topological_sort_as_sets(dependency_graph): """Variation of Kahn's algorithm (1962) that returns sets. Takes a dependency graph as a dictionary of node => dependencies. Yields sets of items in topological order, where the first set contains all nodes without dependencies, and each following set contains all nodes that depend on the nodes in the previously yielded sets. """ todo = dependency_graph.copy() while todo: current = {node for node, deps in todo.items() if len(deps) == 0} if not current: raise ValueError('Cyclic dependency in graph: {}'.format( ', '.join(repr(x) for x in todo.items()))) yield current # remove current from todo's nodes & dependencies todo = {node: (dependencies - current) for node, dependencies in todo.items() if node not in current} def stable_topological_sort(l, dependency_graph): result = [] for layer in topological_sort_as_sets(dependency_graph): for node in l: if node in layer: result.append(node) return result
bsd-3-clause
F5Networks/f5-ansible-modules
ansible_collections/f5networks/f5_modules/plugins/modules/bigip_monitor_oracle.py
1
27413
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: (c) 2019, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type DOCUMENTATION = r''' --- module: bigip_monitor_oracle short_description: Manages BIG-IP Oracle monitors description: - Manages BIG-IP Oracle monitors. version_added: "1.3.0" options: name: description: - Monitor name. type: str required: True app_service: description: - The iApp service to be associated with this profile. When no service is specified, the default is None. type: str description: description: - Specifies descriptive text that identifies the monitor. type: str parent: description: - The parent template of this monitor template. Once this value has been set, it cannot be changed. - By default, this value is the C(oracle) parent on the C(Common) partition. type: str ip: description: - IP address part of the IP/port definition. If this parameter is not provided when creating a new monitor, the default value is '*'. type: str port: description: - Port address part of the IP/port definition. If this parameter is not provided when creating a new monitor, the default value is '*'. - If specifying an IP address, you must specify a value between 1 and 65535. type: str interval: description: - Specifies the frequency, in seconds, at which the system issues the monitor check when either the resource is down or the status of the resource is unknown. type: int timeout: description: - Specifies the number of seconds the target has in which to respond to the monitor request. - If the target responds within the set time period, it is considered 'up'. If the target does not respond within the set time period, it is considered 'down'. When this value is set to 0 (zero), the system uses the interval from the parent monitor. - Note that C(timeout) and C(time_until_up) combine to control when a resource is set to up. type: int time_until_up: description: - Specifies the number of seconds to wait after a resource first responds correctly to the monitor before setting the resource to 'up'. - During the interval, all responses from the resource must be correct. - When the interval expires, the resource is marked 'up'. - A value of 0 means the resource is marked up immediately upon receipt of the first correct response. type: int up_interval: description: - Specifies the interval for the system to use to perform the health check when a resource is up. - When C(0), specifies the system uses the interval in C(interval) to check the health of the resource. - When any other number, enables you to specify a different interval to use when checking the health of a resource that is up. type: int manual_resume: description: - Specifies whether the system automatically changes the status of a resource to B(enabled) at the next successful monitor check. - If you set this option to C(yes), you must manually re-enable the resource before the system can use it for load balancing connections. - When C(yes), specifies you must manually re-enable the resource after an unsuccessful monitor check. - When C(no), specifies the system automatically changes the status of a resource to B(enabled) at the next successful monitor check. type: bool recv: description: - Specifies the text string that the monitor looks for in the returned resource. - The most common receive expressions contain a text string that is included in a field in your database. - If you do not specify both C(send) and a C(recv) parameters, the monitor performs a simple service check and connect only. type: str recv_column: description: - Specifies the column in the database where the specified C(recv) string should be located. - This is an optional setting and is applicable only if you configure the C(send) and the C(recv) parameters. type: str recv_row: description: - Specifies the row in the database where the specified C(recv) string should be located. - This is an optional setting, and is applicable only if you configure the C(send) and the C(recv) parameters. type: str send: description: - Specifies the SQL query the monitor sends to the target object. - Since the string may have special characters, the system may require the string be enclosed with single quotation marks. If this value is C(none), a valid connection suffices to determine the service is up. In this case, the system does not need the recv, recv-row, and recv-column options and ignores them even if not C(none). type: str database: description: - Specifies the name of the database the monitor tries to access. type: str count: description: - Specifies the number of monitor probes after which the connection to the database will be terminated. - Count value of zero indicates that the connection will never be terminated. type: int target_username: description: - Specifies the user name, if the monitored target requires authentication. type: str target_password: description: - Specifies the password, if the monitored target requires authentication. type: str debug: description: - Specifies whether the monitor sends error messages and additional information to a log file created and labeled specifically for this monitor. type: bool update_password: description: - C(always) will update passwords if the C(target_password) is specified. - C(on_create) will only set the password for newly created monitors. type: str choices: - always - on_create default: always partition: description: - Device partition to manage resources on. type: str default: Common state: description: - When C(present), ensures the monitor exists. - When C(absent), ensures the monitor is removed. type: str choices: - present - absent default: present extends_documentation_fragment: f5networks.f5_modules.f5 author: - Wojciech Wypior (@wojtek0806) ''' EXAMPLES = r''' - name: Create an oracle monitor bigip_monitor_oracle: ip: 10.10.10.10 port: 10923 name: my_oracle_monitor send: "SELECT status FROM v$instance" recv: OPEN recv_column: 2 recv_row: 1 database: primary1 target_username: bigip target_password: secret update_password: on_create state: present provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost - name: Modify an oracle monitor bigip_monitor_oracle: name: my_oracle_monitor recv_column: 4 recv_row: 3 database: primary2 state: present provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost - name: Remove oracle monitor bigip_monitor_oracle: state: absent name: my_oracle_monitor provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost ''' RETURN = r''' app_service: description: The iApp service associated with this monitor. returned: changed type: str sample: /Common/good_service.app/good_service parent: description: The parent monitor. returned: changed type: str sample: /Common/foo_oracle description: description: The description of the monitor. returned: changed type: str sample: Important Monitor debug: description: - Whether the monitor sends error messages and additional information to a log file created and labeled specifically for this monitor. returned: changed type: bool sample: no ip: description: The new IP of IP/port definition. returned: changed type: str sample: 10.12.13.14 port: description: - Alias port or service for the monitor to check, on behalf of the pools or pool members with which the monitor is associated. returned: changed type: str sample: 80 interval: description: The new interval at which to run the monitor check. returned: changed type: int sample: 2 up_interval: description: Interval for the system to use to perform the health check when a resource is up. returned: changed type: int sample: 0 timeout: description: The new timeout in which the remote system must respond to the monitor. returned: changed type: int sample: 10 manual_resume: description: - Specifies whether the system automatically changes the status of a resource to up at the next successful monitor check. returned: changed type: bool sample: yes time_until_up: description: The new time in which to mark a system as up after first successful response. returned: changed type: int sample: 2 recv: description: The text string that the monitor looks for in the returned resource. returned: changed type: str sample: OPEN send: description: The SQL query the monitor sends to the target object. returned: changed type: str sample: "SELECT status FROM v$instance" database: description: The name of the database that the monitor tries to access. returned: changed type: str sample: primary1 target_username: description: The user name for the the monitored target. returned: changed type: str sample: bigip recv_column: description: The column in the database where the specified string should be located. returned: changed type: str sample: 2 recv_row: description: The row in the database where the specified string should be located. returned: changed type: str sample: 1 ''' from datetime import datetime from ansible.module_utils.basic import ( AnsibleModule, env_fallback ) from ..module_utils.bigip import F5RestClient from ..module_utils.common import ( F5ModuleError, AnsibleF5Parameters, transform_name, f5_argument_spec, flatten_boolean, fq_name ) from ..module_utils.compare import cmp_str_with_none from ..module_utils.ipaddress import is_valid_ip from ..module_utils.icontrol import tmos_version from ..module_utils.teem import send_teem class Parameters(AnsibleF5Parameters): api_map = { 'appService': 'app_service', 'defaultsFrom': 'parent', 'timeUntilUp': 'time_until_up', 'manualResume': 'manual_resume', 'upInterval': 'up_interval', 'recvColumn': 'recv_column', 'recvRow': 'recv_row', 'username': 'target_username', 'password': 'target_password', } api_attributes = [ 'database', 'defaultsFrom', 'debug', 'description', 'destination', 'interval', 'manualResume', 'recv', 'recvColumn', 'recvRow', 'send', 'timeout', 'timeUntilUp', 'upInterval', 'username', 'password', 'count', ] returnables = [ 'app_service', 'parent', 'description', 'destination', 'debug', 'ip', 'port', 'interval', 'up_interval', 'timeout', 'manual_resume', 'time_until_up', 'recv_column', 'recv_row', 'count', 'send', 'recv', 'database', 'target_username', ] updatables = [ 'app_service', 'parent', 'description', 'destination', 'debug', 'ip', 'port', 'interval', 'up_interval', 'timeout', 'manual_resume', 'time_until_up', 'recv_column', 'recv_row', 'count', 'send', 'recv', 'database', 'target_username', 'target_password', ] class ApiParameters(Parameters): @property def ip(self): try: ip, port = self._values['destination'].split(':') except ValueError: # in version 15 wildcard changed this to have . instead of : as separator for wildcard try: ip, port = self._values['destination'].split('.') except ValueError as ex: raise F5ModuleError(str(ex)) return ip @property def port(self): try: ip, port = self._values['destination'].split(':') except ValueError: # in version 15 wildcard changed this to have . instead of : as separator for wildcard try: ip, port = self._values['destination'].split('.') except ValueError as ex: raise F5ModuleError(str(ex)) return port @property def description(self): if self._values['description'] in [None, 'none']: return None return self._values['description'] @property def count(self): if self._values['count'] is None: return None result = int(self._values['count']) return result class ModuleParameters(Parameters): @property def description(self): if self._values['description'] is None: return None elif self._values['description'] in ['none', '']: return '' return self._values['description'] @property def parent(self): if self._values['parent'] is None: return None result = fq_name(self.partition, self._values['parent']) return result @property def interval(self): if self._values['interval'] is None: return None if 1 > self._values['interval'] > 86400: raise F5ModuleError( "Interval value must be between 1 and 86400." ) return self._values['interval'] @property def timeout(self): if self._values['timeout'] is None: return None if self._values['timeout'] is None: return None if 1 > self._values['timeout'] > 86400: raise F5ModuleError( "Timeout value must be between 1 and 86400." ) return self._values['timeout'] @property def ip(self): if self._values['ip'] is None: return None if self._values['ip'] in ['*', '0.0.0.0']: return '*' elif is_valid_ip(self._values['ip']): return self._values['ip'] else: raise F5ModuleError( "The provided 'ip' parameter is not an IP address." ) @property def destination(self): if self.ip is None and self.port is None: return None destination = '{0}:{1}'.format(self.ip, self.port) return destination @destination.setter def destination(self, value): ip, port = value.split(':') self._values['ip'] = ip self._values['port'] = port @property def time_until_up(self): if self._values['time_until_up'] is None: return None if self._values['time_until_up'] is None: return None if 0 > self._values['time_until_up'] > 86400: raise F5ModuleError( "Time_until_up value must be between 0 and 86400." ) return self._values['time_until_up'] @property def manual_resume(self): result = flatten_boolean(self._values['manual_resume']) if result == 'yes': return 'enabled' if result == 'no': return 'disabled' @property def debug(self): result = flatten_boolean(self._values['debug']) return result class Changes(Parameters): def to_return(self): result = {} try: for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) except Exception: raise return result class UsableChanges(Changes): @property def count(self): if self._values['count'] is None: return None result = str(self._values['count']) return result class ReportableChanges(Changes): @property def manual_resume(self): return flatten_boolean(self._values['manual_resume']) @property def count(self): if self._values['count'] is None: return None result = int(self._values['count']) return result class Difference(object): def __init__(self, want, have=None): self.want = want self.have = have def compare(self, param): try: result = getattr(self, param) return result except AttributeError: return self.__default(param) def __default(self, param): attr1 = getattr(self.want, param) try: attr2 = getattr(self.have, param) if attr1 != attr2: return attr1 except AttributeError: return attr1 @property def destination(self): if self.want.ip is None and self.want.port is None: return None if self.want.port is None: self.want.update({'port': self.have.port}) if self.want.ip is None: self.want.update({'ip': self.have.ip}) if self.want.port in [None, '*'] and self.want.ip != '*': raise F5ModuleError( "Specifying an IP address requires that a port number be specified." ) if self.want.destination != self.have.destination: return self.want.destination @property def description(self): return cmp_str_with_none(self.want.description, self.have.description) @property def interval(self): if self.want.timeout is not None and self.want.interval is not None: if self.want.interval >= self.want.timeout: raise F5ModuleError( "Parameter 'interval' must be less than 'timeout'." ) elif self.want.timeout is not None: if self.have.interval >= self.want.timeout: raise F5ModuleError( "Parameter 'interval' must be less than 'timeout'." ) elif self.want.interval is not None: if self.want.interval >= self.have.timeout: raise F5ModuleError( "Parameter 'interval' must be less than 'timeout'." ) if self.want.interval != self.have.interval: return self.want.interval @property def target_password(self): if self.want.target_password != self.have.target_password: if self.want.update_password == 'always': result = self.want.target_password return result class ModuleManager(object): def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = F5RestClient(**self.module.params) self.want = ModuleParameters(params=self.module.params) self.have = ApiParameters() self.changes = UsableChanges() def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = UsableChanges(params=changed) def _update_changed_options(self): diff = Difference(self.want, self.have) updatables = Parameters.updatables changed = dict() for k in updatables: change = diff.compare(k) if change is None: continue else: if isinstance(change, dict): changed.update(change) else: changed[k] = change if changed: self.changes = UsableChanges(params=changed) return True return False def _announce_deprecations(self, result): warnings = result.pop('__warnings', []) for warning in warnings: self.client.module.deprecate( msg=warning['msg'], version=warning['version'] ) def exec_module(self): start = datetime.now().isoformat() version = tmos_version(self.client) changed = False result = dict() state = self.want.state if state == "present": changed = self.present() elif state == "absent": changed = self.absent() reportable = ReportableChanges(params=self.changes.to_return()) changes = reportable.to_return() result.update(**changes) result.update(dict(changed=changed)) self._announce_deprecations(result) send_teem(start, self.module, version) return result def present(self): if self.exists(): return self.update() else: return self.create() def absent(self): if self.exists(): return self.remove() return False def should_update(self): result = self._update_changed_options() if result: return True return False def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.module.check_mode: return True self.update_on_device() return True def remove(self): if self.module.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the resource.") return True def create(self): self._set_changed_options() if self.module.check_mode: return True self.create_on_device() return True def exists(self): uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/oracle/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if resp.status == 404 or 'code' in response and response['code'] == 404: return False if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]: return True errors = [401, 403, 409, 500, 501, 502, 503, 504] if resp.status in errors or 'code' in response and response['code'] in errors: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def create_on_device(self): params = self.changes.api_params() params['name'] = self.want.name params['partition'] = self.want.partition uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/oracle/".format( self.client.provider['server'], self.client.provider['server_port'], ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]: return True raise F5ModuleError(resp.content) def update_on_device(self): params = self.changes.api_params() uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/oracle/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.patch(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]: return True raise F5ModuleError(resp.content) def remove_from_device(self): uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/oracle/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) response = self.client.api.delete(uri) if response.status in [200, 201]: return True raise F5ModuleError(response.content) def read_current_from_device(self): uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/oracle/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]: return ApiParameters(params=response) raise F5ModuleError(resp.content) class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True argument_spec = dict( name=dict(required=True), app_service=dict(), parent=dict(), description=dict(), debug=dict(type='bool'), database=dict(), count=dict(type='int'), ip=dict(), port=dict(), interval=dict(type='int'), up_interval=dict(type='int'), timeout=dict(type='int'), manual_resume=dict(type='bool'), time_until_up=dict(type='int'), recv=dict(), recv_column=dict(), recv_row=dict(), send=dict(), target_username=dict(), target_password=dict(no_log=True), update_password=dict( default='always', choices=['always', 'on_create'] ), state=dict( default='present', choices=['present', 'absent'] ), partition=dict( default='Common', fallback=(env_fallback, ['F5_PARTITION']) ) ) self.argument_spec = {} self.argument_spec.update(f5_argument_spec) self.argument_spec.update(argument_spec) def main(): spec = ArgumentSpec() module = AnsibleModule( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, ) try: mm = ModuleManager(module=module) results = mm.exec_module() module.exit_json(**results) except F5ModuleError as ex: module.fail_json(msg=str(ex)) if __name__ == '__main__': main()
mit
elemoine/papyrus
papyrus/__init__.py
1
3162
def add_papyrus_handler(self, route_name_prefix, base_url, handler): """ Add a Papyrus handler, i.e. a handler defining the MapFish HTTP interface. Example:: import papyrus config.include(papyrus) config.add_papyrus_handler( 'spots', '/spots', 'mypackage.handlers.SpotHandler') Arguments: ``route_name_prefix`` The prefix used for the route names passed to ``config.add_handler``. ``base_url`` The web service's base URL, e.g. ``/spots``. No trailing slash! ``handler`` a dotted name or a reference to a handler class, e.g. ``'mypackage.handlers.MyHandler'``. """ route_name = route_name_prefix + '_read_many' self.add_handler(route_name, base_url, handler, action='read_many', request_method='GET') route_name = route_name_prefix + '_read_one' self.add_handler(route_name, base_url + '/{id}', handler, action='read_one', request_method='GET') route_name = route_name_prefix + '_count' self.add_handler(route_name, base_url + '/count', handler, action='count', request_method='GET') route_name = route_name_prefix + '_create' self.add_handler(route_name, base_url, handler, action='create', request_method='POST') route_name = route_name_prefix + '_update' self.add_handler(route_name, base_url + '/{id}', handler, action='update', request_method='PUT') route_name = route_name_prefix + '_delete' self.add_handler(route_name, base_url + '/{id}', handler, action='delete', request_method='DELETE') def add_papyrus_routes(self, route_name_prefix, base_url): """ A helper method that adds routes to view callables that, together, implement the MapFish HTTP interface. Example:: import papyrus config.include(papyrus) config.add_papyrus_routes('spots', '/spots') config.scan() Arguments: ``route_name_prefix' The prefix used for the route names passed to ``config.add_route``. ``base_url`` The web service's base URL, e.g. ``/spots``. No trailing slash! """ route_name = route_name_prefix + '_read_many' self.add_route(route_name, base_url, request_method='GET') route_name = route_name_prefix + '_read_one' self.add_route(route_name, base_url + '/{id}', request_method='GET') route_name = route_name_prefix + '_count' self.add_route(route_name, base_url + '/count', request_method='GET') route_name = route_name_prefix + '_create' self.add_route(route_name, base_url, request_method='POST') route_name = route_name_prefix + '_update' self.add_route(route_name, base_url + '/{id}', request_method='PUT') route_name = route_name_prefix + '_delete' self.add_route(route_name, base_url + '/{id}', request_method='DELETE') def includeme(config): """ The function to pass to ``config.include``. Requires the ``pyramid_handlers`` module. """ config.add_directive('add_papyrus_handler', add_papyrus_handler) config.add_directive('add_papyrus_routes', add_papyrus_routes)
bsd-2-clause
untitaker/werkzeug
werkzeug/formparser.py
162
21207
# -*- coding: utf-8 -*- """ werkzeug.formparser ~~~~~~~~~~~~~~~~~~~ This module implements the form parsing. It supports url-encoded forms as well as non-nested multipart uploads. :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import re import codecs from io import BytesIO from tempfile import TemporaryFile from itertools import chain, repeat, tee from functools import update_wrapper from werkzeug._compat import to_native, text_type from werkzeug.urls import url_decode_stream from werkzeug.wsgi import make_line_iter, \ get_input_stream, get_content_length from werkzeug.datastructures import Headers, FileStorage, MultiDict from werkzeug.http import parse_options_header #: an iterator that yields empty strings _empty_string_iter = repeat('') #: a regular expression for multipart boundaries _multipart_boundary_re = re.compile('^[ -~]{0,200}[!-~]$') #: supported http encodings that are also available in python we support #: for multipart messages. _supported_multipart_encodings = frozenset(['base64', 'quoted-printable']) def default_stream_factory(total_content_length, filename, content_type, content_length=None): """The stream factory that is used per default.""" if total_content_length > 1024 * 500: return TemporaryFile('wb+') return BytesIO() def parse_form_data(environ, stream_factory=None, charset='utf-8', errors='replace', max_form_memory_size=None, max_content_length=None, cls=None, silent=True): """Parse the form data in the environ and return it as tuple in the form ``(stream, form, files)``. You should only call this method if the transport method is `POST`, `PUT`, or `PATCH`. If the mimetype of the data transmitted is `multipart/form-data` the files multidict will be filled with `FileStorage` objects. If the mimetype is unknown the input stream is wrapped and returned as first argument, else the stream is empty. This is a shortcut for the common usage of :class:`FormDataParser`. Have a look at :ref:`dealing-with-request-data` for more details. .. versionadded:: 0.5 The `max_form_memory_size`, `max_content_length` and `cls` parameters were added. .. versionadded:: 0.5.1 The optional `silent` flag was added. :param environ: the WSGI environment to be used for parsing. :param stream_factory: An optional callable that returns a new read and writeable file descriptor. This callable works the same as :meth:`~BaseResponse._get_file_stream`. :param charset: The character set for URL and url encoded form data. :param errors: The encoding error behavior. :param max_form_memory_size: the maximum number of bytes to be accepted for in-memory stored form data. If the data exceeds the value specified an :exc:`~exceptions.RequestEntityTooLarge` exception is raised. :param max_content_length: If this is provided and the transmitted data is longer than this value an :exc:`~exceptions.RequestEntityTooLarge` exception is raised. :param cls: an optional dict class to use. If this is not specified or `None` the default :class:`MultiDict` is used. :param silent: If set to False parsing errors will not be caught. :return: A tuple in the form ``(stream, form, files)``. """ return FormDataParser(stream_factory, charset, errors, max_form_memory_size, max_content_length, cls, silent).parse_from_environ(environ) def exhaust_stream(f): """Helper decorator for methods that exhausts the stream on return.""" def wrapper(self, stream, *args, **kwargs): try: return f(self, stream, *args, **kwargs) finally: exhaust = getattr(stream, 'exhaust', None) if exhaust is not None: exhaust() else: while 1: chunk = stream.read(1024 * 64) if not chunk: break return update_wrapper(wrapper, f) class FormDataParser(object): """This class implements parsing of form data for Werkzeug. By itself it can parse multipart and url encoded form data. It can be subclassed and extended but for most mimetypes it is a better idea to use the untouched stream and expose it as separate attributes on a request object. .. versionadded:: 0.8 :param stream_factory: An optional callable that returns a new read and writeable file descriptor. This callable works the same as :meth:`~BaseResponse._get_file_stream`. :param charset: The character set for URL and url encoded form data. :param errors: The encoding error behavior. :param max_form_memory_size: the maximum number of bytes to be accepted for in-memory stored form data. If the data exceeds the value specified an :exc:`~exceptions.RequestEntityTooLarge` exception is raised. :param max_content_length: If this is provided and the transmitted data is longer than this value an :exc:`~exceptions.RequestEntityTooLarge` exception is raised. :param cls: an optional dict class to use. If this is not specified or `None` the default :class:`MultiDict` is used. :param silent: If set to False parsing errors will not be caught. """ def __init__(self, stream_factory=None, charset='utf-8', errors='replace', max_form_memory_size=None, max_content_length=None, cls=None, silent=True): if stream_factory is None: stream_factory = default_stream_factory self.stream_factory = stream_factory self.charset = charset self.errors = errors self.max_form_memory_size = max_form_memory_size self.max_content_length = max_content_length if cls is None: cls = MultiDict self.cls = cls self.silent = silent def get_parse_func(self, mimetype, options): return self.parse_functions.get(mimetype) def parse_from_environ(self, environ): """Parses the information from the environment as form data. :param environ: the WSGI environment to be used for parsing. :return: A tuple in the form ``(stream, form, files)``. """ content_type = environ.get('CONTENT_TYPE', '') content_length = get_content_length(environ) mimetype, options = parse_options_header(content_type) return self.parse(get_input_stream(environ), mimetype, content_length, options) def parse(self, stream, mimetype, content_length, options=None): """Parses the information from the given stream, mimetype, content length and mimetype parameters. :param stream: an input stream :param mimetype: the mimetype of the data :param content_length: the content length of the incoming data :param options: optional mimetype parameters (used for the multipart boundary for instance) :return: A tuple in the form ``(stream, form, files)``. """ if self.max_content_length is not None and \ content_length is not None and \ content_length > self.max_content_length: raise exceptions.RequestEntityTooLarge() if options is None: options = {} parse_func = self.get_parse_func(mimetype, options) if parse_func is not None: try: return parse_func(self, stream, mimetype, content_length, options) except ValueError: if not self.silent: raise return stream, self.cls(), self.cls() @exhaust_stream def _parse_multipart(self, stream, mimetype, content_length, options): parser = MultiPartParser(self.stream_factory, self.charset, self.errors, max_form_memory_size=self.max_form_memory_size, cls=self.cls) boundary = options.get('boundary') if boundary is None: raise ValueError('Missing boundary') if isinstance(boundary, text_type): boundary = boundary.encode('ascii') form, files = parser.parse(stream, boundary, content_length) return stream, form, files @exhaust_stream def _parse_urlencoded(self, stream, mimetype, content_length, options): if self.max_form_memory_size is not None and \ content_length is not None and \ content_length > self.max_form_memory_size: raise exceptions.RequestEntityTooLarge() form = url_decode_stream(stream, self.charset, errors=self.errors, cls=self.cls) return stream, form, self.cls() #: mapping of mimetypes to parsing functions parse_functions = { 'multipart/form-data': _parse_multipart, 'application/x-www-form-urlencoded': _parse_urlencoded, 'application/x-url-encoded': _parse_urlencoded } def is_valid_multipart_boundary(boundary): """Checks if the string given is a valid multipart boundary.""" return _multipart_boundary_re.match(boundary) is not None def _line_parse(line): """Removes line ending characters and returns a tuple (`stripped_line`, `is_terminated`). """ if line[-2:] in ['\r\n', b'\r\n']: return line[:-2], True elif line[-1:] in ['\r', '\n', b'\r', b'\n']: return line[:-1], True return line, False def parse_multipart_headers(iterable): """Parses multipart headers from an iterable that yields lines (including the trailing newline symbol). The iterable has to be newline terminated. The iterable will stop at the line where the headers ended so it can be further consumed. :param iterable: iterable of strings that are newline terminated """ result = [] for line in iterable: line = to_native(line) line, line_terminated = _line_parse(line) if not line_terminated: raise ValueError('unexpected end of line in multipart header') if not line: break elif line[0] in ' \t' and result: key, value = result[-1] result[-1] = (key, value + '\n ' + line[1:]) else: parts = line.split(':', 1) if len(parts) == 2: result.append((parts[0].strip(), parts[1].strip())) # we link the list to the headers, no need to create a copy, the # list was not shared anyways. return Headers(result) _begin_form = 'begin_form' _begin_file = 'begin_file' _cont = 'cont' _end = 'end' class MultiPartParser(object): def __init__(self, stream_factory=None, charset='utf-8', errors='replace', max_form_memory_size=None, cls=None, buffer_size=64 * 1024): self.stream_factory = stream_factory self.charset = charset self.errors = errors self.max_form_memory_size = max_form_memory_size if stream_factory is None: stream_factory = default_stream_factory if cls is None: cls = MultiDict self.cls = cls # make sure the buffer size is divisible by four so that we can base64 # decode chunk by chunk assert buffer_size % 4 == 0, 'buffer size has to be divisible by 4' # also the buffer size has to be at least 1024 bytes long or long headers # will freak out the system assert buffer_size >= 1024, 'buffer size has to be at least 1KB' self.buffer_size = buffer_size def _fix_ie_filename(self, filename): """Internet Explorer 6 transmits the full file name if a file is uploaded. This function strips the full path if it thinks the filename is Windows-like absolute. """ if filename[1:3] == ':\\' or filename[:2] == '\\\\': return filename.split('\\')[-1] return filename def _find_terminator(self, iterator): """The terminator might have some additional newlines before it. There is at least one application that sends additional newlines before headers (the python setuptools package). """ for line in iterator: if not line: break line = line.strip() if line: return line return b'' def fail(self, message): raise ValueError(message) def get_part_encoding(self, headers): transfer_encoding = headers.get('content-transfer-encoding') if transfer_encoding is not None and \ transfer_encoding in _supported_multipart_encodings: return transfer_encoding def get_part_charset(self, headers): # Figure out input charset for current part content_type = headers.get('content-type') if content_type: mimetype, ct_params = parse_options_header(content_type) return ct_params.get('charset', self.charset) return self.charset def start_file_streaming(self, filename, headers, total_content_length): if isinstance(filename, bytes): filename = filename.decode(self.charset, self.errors) filename = self._fix_ie_filename(filename) content_type = headers.get('content-type') try: content_length = int(headers['content-length']) except (KeyError, ValueError): content_length = 0 container = self.stream_factory(total_content_length, content_type, filename, content_length) return filename, container def in_memory_threshold_reached(self, bytes): raise exceptions.RequestEntityTooLarge() def validate_boundary(self, boundary): if not boundary: self.fail('Missing boundary') if not is_valid_multipart_boundary(boundary): self.fail('Invalid boundary: %s' % boundary) if len(boundary) > self.buffer_size: # pragma: no cover # this should never happen because we check for a minimum size # of 1024 and boundaries may not be longer than 200. The only # situation when this happens is for non debug builds where # the assert is skipped. self.fail('Boundary longer than buffer size') def parse_lines(self, file, boundary, content_length): """Generate parts of ``('begin_form', (headers, name))`` ``('begin_file', (headers, name, filename))`` ``('cont', bytestring)`` ``('end', None)`` Always obeys the grammar parts = ( begin_form cont* end | begin_file cont* end )* """ next_part = b'--' + boundary last_part = next_part + b'--' iterator = chain(make_line_iter(file, limit=content_length, buffer_size=self.buffer_size), _empty_string_iter) terminator = self._find_terminator(iterator) if terminator == last_part: return elif terminator != next_part: self.fail('Expected boundary at start of multipart data') while terminator != last_part: headers = parse_multipart_headers(iterator) disposition = headers.get('content-disposition') if disposition is None: self.fail('Missing Content-Disposition header') disposition, extra = parse_options_header(disposition) transfer_encoding = self.get_part_encoding(headers) name = extra.get('name') filename = extra.get('filename') # if no content type is given we stream into memory. A list is # used as a temporary container. if filename is None: yield _begin_form, (headers, name) # otherwise we parse the rest of the headers and ask the stream # factory for something we can write in. else: yield _begin_file, (headers, name, filename) buf = b'' for line in iterator: if not line: self.fail('unexpected end of stream') if line[:2] == b'--': terminator = line.rstrip() if terminator in (next_part, last_part): break if transfer_encoding is not None: if transfer_encoding == 'base64': transfer_encoding = 'base64_codec' try: line = codecs.decode(line, transfer_encoding) except Exception: self.fail('could not decode transfer encoded chunk') # we have something in the buffer from the last iteration. # this is usually a newline delimiter. if buf: yield _cont, buf buf = b'' # If the line ends with windows CRLF we write everything except # the last two bytes. In all other cases however we write # everything except the last byte. If it was a newline, that's # fine, otherwise it does not matter because we will write it # the next iteration. this ensures we do not write the # final newline into the stream. That way we do not have to # truncate the stream. However we do have to make sure that # if something else than a newline is in there we write it # out. if line[-2:] == b'\r\n': buf = b'\r\n' cutoff = -2 else: buf = line[-1:] cutoff = -1 yield _cont, line[:cutoff] else: # pragma: no cover raise ValueError('unexpected end of part') # if we have a leftover in the buffer that is not a newline # character we have to flush it, otherwise we will chop of # certain values. if buf not in (b'', b'\r', b'\n', b'\r\n'): yield _cont, buf yield _end, None def parse_parts(self, file, boundary, content_length): """Generate ``('file', (name, val))`` and ``('form', (name, val))`` parts. """ in_memory = 0 for ellt, ell in self.parse_lines(file, boundary, content_length): if ellt == _begin_file: headers, name, filename = ell is_file = True guard_memory = False filename, container = self.start_file_streaming( filename, headers, content_length) _write = container.write elif ellt == _begin_form: headers, name = ell is_file = False container = [] _write = container.append guard_memory = self.max_form_memory_size is not None elif ellt == _cont: _write(ell) # if we write into memory and there is a memory size limit we # count the number of bytes in memory and raise an exception if # there is too much data in memory. if guard_memory: in_memory += len(ell) if in_memory > self.max_form_memory_size: self.in_memory_threshold_reached(in_memory) elif ellt == _end: if is_file: container.seek(0) yield ('file', (name, FileStorage(container, filename, name, headers=headers))) else: part_charset = self.get_part_charset(headers) yield ('form', (name, b''.join(container).decode( part_charset, self.errors))) def parse(self, file, boundary, content_length): formstream, filestream = tee( self.parse_parts(file, boundary, content_length), 2) form = (p[1] for p in formstream if p[0] == 'form') files = (p[1] for p in filestream if p[0] == 'file') return self.cls(form), self.cls(files) from werkzeug import exceptions
bsd-3-clause
MiLk/ansible
lib/ansible/modules/cloud/amazon/sqs_queue.py
50
10558
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['stableinterface'], 'supported_by': 'curated'} DOCUMENTATION = """ --- module: sqs_queue short_description: Creates or deletes AWS SQS queues. description: - Create or delete AWS SQS queues. - Update attributes on existing queues. version_added: "2.0" author: - Alan Loi (@loia) - Fernando Jose Pando (@nand0p) - Nadir Lloret (@nadirollo) requirements: - "boto >= 2.33.0" options: state: description: - Create or delete the queue required: false choices: ['present', 'absent'] default: 'present' name: description: - Name of the queue. required: true default_visibility_timeout: description: - The default visibility timeout in seconds. required: false default: null message_retention_period: description: - The message retention period in seconds. required: false default: null maximum_message_size: description: - The maximum message size in bytes. required: false default: null delivery_delay: description: - The delivery delay in seconds. required: false default: null receive_message_wait_time: description: - The receive message wait time in seconds. required: false default: null policy: description: - The json dict policy to attach to queue required: false default: null version_added: "2.1" redrive_policy: description: - json dict with the redrive_policy (see example) required: false default: null version_added: "2.2" extends_documentation_fragment: - aws - ec2 """ RETURN = ''' default_visibility_timeout: description: The default visibility timeout in seconds. type: int returned: always sample: 30 delivery_delay: description: The delivery delay in seconds. type: int returned: always sample: 0 maximum_message_size: description: The maximum message size in bytes. type: int returned: always sample: 262144 message_retention_period: description: The message retention period in seconds. type: int returned: always sample: 345600 name: description: Name of the SQS Queue type: string returned: always sample: "queuename-987d2de0" queue_arn: description: The queue's Amazon resource name (ARN). type: string returned: on successful creation or update of the queue sample: 'arn:aws:sqs:us-east-1:199999999999:queuename-987d2de0' receive_message_wait_time: description: The receive message wait time in seconds. type: int returned: always sample: 0 region: description: Region that the queue was created within type: string returned: always sample: 'us-east-1' ''' EXAMPLES = ''' # Create SQS queue with redrive policy - sqs_queue: name: my-queue region: ap-southeast-2 default_visibility_timeout: 120 message_retention_period: 86400 maximum_message_size: 1024 delivery_delay: 30 receive_message_wait_time: 20 policy: "{{ json_dict }}" redrive_policy: maxReceiveCount: 5 deadLetterTargetArn: arn:aws:sqs:eu-west-1:123456789012:my-dead-queue # Delete SQS queue - sqs_queue: name: my-queue region: ap-southeast-2 state: absent ''' import json import traceback try: import boto.sqs from boto.exception import BotoServerError, NoAuthHandlerFound HAS_BOTO = True except ImportError: HAS_BOTO = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info def create_or_update_sqs_queue(connection, module): queue_name = module.params.get('name') queue_attributes = dict( default_visibility_timeout=module.params.get('default_visibility_timeout'), message_retention_period=module.params.get('message_retention_period'), maximum_message_size=module.params.get('maximum_message_size'), delivery_delay=module.params.get('delivery_delay'), receive_message_wait_time=module.params.get('receive_message_wait_time'), policy=module.params.get('policy'), redrive_policy=module.params.get('redrive_policy') ) result = dict( region=module.params.get('region'), name=queue_name, ) result.update(queue_attributes) try: queue = connection.get_queue(queue_name) if queue: # Update existing result['changed'] = update_sqs_queue(queue, check_mode=module.check_mode, **queue_attributes) else: # Create new if not module.check_mode: queue = connection.create_queue(queue_name) update_sqs_queue(queue, **queue_attributes) result['changed'] = True if not module.check_mode: result['queue_arn'] = queue.get_attributes('QueueArn')['QueueArn'] result['default_visibility_timeout'] = queue.get_attributes('VisibilityTimeout')['VisibilityTimeout'] result['message_retention_period'] = queue.get_attributes('MessageRetentionPeriod')['MessageRetentionPeriod'] result['maximum_message_size'] = queue.get_attributes('MaximumMessageSize')['MaximumMessageSize'] result['delivery_delay'] = queue.get_attributes('DelaySeconds')['DelaySeconds'] result['receive_message_wait_time'] = queue.get_attributes('ReceiveMessageWaitTimeSeconds')['ReceiveMessageWaitTimeSeconds'] except BotoServerError: result['msg'] = 'Failed to create/update sqs queue due to error: ' + traceback.format_exc() module.fail_json(**result) else: module.exit_json(**result) def update_sqs_queue(queue, check_mode=False, default_visibility_timeout=None, message_retention_period=None, maximum_message_size=None, delivery_delay=None, receive_message_wait_time=None, policy=None, redrive_policy=None): changed = False changed = set_queue_attribute(queue, 'VisibilityTimeout', default_visibility_timeout, check_mode=check_mode) or changed changed = set_queue_attribute(queue, 'MessageRetentionPeriod', message_retention_period, check_mode=check_mode) or changed changed = set_queue_attribute(queue, 'MaximumMessageSize', maximum_message_size, check_mode=check_mode) or changed changed = set_queue_attribute(queue, 'DelaySeconds', delivery_delay, check_mode=check_mode) or changed changed = set_queue_attribute(queue, 'ReceiveMessageWaitTimeSeconds', receive_message_wait_time, check_mode=check_mode) or changed changed = set_queue_attribute(queue, 'Policy', policy, check_mode=check_mode) or changed changed = set_queue_attribute(queue, 'RedrivePolicy', redrive_policy, check_mode=check_mode) or changed return changed def set_queue_attribute(queue, attribute, value, check_mode=False): if not value: return False try: existing_value = queue.get_attributes(attributes=attribute)[attribute] except: existing_value = '' # convert dict attributes to JSON strings (sort keys for comparing) if attribute in ['Policy', 'RedrivePolicy']: value = json.dumps(value, sort_keys=True) if existing_value: existing_value = json.dumps(json.loads(existing_value), sort_keys=True) if str(value) != existing_value: if not check_mode: queue.set_attribute(attribute, value) return True return False def delete_sqs_queue(connection, module): queue_name = module.params.get('name') result = dict( region=module.params.get('region'), name=queue_name, ) try: queue = connection.get_queue(queue_name) if queue: if not module.check_mode: connection.delete_queue(queue) result['changed'] = True else: result['changed'] = False except BotoServerError: result['msg'] = 'Failed to delete sqs queue due to error: ' + traceback.format_exc() module.fail_json(**result) else: module.exit_json(**result) def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( state=dict(default='present', choices=['present', 'absent']), name=dict(required=True, type='str'), default_visibility_timeout=dict(type='int'), message_retention_period=dict(type='int'), maximum_message_size=dict(type='int'), delivery_delay=dict(type='int'), receive_message_wait_time=dict(type='int'), policy=dict(type='dict', required=False), redrive_policy=dict(type='dict', required=False), )) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True) if not HAS_BOTO: module.fail_json(msg='boto required for this module') region, ec2_url, aws_connect_params = get_aws_connection_info(module) if not region: module.fail_json(msg='region must be specified') try: connection = connect_to_aws(boto.sqs, region, **aws_connect_params) except (NoAuthHandlerFound, AnsibleAWSError) as e: module.fail_json(msg=str(e)) state = module.params.get('state') if state == 'present': create_or_update_sqs_queue(connection, module) elif state == 'absent': delete_sqs_queue(connection, module) if __name__ == '__main__': main()
gpl-3.0
andreif/django
django/utils/text.py
308
14923
from __future__ import unicode_literals import re import unicodedata from gzip import GzipFile from io import BytesIO from django.utils import six from django.utils.encoding import force_text from django.utils.functional import SimpleLazyObject, allow_lazy from django.utils.safestring import SafeText, mark_safe from django.utils.six.moves import html_entities from django.utils.translation import pgettext, ugettext as _, ugettext_lazy if six.PY2: # Import force_unicode even though this module doesn't use it, because some # people rely on it being here. from django.utils.encoding import force_unicode # NOQA # Capitalizes the first letter of a string. capfirst = lambda x: x and force_text(x)[0].upper() + force_text(x)[1:] capfirst = allow_lazy(capfirst, six.text_type) # Set up regular expressions re_words = re.compile(r'<.*?>|((?:\w[-\w]*|&.*?;)+)', re.U | re.S) re_chars = re.compile(r'<.*?>|(.)', re.U | re.S) re_tag = re.compile(r'<(/)?([^ ]+?)(?:(\s*/)| .*?)?>', re.S) re_newlines = re.compile(r'\r\n|\r') # Used in normalize_newlines re_camel_case = re.compile(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))') def wrap(text, width): """ A word-wrap function that preserves existing line breaks. Expects that existing line breaks are posix newlines. All white space is preserved except added line breaks consume the space on which they break the line. Long words are not wrapped, so the output text may have lines longer than ``width``. """ text = force_text(text) def _generator(): for line in text.splitlines(True): # True keeps trailing linebreaks max_width = min((line.endswith('\n') and width + 1 or width), width) while len(line) > max_width: space = line[:max_width + 1].rfind(' ') + 1 if space == 0: space = line.find(' ') + 1 if space == 0: yield line line = '' break yield '%s\n' % line[:space - 1] line = line[space:] max_width = min((line.endswith('\n') and width + 1 or width), width) if line: yield line return ''.join(_generator()) wrap = allow_lazy(wrap, six.text_type) class Truncator(SimpleLazyObject): """ An object used to truncate text, either by characters or words. """ def __init__(self, text): super(Truncator, self).__init__(lambda: force_text(text)) def add_truncation_text(self, text, truncate=None): if truncate is None: truncate = pgettext( 'String to return when truncating text', '%(truncated_text)s...') truncate = force_text(truncate) if '%(truncated_text)s' in truncate: return truncate % {'truncated_text': text} # The truncation text didn't contain the %(truncated_text)s string # replacement argument so just append it to the text. if text.endswith(truncate): # But don't append the truncation text if the current text already # ends in this. return text return '%s%s' % (text, truncate) def chars(self, num, truncate=None, html=False): """ Returns the text truncated to be no longer than the specified number of characters. Takes an optional argument of what should be used to notify that the string has been truncated, defaulting to a translatable string of an ellipsis (...). """ length = int(num) text = unicodedata.normalize('NFC', self._wrapped) # Calculate the length to truncate to (max length - end_text length) truncate_len = length for char in self.add_truncation_text('', truncate): if not unicodedata.combining(char): truncate_len -= 1 if truncate_len == 0: break if html: return self._truncate_html(length, truncate, text, truncate_len, False) return self._text_chars(length, truncate, text, truncate_len) chars = allow_lazy(chars) def _text_chars(self, length, truncate, text, truncate_len): """ Truncates a string after a certain number of chars. """ s_len = 0 end_index = None for i, char in enumerate(text): if unicodedata.combining(char): # Don't consider combining characters # as adding to the string length continue s_len += 1 if end_index is None and s_len > truncate_len: end_index = i if s_len > length: # Return the truncated string return self.add_truncation_text(text[:end_index or 0], truncate) # Return the original string since no truncation was necessary return text def words(self, num, truncate=None, html=False): """ Truncates a string after a certain number of words. Takes an optional argument of what should be used to notify that the string has been truncated, defaulting to ellipsis (...). """ length = int(num) if html: return self._truncate_html(length, truncate, self._wrapped, length, True) return self._text_words(length, truncate) words = allow_lazy(words) def _text_words(self, length, truncate): """ Truncates a string after a certain number of words. Newlines in the string will be stripped. """ words = self._wrapped.split() if len(words) > length: words = words[:length] return self.add_truncation_text(' '.join(words), truncate) return ' '.join(words) def _truncate_html(self, length, truncate, text, truncate_len, words): """ Truncates HTML to a certain number of chars (not counting tags and comments), or, if words is True, then to a certain number of words. Closes opened tags if they were correctly closed in the given HTML. Newlines in the HTML are preserved. """ if words and length <= 0: return '' html4_singlets = ( 'br', 'col', 'link', 'base', 'img', 'param', 'area', 'hr', 'input' ) # Count non-HTML chars/words and keep note of open tags pos = 0 end_text_pos = 0 current_len = 0 open_tags = [] regex = re_words if words else re_chars while current_len <= length: m = regex.search(text, pos) if not m: # Checked through whole string break pos = m.end(0) if m.group(1): # It's an actual non-HTML word or char current_len += 1 if current_len == truncate_len: end_text_pos = pos continue # Check for tag tag = re_tag.match(m.group(0)) if not tag or current_len >= truncate_len: # Don't worry about non tags or tags after our truncate point continue closing_tag, tagname, self_closing = tag.groups() # Element names are always case-insensitive tagname = tagname.lower() if self_closing or tagname in html4_singlets: pass elif closing_tag: # Check for match in open tags list try: i = open_tags.index(tagname) except ValueError: pass else: # SGML: An end tag closes, back to the matching start tag, # all unclosed intervening start tags with omitted end tags open_tags = open_tags[i + 1:] else: # Add it to the start of the open tags list open_tags.insert(0, tagname) if current_len <= length: return text out = text[:end_text_pos] truncate_text = self.add_truncation_text('', truncate) if truncate_text: out += truncate_text # Close any tags still open for tag in open_tags: out += '</%s>' % tag # Return string return out def get_valid_filename(s): """ Returns the given string converted to a string that can be used for a clean filename. Specifically, leading and trailing spaces are removed; other spaces are converted to underscores; and anything that is not a unicode alphanumeric, dash, underscore, or dot, is removed. >>> get_valid_filename("john's portrait in 2004.jpg") 'johns_portrait_in_2004.jpg' """ s = force_text(s).strip().replace(' ', '_') return re.sub(r'(?u)[^-\w.]', '', s) get_valid_filename = allow_lazy(get_valid_filename, six.text_type) def get_text_list(list_, last_word=ugettext_lazy('or')): """ >>> get_text_list(['a', 'b', 'c', 'd']) 'a, b, c or d' >>> get_text_list(['a', 'b', 'c'], 'and') 'a, b and c' >>> get_text_list(['a', 'b'], 'and') 'a and b' >>> get_text_list(['a']) 'a' >>> get_text_list([]) '' """ if len(list_) == 0: return '' if len(list_) == 1: return force_text(list_[0]) return '%s %s %s' % ( # Translators: This string is used as a separator between list elements _(', ').join(force_text(i) for i in list_[:-1]), force_text(last_word), force_text(list_[-1])) get_text_list = allow_lazy(get_text_list, six.text_type) def normalize_newlines(text): """Normalizes CRLF and CR newlines to just LF.""" text = force_text(text) return re_newlines.sub('\n', text) normalize_newlines = allow_lazy(normalize_newlines, six.text_type) def phone2numeric(phone): """Converts a phone number with letters into its numeric equivalent.""" char2number = {'a': '2', 'b': '2', 'c': '2', 'd': '3', 'e': '3', 'f': '3', 'g': '4', 'h': '4', 'i': '4', 'j': '5', 'k': '5', 'l': '5', 'm': '6', 'n': '6', 'o': '6', 'p': '7', 'q': '7', 'r': '7', 's': '7', 't': '8', 'u': '8', 'v': '8', 'w': '9', 'x': '9', 'y': '9', 'z': '9'} return ''.join(char2number.get(c, c) for c in phone.lower()) phone2numeric = allow_lazy(phone2numeric) # From http://www.xhaus.com/alan/python/httpcomp.html#gzip # Used with permission. def compress_string(s): zbuf = BytesIO() zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf) zfile.write(s) zfile.close() return zbuf.getvalue() class StreamingBuffer(object): def __init__(self): self.vals = [] def write(self, val): self.vals.append(val) def read(self): if not self.vals: return b'' ret = b''.join(self.vals) self.vals = [] return ret def flush(self): return def close(self): return # Like compress_string, but for iterators of strings. def compress_sequence(sequence): buf = StreamingBuffer() zfile = GzipFile(mode='wb', compresslevel=6, fileobj=buf) # Output headers... yield buf.read() for item in sequence: zfile.write(item) data = buf.read() if data: yield data zfile.close() yield buf.read() # Expression to match some_token and some_token="with spaces" (and similarly # for single-quoted strings). smart_split_re = re.compile(r""" ((?: [^\s'"]* (?: (?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*') [^\s'"]* )+ ) | \S+) """, re.VERBOSE) def smart_split(text): r""" Generator that splits a string by spaces, leaving quoted phrases together. Supports both single and double quotes, and supports escaping quotes with backslashes. In the output, strings will keep their initial and trailing quote marks and escaped quotes will remain escaped (the results can then be further processed with unescape_string_literal()). >>> list(smart_split(r'This is "a person\'s" test.')) ['This', 'is', '"a person\\\'s"', 'test.'] >>> list(smart_split(r"Another 'person\'s' test.")) ['Another', "'person\\'s'", 'test.'] >>> list(smart_split(r'A "\"funky\" style" test.')) ['A', '"\\"funky\\" style"', 'test.'] """ text = force_text(text) for bit in smart_split_re.finditer(text): yield bit.group(0) def _replace_entity(match): text = match.group(1) if text[0] == '#': text = text[1:] try: if text[0] in 'xX': c = int(text[1:], 16) else: c = int(text) return six.unichr(c) except ValueError: return match.group(0) else: try: return six.unichr(html_entities.name2codepoint[text]) except (ValueError, KeyError): return match.group(0) _entity_re = re.compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));") def unescape_entities(text): return _entity_re.sub(_replace_entity, text) unescape_entities = allow_lazy(unescape_entities, six.text_type) def unescape_string_literal(s): r""" Convert quoted string literals to unquoted strings with escaped quotes and backslashes unquoted:: >>> unescape_string_literal('"abc"') 'abc' >>> unescape_string_literal("'abc'") 'abc' >>> unescape_string_literal('"a \"bc\""') 'a "bc"' >>> unescape_string_literal("'\'ab\' c'") "'ab' c" """ if s[0] not in "\"'" or s[-1] != s[0]: raise ValueError("Not a string literal: %r" % s) quote = s[0] return s[1:-1].replace(r'\%s' % quote, quote).replace(r'\\', '\\') unescape_string_literal = allow_lazy(unescape_string_literal) def slugify(value, allow_unicode=False): """ Convert to ASCII if 'allow_unicode' is False. Convert spaces to hyphens. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace. """ value = force_text(value) if allow_unicode: value = unicodedata.normalize('NFKC', value) value = re.sub('[^\w\s-]', '', value, flags=re.U).strip().lower() return mark_safe(re.sub('[-\s]+', '-', value, flags=re.U)) value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii') value = re.sub('[^\w\s-]', '', value).strip().lower() return mark_safe(re.sub('[-\s]+', '-', value)) slugify = allow_lazy(slugify, six.text_type, SafeText) def camel_case_to_spaces(value): """ Splits CamelCase and converts to lower case. Also strips leading and trailing whitespace. """ return re_camel_case.sub(r' \1', value).strip().lower()
bsd-3-clause
jpush/jbox
Server/venv/lib/python3.5/site-packages/pip/_vendor/distlib/resources.py
132
10620
# -*- coding: utf-8 -*- # # Copyright (C) 2013-2016 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # from __future__ import unicode_literals import bisect import io import logging import os import pkgutil import shutil import sys import types import zipimport from . import DistlibException from .util import cached_property, get_cache_base, path_to_cache_dir, Cache logger = logging.getLogger(__name__) cache = None # created when needed class ResourceCache(Cache): def __init__(self, base=None): if base is None: # Use native string to avoid issues on 2.x: see Python #20140. base = os.path.join(get_cache_base(), str('resource-cache')) super(ResourceCache, self).__init__(base) def is_stale(self, resource, path): """ Is the cache stale for the given resource? :param resource: The :class:`Resource` being cached. :param path: The path of the resource in the cache. :return: True if the cache is stale. """ # Cache invalidation is a hard problem :-) return True def get(self, resource): """ Get a resource into the cache, :param resource: A :class:`Resource` instance. :return: The pathname of the resource in the cache. """ prefix, path = resource.finder.get_cache_info(resource) if prefix is None: result = path else: result = os.path.join(self.base, self.prefix_to_dir(prefix), path) dirname = os.path.dirname(result) if not os.path.isdir(dirname): os.makedirs(dirname) if not os.path.exists(result): stale = True else: stale = self.is_stale(resource, path) if stale: # write the bytes of the resource to the cache location with open(result, 'wb') as f: f.write(resource.bytes) return result class ResourceBase(object): def __init__(self, finder, name): self.finder = finder self.name = name class Resource(ResourceBase): """ A class representing an in-package resource, such as a data file. This is not normally instantiated by user code, but rather by a :class:`ResourceFinder` which manages the resource. """ is_container = False # Backwards compatibility def as_stream(self): """ Get the resource as a stream. This is not a property to make it obvious that it returns a new stream each time. """ return self.finder.get_stream(self) @cached_property def file_path(self): global cache if cache is None: cache = ResourceCache() return cache.get(self) @cached_property def bytes(self): return self.finder.get_bytes(self) @cached_property def size(self): return self.finder.get_size(self) class ResourceContainer(ResourceBase): is_container = True # Backwards compatibility @cached_property def resources(self): return self.finder.get_resources(self) class ResourceFinder(object): """ Resource finder for file system resources. """ if sys.platform.startswith('java'): skipped_extensions = ('.pyc', '.pyo', '.class') else: skipped_extensions = ('.pyc', '.pyo') def __init__(self, module): self.module = module self.loader = getattr(module, '__loader__', None) self.base = os.path.dirname(getattr(module, '__file__', '')) def _adjust_path(self, path): return os.path.realpath(path) def _make_path(self, resource_name): # Issue #50: need to preserve type of path on Python 2.x # like os.path._get_sep if isinstance(resource_name, bytes): # should only happen on 2.x sep = b'/' else: sep = '/' parts = resource_name.split(sep) parts.insert(0, self.base) result = os.path.join(*parts) return self._adjust_path(result) def _find(self, path): return os.path.exists(path) def get_cache_info(self, resource): return None, resource.path def find(self, resource_name): path = self._make_path(resource_name) if not self._find(path): result = None else: if self._is_directory(path): result = ResourceContainer(self, resource_name) else: result = Resource(self, resource_name) result.path = path return result def get_stream(self, resource): return open(resource.path, 'rb') def get_bytes(self, resource): with open(resource.path, 'rb') as f: return f.read() def get_size(self, resource): return os.path.getsize(resource.path) def get_resources(self, resource): def allowed(f): return (f != '__pycache__' and not f.endswith(self.skipped_extensions)) return set([f for f in os.listdir(resource.path) if allowed(f)]) def is_container(self, resource): return self._is_directory(resource.path) _is_directory = staticmethod(os.path.isdir) def iterator(self, resource_name): resource = self.find(resource_name) if resource is not None: todo = [resource] while todo: resource = todo.pop(0) yield resource if resource.is_container: rname = resource.name for name in resource.resources: if not rname: new_name = name else: new_name = '/'.join([rname, name]) child = self.find(new_name) if child.is_container: todo.append(child) else: yield child class ZipResourceFinder(ResourceFinder): """ Resource finder for resources in .zip files. """ def __init__(self, module): super(ZipResourceFinder, self).__init__(module) archive = self.loader.archive self.prefix_len = 1 + len(archive) # PyPy doesn't have a _files attr on zipimporter, and you can't set one if hasattr(self.loader, '_files'): self._files = self.loader._files else: self._files = zipimport._zip_directory_cache[archive] self.index = sorted(self._files) def _adjust_path(self, path): return path def _find(self, path): path = path[self.prefix_len:] if path in self._files: result = True else: if path and path[-1] != os.sep: path = path + os.sep i = bisect.bisect(self.index, path) try: result = self.index[i].startswith(path) except IndexError: result = False if not result: logger.debug('_find failed: %r %r', path, self.loader.prefix) else: logger.debug('_find worked: %r %r', path, self.loader.prefix) return result def get_cache_info(self, resource): prefix = self.loader.archive path = resource.path[1 + len(prefix):] return prefix, path def get_bytes(self, resource): return self.loader.get_data(resource.path) def get_stream(self, resource): return io.BytesIO(self.get_bytes(resource)) def get_size(self, resource): path = resource.path[self.prefix_len:] return self._files[path][3] def get_resources(self, resource): path = resource.path[self.prefix_len:] if path and path[-1] != os.sep: path += os.sep plen = len(path) result = set() i = bisect.bisect(self.index, path) while i < len(self.index): if not self.index[i].startswith(path): break s = self.index[i][plen:] result.add(s.split(os.sep, 1)[0]) # only immediate children i += 1 return result def _is_directory(self, path): path = path[self.prefix_len:] if path and path[-1] != os.sep: path += os.sep i = bisect.bisect(self.index, path) try: result = self.index[i].startswith(path) except IndexError: result = False return result _finder_registry = { type(None): ResourceFinder, zipimport.zipimporter: ZipResourceFinder } try: import _frozen_importlib _finder_registry[_frozen_importlib.SourceFileLoader] = ResourceFinder _finder_registry[_frozen_importlib.FileFinder] = ResourceFinder except (ImportError, AttributeError): pass def register_finder(loader, finder_maker): _finder_registry[type(loader)] = finder_maker _finder_cache = {} def finder(package): """ Return a resource finder for a package. :param package: The name of the package. :return: A :class:`ResourceFinder` instance for the package. """ if package in _finder_cache: result = _finder_cache[package] else: if package not in sys.modules: __import__(package) module = sys.modules[package] path = getattr(module, '__path__', None) if path is None: raise DistlibException('You cannot get a finder for a module, ' 'only for a package') loader = getattr(module, '__loader__', None) finder_maker = _finder_registry.get(type(loader)) if finder_maker is None: raise DistlibException('Unable to locate finder for %r' % package) result = finder_maker(module) _finder_cache[package] = result return result _dummy_module = types.ModuleType(str('__dummy__')) def finder_for_path(path): """ Return a resource finder for a path, which should represent a container. :param path: The path. :return: A :class:`ResourceFinder` instance for the path. """ result = None # calls any path hooks, gets importer into cache pkgutil.get_importer(path) loader = sys.path_importer_cache.get(path) finder = _finder_registry.get(type(loader)) if finder: module = _dummy_module module.__file__ = os.path.join(path, '') module.__loader__ = loader result = finder(module) return result
mit
mwarkentin/django-watchman
tests/test_management.py
1
1542
# -*- coding: utf-8 -*- """ test_django-watchman ------------ Tests for `django-watchman` management commands. """ from __future__ import unicode_literals from io import StringIO import unittest from django.core.management import call_command class TestWatchman(unittest.TestCase): def test_successful_management_command_outputs_nothing(self): out = StringIO() call_command('watchman', stdout=out) self.assertEquals('', out.getvalue()) def test_successful_management_command_outputs_check_status_with_verbosity_2(self): out = StringIO() call_command('watchman', stdout=out, verbosity='2') self.assertIn('caches', out.getvalue()) def test_successful_management_command_outputs_check_status_with_verbosity_3(self): out = StringIO() call_command('watchman', stdout=out, verbosity='3') self.assertIn('caches', out.getvalue()) def test_successful_management_command_supports_check_list(self): out = StringIO() call_command('watchman', stdout=out, checks='watchman.checks.caches', verbosity='3') self.assertIn('caches', out.getvalue()) self.assertNotIn('databases', out.getvalue()) def test_successful_management_command_supports_skip_list(self): out = StringIO() call_command('watchman', stdout=out, skips='watchman.checks.email', verbosity='3') self.assertIn('caches', out.getvalue()) self.assertIn('databases', out.getvalue()) self.assertNotIn('email', out.getvalue())
bsd-3-clause
xwolf12/django
tests/test_runner/test_debug_sql.py
210
4048
import sys import unittest from django.db import connection from django.test import TestCase from django.test.runner import DiscoverRunner from django.utils import six from django.utils.encoding import force_text from .models import Person @unittest.skipUnless(connection.vendor == 'sqlite', 'Only run on sqlite so we can check output SQL.') class TestDebugSQL(unittest.TestCase): class PassingTest(TestCase): def runTest(self): Person.objects.filter(first_name='pass').count() class FailingTest(TestCase): def runTest(self): Person.objects.filter(first_name='fail').count() self.fail() class ErrorTest(TestCase): def runTest(self): Person.objects.filter(first_name='error').count() raise Exception def _test_output(self, verbosity): runner = DiscoverRunner(debug_sql=True, verbosity=0) suite = runner.test_suite() suite.addTest(self.FailingTest()) suite.addTest(self.ErrorTest()) suite.addTest(self.PassingTest()) old_config = runner.setup_databases() stream = six.StringIO() resultclass = runner.get_resultclass() runner.test_runner( verbosity=verbosity, stream=stream, resultclass=resultclass, ).run(suite) runner.teardown_databases(old_config) if six.PY2: stream.buflist = [force_text(x) for x in stream.buflist] return stream.getvalue() def test_output_normal(self): full_output = self._test_output(1) for output in self.expected_outputs: self.assertIn(output, full_output) for output in self.verbose_expected_outputs: self.assertNotIn(output, full_output) def test_output_verbose(self): full_output = self._test_output(2) for output in self.expected_outputs: self.assertIn(output, full_output) for output in self.verbose_expected_outputs: self.assertIn(output, full_output) if six.PY3: expected_outputs = [ ('''QUERY = 'SELECT COUNT(%s) AS "__count" ''' '''FROM "test_runner_person" WHERE ''' '''"test_runner_person"."first_name" = %s' ''' '''- PARAMS = ('*', 'error');'''), ('''QUERY = 'SELECT COUNT(%s) AS "__count" ''' '''FROM "test_runner_person" WHERE ''' '''"test_runner_person"."first_name" = %s' ''' '''- PARAMS = ('*', 'fail');'''), ] else: expected_outputs = [ ('''QUERY = u'SELECT COUNT(%s) AS "__count" ''' '''FROM "test_runner_person" WHERE ''' '''"test_runner_person"."first_name" = %s' ''' '''- PARAMS = (u'*', u'error');'''), ('''QUERY = u'SELECT COUNT(%s) AS "__count" ''' '''FROM "test_runner_person" WHERE ''' '''"test_runner_person"."first_name" = %s' ''' '''- PARAMS = (u'*', u'fail');'''), ] verbose_expected_outputs = [ # Output format changed in Python 3.5+ x.format('' if sys.version_info < (3, 5) else 'TestDebugSQL.') for x in [ 'runTest (test_runner.test_debug_sql.{}FailingTest) ... FAIL', 'runTest (test_runner.test_debug_sql.{}ErrorTest) ... ERROR', 'runTest (test_runner.test_debug_sql.{}PassingTest) ... ok', ] ] if six.PY3: verbose_expected_outputs += [ ('''QUERY = 'SELECT COUNT(%s) AS "__count" ''' '''FROM "test_runner_person" WHERE ''' '''"test_runner_person"."first_name" = %s' ''' '''- PARAMS = ('*', 'pass');'''), ] else: verbose_expected_outputs += [ ('''QUERY = u'SELECT COUNT(%s) AS "__count" ''' '''FROM "test_runner_person" WHERE ''' '''"test_runner_person"."first_name" = %s' ''' '''- PARAMS = (u'*', u'pass');'''), ]
bsd-3-clause
lmprice/ansible
lib/ansible/plugins/lookup/mongodb.py
34
8869
# (c) 2016, Marcos Diez <marcos@unitron.com.br> # https://github.com/marcosdiez/ # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) from ansible.module_utils.six import string_types, integer_types __metaclass__ = type DOCUMENTATION = ''' author: 'Marcos Diez <marcos (at) unitron.com.br>' lookup: mongodb version_added: "2.3" short_description: lookup info from MongoDB description: - 'The ``MongoDB`` lookup runs the *find()* command on a given *collection* on a given *MongoDB* server.' - 'The result is a list of jsons, so slightly different from what PyMongo returns. In particular, *timestamps* are converted to epoch integers.' options: connect_string: description: - Can be any valid MongoDB connection string, supporting authentication, replicasets, etc. - "More info at U(https://docs.mongodb.org/manual/reference/connection-string/)" default: "mongodb://localhost/" database: description: - Name of the database which the query will be made required: True collection: description: - Name of the collection which the query will be made required: True filter: description: - Criteria of the output type: 'dict' default: '{}' projection: description: - Fields you want returned type: dict default: "{}" skip: description: - How many results should be skept type: integer limit: description: - How many results should be shown type: integer sort: description: - Sorting rules. Please notice the constats are replaced by strings. type: list default: "[]" notes: - "Please check https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find for more detais." requirements: - pymongo >= 2.4 (python library) ''' EXAMPLES = ''' - hosts: all gather_facts: false vars: mongodb_parameters: #mandatory parameters database: 'local' #optional collection: "startup_log" connection_string: "mongodb://localhost/" extra_connection_parameters: { "ssl" : True , "ssl_certfile": /etc/self_signed_certificate.pem" } #optional query parameters, we accept any parameter from the normal mongodb query. filter: { "hostname": "batman" } projection: { "pid": True , "_id" : False , "hostname" : True } skip: 0 limit: 1 sort: [ [ "startTime" , "ASCENDING" ] , [ "age", "DESCENDING" ] ] tasks: - debug: msg="Mongo has already started with the following PID [{{ item.pid }}]" with_mongodb: "{{mongodb_parameters}}" ''' import datetime try: from pymongo import ASCENDING, DESCENDING from pymongo.errors import ConnectionFailure from pymongo import MongoClient except ImportError: try: # for older PyMongo 2.2 from pymongo import Connection as MongoClient except ImportError: pymongo_found = False else: pymongo_found = True else: pymongo_found = True from ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase class LookupModule(LookupBase): def _fix_sort_parameter(self, sort_parameter): if sort_parameter is None: return sort_parameter if not isinstance(sort_parameter, list): raise AnsibleError(u"Error. Sort parameters must be a list, not [ {0} ]".format(sort_parameter)) for item in sort_parameter: self._convert_sort_string_to_constant(item) return sort_parameter def _convert_sort_string_to_constant(self, item): original_sort_order = item[1] sort_order = original_sort_order.upper() if sort_order == u"ASCENDING": item[1] = ASCENDING elif sort_order == u"DESCENDING": item[1] = DESCENDING # else the user knows what s/he is doing and we won't predict. PyMongo will return an error if necessary def convert_mongo_result_to_valid_json(self, result): if result is None: return result if isinstance(result, integer_types + (float, bool)): return result if isinstance(result, string_types): return result elif isinstance(result, list): new_list = [] for elem in result: new_list.append(self.convert_mongo_result_to_valid_json(elem)) return new_list elif isinstance(result, dict): new_dict = {} for key in result.keys(): value = result[key] # python2 and 3 compatible.... new_dict[key] = self.convert_mongo_result_to_valid_json(value) return new_dict elif isinstance(result, datetime.datetime): # epoch return (result - datetime.datetime(1970, 1, 1)). total_seconds() else: # failsafe return u"{0}".format(result) def run(self, terms, variables, **kwargs): ret = [] for term in terms: u''' Makes a MongoDB query and returns the output as a valid list of json. Timestamps are converted to epoch integers/longs. Here is a sample playbook that uses it: ------------------------------------------------------------------------------- - hosts: all gather_facts: false vars: mongodb_parameters: #optional parameter, default = "mongodb://localhost/" # connection_string: "mongodb://localhost/" #mandatory parameters database: 'local' collection: "startup_log" #optional query parameters #we accept any parameter from the normal mongodb query. # the official documentation is here # https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find # filter: { "hostname": "batman" } # projection: { "pid": True , "_id" : False , "hostname" : True } # skip: 0 # limit: 1 # sort: [ [ "startTime" , "ASCENDING" ] , [ "age", "DESCENDING" ] ] # extra_connection_parameters = { } # dictionary with extra parameters like ssl, ssl_keyfile, maxPoolSize etc... # the full list is available here. It varies from PyMongo version # https://api.mongodb.org/python/current/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient tasks: - debug: msg="Mongo has already started with the following PID [{{ item.pid }}] - full_data {{ item }} " with_items: - "{{ lookup('mongodb', mongodb_parameters) }}" ------------------------------------------------------------------------------- ''' connection_string = term.get(u'connection_string', u"mongodb://localhost") database = term[u"database"] collection = term[u'collection'] extra_connection_parameters = term.get(u'extra_connection_parameters', {}) if u"extra_connection_parameters" in term: del term[u"extra_connection_parameters"] if u"connection_string" in term: del term[u"connection_string"] del term[u"database"] del term[u"collection"] if u"sort" in term: term[u"sort"] = self._fix_sort_parameter(term[u"sort"]) # all other parameters are sent to mongo, so we are future and past proof try: client = MongoClient(connection_string, **extra_connection_parameters) results = client[database][collection].find(**term) for result in results: result = self.convert_mongo_result_to_valid_json(result) ret.append(result) except ConnectionFailure as e: raise AnsibleError(u'unable to connect to database: %s' % str(e)) return ret
gpl-3.0
missionpinball/mpf
mpf/platforms/system11.py
1
20512
"""A generic system11 driver overlay. This is based on the Snux platform to generically support all kinds of System11 platforms. """ from typing import Any, Optional, Set, Tuple, Dict from mpf.core.machine import MachineController from mpf.core.platform import DriverPlatform, DriverConfig, SwitchSettings, DriverSettings, RepulseSettings, \ SwitchPlatform, SwitchConfig from mpf.platforms.interfaces.driver_platform_interface import DriverPlatformInterface, PulseSettings, HoldSettings from mpf.core.delays import DelayManager MYPY = False if MYPY: # pragma: no cover class SwitchDriverPlatform(DriverPlatform, SwitchPlatform): # noqa pass # pylint: disable-msg=too-many-instance-attributes class System11OverlayPlatform(DriverPlatform, SwitchPlatform): """Overlay platform to drive system11 machines using a WPC controller.""" __slots__ = ["delay", "platform", "system11_config", "a_side_queue", "c_side_queue", "a_side_done_time", "c_side_done_time", "drivers_holding_a_side", "drivers_holding_c_side", "a_side_enabled", "c_side_enabled", "ac_relay_in_transition", "prefer_a_side", "drivers"] def __init__(self, machine: MachineController) -> None: """Initialise the board.""" super().__init__(machine) self.delay = DelayManager(machine) self.platform = None # type: Optional[SwitchDriverPlatform] self.system11_config = None # type: Any self.a_side_queue = \ set() # type: Set[Tuple[DriverPlatformInterface, Optional[PulseSettings], Optional[HoldSettings]]] self.c_side_queue = \ set() # type: Set[Tuple[DriverPlatformInterface, Optional[PulseSettings], Optional[HoldSettings]]] self.a_side_done_time = 0 self.c_side_done_time = 0 self.drivers_holding_a_side = set() # type: Set[DriverPlatformInterface] self.drivers_holding_c_side = set() # type: Set[DriverPlatformInterface] self.a_side_enabled = True self.c_side_enabled = False self.drivers = {} # type: Dict[str, DriverPlatformInterface] self.ac_relay_in_transition = False # Specify whether the AC relay should favour the A or C side when at rest. # Typically during a game the 'C' side should be preferred, since that is # normally where the flashers are which need a quick response without having to wait on the relay. # At game over though, it should prefer the 'A' side so that the relay isn't permanently energised. self.prefer_a_side = True def stop(self): """Stop the overlay. Nothing to do here because stop is also called on parent platform.""" @property def a_side_busy(self): """Return if A side cannot be switches off right away.""" return self.drivers_holding_a_side or self.a_side_done_time > self.machine.clock.get_time() or self.a_side_queue @property def c_side_active(self): """Return if C side cannot be switches off right away.""" return self.drivers_holding_c_side or self.c_side_done_time > self.machine.clock.get_time() @property def c_side_busy(self): """Return if C side cannot be switches off right away.""" return self.drivers_holding_c_side or self.c_side_done_time > self.machine.clock.get_time() or self.c_side_queue @property def a_side_active(self): """Return if A side cannot be switches off right away.""" return self.drivers_holding_a_side or self.a_side_done_time > self.machine.clock.get_time() def _null_log_handler(self, *args, **kwargs): pass async def initialize(self): """Automatically called by the Platform class after all the core modules are loaded.""" # load coil platform self.platform = self.machine.get_platform_sections( "platform", getattr(self.machine.config.get('system11', {}), 'platform', None)) # we have to wait for coils to be initialized self.machine.events.add_handler("init_phase_1", self._initialize) def _initialize(self, **kwargs): del kwargs self._validate_config() self.configure_logging('Platform.System11', self.system11_config['console_log'], self.system11_config['file_log']) self.log.debug("Configuring A/C Select Relay for driver %s", self.system11_config['ac_relay_driver'].name) self.system11_config['ac_relay_driver'].get_and_verify_hold_power(1.0) self.log.debug("Configuring A/C Select Relay transition delay for " "%sms", self.system11_config['ac_relay_delay_ms']) self.machine.events.add_handler(self.system11_config['prefer_a_side_event'], self._prefer_a_side) self.log.info("Configuring System11 driver to prefer A side on event %s", self.system11_config['prefer_a_side_event']) self.machine.events.add_handler(self.system11_config['prefer_c_side_event'], self._prefer_c_side) self.log.info("Configuring System11 driver to prefer C side on event %s", self.system11_config['prefer_c_side_event']) def _prefer_a_side(self, **kwargs): del kwargs self.prefer_a_side = True self._enable_a_side() def _prefer_c_side(self, **kwargs): del kwargs self.prefer_a_side = False self._enable_c_side() def _validate_config(self): self.system11_config = self.machine.config_validator.validate_config( 'system11', self.machine.config.get('system11', {})) def tick(self): """System11 main loop. Called based on the timer_tick event. """ if self.prefer_a_side: if self.a_side_queue: self._service_a_side() elif self.c_side_queue: self._service_c_side() elif self.c_side_enabled and not self.c_side_active: self._enable_a_side() else: if self.c_side_queue: self._service_c_side() elif self.a_side_queue: self._service_a_side() elif self.a_side_enabled and not self.a_side_active: self._enable_c_side() def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict): """Configure switch on system11 overlay.""" return self.platform.configure_switch(number, config, platform_config) async def get_hw_switch_states(self): """Get initial hardware state.""" return await self.platform.get_hw_switch_states() def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict): """Configure a driver on the system11 overlay. Args: ---- config: Driver config dict number: Number of the driver. platform_settings: Platform specific config. """ assert self.platform is not None orig_number = number if number and (number.lower().endswith('a') or number.lower().endswith('c')): side = number[-1:].upper() number = number[:-1] # only configure driver once if number not in self.drivers: self.drivers[number] = self.platform.configure_driver(config, number, platform_settings) system11_driver = System11Driver(orig_number, self.drivers[number], self, side) return system11_driver return self.platform.configure_driver(config, number, platform_settings) @staticmethod def _check_if_driver_is_capable_for_rule(driver: DriverPlatformInterface): """Check if driver is capable for rule and bail out with an exception if not.""" number = driver.number if number and (number.lower().endswith('a') or number.lower().endswith('c')): raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. " "This is not supported.".format(driver)) def set_pulse_on_hit_and_release_rule(self, enable_switch, coil): """Configure a rule for a driver on the system11 overlay. Will pass the call onto the parent platform if the driver is not on A/C relay. """ self._check_if_driver_is_capable_for_rule(coil.hw_driver) self.platform.set_pulse_on_hit_and_release_rule(enable_switch, coil) def set_pulse_on_hit_and_enable_and_release_rule(self, enable_switch, coil): """Configure a rule for a driver on the system11 overlay. Will pass the call onto the parent platform if the driver is not on A/C relay. """ self._check_if_driver_is_capable_for_rule(coil.hw_driver) self.platform.set_pulse_on_hit_and_enable_and_release_rule(enable_switch, coil) def set_pulse_on_hit_and_release_and_disable_rule(self, enable_switch: SwitchSettings, eos_switch: SwitchSettings, coil: DriverSettings, repulse_settings: Optional[RepulseSettings]): """Configure a rule for a driver on the system11 overlay. Will pass the call onto the parent platform if the driver is not on A/C relay. """ self._check_if_driver_is_capable_for_rule(coil.hw_driver) self.platform.set_pulse_on_hit_and_release_and_disable_rule(enable_switch, eos_switch, coil, repulse_settings) def set_pulse_on_hit_and_enable_and_release_and_disable_rule(self, enable_switch: SwitchSettings, eos_switch: SwitchSettings, coil: DriverSettings, repulse_settings: Optional[RepulseSettings]): """Configure a rule for a driver on the system11 overlay. Will pass the call onto the parent platform if the driver is not on A/C relay. """ self._check_if_driver_is_capable_for_rule(coil.hw_driver) self.platform.set_pulse_on_hit_and_enable_and_release_and_disable_rule(enable_switch, eos_switch, coil, repulse_settings) def set_pulse_on_hit_rule(self, enable_switch, coil): """Configure a rule on the system11 overlay. Will pass the call onto the parent platform if the driver is not on A/C relay. """ self._check_if_driver_is_capable_for_rule(coil.hw_driver) self.platform.set_pulse_on_hit_rule(enable_switch, coil) def clear_hw_rule(self, switch, coil): """Clear a rule for a driver on the system11 overlay.""" self._check_if_driver_is_capable_for_rule(coil.hw_driver) self.platform.clear_hw_rule(switch, coil) def driver_action(self, driver, pulse_settings: Optional[PulseSettings], hold_settings: Optional[HoldSettings], side: str): """Add a driver action for a switched driver to the queue (for either the A-side or C-side queue). Args: ---- driver: A reference to the original platform class Driver instance. pulse_settings: Settings for the pulse or None hold_settings:Settings for hold or None side: Whatever the driver is on A or C side. This action will be serviced immediately if it can, or ASAP otherwise. """ if self.prefer_a_side: if side == "A": self.a_side_queue.add((driver, pulse_settings, hold_settings)) self._service_a_side() elif side == "C": self.c_side_queue.add((driver, pulse_settings, hold_settings)) if not self.ac_relay_in_transition and not self.a_side_busy: self._service_c_side() else: raise AssertionError("Invalid side {}".format(side)) else: if side == "C": self.c_side_queue.add((driver, pulse_settings, hold_settings)) self._service_c_side() elif side == "A": self.a_side_queue.add((driver, pulse_settings, hold_settings)) if not self.ac_relay_in_transition and not self.c_side_busy: self._service_a_side() else: raise AssertionError("Invalid side {}".format(side)) def _enable_ac_relay(self): self.system11_config['ac_relay_driver'].enable() self.ac_relay_in_transition = True self.a_side_enabled = False self.c_side_enabled = False self.delay.add(ms=self.system11_config['ac_relay_delay_ms'], callback=self._c_side_enabled, name='enable_ac_relay') def _disable_ac_relay(self): self.system11_config['ac_relay_driver'].disable() self.ac_relay_in_transition = True self.a_side_enabled = False self.c_side_enabled = False self.delay.add(ms=self.system11_config['ac_relay_delay_ms'], callback=self._a_side_enabled, name='disable_ac_relay') # -------------------------------- A SIDE --------------------------------- def _enable_a_side(self): if self.prefer_a_side: if not self.a_side_enabled and not self.ac_relay_in_transition: if self.c_side_active: self._disable_all_c_side_drivers() self._disable_ac_relay() self.delay.add(ms=self.system11_config['ac_relay_delay_ms'], callback=self._enable_a_side, name='enable_a_side') return if self.c_side_enabled: self._disable_ac_relay() else: self._a_side_enabled() else: if (not self.ac_relay_in_transition and not self.a_side_enabled and not self.c_side_busy): self._disable_ac_relay() elif self.a_side_enabled and self.a_side_queue: self._service_a_side() def _a_side_enabled(self): self.ac_relay_in_transition = False if self.prefer_a_side: self.a_side_enabled = True self.c_side_enabled = False self._service_a_side() else: if self.c_side_queue: self._enable_c_side() return self.c_side_enabled = False self.a_side_enabled = True self._service_a_side() def _service_a_side(self): if not self.a_side_queue: return if not self.a_side_enabled: self._enable_a_side() return while self.a_side_queue: driver, pulse_settings, hold_settings = self.a_side_queue.pop() if hold_settings is None and pulse_settings: driver.pulse(pulse_settings) self.a_side_done_time = max(self.a_side_done_time, self.machine.clock.get_time() + (pulse_settings.duration / 1000.0)) elif hold_settings and pulse_settings: driver.enable(pulse_settings, hold_settings) self.drivers_holding_a_side.add(driver) else: # ms == 0 driver.disable() try: self.drivers_holding_a_side.remove(driver) except KeyError: pass # -------------------------------- C SIDE --------------------------------- def _enable_c_side(self): if self.prefer_a_side: if not self.c_side_enabled and not self.ac_relay_in_transition: if self.a_side_active: self._disable_all_a_side_drivers() self._enable_ac_relay() self.delay.add(ms=self.system11_config['ac_relay_delay_ms'], callback=self._enable_c_side, name='enable_c_side') return if self.a_side_enabled: self._enable_ac_relay() else: self._c_side_enabled() else: if (not self.ac_relay_in_transition and not self.c_side_enabled and not self.a_side_busy): self._enable_ac_relay() elif self.c_side_enabled and self.c_side_queue: self._service_c_side() def _c_side_enabled(self): self.ac_relay_in_transition = False if self.prefer_a_side: self.c_side_enabled = True self.a_side_enabled = False self._service_c_side() else: if self.a_side_queue: self._enable_a_side() return self.a_side_enabled = False self.c_side_enabled = True self._service_c_side() def _service_c_side(self): if not self.c_side_queue: return if self.ac_relay_in_transition or self.a_side_busy: return if not self.c_side_enabled: self._enable_c_side() return while self.c_side_queue: driver, pulse_settings, hold_settings = self.c_side_queue.pop() if hold_settings is None and pulse_settings: driver.pulse(pulse_settings) self.c_side_done_time = max(self.c_side_done_time, self.machine.clock.get_time() + (pulse_settings.duration / 1000.0)) elif hold_settings and pulse_settings: driver.enable(pulse_settings, hold_settings) self.drivers_holding_c_side.add(driver) else: driver.disable() try: self.drivers_holding_c_side.remove(driver) except KeyError: pass def _disable_all_c_side_drivers(self): if self.c_side_active: for driver in self.drivers_holding_c_side: driver.disable() self.drivers_holding_c_side = set() self.c_side_done_time = 0 self.c_side_enabled = False def _disable_all_a_side_drivers(self): if self.a_side_active: for driver in self.drivers_holding_a_side: driver.disable() self.drivers_holding_a_side = set() self.a_side_done_time = 0 self.a_side_enabled = False def validate_coil_section(self, driver, config): """Validate coil config for platform.""" return self.platform.validate_coil_section(driver, config) class System11Driver(DriverPlatformInterface): """Represent one driver on the system11 overlay. Two of those drivers may be created for one real driver. One for the A and one for the C side. """ def __init__(self, number, platform_driver: DriverPlatformInterface, overlay, side) -> None: """Initialize driver.""" super().__init__(platform_driver.config, number) self.number = number self.platform_driver = platform_driver self.overlay = overlay self.side = side def __repr__(self): """Pretty print.""" return "System11Driver.{}".format(self.number) def get_board_name(self): """Return name of driver board.""" return self.platform_driver.get_board_name() def pulse(self, pulse_settings: PulseSettings): """Pulse driver.""" self.overlay.driver_action(self.platform_driver, pulse_settings, None, self.side) # Usually pulse() returns the value (in ms) that the driver will pulse # for so we can update Driver.time_when_done. But with A/C switched # coils, we don't know when exactly that will be, so we return -1 return -1 def enable(self, pulse_settings: PulseSettings, hold_settings: HoldSettings): """Enable driver.""" self.overlay.driver_action(self.platform_driver, pulse_settings, hold_settings, self.side) def disable(self): """Disable driver.""" self.overlay.driver_action(self.platform_driver, None, None, self.side)
mit
usig/normalizador-amba
tests/NormalizadorDireccionesAMBAConCabaTestCase.py
1
4080
# coding: UTF-8 import unittest from usig_normalizador_amba.NormalizadorDireccionesAMBA import NormalizadorDireccionesAMBA from usig_normalizador_amba.Direccion import Direccion from usig_normalizador_amba.settings import CALLE_ALTURA from tests.test_commons import cargarCallejeroEstatico class NormalizadorDireccionesAMBAConCabaTestCase(unittest.TestCase): partidos = ['caba', 'lomas_de_zamora', 'jose_c_paz'] nd = NormalizadorDireccionesAMBA(include_list=partidos) for n in nd.normalizadores: if n.partido.codigo != 'caba': cargarCallejeroEstatico(n.c) def _checkDireccion(self, direccion, codigo_calle, nombre_calle, altura, codigo_partido, localidad): self.assertTrue(isinstance(direccion, Direccion)) self.assertEqual(direccion.tipo, CALLE_ALTURA) self.assertEqual(direccion.calle.codigo, codigo_calle) self.assertEqual(direccion.calle.nombre, nombre_calle) self.assertEqual(direccion.altura, altura) self.assertEqual(direccion.partido.codigo, codigo_partido) self.assertEqual(direccion.localidad, localidad) def testDireccionEnCabaYConurbano(self): res = self.nd.normalizar('Arenales 806') self.assertTrue(isinstance(res, list)) self.assertEqual(len(res), 3, 'Debería haber 3 matching/s. Hay {0}'.format(len(res))) self._checkDireccion(res[0], 1104, 'ARENALES', 806, 'caba', 'CABA') self._checkDireccion(res[1], 53565, 'Gral Arenales', 806, 'jose_c_paz', 'José C. Paz') self._checkDireccion(res[2], 360326, 'General Arenales', 806, 'lomas_de_zamora', 'Lomas de Zamora') def testDireccionSoloEnCaba(self): res = self.nd.normalizar('Callao 1536') self.assertTrue(isinstance(res, list)) self.assertEqual(len(res), 1, 'Debería haber 1 matching/s. Hay {0}'.format(len(res))) self._checkDireccion(res[0], 3030, 'CALLAO AV.', 1536, 'caba', 'CABA') def testDireccionSoloEnConurbano(self): res = self.nd.normalizar('Laprida 890') self.assertTrue(isinstance(res, list)) self.assertEqual(len(res), 2, 'Debería haber 2 matching/s. Hay {0}'.format(len(res))) self._checkDireccion(res[0], 156179, 'Laprida', 890, 'jose_c_paz', 'José C. Paz') self._checkDireccion(res[1], 18028, 'Laprida', 890, 'lomas_de_zamora', 'Lomas de Zamora') def testDireccionEnCabaYConurbanoConFiltroEnCaba(self): res = self.nd.normalizar('Arenales 806, caba') self.assertTrue(isinstance(res, list)) self.assertEqual(len(res), 1, 'Debería haber 1 matching/s. Hay {0}'.format(len(res))) self._checkDireccion(res[0], 1104, 'ARENALES', 806, 'caba', 'CABA') def testDireccionEnCabaYConurbanoConFiltroEnConurbano1(self): res = self.nd.normalizar('Arenales 806, lomas') self.assertTrue(isinstance(res, list)) self.assertEqual(len(res), 1, 'Debería haber 1 matching/s. Hay {0}'.format(len(res))) self._checkDireccion(res[0], 360326, 'General Arenales', 806, 'lomas_de_zamora', 'Lomas de Zamora') def testDireccionEnCabaYConurbanoConFiltroEnConurbano2(self): res = self.nd.normalizar('Arenales 806, Jose c Paz') self.assertTrue(isinstance(res, list)) self.assertEqual(len(res), 1, 'Debería haber 1 matching/s. Hay {0}'.format(len(res))) self._checkDireccion(res[0], 53565, 'Gral Arenales', 806, 'jose_c_paz', 'José C. Paz') def testNormalizador_normalizar_calles_como_av_o_pje(self): casos = [ 'Avenida Paraná 853, caba', 'Paraná avenida 853, caba', 'Paraná avda 853, caba', 'AV. Paraná 853, caba', 'Pasaje Paraná 853, caba', 'Psje. Paraná 853, caba', 'Paraná pje. 853, caba' ] for caso in casos: res = self.nd.normalizar(caso) self.assertTrue(isinstance(res, list)) self.assertEqual(len(res), 1, 'Debería haber 1 matching.') self._checkDireccion(res[0], 17018, 'PARANA', 853, 'caba', 'CABA')
mit
klmitch/nutjob
nutjob.py
1
4628
# Copyright 2013 Rackspace # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import redis class NutJobStrictRedis(redis.StrictRedis): """ A replacement for ``redis.StrictRedis`` which allows Turnstile to communicate properly with the nutcracker Redis proxy. Turnstile's use of ``PUBLISH`` for error reporting is ignored, rather than being passed on to the proxy. The Turnstile compactor daemon's use of LUA scripts is also enabled by redefining the ``redis.client.Script`` class to not attempt to use ``SCRIPT LOAD``, which is unsupported by nutcracker. Note that the Turnstile control daemon CANNOT use this client; you must set ``control.redis.redis_client`` to "redis" and set ``control.redis.server_version`` to nothing. This also, of course, means that the control daemon CANNOT connect to nutcracker; it must connect directly to a Redis database. """ def __init__(self, **kwargs): """ Initialize the ``NutJobStrictRedis`` client. Takes only keyword arguments; positional arguments are not recognized. Also adds one optional keyword argument, ``server_version``, which provides the minimum version of the Redis servers behind the nutcracker proxy; this defaults to "2.4" if not provided. """ # Save the redis version, if one is provided self.redis_version = kwargs.pop('server_version', '2.4') super(NutJobStrictRedis, self).__init__(**kwargs) def info(self): """ Returns a dictionary containing information about the Redis server. Since nutcracker does not support ``INFO`` (it doesn't know which server to pass the request on to), this implementation returns a dictionary of one key--"redis_version"--and bases the value of the key on the value passed in to the constructor (if any). """ return {'redis_version': self.redis_version} def publish(self, channel, message): """ Publish ``message`` on ``channel``. Returns the number of subscribers the message was delivered to. This variant does not call out to Redis, since nutcracker does not support ``PUBLISH``. """ return 0 def register_script(self, script): """ Register a LUA ``script`` specifying the ``keys`` it will touch. Returns a Script object that is callable and hides the complexity of deal with scripts, keys, and shas. This is the preferred way to work with LUA scripts. """ return NutJobScript(self, script) class NutJobScript(object): """ The nutcracker proxy supports ``EVAL`` and ``EVALSHA``, but oddly not ``SCRIPT LOAD``, which is an integral component of ``redis.StrictRedis``'s support for LUA scripts. This variant of ``redis.client.Script`` avoids the use of ``SCRIPT LOAD`` and calls ``EVAL`` directly; this is less efficient, but allows the scripting feature of recent Redis to be used. """ def __init__(self, registered_client, script): """ Initialize a ``NutJobScript`` object. :param registered_client: The client the script is associated with. :param script: The text of the script. """ self.registered_client = registered_client self.script = script def __call__(self, keys=[], args=[], client=None): """ Call the script with the provided keys and arguments. :param keys: A list of the keys the script is to act upon. :param args: A list of arguments (besides keys) to pass to the script. :param client: An alternate client to use for executing the script. :returns: The results of calling the script on the server. """ client = client or self.registered_client args = tuple(keys) + tuple(args) # Call the script return client.eval(self.script, len(keys), *args)
apache-2.0
nvoron23/arangodb
3rdParty/V8-4.3.61/third_party/python_26/Lib/lib-tk/tkSimpleDialog.py
54
7590
# # An Introduction to Tkinter # tkSimpleDialog.py # # Copyright (c) 1997 by Fredrik Lundh # # fredrik@pythonware.com # http://www.pythonware.com # # -------------------------------------------------------------------- # dialog base class '''Dialog boxes This module handles dialog boxes. It contains the following public symbols: Dialog -- a base class for dialogs askinteger -- get an integer from the user askfloat -- get a float from the user askstring -- get a string from the user ''' from Tkinter import * class Dialog(Toplevel): '''Class to open dialogs. This class is intended as a base class for custom dialogs ''' def __init__(self, parent, title = None): '''Initialize a dialog. Arguments: parent -- a parent window (the application window) title -- the dialog title ''' Toplevel.__init__(self, parent) # If the master is not viewable, don't # make the child transient, or else it # would be opened withdrawn if parent.winfo_viewable(): self.transient(parent) if title: self.title(title) self.parent = parent self.result = None body = Frame(self) self.initial_focus = self.body(body) body.pack(padx=5, pady=5) self.buttonbox() self.wait_visibility() # window needs to be visible for the grab self.grab_set() if not self.initial_focus: self.initial_focus = self self.protocol("WM_DELETE_WINDOW", self.cancel) if self.parent is not None: self.geometry("+%d+%d" % (parent.winfo_rootx()+50, parent.winfo_rooty()+50)) self.initial_focus.focus_set() self.wait_window(self) def destroy(self): '''Destroy the window''' self.initial_focus = None Toplevel.destroy(self) # # construction hooks def body(self, master): '''create dialog body. return widget that should have initial focus. This method should be overridden, and is called by the __init__ method. ''' pass def buttonbox(self): '''add standard button box. override if you do not want the standard buttons ''' box = Frame(self) w = Button(box, text="OK", width=10, command=self.ok, default=ACTIVE) w.pack(side=LEFT, padx=5, pady=5) w = Button(box, text="Cancel", width=10, command=self.cancel) w.pack(side=LEFT, padx=5, pady=5) self.bind("<Return>", self.ok) self.bind("<Escape>", self.cancel) box.pack() # # standard button semantics def ok(self, event=None): if not self.validate(): self.initial_focus.focus_set() # put focus back return self.withdraw() self.update_idletasks() try: self.apply() finally: self.cancel() def cancel(self, event=None): # put focus back to the parent window if self.parent is not None: self.parent.focus_set() self.destroy() # # command hooks def validate(self): '''validate the data This method is called automatically to validate the data before the dialog is destroyed. By default, it always validates OK. ''' return 1 # override def apply(self): '''process the data This method is called automatically to process the data, *after* the dialog is destroyed. By default, it does nothing. ''' pass # override # -------------------------------------------------------------------- # convenience dialogues class _QueryDialog(Dialog): def __init__(self, title, prompt, initialvalue=None, minvalue = None, maxvalue = None, parent = None): if not parent: import Tkinter parent = Tkinter._default_root self.prompt = prompt self.minvalue = minvalue self.maxvalue = maxvalue self.initialvalue = initialvalue Dialog.__init__(self, parent, title) def destroy(self): self.entry = None Dialog.destroy(self) def body(self, master): w = Label(master, text=self.prompt, justify=LEFT) w.grid(row=0, padx=5, sticky=W) self.entry = Entry(master, name="entry") self.entry.grid(row=1, padx=5, sticky=W+E) if self.initialvalue: self.entry.insert(0, self.initialvalue) self.entry.select_range(0, END) return self.entry def validate(self): import tkMessageBox try: result = self.getresult() except ValueError: tkMessageBox.showwarning( "Illegal value", self.errormessage + "\nPlease try again", parent = self ) return 0 if self.minvalue is not None and result < self.minvalue: tkMessageBox.showwarning( "Too small", "The allowed minimum value is %s. " "Please try again." % self.minvalue, parent = self ) return 0 if self.maxvalue is not None and result > self.maxvalue: tkMessageBox.showwarning( "Too large", "The allowed maximum value is %s. " "Please try again." % self.maxvalue, parent = self ) return 0 self.result = result return 1 class _QueryInteger(_QueryDialog): errormessage = "Not an integer." def getresult(self): return int(self.entry.get()) def askinteger(title, prompt, **kw): '''get an integer from the user Arguments: title -- the dialog title prompt -- the label text **kw -- see SimpleDialog class Return value is an integer ''' d = _QueryInteger(title, prompt, **kw) return d.result class _QueryFloat(_QueryDialog): errormessage = "Not a floating point value." def getresult(self): return float(self.entry.get()) def askfloat(title, prompt, **kw): '''get a float from the user Arguments: title -- the dialog title prompt -- the label text **kw -- see SimpleDialog class Return value is a float ''' d = _QueryFloat(title, prompt, **kw) return d.result class _QueryString(_QueryDialog): def __init__(self, *args, **kw): if kw.has_key("show"): self.__show = kw["show"] del kw["show"] else: self.__show = None _QueryDialog.__init__(self, *args, **kw) def body(self, master): entry = _QueryDialog.body(self, master) if self.__show is not None: entry.configure(show=self.__show) return entry def getresult(self): return self.entry.get() def askstring(title, prompt, **kw): '''get a string from the user Arguments: title -- the dialog title prompt -- the label text **kw -- see SimpleDialog class Return value is a string ''' d = _QueryString(title, prompt, **kw) return d.result if __name__ == "__main__": root = Tk() root.update() print askinteger("Spam", "Egg count", initialvalue=12*12) print askfloat("Spam", "Egg weight\n(in tons)", minvalue=1, maxvalue=100) print askstring("Spam", "Egg label")
apache-2.0
bottompawn/kbengine
kbe/src/lib/python/Lib/encodings/cp862.py
272
33370
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP862.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_map) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_map)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp862', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Map decoding_map = codecs.make_identity_dict(range(256)) decoding_map.update({ 0x0080: 0x05d0, # HEBREW LETTER ALEF 0x0081: 0x05d1, # HEBREW LETTER BET 0x0082: 0x05d2, # HEBREW LETTER GIMEL 0x0083: 0x05d3, # HEBREW LETTER DALET 0x0084: 0x05d4, # HEBREW LETTER HE 0x0085: 0x05d5, # HEBREW LETTER VAV 0x0086: 0x05d6, # HEBREW LETTER ZAYIN 0x0087: 0x05d7, # HEBREW LETTER HET 0x0088: 0x05d8, # HEBREW LETTER TET 0x0089: 0x05d9, # HEBREW LETTER YOD 0x008a: 0x05da, # HEBREW LETTER FINAL KAF 0x008b: 0x05db, # HEBREW LETTER KAF 0x008c: 0x05dc, # HEBREW LETTER LAMED 0x008d: 0x05dd, # HEBREW LETTER FINAL MEM 0x008e: 0x05de, # HEBREW LETTER MEM 0x008f: 0x05df, # HEBREW LETTER FINAL NUN 0x0090: 0x05e0, # HEBREW LETTER NUN 0x0091: 0x05e1, # HEBREW LETTER SAMEKH 0x0092: 0x05e2, # HEBREW LETTER AYIN 0x0093: 0x05e3, # HEBREW LETTER FINAL PE 0x0094: 0x05e4, # HEBREW LETTER PE 0x0095: 0x05e5, # HEBREW LETTER FINAL TSADI 0x0096: 0x05e6, # HEBREW LETTER TSADI 0x0097: 0x05e7, # HEBREW LETTER QOF 0x0098: 0x05e8, # HEBREW LETTER RESH 0x0099: 0x05e9, # HEBREW LETTER SHIN 0x009a: 0x05ea, # HEBREW LETTER TAV 0x009b: 0x00a2, # CENT SIGN 0x009c: 0x00a3, # POUND SIGN 0x009d: 0x00a5, # YEN SIGN 0x009e: 0x20a7, # PESETA SIGN 0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK 0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE 0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE 0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE 0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE 0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE 0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE 0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR 0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR 0x00a8: 0x00bf, # INVERTED QUESTION MARK 0x00a9: 0x2310, # REVERSED NOT SIGN 0x00aa: 0x00ac, # NOT SIGN 0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF 0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER 0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK 0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00b0: 0x2591, # LIGHT SHADE 0x00b1: 0x2592, # MEDIUM SHADE 0x00b2: 0x2593, # DARK SHADE 0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL 0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL 0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT 0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT 0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL 0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL 0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT 0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x00db: 0x2588, # FULL BLOCK 0x00dc: 0x2584, # LOWER HALF BLOCK 0x00dd: 0x258c, # LEFT HALF BLOCK 0x00de: 0x2590, # RIGHT HALF BLOCK 0x00df: 0x2580, # UPPER HALF BLOCK 0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA 0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN) 0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA 0x00e3: 0x03c0, # GREEK SMALL LETTER PI 0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA 0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA 0x00e6: 0x00b5, # MICRO SIGN 0x00e7: 0x03c4, # GREEK SMALL LETTER TAU 0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI 0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA 0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA 0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA 0x00ec: 0x221e, # INFINITY 0x00ed: 0x03c6, # GREEK SMALL LETTER PHI 0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON 0x00ef: 0x2229, # INTERSECTION 0x00f0: 0x2261, # IDENTICAL TO 0x00f1: 0x00b1, # PLUS-MINUS SIGN 0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO 0x00f3: 0x2264, # LESS-THAN OR EQUAL TO 0x00f4: 0x2320, # TOP HALF INTEGRAL 0x00f5: 0x2321, # BOTTOM HALF INTEGRAL 0x00f6: 0x00f7, # DIVISION SIGN 0x00f7: 0x2248, # ALMOST EQUAL TO 0x00f8: 0x00b0, # DEGREE SIGN 0x00f9: 0x2219, # BULLET OPERATOR 0x00fa: 0x00b7, # MIDDLE DOT 0x00fb: 0x221a, # SQUARE ROOT 0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N 0x00fd: 0x00b2, # SUPERSCRIPT TWO 0x00fe: 0x25a0, # BLACK SQUARE 0x00ff: 0x00a0, # NO-BREAK SPACE }) ### Decoding Table decoding_table = ( '\x00' # 0x0000 -> NULL '\x01' # 0x0001 -> START OF HEADING '\x02' # 0x0002 -> START OF TEXT '\x03' # 0x0003 -> END OF TEXT '\x04' # 0x0004 -> END OF TRANSMISSION '\x05' # 0x0005 -> ENQUIRY '\x06' # 0x0006 -> ACKNOWLEDGE '\x07' # 0x0007 -> BELL '\x08' # 0x0008 -> BACKSPACE '\t' # 0x0009 -> HORIZONTAL TABULATION '\n' # 0x000a -> LINE FEED '\x0b' # 0x000b -> VERTICAL TABULATION '\x0c' # 0x000c -> FORM FEED '\r' # 0x000d -> CARRIAGE RETURN '\x0e' # 0x000e -> SHIFT OUT '\x0f' # 0x000f -> SHIFT IN '\x10' # 0x0010 -> DATA LINK ESCAPE '\x11' # 0x0011 -> DEVICE CONTROL ONE '\x12' # 0x0012 -> DEVICE CONTROL TWO '\x13' # 0x0013 -> DEVICE CONTROL THREE '\x14' # 0x0014 -> DEVICE CONTROL FOUR '\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE '\x16' # 0x0016 -> SYNCHRONOUS IDLE '\x17' # 0x0017 -> END OF TRANSMISSION BLOCK '\x18' # 0x0018 -> CANCEL '\x19' # 0x0019 -> END OF MEDIUM '\x1a' # 0x001a -> SUBSTITUTE '\x1b' # 0x001b -> ESCAPE '\x1c' # 0x001c -> FILE SEPARATOR '\x1d' # 0x001d -> GROUP SEPARATOR '\x1e' # 0x001e -> RECORD SEPARATOR '\x1f' # 0x001f -> UNIT SEPARATOR ' ' # 0x0020 -> SPACE '!' # 0x0021 -> EXCLAMATION MARK '"' # 0x0022 -> QUOTATION MARK '#' # 0x0023 -> NUMBER SIGN '$' # 0x0024 -> DOLLAR SIGN '%' # 0x0025 -> PERCENT SIGN '&' # 0x0026 -> AMPERSAND "'" # 0x0027 -> APOSTROPHE '(' # 0x0028 -> LEFT PARENTHESIS ')' # 0x0029 -> RIGHT PARENTHESIS '*' # 0x002a -> ASTERISK '+' # 0x002b -> PLUS SIGN ',' # 0x002c -> COMMA '-' # 0x002d -> HYPHEN-MINUS '.' # 0x002e -> FULL STOP '/' # 0x002f -> SOLIDUS '0' # 0x0030 -> DIGIT ZERO '1' # 0x0031 -> DIGIT ONE '2' # 0x0032 -> DIGIT TWO '3' # 0x0033 -> DIGIT THREE '4' # 0x0034 -> DIGIT FOUR '5' # 0x0035 -> DIGIT FIVE '6' # 0x0036 -> DIGIT SIX '7' # 0x0037 -> DIGIT SEVEN '8' # 0x0038 -> DIGIT EIGHT '9' # 0x0039 -> DIGIT NINE ':' # 0x003a -> COLON ';' # 0x003b -> SEMICOLON '<' # 0x003c -> LESS-THAN SIGN '=' # 0x003d -> EQUALS SIGN '>' # 0x003e -> GREATER-THAN SIGN '?' # 0x003f -> QUESTION MARK '@' # 0x0040 -> COMMERCIAL AT 'A' # 0x0041 -> LATIN CAPITAL LETTER A 'B' # 0x0042 -> LATIN CAPITAL LETTER B 'C' # 0x0043 -> LATIN CAPITAL LETTER C 'D' # 0x0044 -> LATIN CAPITAL LETTER D 'E' # 0x0045 -> LATIN CAPITAL LETTER E 'F' # 0x0046 -> LATIN CAPITAL LETTER F 'G' # 0x0047 -> LATIN CAPITAL LETTER G 'H' # 0x0048 -> LATIN CAPITAL LETTER H 'I' # 0x0049 -> LATIN CAPITAL LETTER I 'J' # 0x004a -> LATIN CAPITAL LETTER J 'K' # 0x004b -> LATIN CAPITAL LETTER K 'L' # 0x004c -> LATIN CAPITAL LETTER L 'M' # 0x004d -> LATIN CAPITAL LETTER M 'N' # 0x004e -> LATIN CAPITAL LETTER N 'O' # 0x004f -> LATIN CAPITAL LETTER O 'P' # 0x0050 -> LATIN CAPITAL LETTER P 'Q' # 0x0051 -> LATIN CAPITAL LETTER Q 'R' # 0x0052 -> LATIN CAPITAL LETTER R 'S' # 0x0053 -> LATIN CAPITAL LETTER S 'T' # 0x0054 -> LATIN CAPITAL LETTER T 'U' # 0x0055 -> LATIN CAPITAL LETTER U 'V' # 0x0056 -> LATIN CAPITAL LETTER V 'W' # 0x0057 -> LATIN CAPITAL LETTER W 'X' # 0x0058 -> LATIN CAPITAL LETTER X 'Y' # 0x0059 -> LATIN CAPITAL LETTER Y 'Z' # 0x005a -> LATIN CAPITAL LETTER Z '[' # 0x005b -> LEFT SQUARE BRACKET '\\' # 0x005c -> REVERSE SOLIDUS ']' # 0x005d -> RIGHT SQUARE BRACKET '^' # 0x005e -> CIRCUMFLEX ACCENT '_' # 0x005f -> LOW LINE '`' # 0x0060 -> GRAVE ACCENT 'a' # 0x0061 -> LATIN SMALL LETTER A 'b' # 0x0062 -> LATIN SMALL LETTER B 'c' # 0x0063 -> LATIN SMALL LETTER C 'd' # 0x0064 -> LATIN SMALL LETTER D 'e' # 0x0065 -> LATIN SMALL LETTER E 'f' # 0x0066 -> LATIN SMALL LETTER F 'g' # 0x0067 -> LATIN SMALL LETTER G 'h' # 0x0068 -> LATIN SMALL LETTER H 'i' # 0x0069 -> LATIN SMALL LETTER I 'j' # 0x006a -> LATIN SMALL LETTER J 'k' # 0x006b -> LATIN SMALL LETTER K 'l' # 0x006c -> LATIN SMALL LETTER L 'm' # 0x006d -> LATIN SMALL LETTER M 'n' # 0x006e -> LATIN SMALL LETTER N 'o' # 0x006f -> LATIN SMALL LETTER O 'p' # 0x0070 -> LATIN SMALL LETTER P 'q' # 0x0071 -> LATIN SMALL LETTER Q 'r' # 0x0072 -> LATIN SMALL LETTER R 's' # 0x0073 -> LATIN SMALL LETTER S 't' # 0x0074 -> LATIN SMALL LETTER T 'u' # 0x0075 -> LATIN SMALL LETTER U 'v' # 0x0076 -> LATIN SMALL LETTER V 'w' # 0x0077 -> LATIN SMALL LETTER W 'x' # 0x0078 -> LATIN SMALL LETTER X 'y' # 0x0079 -> LATIN SMALL LETTER Y 'z' # 0x007a -> LATIN SMALL LETTER Z '{' # 0x007b -> LEFT CURLY BRACKET '|' # 0x007c -> VERTICAL LINE '}' # 0x007d -> RIGHT CURLY BRACKET '~' # 0x007e -> TILDE '\x7f' # 0x007f -> DELETE '\u05d0' # 0x0080 -> HEBREW LETTER ALEF '\u05d1' # 0x0081 -> HEBREW LETTER BET '\u05d2' # 0x0082 -> HEBREW LETTER GIMEL '\u05d3' # 0x0083 -> HEBREW LETTER DALET '\u05d4' # 0x0084 -> HEBREW LETTER HE '\u05d5' # 0x0085 -> HEBREW LETTER VAV '\u05d6' # 0x0086 -> HEBREW LETTER ZAYIN '\u05d7' # 0x0087 -> HEBREW LETTER HET '\u05d8' # 0x0088 -> HEBREW LETTER TET '\u05d9' # 0x0089 -> HEBREW LETTER YOD '\u05da' # 0x008a -> HEBREW LETTER FINAL KAF '\u05db' # 0x008b -> HEBREW LETTER KAF '\u05dc' # 0x008c -> HEBREW LETTER LAMED '\u05dd' # 0x008d -> HEBREW LETTER FINAL MEM '\u05de' # 0x008e -> HEBREW LETTER MEM '\u05df' # 0x008f -> HEBREW LETTER FINAL NUN '\u05e0' # 0x0090 -> HEBREW LETTER NUN '\u05e1' # 0x0091 -> HEBREW LETTER SAMEKH '\u05e2' # 0x0092 -> HEBREW LETTER AYIN '\u05e3' # 0x0093 -> HEBREW LETTER FINAL PE '\u05e4' # 0x0094 -> HEBREW LETTER PE '\u05e5' # 0x0095 -> HEBREW LETTER FINAL TSADI '\u05e6' # 0x0096 -> HEBREW LETTER TSADI '\u05e7' # 0x0097 -> HEBREW LETTER QOF '\u05e8' # 0x0098 -> HEBREW LETTER RESH '\u05e9' # 0x0099 -> HEBREW LETTER SHIN '\u05ea' # 0x009a -> HEBREW LETTER TAV '\xa2' # 0x009b -> CENT SIGN '\xa3' # 0x009c -> POUND SIGN '\xa5' # 0x009d -> YEN SIGN '\u20a7' # 0x009e -> PESETA SIGN '\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK '\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE '\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE '\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE '\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE '\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE '\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE '\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR '\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR '\xbf' # 0x00a8 -> INVERTED QUESTION MARK '\u2310' # 0x00a9 -> REVERSED NOT SIGN '\xac' # 0x00aa -> NOT SIGN '\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF '\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER '\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK '\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK '\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK '\u2591' # 0x00b0 -> LIGHT SHADE '\u2592' # 0x00b1 -> MEDIUM SHADE '\u2593' # 0x00b2 -> DARK SHADE '\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL '\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT '\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE '\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE '\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE '\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE '\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT '\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL '\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT '\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT '\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE '\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE '\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT '\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT '\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL '\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL '\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT '\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL '\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL '\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE '\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE '\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT '\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT '\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL '\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL '\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT '\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL '\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL '\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE '\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE '\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE '\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE '\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE '\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE '\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE '\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE '\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE '\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE '\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT '\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT '\u2588' # 0x00db -> FULL BLOCK '\u2584' # 0x00dc -> LOWER HALF BLOCK '\u258c' # 0x00dd -> LEFT HALF BLOCK '\u2590' # 0x00de -> RIGHT HALF BLOCK '\u2580' # 0x00df -> UPPER HALF BLOCK '\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA '\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S (GERMAN) '\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA '\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI '\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA '\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA '\xb5' # 0x00e6 -> MICRO SIGN '\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU '\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI '\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA '\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA '\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA '\u221e' # 0x00ec -> INFINITY '\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI '\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON '\u2229' # 0x00ef -> INTERSECTION '\u2261' # 0x00f0 -> IDENTICAL TO '\xb1' # 0x00f1 -> PLUS-MINUS SIGN '\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO '\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO '\u2320' # 0x00f4 -> TOP HALF INTEGRAL '\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL '\xf7' # 0x00f6 -> DIVISION SIGN '\u2248' # 0x00f7 -> ALMOST EQUAL TO '\xb0' # 0x00f8 -> DEGREE SIGN '\u2219' # 0x00f9 -> BULLET OPERATOR '\xb7' # 0x00fa -> MIDDLE DOT '\u221a' # 0x00fb -> SQUARE ROOT '\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N '\xb2' # 0x00fd -> SUPERSCRIPT TWO '\u25a0' # 0x00fe -> BLACK SQUARE '\xa0' # 0x00ff -> NO-BREAK SPACE ) ### Encoding Map encoding_map = { 0x0000: 0x0000, # NULL 0x0001: 0x0001, # START OF HEADING 0x0002: 0x0002, # START OF TEXT 0x0003: 0x0003, # END OF TEXT 0x0004: 0x0004, # END OF TRANSMISSION 0x0005: 0x0005, # ENQUIRY 0x0006: 0x0006, # ACKNOWLEDGE 0x0007: 0x0007, # BELL 0x0008: 0x0008, # BACKSPACE 0x0009: 0x0009, # HORIZONTAL TABULATION 0x000a: 0x000a, # LINE FEED 0x000b: 0x000b, # VERTICAL TABULATION 0x000c: 0x000c, # FORM FEED 0x000d: 0x000d, # CARRIAGE RETURN 0x000e: 0x000e, # SHIFT OUT 0x000f: 0x000f, # SHIFT IN 0x0010: 0x0010, # DATA LINK ESCAPE 0x0011: 0x0011, # DEVICE CONTROL ONE 0x0012: 0x0012, # DEVICE CONTROL TWO 0x0013: 0x0013, # DEVICE CONTROL THREE 0x0014: 0x0014, # DEVICE CONTROL FOUR 0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE 0x0016: 0x0016, # SYNCHRONOUS IDLE 0x0017: 0x0017, # END OF TRANSMISSION BLOCK 0x0018: 0x0018, # CANCEL 0x0019: 0x0019, # END OF MEDIUM 0x001a: 0x001a, # SUBSTITUTE 0x001b: 0x001b, # ESCAPE 0x001c: 0x001c, # FILE SEPARATOR 0x001d: 0x001d, # GROUP SEPARATOR 0x001e: 0x001e, # RECORD SEPARATOR 0x001f: 0x001f, # UNIT SEPARATOR 0x0020: 0x0020, # SPACE 0x0021: 0x0021, # EXCLAMATION MARK 0x0022: 0x0022, # QUOTATION MARK 0x0023: 0x0023, # NUMBER SIGN 0x0024: 0x0024, # DOLLAR SIGN 0x0025: 0x0025, # PERCENT SIGN 0x0026: 0x0026, # AMPERSAND 0x0027: 0x0027, # APOSTROPHE 0x0028: 0x0028, # LEFT PARENTHESIS 0x0029: 0x0029, # RIGHT PARENTHESIS 0x002a: 0x002a, # ASTERISK 0x002b: 0x002b, # PLUS SIGN 0x002c: 0x002c, # COMMA 0x002d: 0x002d, # HYPHEN-MINUS 0x002e: 0x002e, # FULL STOP 0x002f: 0x002f, # SOLIDUS 0x0030: 0x0030, # DIGIT ZERO 0x0031: 0x0031, # DIGIT ONE 0x0032: 0x0032, # DIGIT TWO 0x0033: 0x0033, # DIGIT THREE 0x0034: 0x0034, # DIGIT FOUR 0x0035: 0x0035, # DIGIT FIVE 0x0036: 0x0036, # DIGIT SIX 0x0037: 0x0037, # DIGIT SEVEN 0x0038: 0x0038, # DIGIT EIGHT 0x0039: 0x0039, # DIGIT NINE 0x003a: 0x003a, # COLON 0x003b: 0x003b, # SEMICOLON 0x003c: 0x003c, # LESS-THAN SIGN 0x003d: 0x003d, # EQUALS SIGN 0x003e: 0x003e, # GREATER-THAN SIGN 0x003f: 0x003f, # QUESTION MARK 0x0040: 0x0040, # COMMERCIAL AT 0x0041: 0x0041, # LATIN CAPITAL LETTER A 0x0042: 0x0042, # LATIN CAPITAL LETTER B 0x0043: 0x0043, # LATIN CAPITAL LETTER C 0x0044: 0x0044, # LATIN CAPITAL LETTER D 0x0045: 0x0045, # LATIN CAPITAL LETTER E 0x0046: 0x0046, # LATIN CAPITAL LETTER F 0x0047: 0x0047, # LATIN CAPITAL LETTER G 0x0048: 0x0048, # LATIN CAPITAL LETTER H 0x0049: 0x0049, # LATIN CAPITAL LETTER I 0x004a: 0x004a, # LATIN CAPITAL LETTER J 0x004b: 0x004b, # LATIN CAPITAL LETTER K 0x004c: 0x004c, # LATIN CAPITAL LETTER L 0x004d: 0x004d, # LATIN CAPITAL LETTER M 0x004e: 0x004e, # LATIN CAPITAL LETTER N 0x004f: 0x004f, # LATIN CAPITAL LETTER O 0x0050: 0x0050, # LATIN CAPITAL LETTER P 0x0051: 0x0051, # LATIN CAPITAL LETTER Q 0x0052: 0x0052, # LATIN CAPITAL LETTER R 0x0053: 0x0053, # LATIN CAPITAL LETTER S 0x0054: 0x0054, # LATIN CAPITAL LETTER T 0x0055: 0x0055, # LATIN CAPITAL LETTER U 0x0056: 0x0056, # LATIN CAPITAL LETTER V 0x0057: 0x0057, # LATIN CAPITAL LETTER W 0x0058: 0x0058, # LATIN CAPITAL LETTER X 0x0059: 0x0059, # LATIN CAPITAL LETTER Y 0x005a: 0x005a, # LATIN CAPITAL LETTER Z 0x005b: 0x005b, # LEFT SQUARE BRACKET 0x005c: 0x005c, # REVERSE SOLIDUS 0x005d: 0x005d, # RIGHT SQUARE BRACKET 0x005e: 0x005e, # CIRCUMFLEX ACCENT 0x005f: 0x005f, # LOW LINE 0x0060: 0x0060, # GRAVE ACCENT 0x0061: 0x0061, # LATIN SMALL LETTER A 0x0062: 0x0062, # LATIN SMALL LETTER B 0x0063: 0x0063, # LATIN SMALL LETTER C 0x0064: 0x0064, # LATIN SMALL LETTER D 0x0065: 0x0065, # LATIN SMALL LETTER E 0x0066: 0x0066, # LATIN SMALL LETTER F 0x0067: 0x0067, # LATIN SMALL LETTER G 0x0068: 0x0068, # LATIN SMALL LETTER H 0x0069: 0x0069, # LATIN SMALL LETTER I 0x006a: 0x006a, # LATIN SMALL LETTER J 0x006b: 0x006b, # LATIN SMALL LETTER K 0x006c: 0x006c, # LATIN SMALL LETTER L 0x006d: 0x006d, # LATIN SMALL LETTER M 0x006e: 0x006e, # LATIN SMALL LETTER N 0x006f: 0x006f, # LATIN SMALL LETTER O 0x0070: 0x0070, # LATIN SMALL LETTER P 0x0071: 0x0071, # LATIN SMALL LETTER Q 0x0072: 0x0072, # LATIN SMALL LETTER R 0x0073: 0x0073, # LATIN SMALL LETTER S 0x0074: 0x0074, # LATIN SMALL LETTER T 0x0075: 0x0075, # LATIN SMALL LETTER U 0x0076: 0x0076, # LATIN SMALL LETTER V 0x0077: 0x0077, # LATIN SMALL LETTER W 0x0078: 0x0078, # LATIN SMALL LETTER X 0x0079: 0x0079, # LATIN SMALL LETTER Y 0x007a: 0x007a, # LATIN SMALL LETTER Z 0x007b: 0x007b, # LEFT CURLY BRACKET 0x007c: 0x007c, # VERTICAL LINE 0x007d: 0x007d, # RIGHT CURLY BRACKET 0x007e: 0x007e, # TILDE 0x007f: 0x007f, # DELETE 0x00a0: 0x00ff, # NO-BREAK SPACE 0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK 0x00a2: 0x009b, # CENT SIGN 0x00a3: 0x009c, # POUND SIGN 0x00a5: 0x009d, # YEN SIGN 0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR 0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00ac: 0x00aa, # NOT SIGN 0x00b0: 0x00f8, # DEGREE SIGN 0x00b1: 0x00f1, # PLUS-MINUS SIGN 0x00b2: 0x00fd, # SUPERSCRIPT TWO 0x00b5: 0x00e6, # MICRO SIGN 0x00b7: 0x00fa, # MIDDLE DOT 0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR 0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER 0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF 0x00bf: 0x00a8, # INVERTED QUESTION MARK 0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE 0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S (GERMAN) 0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE 0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE 0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE 0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE 0x00f7: 0x00f6, # DIVISION SIGN 0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE 0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK 0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA 0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA 0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA 0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI 0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA 0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA 0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA 0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON 0x03c0: 0x00e3, # GREEK SMALL LETTER PI 0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA 0x03c4: 0x00e7, # GREEK SMALL LETTER TAU 0x03c6: 0x00ed, # GREEK SMALL LETTER PHI 0x05d0: 0x0080, # HEBREW LETTER ALEF 0x05d1: 0x0081, # HEBREW LETTER BET 0x05d2: 0x0082, # HEBREW LETTER GIMEL 0x05d3: 0x0083, # HEBREW LETTER DALET 0x05d4: 0x0084, # HEBREW LETTER HE 0x05d5: 0x0085, # HEBREW LETTER VAV 0x05d6: 0x0086, # HEBREW LETTER ZAYIN 0x05d7: 0x0087, # HEBREW LETTER HET 0x05d8: 0x0088, # HEBREW LETTER TET 0x05d9: 0x0089, # HEBREW LETTER YOD 0x05da: 0x008a, # HEBREW LETTER FINAL KAF 0x05db: 0x008b, # HEBREW LETTER KAF 0x05dc: 0x008c, # HEBREW LETTER LAMED 0x05dd: 0x008d, # HEBREW LETTER FINAL MEM 0x05de: 0x008e, # HEBREW LETTER MEM 0x05df: 0x008f, # HEBREW LETTER FINAL NUN 0x05e0: 0x0090, # HEBREW LETTER NUN 0x05e1: 0x0091, # HEBREW LETTER SAMEKH 0x05e2: 0x0092, # HEBREW LETTER AYIN 0x05e3: 0x0093, # HEBREW LETTER FINAL PE 0x05e4: 0x0094, # HEBREW LETTER PE 0x05e5: 0x0095, # HEBREW LETTER FINAL TSADI 0x05e6: 0x0096, # HEBREW LETTER TSADI 0x05e7: 0x0097, # HEBREW LETTER QOF 0x05e8: 0x0098, # HEBREW LETTER RESH 0x05e9: 0x0099, # HEBREW LETTER SHIN 0x05ea: 0x009a, # HEBREW LETTER TAV 0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N 0x20a7: 0x009e, # PESETA SIGN 0x2219: 0x00f9, # BULLET OPERATOR 0x221a: 0x00fb, # SQUARE ROOT 0x221e: 0x00ec, # INFINITY 0x2229: 0x00ef, # INTERSECTION 0x2248: 0x00f7, # ALMOST EQUAL TO 0x2261: 0x00f0, # IDENTICAL TO 0x2264: 0x00f3, # LESS-THAN OR EQUAL TO 0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO 0x2310: 0x00a9, # REVERSED NOT SIGN 0x2320: 0x00f4, # TOP HALF INTEGRAL 0x2321: 0x00f5, # BOTTOM HALF INTEGRAL 0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL 0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL 0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT 0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT 0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL 0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL 0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT 0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x2580: 0x00df, # UPPER HALF BLOCK 0x2584: 0x00dc, # LOWER HALF BLOCK 0x2588: 0x00db, # FULL BLOCK 0x258c: 0x00dd, # LEFT HALF BLOCK 0x2590: 0x00de, # RIGHT HALF BLOCK 0x2591: 0x00b0, # LIGHT SHADE 0x2592: 0x00b1, # MEDIUM SHADE 0x2593: 0x00b2, # DARK SHADE 0x25a0: 0x00fe, # BLACK SQUARE }
lgpl-3.0
bmazin/SDR
DataReadout/ReadoutControls/lib/PulseServer.py
1
4912
#!/usr/bin/env python """ A simple server that ckecks IP throughput """ import socket, struct import sys import time import random import string import os from subprocess import Popen,PIPE,call def mysendall(client,data): size = len(data) totalsent = 0 while totalsent < 32768: sent = client.send(data[totalsent:]) if sent == 0: raise RuntimeError("sent = 0") totalsent = totalsent + sent #data = random.getrandbits(8192) #N = 1024 #data = ''.join(random.choice(string.printable) for x in range(N)) host = '' port = 50000 backlog = 1 s = None # location of ioregs in file structure, assuming pid is passed as argument to function #if sys.argv[1] == '': # print 'Need pid as command line argument' # sys.exit(1) firmware = 'timestamper.bof' if sys.argv[1] != '': firmware = sys.argv[1] # get the process id automatically process = Popen(['ps', '-eo' ,'pid,args'], stdout=PIPE, stderr=PIPE) stdout, notused = process.communicate() procid = 0 for line in stdout.splitlines(): tokens = line.split(' ',3) for i,token in enumerate(tokens): if(token == '/boffiles/'+firmware): procid = tokens[i-1] if procid == 0: print 'Firmware process ' + firmware + ' is not running' sys.exit(1) #procid=sys.argv[1] print 'Process ',procid ptrfile = '/proc/' + procid + '/hw/ioreg/pulses_addr' triggerfile = '/proc/' + procid + '/hw/ioreg/startBuffer' datafile0 = '/proc/' + procid + '/hw/ioreg/pulses_bram0' datafile1 = '/proc/' + procid + '/hw/ioreg/pulses_bram1' try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,1) s.bind((host,port)) s.listen(backlog) except socket.error, (code,message): if s: s.close() print "Could not open socket: " + message sys.exit(1) while 1: # wait for the client to connect print 'Waiting for connection.' sys.stdout.flush() client, address = s.accept() # set registers to tell ROACH to start taking data f = open(triggerfile,'w') f.write('\x00\x00\x00\x01') f.close() # open file/mem location with address pointer and other relevant files f = open(ptrfile,'rb') d0 = open(datafile0,'rb') d1 = open(datafile1,'rb') # don't take data until address begins to increment #time.sleep(1) print 'Starting new observation at: ',time.time() f.seek(0) start = f.read() print start.encode('hex') print type(start) start = struct.unpack('>l', start)[0] print start print type(start) while 1: f.seek(0) ptr = f.read() ptr = struct.unpack('>l', ptr)[0] if ptr > start: break print 'Adr began incrementing at: ',time.time() t0=0 lost_count = 0 packno=0 # loop to send data last = 1 while 1: # read pointer location f.seek(0) ptr = f.read() print 'ptr=',ptr.encode('hex') ptr = struct.unpack('>l', ptr)[0] print 'ptr_dec=',ptr # once 32768 bytes of data is ready send it out if ptr < 8192: if last == 0: print 'Data Lost: ',time.time() lost_count = lost_count + 1 last = 0 while ptr < 8500: time.sleep(0.001) f.seek(0) ptr = f.read() ptr = struct.unpack('>l', ptr)[0] try: t = time.time() d0.seek(0) tmp = d0.read(32768) client.sendall(tmp) d1.seek(0) tmp2 = d1.read(32768) if t0 == 0: t0 = t client.sendall(tmp2) t2=time.time() print "packet ",packno," pixel ",tmp2[0].encode('hex')," at ",(t-t0)," took",(t2-t) packno = packno+1 except: break elif ptr >= 8192 and ptr < 16384: if last == 1: print 'Data Lost! :',time.time() lost_count = lost_count + 1 last = 1 while ptr >= 8192 or ptr < 300: time.sleep(0.001) f.seek(0) ptr = f.read() ptr = struct.unpack('>l', ptr)[0] try: t = time.time() d0.seek(32768) tmp = d0.read(32768) client.sendall(tmp) d1.seek(32768) tmp2 = d1.read(32768) client.sendall(tmp2) t2=time.time() print "packet ",packno," pixel ",tmp2[0].encode('hex')," at ",(t-t0)," took",(t2-t) packno = packno+1 except: break # done with data taking print 'Cleaning up: ',time.time() print "Lost Count = ",lost_count try: client.close() except: pass f.close() d0.close() d1.close() f = open(triggerfile,'w') f.write('\x00\x00\x00\x00') f.close()
gpl-2.0
robmcdan/python-goose
goose/outputformatters.py
13
4704
# -*- coding: utf-8 -*- """\ This is a python port of "Goose" orignialy licensed to Gravity.com under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. Python port was written by Xavier Grangier for Recrutae Gravity.com licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from HTMLParser import HTMLParser from goose.text import innerTrim class OutputFormatter(object): def __init__(self, config, article): # config self.config = config # article self.article = article # parser self.parser = self.config.get_parser() # stopwords class self.stopwords_class = config.stopwords_class # top node self.top_node = None def get_language(self): """\ Returns the language is by the article or the configuration language """ # we don't want to force the target language # so we use the article.meta_lang if self.config.use_meta_language: if self.article.meta_lang: return self.article.meta_lang[:2] return self.config.target_language def get_top_node(self): return self.top_node def get_formatted_text(self): self.top_node = self.article.top_node self.remove_negativescores_nodes() self.links_to_text() self.add_newline_to_br() self.replace_with_text() self.remove_fewwords_paragraphs() return self.convert_to_text() def convert_to_text(self): txts = [] for node in list(self.get_top_node()): txt = self.parser.getText(node) if txt: txt = HTMLParser().unescape(txt) txt_lis = innerTrim(txt).split(r'\n') txts.extend(txt_lis) return '\n\n'.join(txts) def add_newline_to_br(self): for e in self.parser.getElementsByTag(self.top_node, tag='br'): e.text = r'\n' def links_to_text(self): """\ cleans up and converts any nodes that should be considered text into text """ self.parser.stripTags(self.get_top_node(), 'a') def remove_negativescores_nodes(self): """\ if there are elements inside our top node that have a negative gravity score, let's give em the boot """ gravity_items = self.parser.css_select(self.top_node, "*[gravityScore]") for item in gravity_items: score = self.parser.getAttribute(item, 'gravityScore') score = int(score, 0) if score < 1: item.getparent().remove(item) def replace_with_text(self): """\ replace common tags with just text so we don't have any crazy formatting issues so replace <br>, <i>, <strong>, etc.... with whatever text is inside them code : http://lxml.de/api/lxml.etree-module.html#strip_tags """ self.parser.stripTags(self.get_top_node(), 'b', 'strong', 'i', 'br', 'sup') def remove_fewwords_paragraphs(self): """\ remove paragraphs that have less than x number of words, would indicate that it's some sort of link """ all_nodes = self.parser.getElementsByTags(self.get_top_node(), ['*']) all_nodes.reverse() for el in all_nodes: tag = self.parser.getTag(el) text = self.parser.getText(el) stop_words = self.stopwords_class(language=self.get_language()).get_stopword_count(text) if (tag != 'br' or text != '\\r') and stop_words.get_stopword_count() < 3 \ and len(self.parser.getElementsByTag(el, tag='object')) == 0 \ and len(self.parser.getElementsByTag(el, tag='embed')) == 0: self.parser.remove(el) # TODO # check if it is in the right place else: trimmed = self.parser.getText(el) if trimmed.startswith("(") and trimmed.endswith(")"): self.parser.remove(el) class StandardOutputFormatter(OutputFormatter): pass
apache-2.0
jgoclawski/django
django/contrib/admin/views/main.py
327
16684
import sys from collections import OrderedDict from django.contrib.admin import FieldListFilter from django.contrib.admin.exceptions import ( DisallowedModelAdminLookup, DisallowedModelAdminToField, ) from django.contrib.admin.options import ( IS_POPUP_VAR, TO_FIELD_VAR, IncorrectLookupParameters, ) from django.contrib.admin.utils import ( get_fields_from_path, lookup_needs_distinct, prepare_lookup_value, quote, ) from django.core.exceptions import ( FieldDoesNotExist, ImproperlyConfigured, SuspiciousOperation, ) from django.core.paginator import InvalidPage from django.core.urlresolvers import reverse from django.db import models from django.utils import six from django.utils.encoding import force_text from django.utils.http import urlencode from django.utils.translation import ugettext # Changelist settings ALL_VAR = 'all' ORDER_VAR = 'o' ORDER_TYPE_VAR = 'ot' PAGE_VAR = 'p' SEARCH_VAR = 'q' ERROR_FLAG = 'e' IGNORED_PARAMS = ( ALL_VAR, ORDER_VAR, ORDER_TYPE_VAR, SEARCH_VAR, IS_POPUP_VAR, TO_FIELD_VAR) class ChangeList(object): def __init__(self, request, model, list_display, list_display_links, list_filter, date_hierarchy, search_fields, list_select_related, list_per_page, list_max_show_all, list_editable, model_admin): self.model = model self.opts = model._meta self.lookup_opts = self.opts self.root_queryset = model_admin.get_queryset(request) self.list_display = list_display self.list_display_links = list_display_links self.list_filter = list_filter self.date_hierarchy = date_hierarchy self.search_fields = search_fields self.list_select_related = list_select_related self.list_per_page = list_per_page self.list_max_show_all = list_max_show_all self.model_admin = model_admin self.preserved_filters = model_admin.get_preserved_filters(request) # Get search parameters from the query string. try: self.page_num = int(request.GET.get(PAGE_VAR, 0)) except ValueError: self.page_num = 0 self.show_all = ALL_VAR in request.GET self.is_popup = IS_POPUP_VAR in request.GET to_field = request.GET.get(TO_FIELD_VAR) if to_field and not model_admin.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) self.to_field = to_field self.params = dict(request.GET.items()) if PAGE_VAR in self.params: del self.params[PAGE_VAR] if ERROR_FLAG in self.params: del self.params[ERROR_FLAG] if self.is_popup: self.list_editable = () else: self.list_editable = list_editable self.query = request.GET.get(SEARCH_VAR, '') self.queryset = self.get_queryset(request) self.get_results(request) if self.is_popup: title = ugettext('Select %s') else: title = ugettext('Select %s to change') self.title = title % force_text(self.opts.verbose_name) self.pk_attname = self.lookup_opts.pk.attname def get_filters_params(self, params=None): """ Returns all params except IGNORED_PARAMS """ if not params: params = self.params lookup_params = params.copy() # a dictionary of the query string # Remove all the parameters that are globally and systematically # ignored. for ignored in IGNORED_PARAMS: if ignored in lookup_params: del lookup_params[ignored] return lookup_params def get_filters(self, request): lookup_params = self.get_filters_params() use_distinct = False for key, value in lookup_params.items(): if not self.model_admin.lookup_allowed(key, value): raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key) filter_specs = [] if self.list_filter: for list_filter in self.list_filter: if callable(list_filter): # This is simply a custom list filter class. spec = list_filter(request, lookup_params, self.model, self.model_admin) else: field_path = None if isinstance(list_filter, (tuple, list)): # This is a custom FieldListFilter class for a given field. field, field_list_filter_class = list_filter else: # This is simply a field name, so use the default # FieldListFilter class that has been registered for # the type of the given field. field, field_list_filter_class = list_filter, FieldListFilter.create if not isinstance(field, models.Field): field_path = field field = get_fields_from_path(self.model, field_path)[-1] spec = field_list_filter_class(field, request, lookup_params, self.model, self.model_admin, field_path=field_path) # Check if we need to use distinct() use_distinct = (use_distinct or lookup_needs_distinct(self.lookup_opts, field_path)) if spec and spec.has_output(): filter_specs.append(spec) # At this point, all the parameters used by the various ListFilters # have been removed from lookup_params, which now only contains other # parameters passed via the query string. We now loop through the # remaining parameters both to ensure that all the parameters are valid # fields and to determine if at least one of them needs distinct(). If # the lookup parameters aren't real fields, then bail out. try: for key, value in lookup_params.items(): lookup_params[key] = prepare_lookup_value(key, value) use_distinct = (use_distinct or lookup_needs_distinct(self.lookup_opts, key)) return filter_specs, bool(filter_specs), lookup_params, use_distinct except FieldDoesNotExist as e: six.reraise(IncorrectLookupParameters, IncorrectLookupParameters(e), sys.exc_info()[2]) def get_query_string(self, new_params=None, remove=None): if new_params is None: new_params = {} if remove is None: remove = [] p = self.params.copy() for r in remove: for k in list(p): if k.startswith(r): del p[k] for k, v in new_params.items(): if v is None: if k in p: del p[k] else: p[k] = v return '?%s' % urlencode(sorted(p.items())) def get_results(self, request): paginator = self.model_admin.get_paginator(request, self.queryset, self.list_per_page) # Get the number of objects, with admin filters applied. result_count = paginator.count # Get the total number of objects, with no admin filters applied. # Perform a slight optimization: # full_result_count is equal to paginator.count if no filters # were applied if self.model_admin.show_full_result_count: if self.get_filters_params() or self.params.get(SEARCH_VAR): full_result_count = self.root_queryset.count() else: full_result_count = result_count else: full_result_count = None can_show_all = result_count <= self.list_max_show_all multi_page = result_count > self.list_per_page # Get the list of objects to display on this page. if (self.show_all and can_show_all) or not multi_page: result_list = self.queryset._clone() else: try: result_list = paginator.page(self.page_num + 1).object_list except InvalidPage: raise IncorrectLookupParameters self.result_count = result_count self.show_full_result_count = self.model_admin.show_full_result_count # Admin actions are shown if there is at least one entry # or if entries are not counted because show_full_result_count is disabled self.show_admin_actions = not self.show_full_result_count or bool(full_result_count) self.full_result_count = full_result_count self.result_list = result_list self.can_show_all = can_show_all self.multi_page = multi_page self.paginator = paginator def _get_default_ordering(self): ordering = [] if self.model_admin.ordering: ordering = self.model_admin.ordering elif self.lookup_opts.ordering: ordering = self.lookup_opts.ordering return ordering def get_ordering_field(self, field_name): """ Returns the proper model field name corresponding to the given field_name to use for ordering. field_name may either be the name of a proper model field or the name of a method (on the admin or model) or a callable with the 'admin_order_field' attribute. Returns None if no proper model field name can be matched. """ try: field = self.lookup_opts.get_field(field_name) return field.name except FieldDoesNotExist: # See whether field_name is a name of a non-field # that allows sorting. if callable(field_name): attr = field_name elif hasattr(self.model_admin, field_name): attr = getattr(self.model_admin, field_name) else: attr = getattr(self.model, field_name) return getattr(attr, 'admin_order_field', None) def get_ordering(self, request, queryset): """ Returns the list of ordering fields for the change list. First we check the get_ordering() method in model admin, then we check the object's default ordering. Then, any manually-specified ordering from the query string overrides anything. Finally, a deterministic order is guaranteed by ensuring the primary key is used as the last ordering field. """ params = self.params ordering = list(self.model_admin.get_ordering(request) or self._get_default_ordering()) if ORDER_VAR in params: # Clear ordering and used params ordering = [] order_params = params[ORDER_VAR].split('.') for p in order_params: try: none, pfx, idx = p.rpartition('-') field_name = self.list_display[int(idx)] order_field = self.get_ordering_field(field_name) if not order_field: continue # No 'admin_order_field', skip it # reverse order if order_field has already "-" as prefix if order_field.startswith('-') and pfx == "-": ordering.append(order_field[1:]) else: ordering.append(pfx + order_field) except (IndexError, ValueError): continue # Invalid ordering specified, skip it. # Add the given query's ordering fields, if any. ordering.extend(queryset.query.order_by) # Ensure that the primary key is systematically present in the list of # ordering fields so we can guarantee a deterministic order across all # database backends. pk_name = self.lookup_opts.pk.name if not (set(ordering) & {'pk', '-pk', pk_name, '-' + pk_name}): # The two sets do not intersect, meaning the pk isn't present. So # we add it. ordering.append('-pk') return ordering def get_ordering_field_columns(self): """ Returns an OrderedDict of ordering field column numbers and asc/desc """ # We must cope with more than one column having the same underlying sort # field, so we base things on column numbers. ordering = self._get_default_ordering() ordering_fields = OrderedDict() if ORDER_VAR not in self.params: # for ordering specified on ModelAdmin or model Meta, we don't know # the right column numbers absolutely, because there might be more # than one column associated with that ordering, so we guess. for field in ordering: if field.startswith('-'): field = field[1:] order_type = 'desc' else: order_type = 'asc' for index, attr in enumerate(self.list_display): if self.get_ordering_field(attr) == field: ordering_fields[index] = order_type break else: for p in self.params[ORDER_VAR].split('.'): none, pfx, idx = p.rpartition('-') try: idx = int(idx) except ValueError: continue # skip it ordering_fields[idx] = 'desc' if pfx == '-' else 'asc' return ordering_fields def get_queryset(self, request): # First, we collect all the declared list filters. (self.filter_specs, self.has_filters, remaining_lookup_params, filters_use_distinct) = self.get_filters(request) # Then, we let every list filter modify the queryset to its liking. qs = self.root_queryset for filter_spec in self.filter_specs: new_qs = filter_spec.queryset(request, qs) if new_qs is not None: qs = new_qs try: # Finally, we apply the remaining lookup parameters from the query # string (i.e. those that haven't already been processed by the # filters). qs = qs.filter(**remaining_lookup_params) except (SuspiciousOperation, ImproperlyConfigured): # Allow certain types of errors to be re-raised as-is so that the # caller can treat them in a special way. raise except Exception as e: # Every other error is caught with a naked except, because we don't # have any other way of validating lookup parameters. They might be # invalid if the keyword arguments are incorrect, or if the values # are not in the correct type, so we might get FieldError, # ValueError, ValidationError, or ?. raise IncorrectLookupParameters(e) if not qs.query.select_related: qs = self.apply_select_related(qs) # Set ordering. ordering = self.get_ordering(request, qs) qs = qs.order_by(*ordering) # Apply search results qs, search_use_distinct = self.model_admin.get_search_results( request, qs, self.query) # Remove duplicates from results, if necessary if filters_use_distinct | search_use_distinct: return qs.distinct() else: return qs def apply_select_related(self, qs): if self.list_select_related is True: return qs.select_related() if self.list_select_related is False: if self.has_related_field_in_list_display(): return qs.select_related() if self.list_select_related: return qs.select_related(*self.list_select_related) return qs def has_related_field_in_list_display(self): for field_name in self.list_display: try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: pass else: if isinstance(field.remote_field, models.ManyToOneRel): return True return False def url_for_result(self, result): pk = getattr(result, self.pk_attname) return reverse('admin:%s_%s_change' % (self.opts.app_label, self.opts.model_name), args=(quote(pk),), current_app=self.model_admin.admin_site.name)
bsd-3-clause
mohanprasath/Course-Work
data_analysis/uh_data_analysis_with_python/hy-data-analysis-with-python-spring-2020/part05-e08_bicycle_timeseries/test/test_bicycle_timeseries.py
1
2279
#!/usr/bin/env python3 import unittest from unittest.mock import patch, MagicMock import pandas as pd import numpy as np from tmc import points from tmc.utils import load, get_out, patch_helper module_name="src.bicycle_timeseries" bicycle_timeseries = load(module_name, "bicycle_timeseries") main = load(module_name, "main") ph = patch_helper(module_name) @points('p05-08.1') class BicycleTimeseries(unittest.TestCase): # @classmethod # def setUpClass(cls): # cls.df = bicycle_timeseries() def setUp(self): self.df = bicycle_timeseries() def test_shape(self): self.assertEqual(self.df.shape, (37128, 20), msg="Incorrect shape!") def test_columns(self): cols = ['Auroransilta', 'Eteläesplanadi', 'Huopalahti (asema)', 'Kaisaniemi/Eläintarhanlahti', 'Kaivokatu', 'Kulosaaren silta et.', 'Kulosaaren silta po. ', 'Kuusisaarentie', 'Käpylä, Pohjoisbaana', 'Lauttasaaren silta eteläpuoli', 'Merikannontie', 'Munkkiniemen silta eteläpuoli', 'Munkkiniemi silta pohjoispuoli', 'Heperian puisto/Ooppera', 'Pitkäsilta itäpuoli', 'Pitkäsilta länsipuoli', 'Lauttasaaren silta pohjoispuoli', 'Ratapihantie', 'Viikintie', 'Baana'] np.testing.assert_array_equal(self.df.columns, cols, err_msg="Incorrect columns!") def test_index(self): self.assertIsInstance(self.df.index[0], pd.Timestamp, msg="Expected index to have type timestamp!") self.assertEqual(self.df.index[0], pd.to_datetime("2014-1-1 00:00"), msg="Incorrect first index!") self.assertEqual(self.df.index[1], pd.to_datetime("2014-1-1 01:00"), msg="Incorrect second index!") def test_calls(self): with patch(ph("bicycle_timeseries"), wraps=bicycle_timeseries) as pbts,\ patch(ph("pd.read_csv"), wraps=pd.read_csv) as prc,\ patch(ph("pd.to_datetime"), wraps=pd.to_datetime) as pdatetime: main() pbts.assert_called_once() prc.assert_called_once() pdatetime.assert_called() if __name__ == '__main__': unittest.main()
gpl-3.0
angelblue05/plugin.video.emby
libraries/emby/core/http.py
3
8440
# -*- coding: utf-8 -*- ################################################################################################# import json import logging import time import requests from exceptions import HTTPException ################################################################################################# LOG = logging.getLogger('Emby.'+__name__) ################################################################################################# class HTTP(object): session = None keep_alive = False def __init__(self, client): self.client = client self.config = client['config'] def __shortcuts__(self, key): if key == "request": return self.request return def start_session(self): self.session = requests.Session() """ max_retries = self.config['http.max_retries'] self.session.mount("http://", requests.adapters.HTTPAdapter(max_retries=max_retries)) self.session.mount("https://", requests.adapters.HTTPAdapter(max_retries=max_retries)) """ def stop_session(self): if self.session is None: return try: LOG.warn("--<[ session/%s ]", id(self.session)) self.session.close() except Exception as error: LOG.warn("The requests session could not be terminated: %s", error) def _replace_user_info(self, string): if '{server}' in string: if self.config['auth.server']: string = string.decode('utf-8').replace("{server}", self.config['auth.server']) else: raise Exception("Server address not set.") if '{UserId}'in string: if self.config['auth.user_id']: string = string.decode('utf-8').replace("{UserId}", self.config['auth.user_id']) else: raise Exception("UserId is not set.") return string def request(self, data, session=None): ''' Give a chance to retry the connection. Emby sometimes can be slow to answer back data dictionary can contain: type: GET, POST, etc. url: (optional) handler: not considered when url is provided (optional) params: request parameters (optional) json: request body (optional) headers: (optional), verify: ssl certificate, True (verify using device built-in library) or False ''' if not data: raise AttributeError("Request cannot be empty") data = self._request(data) LOG.debug("--->[ http ] %s", json.dumps(data, indent=4)) retry = data.pop('retry', 5) def _retry(current): if current: current -= 1 time.sleep(1) return current while True: try: r = self._requests(session or self.session or requests, data.pop('type', "GET"), **data) r.content # release the connection if not self.keep_alive and self.session is not None: self.stop_session() r.raise_for_status() except requests.exceptions.ConnectionError as error: retry = _retry(retry) if retry: continue LOG.error(error) self.client['callback']("ServerUnreachable", {'ServerId': self.config['auth.server-id']}) raise HTTPException("ServerUnreachable", error) except requests.exceptions.ReadTimeout as error: retry = _retry(retry) if retry: continue LOG.error(error) raise HTTPException("ReadTimeout", error) except requests.exceptions.HTTPError as error: LOG.error(error) if r.status_code == 401: if 'X-Application-Error-Code' in r.headers: self.client['callback']("AccessRestricted", {'ServerId': self.config['auth.server-id']}) raise HTTPException("AccessRestricted", error) else: self.client['callback']("Unauthorized", {'ServerId': self.config['auth.server-id']}) self.client['auth/revoke-token'] raise HTTPException("Unauthorized", error) elif r.status_code == 500: # log and ignore. LOG.error("--[ 500 response ] %s", error) return elif r.status_code == 502: retry = _retry(retry) if retry: continue raise HTTPException(r.status_code, error) except requests.exceptions.MissingSchema as error: raise HTTPException("MissingSchema", {'ServerId': self.config['auth.server']}) except Exception as error: raise else: elapsed = int(r.elapsed.total_seconds() * 1000) LOG.debug("---<[ http ][%s ms]", elapsed) try: self.config['server-time'] = r.headers['Date'] if r.status_code == 204: # return, because there is no response return response = r.json() try: LOG.debug(json.dumps(response, indent=4)) except Exception: LOG.debug(response) return response except ValueError: return def _request(self, data): if 'url' not in data: data['url'] = "%s/emby/%s" % (self.config['auth.server'], data.pop('handler', "")) self._get_header(data) data['timeout'] = data.get('timeout') or self.config['http.timeout'] data['url'] = self._replace_user_info(data['url']) if data.get('verify') is None: if self.config['auth.ssl'] is None: data['verify'] = data['url'].startswith('https') else: data['verify'] = self.config['auth.ssl'] self._process_params(data.get('params') or {}) self._process_params(data.get('json') or {}) return data def _process_params(self, params): for key in params: value = params[key] if isinstance(value, dict): self._process_params(value) if isinstance(value, str): params[key] = self._replace_user_info(value) def _get_header(self, data): data['headers'] = data.setdefault('headers', {}) if not data['headers']: data['headers'].update({ 'Content-type': "application/json", 'Accept-Charset': "UTF-8,*", 'Accept-encoding': "gzip", 'User-Agent': self.config['http.user_agent'] or "%s/%s" % (self.config['app.name'], self.config['app.version']) }) if 'Authorization' not in data['headers']: self._authorization(data) return data def _authorization(self, data): if not self.config['app.device_name']: raise KeyError("Device name cannot be null") auth = "MediaBrowser " auth += "Client=%s, " % self.config['app.name'].encode('utf-8') auth += "Device=%s, " % self.config['app.device_name'].encode('utf-8') auth += "DeviceId=%s, " % self.config['app.device_id'].encode('utf-8') auth += "Version=%s" % self.config['app.version'].encode('utf-8') data['headers'].update({'Authorization': auth}) if self.config['auth.token'] and self.config['auth.user_id']: auth += ', UserId=%s' % self.config['auth.user_id'].encode('utf-8') data['headers'].update({'Authorization': auth, 'X-MediaBrowser-Token': self.config['auth.token'].encode('utf-8')}) return data def _requests(self, session, action, **kwargs): if action == "GET": return session.get(**kwargs) elif action == "POST": return session.post(**kwargs) elif action == "HEAD": return session.head(**kwargs) elif action == "DELETE": return session.delete(**kwargs)
gpl-3.0
Rudloff/youtube-dl
youtube_dl/postprocessor/xattrpp.py
19
7229
from __future__ import unicode_literals import os import subprocess import sys import errno from .common import PostProcessor from ..compat import compat_os_name from ..utils import ( check_executable, hyphenate_date, version_tuple, PostProcessingError, encodeArgument, encodeFilename, ) class XAttrMetadataError(PostProcessingError): def __init__(self, code=None, msg='Unknown error'): super(XAttrMetadataError, self).__init__(msg) self.code = code # Parsing code and msg if (self.code in (errno.ENOSPC, errno.EDQUOT) or 'No space left' in self.msg or 'Disk quota excedded' in self.msg): self.reason = 'NO_SPACE' elif self.code == errno.E2BIG or 'Argument list too long' in self.msg: self.reason = 'VALUE_TOO_LONG' else: self.reason = 'NOT_SUPPORTED' class XAttrMetadataPP(PostProcessor): # # More info about extended attributes for media: # http://freedesktop.org/wiki/CommonExtendedAttributes/ # http://www.freedesktop.org/wiki/PhreedomDraft/ # http://dublincore.org/documents/usageguide/elements.shtml # # TODO: # * capture youtube keywords and put them in 'user.dublincore.subject' (comma-separated) # * figure out which xattrs can be used for 'duration', 'thumbnail', 'resolution' # def run(self, info): """ Set extended attributes on downloaded file (if xattr support is found). """ # This mess below finds the best xattr tool for the job and creates a # "write_xattr" function. try: # try the pyxattr module... import xattr # Unicode arguments are not supported in python-pyxattr until # version 0.5.0 # See https://github.com/rg3/youtube-dl/issues/5498 pyxattr_required_version = '0.5.0' if version_tuple(xattr.__version__) < version_tuple(pyxattr_required_version): self._downloader.report_warning( 'python-pyxattr is detected but is too old. ' 'youtube-dl requires %s or above while your version is %s. ' 'Falling back to other xattr implementations' % ( pyxattr_required_version, xattr.__version__)) raise ImportError def write_xattr(path, key, value): try: xattr.set(path, key, value) except EnvironmentError as e: raise XAttrMetadataError(e.errno, e.strerror) except ImportError: if compat_os_name == 'nt': # Write xattrs to NTFS Alternate Data Streams: # http://en.wikipedia.org/wiki/NTFS#Alternate_data_streams_.28ADS.29 def write_xattr(path, key, value): assert ':' not in key assert os.path.exists(path) ads_fn = path + ':' + key try: with open(ads_fn, 'wb') as f: f.write(value) except EnvironmentError as e: raise XAttrMetadataError(e.errno, e.strerror) else: user_has_setfattr = check_executable('setfattr', ['--version']) user_has_xattr = check_executable('xattr', ['-h']) if user_has_setfattr or user_has_xattr: def write_xattr(path, key, value): value = value.decode('utf-8') if user_has_setfattr: executable = 'setfattr' opts = ['-n', key, '-v', value] elif user_has_xattr: executable = 'xattr' opts = ['-w', key, value] cmd = ([encodeFilename(executable, True)] + [encodeArgument(o) for o in opts] + [encodeFilename(path, True)]) try: p = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) except EnvironmentError as e: raise XAttrMetadataError(e.errno, e.strerror) stdout, stderr = p.communicate() stderr = stderr.decode('utf-8', 'replace') if p.returncode != 0: raise XAttrMetadataError(p.returncode, stderr) else: # On Unix, and can't find pyxattr, setfattr, or xattr. if sys.platform.startswith('linux'): self._downloader.report_error( "Couldn't find a tool to set the xattrs. " "Install either the python 'pyxattr' or 'xattr' " "modules, or the GNU 'attr' package " "(which contains the 'setfattr' tool).") else: self._downloader.report_error( "Couldn't find a tool to set the xattrs. " "Install either the python 'xattr' module, " "or the 'xattr' binary.") # Write the metadata to the file's xattrs self._downloader.to_screen('[metadata] Writing metadata to file\'s xattrs') filename = info['filepath'] try: xattr_mapping = { 'user.xdg.referrer.url': 'webpage_url', # 'user.xdg.comment': 'description', 'user.dublincore.title': 'title', 'user.dublincore.date': 'upload_date', 'user.dublincore.description': 'description', 'user.dublincore.contributor': 'uploader', 'user.dublincore.format': 'format', } for xattrname, infoname in xattr_mapping.items(): value = info.get(infoname) if value: if infoname == 'upload_date': value = hyphenate_date(value) byte_value = value.encode('utf-8') write_xattr(filename, xattrname, byte_value) return [], info except XAttrMetadataError as e: if e.reason == 'NO_SPACE': self._downloader.report_warning( 'There\'s no disk space left or disk quota exceeded. ' + 'Extended attributes are not written.') elif e.reason == 'VALUE_TOO_LONG': self._downloader.report_warning( 'Unable to write extended attributes due to too long values.') else: msg = 'This filesystem doesn\'t support extended attributes. ' if compat_os_name == 'nt': msg += 'You need to use NTFS.' else: msg += '(You may have to enable them in your /etc/fstab)' self._downloader.report_error(msg) return [], info
unlicense
adit-chandra/tensorflow
tensorflow/python/data/experimental/kernel_tests/serialization/matching_files_dataset_serialization_test.py
15
2563
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the MatchingFilesDataset serialization.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import shutil import tempfile from absl.testing import parameterized from tensorflow.python.data.experimental.kernel_tests.serialization import dataset_serialization_test_base from tensorflow.python.data.experimental.ops import matching_files from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.framework import combinations from tensorflow.python.platform import test class MatchingFilesDatasetSerializationTest( dataset_serialization_test_base.DatasetSerializationTestBase, parameterized.TestCase): def _build_iterator_graph(self, test_patterns): return matching_files.MatchingFilesDataset(test_patterns) @combinations.generate(test_base.default_test_combinations()) def testMatchingFilesCore(self): tmp_dir = tempfile.mkdtemp() width = 16 depth = 8 for i in range(width): for j in range(depth): new_base = os.path.join(tmp_dir, str(i), *[str(dir_name) for dir_name in range(j)]) if not os.path.exists(new_base): os.makedirs(new_base) child_files = ['a.py', 'b.pyc'] if j < depth - 1 else ['c.txt', 'd.log'] for f in child_files: filename = os.path.join(new_base, f) open(filename, 'w').close() patterns = [ os.path.join(tmp_dir, os.path.join(*['**' for _ in range(depth)]), suffix) for suffix in ['*.txt', '*.log'] ] num_outputs = width * len(patterns) self.run_core_tests(lambda: self._build_iterator_graph(patterns), num_outputs) shutil.rmtree(tmp_dir, ignore_errors=True) if __name__ == '__main__': test.main()
apache-2.0
jayceyxc/hue
desktop/core/ext-py/ply-3.9/example/closurecalc/calc.py
10
3063
# ----------------------------------------------------------------------------- # calc.py # # A calculator parser that makes use of closures. The function make_calculator() # returns a function that accepts an input string and returns a result. All # lexing rules, parsing rules, and internal state are held inside the function. # ----------------------------------------------------------------------------- import sys sys.path.insert(0, "../..") if sys.version_info[0] >= 3: raw_input = input # Make a calculator function def make_calculator(): import ply.lex as lex import ply.yacc as yacc # ------- Internal calculator state variables = {} # Dictionary of stored variables # ------- Calculator tokenizing rules tokens = ( 'NAME', 'NUMBER', ) literals = ['=', '+', '-', '*', '/', '(', ')'] t_ignore = " \t" t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' def t_NUMBER(t): r'\d+' t.value = int(t.value) return t def t_newline(t): r'\n+' t.lexer.lineno += t.value.count("\n") def t_error(t): print("Illegal character '%s'" % t.value[0]) t.lexer.skip(1) # Build the lexer lexer = lex.lex() # ------- Calculator parsing rules precedence = ( ('left', '+', '-'), ('left', '*', '/'), ('right', 'UMINUS'), ) def p_statement_assign(p): 'statement : NAME "=" expression' variables[p[1]] = p[3] p[0] = None def p_statement_expr(p): 'statement : expression' p[0] = p[1] def p_expression_binop(p): '''expression : expression '+' expression | expression '-' expression | expression '*' expression | expression '/' expression''' if p[2] == '+': p[0] = p[1] + p[3] elif p[2] == '-': p[0] = p[1] - p[3] elif p[2] == '*': p[0] = p[1] * p[3] elif p[2] == '/': p[0] = p[1] / p[3] def p_expression_uminus(p): "expression : '-' expression %prec UMINUS" p[0] = -p[2] def p_expression_group(p): "expression : '(' expression ')'" p[0] = p[2] def p_expression_number(p): "expression : NUMBER" p[0] = p[1] def p_expression_name(p): "expression : NAME" try: p[0] = variables[p[1]] except LookupError: print("Undefined name '%s'" % p[1]) p[0] = 0 def p_error(p): if p: print("Syntax error at '%s'" % p.value) else: print("Syntax error at EOF") # Build the parser parser = yacc.yacc() # ------- Input function def input(text): result = parser.parse(text, lexer=lexer) return result return input # Make a calculator object and use it calc = make_calculator() while True: try: s = raw_input("calc > ") except EOFError: break r = calc(s) if r: print(r)
apache-2.0
afandria/mojo
third_party/cython/src/Cython/Compiler/TreeFragment.py
96
8733
# # TreeFragments - parsing of strings to trees # import re from StringIO import StringIO from Scanning import PyrexScanner, StringSourceDescriptor from Symtab import ModuleScope import PyrexTypes from Visitor import VisitorTransform from Nodes import Node, StatListNode from ExprNodes import NameNode import Parsing import Main import UtilNodes """ Support for parsing strings into code trees. """ class StringParseContext(Main.Context): def __init__(self, name, include_directories=None): if include_directories is None: include_directories = [] Main.Context.__init__(self, include_directories, {}, create_testscope=False) self.module_name = name def find_module(self, module_name, relative_to = None, pos = None, need_pxd = 1): if module_name not in (self.module_name, 'cython'): raise AssertionError("Not yet supporting any cimports/includes from string code snippets") return ModuleScope(module_name, parent_module = None, context = self) def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None, context=None, allow_struct_enum_decorator=False): """ Utility method to parse a (unicode) string of code. This is mostly used for internal Cython compiler purposes (creating code snippets that transforms should emit, as well as unit testing). code - a unicode string containing Cython (module-level) code name - a descriptive name for the code source (to use in error messages etc.) RETURNS The tree, i.e. a ModuleNode. The ModuleNode's scope attribute is set to the scope used when parsing. """ if context is None: context = StringParseContext(name) # Since source files carry an encoding, it makes sense in this context # to use a unicode string so that code fragments don't have to bother # with encoding. This means that test code passed in should not have an # encoding header. assert isinstance(code, unicode), "unicode code snippets only please" encoding = "UTF-8" module_name = name if initial_pos is None: initial_pos = (name, 1, 0) code_source = StringSourceDescriptor(name, code) scope = context.find_module(module_name, pos = initial_pos, need_pxd = 0) buf = StringIO(code) scanner = PyrexScanner(buf, code_source, source_encoding = encoding, scope = scope, context = context, initial_pos = initial_pos) ctx = Parsing.Ctx(allow_struct_enum_decorator=allow_struct_enum_decorator) if level is None: tree = Parsing.p_module(scanner, 0, module_name, ctx=ctx) tree.scope = scope tree.is_pxd = False else: tree = Parsing.p_code(scanner, level=level, ctx=ctx) tree.scope = scope return tree class TreeCopier(VisitorTransform): def visit_Node(self, node): if node is None: return node else: c = node.clone_node() self.visitchildren(c) return c class ApplyPositionAndCopy(TreeCopier): def __init__(self, pos): super(ApplyPositionAndCopy, self).__init__() self.pos = pos def visit_Node(self, node): copy = super(ApplyPositionAndCopy, self).visit_Node(node) copy.pos = self.pos return copy class TemplateTransform(VisitorTransform): """ Makes a copy of a template tree while doing substitutions. A dictionary "substitutions" should be passed in when calling the transform; mapping names to replacement nodes. Then replacement happens like this: - If an ExprStatNode contains a single NameNode, whose name is a key in the substitutions dictionary, the ExprStatNode is replaced with a copy of the tree given in the dictionary. It is the responsibility of the caller that the replacement node is a valid statement. - If a single NameNode is otherwise encountered, it is replaced if its name is listed in the substitutions dictionary in the same way. It is the responsibility of the caller to make sure that the replacement nodes is a valid expression. Also a list "temps" should be passed. Any names listed will be transformed into anonymous, temporary names. Currently supported for tempnames is: NameNode (various function and class definition nodes etc. should be added to this) Each replacement node gets the position of the substituted node recursively applied to every member node. """ temp_name_counter = 0 def __call__(self, node, substitutions, temps, pos): self.substitutions = substitutions self.pos = pos tempmap = {} temphandles = [] for temp in temps: TemplateTransform.temp_name_counter += 1 handle = UtilNodes.TempHandle(PyrexTypes.py_object_type) tempmap[temp] = handle temphandles.append(handle) self.tempmap = tempmap result = super(TemplateTransform, self).__call__(node) if temps: result = UtilNodes.TempsBlockNode(self.get_pos(node), temps=temphandles, body=result) return result def get_pos(self, node): if self.pos: return self.pos else: return node.pos def visit_Node(self, node): if node is None: return None else: c = node.clone_node() if self.pos is not None: c.pos = self.pos self.visitchildren(c) return c def try_substitution(self, node, key): sub = self.substitutions.get(key) if sub is not None: pos = self.pos if pos is None: pos = node.pos return ApplyPositionAndCopy(pos)(sub) else: return self.visit_Node(node) # make copy as usual def visit_NameNode(self, node): temphandle = self.tempmap.get(node.name) if temphandle: # Replace name with temporary return temphandle.ref(self.get_pos(node)) else: return self.try_substitution(node, node.name) def visit_ExprStatNode(self, node): # If an expression-as-statement consists of only a replaceable # NameNode, we replace the entire statement, not only the NameNode if isinstance(node.expr, NameNode): return self.try_substitution(node, node.expr.name) else: return self.visit_Node(node) def copy_code_tree(node): return TreeCopier()(node) INDENT_RE = re.compile(ur"^ *") def strip_common_indent(lines): "Strips empty lines and common indentation from the list of strings given in lines" # TODO: Facilitate textwrap.indent instead lines = [x for x in lines if x.strip() != u""] minindent = min([len(INDENT_RE.match(x).group(0)) for x in lines]) lines = [x[minindent:] for x in lines] return lines class TreeFragment(object): def __init__(self, code, name="(tree fragment)", pxds={}, temps=[], pipeline=[], level=None, initial_pos=None): if isinstance(code, unicode): def fmt(x): return u"\n".join(strip_common_indent(x.split(u"\n"))) fmt_code = fmt(code) fmt_pxds = {} for key, value in pxds.iteritems(): fmt_pxds[key] = fmt(value) mod = t = parse_from_strings(name, fmt_code, fmt_pxds, level=level, initial_pos=initial_pos) if level is None: t = t.body # Make sure a StatListNode is at the top if not isinstance(t, StatListNode): t = StatListNode(pos=mod.pos, stats=[t]) for transform in pipeline: if transform is None: continue t = transform(t) self.root = t elif isinstance(code, Node): if pxds != {}: raise NotImplementedError() self.root = code else: raise ValueError("Unrecognized code format (accepts unicode and Node)") self.temps = temps def copy(self): return copy_code_tree(self.root) def substitute(self, nodes={}, temps=[], pos = None): return TemplateTransform()(self.root, substitutions = nodes, temps = self.temps + temps, pos = pos) class SetPosTransform(VisitorTransform): def __init__(self, pos): super(SetPosTransform, self).__init__() self.pos = pos def visit_Node(self, node): node.pos = self.pos self.visitchildren(node) return node
bsd-3-clause
anisku11/sublimeku
Packages/pygments/all/pygments/lexers/tcl.py
47
5398
# -*- coding: utf-8 -*- """ pygments.lexers.tcl ~~~~~~~~~~~~~~~~~~~ Lexers for Tcl and related languages. :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.lexer import RegexLexer, include, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number from pygments.util import shebang_matches __all__ = ['TclLexer'] class TclLexer(RegexLexer): """ For Tcl source code. .. versionadded:: 0.10 """ keyword_cmds_re = words(( 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error', 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return', 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable', 'vwait', 'while'), prefix=r'\b', suffix=r'\b') builtin_cmds_re = words(( 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict', 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file', 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp', 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk', 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc', 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex', 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan', 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string', 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b') name = 'Tcl' aliases = ['tcl'] filenames = ['*.tcl', '*.rvt'] mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl'] def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""): return [ (keyword_cmds_re, Keyword, 'params' + context), (builtin_cmds_re, Name.Builtin, 'params' + context), (r'([\w.-]+)', Name.Variable, 'params' + context), (r'#', Comment, 'comment'), ] tokens = { 'root': [ include('command'), include('basic'), include('data'), (r'\}', Keyword), # HACK: somehow we miscounted our braces ], 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re), 'command-in-brace': _gen_command_rules(keyword_cmds_re, builtin_cmds_re, "-in-brace"), 'command-in-bracket': _gen_command_rules(keyword_cmds_re, builtin_cmds_re, "-in-bracket"), 'command-in-paren': _gen_command_rules(keyword_cmds_re, builtin_cmds_re, "-in-paren"), 'basic': [ (r'\(', Keyword, 'paren'), (r'\[', Keyword, 'bracket'), (r'\{', Keyword, 'brace'), (r'"', String.Double, 'string'), (r'(eq|ne|in|ni)\b', Operator.Word), (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator), ], 'data': [ (r'\s+', Text), (r'0x[a-fA-F0-9]+', Number.Hex), (r'0[0-7]+', Number.Oct), (r'\d+\.\d+', Number.Float), (r'\d+', Number.Integer), (r'\$([\w.:-]+)', Name.Variable), (r'([\w.:-]+)', Text), ], 'params': [ (r';', Keyword, '#pop'), (r'\n', Text, '#pop'), (r'(else|elseif|then)\b', Keyword), include('basic'), include('data'), ], 'params-in-brace': [ (r'\}', Keyword, ('#pop', '#pop')), include('params') ], 'params-in-paren': [ (r'\)', Keyword, ('#pop', '#pop')), include('params') ], 'params-in-bracket': [ (r'\]', Keyword, ('#pop', '#pop')), include('params') ], 'string': [ (r'\[', String.Double, 'string-square'), (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double), (r'"', String.Double, '#pop') ], 'string-square': [ (r'\[', String.Double, 'string-square'), (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double), (r'\]', String.Double, '#pop') ], 'brace': [ (r'\}', Keyword, '#pop'), include('command-in-brace'), include('basic'), include('data'), ], 'paren': [ (r'\)', Keyword, '#pop'), include('command-in-paren'), include('basic'), include('data'), ], 'bracket': [ (r'\]', Keyword, '#pop'), include('command-in-bracket'), include('basic'), include('data'), ], 'comment': [ (r'.*[^\\]\n', Comment, '#pop'), (r'.*\\\n', Comment), ], } def analyse_text(text): return shebang_matches(text, r'(tcl)')
mit
tbadgu/Barcamp-Bangalore-Android-App
gcm_flask/werkzeug/contrib/testtools.py
92
2449
# -*- coding: utf-8 -*- """ werkzeug.contrib.testtools ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module implements extended wrappers for simplified testing. `TestResponse` A response wrapper which adds various cached attributes for simplified assertions on various content types. :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from werkzeug.utils import cached_property, import_string from werkzeug.wrappers import Response from warnings import warn warn(DeprecationWarning('werkzeug.contrib.testtools is deprecated and ' 'will be removed with Werkzeug 1.0')) class ContentAccessors(object): """ A mixin class for response objects that provides a couple of useful accessors for unittesting. """ def xml(self): """Get an etree if possible.""" if 'xml' not in self.mimetype: raise AttributeError( 'Not a XML response (Content-Type: %s)' % self.mimetype) for module in ['xml.etree.ElementTree', 'ElementTree', 'elementtree.ElementTree']: etree = import_string(module, silent=True) if etree is not None: return etree.XML(self.body) raise RuntimeError('You must have ElementTree installed ' 'to use TestResponse.xml') xml = cached_property(xml) def lxml(self): """Get an lxml etree if possible.""" if ('html' not in self.mimetype and 'xml' not in self.mimetype): raise AttributeError('Not an HTML/XML response') from lxml import etree try: from lxml.html import fromstring except ImportError: fromstring = etree.HTML if self.mimetype=='text/html': return fromstring(self.data) return etree.XML(self.data) lxml = cached_property(lxml) def json(self): """Get the result of simplejson.loads if possible.""" if 'json' not in self.mimetype: raise AttributeError('Not a JSON response') try: from simplejson import loads except ImportError: from json import loads return loads(self.data) json = cached_property(json) class TestResponse(Response, ContentAccessors): """Pass this to `werkzeug.test.Client` for easier unittesting."""
apache-2.0
tuxfux-hlp-notes/python-batches
archieves/batch-64/09-modules/myenv/lib/python2.7/site-packages/django/db/migrations/questioner.py
119
7690
from __future__ import print_function, unicode_literals import importlib import os import sys from django.apps import apps from django.db.models.fields import NOT_PROVIDED from django.utils import datetime_safe, six, timezone from django.utils.six.moves import input from .loader import MIGRATIONS_MODULE_NAME class MigrationQuestioner(object): """ Gives the autodetector responses to questions it might have. This base class has a built-in noninteractive mode, but the interactive subclass is what the command-line arguments will use. """ def __init__(self, defaults=None, specified_apps=None, dry_run=None): self.defaults = defaults or {} self.specified_apps = specified_apps or set() self.dry_run = dry_run def ask_initial(self, app_label): "Should we create an initial migration for the app?" # If it was specified on the command line, definitely true if app_label in self.specified_apps: return True # Otherwise, we look to see if it has a migrations module # without any Python files in it, apart from __init__.py. # Apps from the new app template will have these; the python # file check will ensure we skip South ones. try: app_config = apps.get_app_config(app_label) except LookupError: # It's a fake app. return self.defaults.get("ask_initial", False) migrations_import_path = "%s.%s" % (app_config.name, MIGRATIONS_MODULE_NAME) try: migrations_module = importlib.import_module(migrations_import_path) except ImportError: return self.defaults.get("ask_initial", False) else: if hasattr(migrations_module, "__file__"): filenames = os.listdir(os.path.dirname(migrations_module.__file__)) elif hasattr(migrations_module, "__path__"): if len(migrations_module.__path__) > 1: return False filenames = os.listdir(list(migrations_module.__path__)[0]) return not any(x.endswith(".py") for x in filenames if x != "__init__.py") def ask_not_null_addition(self, field_name, model_name): "Adding a NOT NULL field to a model" # None means quit return None def ask_not_null_alteration(self, field_name, model_name): "Changing a NULL field to NOT NULL" # None means quit return None def ask_rename(self, model_name, old_name, new_name, field_instance): "Was this field really renamed?" return self.defaults.get("ask_rename", False) def ask_rename_model(self, old_model_state, new_model_state): "Was this model really renamed?" return self.defaults.get("ask_rename_model", False) def ask_merge(self, app_label): "Do you really want to merge these migrations?" return self.defaults.get("ask_merge", False) class InteractiveMigrationQuestioner(MigrationQuestioner): def _boolean_input(self, question, default=None): result = input("%s " % question) if not result and default is not None: return default while len(result) < 1 or result[0].lower() not in "yn": result = input("Please answer yes or no: ") return result[0].lower() == "y" def _choice_input(self, question, choices): print(question) for i, choice in enumerate(choices): print(" %s) %s" % (i + 1, choice)) result = input("Select an option: ") while True: try: value = int(result) if 0 < value <= len(choices): return value except ValueError: pass result = input("Please select a valid option: ") def _ask_default(self): print("Please enter the default value now, as valid Python") print("The datetime and django.utils.timezone modules are available, so you can do e.g. timezone.now()") while True: if six.PY3: # Six does not correctly abstract over the fact that # py3 input returns a unicode string, while py2 raw_input # returns a bytestring. code = input(">>> ") else: code = input(">>> ").decode(sys.stdin.encoding) if not code: print("Please enter some code, or 'exit' (with no quotes) to exit.") elif code == "exit": sys.exit(1) else: try: return eval(code, {}, {"datetime": datetime_safe, "timezone": timezone}) except (SyntaxError, NameError) as e: print("Invalid input: %s" % e) def ask_not_null_addition(self, field_name, model_name): "Adding a NOT NULL field to a model" if not self.dry_run: choice = self._choice_input( "You are trying to add a non-nullable field '%s' to %s without a default; " "we can't do that (the database needs something to populate existing rows).\n" "Please select a fix:" % (field_name, model_name), [ "Provide a one-off default now (will be set on all existing rows)", "Quit, and let me add a default in models.py", ] ) if choice == 2: sys.exit(3) else: return self._ask_default() return None def ask_not_null_alteration(self, field_name, model_name): "Changing a NULL field to NOT NULL" if not self.dry_run: choice = self._choice_input( "You are trying to change the nullable field '%s' on %s to non-nullable " "without a default; we can't do that (the database needs something to " "populate existing rows).\n" "Please select a fix:" % (field_name, model_name), [ "Provide a one-off default now (will be set on all existing rows)", ("Ignore for now, and let me handle existing rows with NULL myself " "(e.g. adding a RunPython or RunSQL operation in the new migration " "file before the AlterField operation)"), "Quit, and let me add a default in models.py", ] ) if choice == 2: return NOT_PROVIDED elif choice == 3: sys.exit(3) else: return self._ask_default() return None def ask_rename(self, model_name, old_name, new_name, field_instance): "Was this field really renamed?" msg = "Did you rename %s.%s to %s.%s (a %s)? [y/N]" return self._boolean_input(msg % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__), False) def ask_rename_model(self, old_model_state, new_model_state): "Was this model really renamed?" msg = "Did you rename the %s.%s model to %s? [y/N]" return self._boolean_input(msg % (old_model_state.app_label, old_model_state.name, new_model_state.name), False) def ask_merge(self, app_label): return self._boolean_input( "\nMerging will only work if the operations printed above do not conflict\n" + "with each other (working on different fields or models)\n" + "Do you want to merge these migration branches? [y/N]", False, )
gpl-3.0
tigerking/pyvision
src/pyvision/gui/__init__.py
8
1646
# PyVision License # # Copyright (c) 2006-2008 David S. Bolme # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither name of copyright holders nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' This package is for GUI tools that are of general use to visior researchers. '''
bsd-3-clause
40223226/2015cd_midterm2
static/Brython3.1.1-20150328-091302/Lib/jqueryui/__init__.py
603
3671
"""Wrapper around the jQuery UI library Exposes a single object, jq, to manipulate the widgets designed in the library This object supports : - subscription : js[elt_id] returns an object matching the element with the specified id - a method get(**kw). The only keyword currently supported is "selector". The method returns a list of instances of the class Element, each instance wraps the elements matching the CSS selector passed jq(selector="button") : returns instances of Element for all button tags The value can be a list or tuple of CSS selector strings : js(selector=("input[type=submit]","a")) : instances of Element for all "input" tags with attribute "type" set to "submit" + "a" tags (anchors) Instances of Element have the same interface as the selections made by the jQuery function $, with the additional methods provided by jQuery UI. For instance, to turn an element into a dialog : jq[elt_id].dialog() When jQuery UI methods expect a Javascript object, they can be passed as key/value pairs : jq['tags'].autocomplete(source=availableTags) """ from browser import html, document, window import javascript _path = __file__[:__file__.rfind('/')]+'/' document <= html.LINK(rel="stylesheet", href=_path+'css/smoothness/jquery-ui.css') # The scripts must be loaded in blocking mode, by using the function # load(script_url[, names]) in module javascript # If we just add them to the document with script tags, eg : # # document <= html.SCRIPT(sciprt_url) # _jqui = window.jQuery.noConflict(True) # # the name "jQuery" is not in the Javascript namespace until the script is # fully loaded in the page, so "window.jQuery" raises an exception # Load jQuery and put name 'jQuery' in the global Javascript namespace javascript.load(_path+'jquery-1.11.2.js', ['jQuery']) javascript.load(_path+'jquery-ui.js') _jqui = window.jQuery.noConflict(True) _events = ['abort', 'beforeinput', 'blur', 'click', 'compositionstart', 'compositionupdate', 'compositionend', 'dblclick', 'error', 'focus', 'focusin', 'focusout', 'input', 'keydown', 'keyup', 'load', 'mousedown', 'mouseenter', 'mouseleave', 'mousemove', 'mouseout', 'mouseover', 'mouseup', 'resize', 'scroll', 'select', 'unload'] class JQFunction: def __init__(self, func): self.func = func def __call__(self, *args, **kw): if kw: # keyword arguments are passed as a single Javascript object return self.func(*args, kw) else: return self.func(*args) class Element: """Wrapper around the objects returned by jQuery selections""" def __init__(self, item): self.item = item def bind(self, event, callback): """Binds an event on the element to function callback""" getattr(self.item, event)(callback) def __getattr__(self, attr): res = getattr(self.item, attr) if attr in _events: # elt.click(f) is handled like elt.bind('click', f) return lambda f:self.bind(attr, f) if callable(res): res = JQFunction(res) return res class jq: @staticmethod def get(**selectors): items = [] for k,v in selectors.items(): if k=='selector': if isinstance(v,[list, tuple]): values = v else: values = [v] for value in values: items.append(Element(_jqui(value))) elif k=='element': items = Element(_jqui(v)) return items @staticmethod def __getitem__(element_id): return jq.get(selector='#'+element_id)[0]
gpl-3.0
mozilla/verbatim
local_apps/pootle_app/management/commands/update_from_templates.py
5
1083
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2009 Zuza Software Foundation # # This file is part of Pootle. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. import os os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings' from pootle_app.management.commands import PootleCommand class Command(PootleCommand): help = "mass update from templates." def handle_translation_project(self, translation_project, **options): translation_project.update_from_templates()
gpl-2.0
cylc/cylc
tests/unit/test_job_file.py
1
19323
# THIS FILE IS PART OF THE CYLC SUITE ENGINE. # Copyright (C) NIWA & British Crown (Met Office) & Contributors. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Tests for functions contained in cylc.flow.job_file. # TODO remove the unittest dependency - it should not be necessary. import io import os import pytest from tempfile import TemporaryFile, NamedTemporaryFile from unittest import mock from cylc.flow import __version__ import cylc.flow.flags from cylc.flow.job_file import JobFileWriter from cylc.flow.platforms import platform_from_name @pytest.mark.parametrize( 'in_value, out_value', [('~foo/bar bar', '~foo/"bar bar"'), ('~/bar bar', '~/"bar bar"'), ('~/a', '~/"a"'), ('test', '"test"'), ('~', '~'), ('~a', '~a'), ('foo%s', '"foo%s"'), ('foo%(i)d', '"foo3"')] ) def test_get_variable_value_definition(in_value, out_value): """Test the value for single/tilde variables are correctly quoted, and parameter environment templates are handled""" param_dict = {'i': 3} res = JobFileWriter._get_variable_value_definition(in_value, param_dict) assert(out_value == res) @pytest.fixture def fixture_get_platform(): """ Allows pytest to cache default platform dictionary. Args: custom_settings (dict): settings that you wish to override. Returns: platforms dictionary. """ def inner_func(custom_settings=None): platform = platform_from_name() if custom_settings is not None: platform.update(custom_settings) return platform yield inner_func def test_write_prelude_invalid_cylc_command(): job_conf = { "platform": { "batch system": "background", "hosts": ["localhost"], "owner": "me", "cylc executable": "sl -a" } } with pytest.raises(ValueError) as ex: with TemporaryFile(mode="w+") as handle: JobFileWriter()._write_prelude(handle, job_conf) assert("bad cylc executable" in str(ex)) @mock.patch.dict( "os.environ", {'CYLC_SUITE_DEF_PATH': 'cylc/suite/def/path'}) @mock.patch("cylc.flow.job_file.get_remote_suite_run_dir") def test_write(mocked_get_remote_suite_run_dir, fixture_get_platform): """Test write function outputs jobscript file correctly.""" with NamedTemporaryFile() as local_job_file_path: local_job_file_path = local_job_file_path.name platform = fixture_get_platform( { "batch submit command template": "woof", "owner": "me" } ) job_conf = { "platform": platform, "task_id": "baa", "suite_name": "farm_noises", "work_d": "farm_noises/work_d", "remote_suite_d": "remote/suite/dir", "uuid_str": "neigh", 'environment': {'cow': '~/moo', 'sheep': '~baa/baa', 'duck': '~quack'}, "job_d": "1/baa/01", "try_num": 1, "flow_label": "aZ", # "batch_system_name": "background", "param_var": {"duck": "quack", "mouse": "squeak"}, "execution_time_limit": "moo", "namespace_hierarchy": ["root", "baa", "moo"], "dependencies": ['moo', 'neigh', 'quack'], "init-script": "This is the init script", "env-script": "This is the env script", "err-script": "This is the err script", "pre-script": "This is the pre script", "script": "This is the script", "post-script": "This is the post script", "exit-script": "This is the exit script", } mocked_get_remote_suite_run_dir.return_value = "run/dir" JobFileWriter().write(local_job_file_path, job_conf) assert (os.path.exists(local_job_file_path)) size_of_file = os.stat(local_job_file_path).st_size # This test only needs to check that the file is created and is # non-empty as each section is covered by individual unit tests. assert(size_of_file > 10) """Test the header is correctly written""" expected = ('#!/bin/bash -l\n#\n# ++++ THIS IS A CYLC TASK JOB SCRIPT ' '++++\n# Suite: farm_noises\n# Task: baa\n# Job ' 'log directory: 1/baa/01\n# Job submit method: ' 'background\n# Job submit command template: woof\n#' ' Execution time limit: moo') platform = fixture_get_platform( {"batch submit command template": "woof"} ) job_conf = { "platform": platform, "batch system": "background", "execution_time_limit": "moo", "suite_name": "farm_noises", "task_id": "baa", "job_d": "1/baa/01" } with io.StringIO() as fake_file: JobFileWriter()._write_header(fake_file, job_conf) assert(fake_file.getvalue() == expected) @pytest.mark.parametrize( 'job_conf,expected', [ ( # basic { "platform": { "batch system": "loadleveler", "batch submit command template": "test_suite", }, "directives": {"moo": "foo", "cluck": "bar"}, "suite_name": "farm_noises", "task_id": "baa", "job_d": "1/test_task_id/01", "job_file_path": "directory/job", "execution_time_limit": 60 }, ('\n\n# DIRECTIVES:\n# @ job_name = farm_noises.baa' '\n# @ output = directory/job.out\n# @ error = directory/' 'job.err\n# @ wall_clock_limit = 120,60\n# @ moo = foo' '\n# @ cluck = bar\n# @ queue') ), ( # Check no directives is correctly written { "platform": { "batch system": "slurm", "batch submit command template": "test_suite" }, "directives": {}, "suite_name": "farm_noises", "task_id": "baa", "job_d": "1/test_task_id/01", "job_file_path": "directory/job", "execution_time_limit": 60 }, ('\n\n# DIRECTIVES:\n#SBATCH --job-name=baa.farm_noises\n#SBATCH ' '--output=directory/job.out\n#SBATCH --error=directory/' 'job.err\n#SBATCH --time=1:00') ), ( # Check pbs max job name length { "platform": { "batch system": "pbs", "batch submit command template": "test_suite", "job name length maximum": 15 }, "directives": {}, "suite_name": "farm_noises", "task_id": "baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "job_d": "1/test_task_id/01", "job_file_path": "directory/job", "execution_time_limit": 60 }, ('\n\n# DIRECTIVES:\n#PBS -N baaaaaaaaaaaaaa\n#PBS -o ' 'directory/job.out\n#PBS -e directory/job.err\n#PBS -l ' 'walltime=60') ), ( # Check sge directives are correctly written { "platform": { "batch system": "sge", "batch submit command template": "test_suite", }, "directives": {"-V": "", "-q": "queuename", "-l": "s_vmem=1G,s_cpu=60"}, "suite_name": "farm_noises", "task_id": "baa", "job_d": "1/test_task_id/01", "job_file_path": "$HOME/directory/job", "execution_time_limit": 1000 }, ('\n\n# DIRECTIVES:\n#$ -N farm_noises.baa\n#$ -o directory/' 'job.out\n#$ -e directory/job.err\n#$ -l h_rt=0:16:40\n#$ -V\n#' '$ -q queuename\n#$ -l s_vmem=1G,s_cpu=60' ) ) ], ids=["1", "2", "3", "4"]) def test_write_directives(fixture_get_platform, job_conf: dict, expected: str): """"Test the directives section of job script file is correctly written""" with io.StringIO() as fake_file: JobFileWriter()._write_directives(fake_file, job_conf) assert(fake_file.getvalue() == expected) @pytest.mark.parametrize( "batch_sys", ["at", "background", "loadleveler", "pbs", "sge", "slurm"]) def test_traps_for_each_batch_system(batch_sys: str): """Test traps for each batch system""" platform = platform_from_name() platform.update({ "batch system": f"{batch_sys}", "owner": "me" }) job_conf = { "platform": platform, "directives": {} } with io.StringIO() as fake_file: JobFileWriter()._write_prelude(fake_file, job_conf) output = fake_file.getvalue() if batch_sys == "slurm": assert( "CYLC_FAIL_SIGNALS='EXIT ERR XCPU" in output) else: assert( "CYLC_FAIL_SIGNALS='EXIT ERR TERM XCPU" in output) def test_write_prelude(monkeypatch, fixture_get_platform): """Test the prelude section of job script file is correctly written. """ cylc.flow.flags.debug = True expected = ('\nCYLC_FAIL_SIGNALS=\'EXIT ERR TERM XCPU\'\n' 'CYLC_VACATION_SIGNALS=\'USR1\'\nexport PATH=moo/baa:$PATH' '\nexport CYLC_DEBUG=true' '\nexport CYLC_VERSION=\'%s\'\nexport ' % __version__ + 'CYLC_SUITE_INITIAL_CYCLE_POINT=\'20200101T0000Z\'') job_conf = { "platform": fixture_get_platform({ "batch system": "loadleveler", "batch submit command template": "test_suite", "host": "localhost", "owner": "me", "copyable environment variables": [ "CYLC_SUITE_INITIAL_CYCLE_POINT" ], "cylc executable": "moo/baa/cylc" }), "directives": {"restart": "yes"}, } monkeypatch.setenv("CYLC_SUITE_INITIAL_CYCLE_POINT", "20200101T0000Z") with io.StringIO() as fake_file: # copyable environment variables JobFileWriter()._write_prelude(fake_file, job_conf) assert(fake_file.getvalue() == expected) def test_write_suite_environment(fixture_get_platform, monkeypatch): """Test suite environment is correctly written in jobscript""" # set some suite environment conditions monkeypatch.setattr( cylc.flow.job_file, "get_remote_suite_work_dir", lambda a, b: "work/dir" ) monkeypatch.setenv('CYLC_SUITE_DEF_PATH', 'cylc/suite/def/path') cylc.flow.flags.debug = True cylc.flow.flags.verbose = True suite_env = {'CYLC_UTC': 'True', 'CYLC_CYCLING_MODE': 'integer'} job_file_writer = JobFileWriter() job_file_writer.set_suite_env(suite_env) # suite env not correctly setting...check this expected = ('\n\ncylc__job__inst__cylc_env() {\n # CYLC SUITE ' 'ENVIRONMENT:\n export CYLC_CYCLING_MODE="integer"\n ' ' export CYLC_UTC="True"\n export TZ="UTC"\n\n ' ' export CYLC_SUITE_RUN_DIR="cylc-run/farm_noises"\n ' ' export CYLC_SUITE_WORK_DIR_ROOT="work/dir"\n ' ' export CYLC_SUITE_DEF_PATH="remote/suite/dir"\n expor' 't CYLC_SUITE_DEF_PATH_ON_SUITE_HOST="cylc/suite/def/path"' '\n export CYLC_SUITE_UUID="neigh"') job_conf = { "platform": fixture_get_platform({ "host": "localhost", "owner": "me", }), "suite_name": "farm_noises", "remote_suite_d": "remote/suite/dir", "uuid_str": "neigh" } rund = "cylc-run/farm_noises" with io.StringIO() as fake_file: job_file_writer._write_suite_environment(fake_file, job_conf, rund) assert(fake_file.getvalue() == expected) def test_write_suite_environment_no_remote_suite_d( fixture_get_platform, monkeypatch ): """Test suite environment is correctly written in jobscript""" monkeypatch.setenv('CYLC_SUITE_DEF_PATH', 'cylc/suite/def/path') monkeypatch.setattr( cylc.flow.job_file, "get_remote_suite_work_dir", lambda a, b: "work/dir" ) cylc.flow.flags.debug = True cylc.flow.flags.verbose = True suite_env = {'CYLC_UTC': 'True', 'CYLC_CYCLING_MODE': 'integer'} job_file_writer = JobFileWriter() job_file_writer.set_suite_env(suite_env) expected = ('\n\ncylc__job__inst__cylc_env() {\n # CYLC SUITE ' 'ENVIRONMENT:\n export CYLC_CYCLING_MODE="integer"\n ' 'export CYLC_UTC="True"\n export TZ="UTC"\n\n export ' 'CYLC_SUITE_RUN_DIR="cylc-run/farm_noises"\n ' 'export CYLC_SUITE' '_WORK_DIR_ROOT="work/dir"\n export CYLC_SUITE_DEF_PATH=' '"cylc/suite/def/path"\n export ' 'CYLC_SUITE_DEF_PATH_ON_SUITE_HOST="cylc/suite/def/path"\n' ' export CYLC_SUITE_UUID="neigh"') job_conf = { "platform": fixture_get_platform({ "host": "localhost", "owner": "me", }), "suite_name": "farm_noises", "uuid_str": "neigh", "remote_suite_d": "" } rund = "cylc-run/farm_noises" with io.StringIO() as fake_file: job_file_writer._write_suite_environment(fake_file, job_conf, rund) blah = fake_file.getvalue() print(blah) assert(fake_file.getvalue() == expected) def test_write_script(): """Test script is correctly written in jobscript""" expected = ( "\n\ncylc__job__inst__init_script() {\n# INIT-SCRIPT:\n" "This is the init script\n}\n\ncylc__job__inst__env_script()" " {\n# ENV-SCRIPT:\nThis is the env script\n}\n\n" "cylc__job__inst__err_script() {\n# ERR-SCRIPT:\nThis is the err " "script\n}\n\ncylc__job__inst__pre_script() {\n# PRE-SCRIPT:\n" "This is the pre script\n}\n\ncylc__job__inst__script() {\n" "# SCRIPT:\nThis is the script\n}\n\ncylc__job__inst__post_script" "() {\n# POST-SCRIPT:\nThis is the post script\n}\n\n" "cylc__job__inst__exit_script() {\n# EXIT-SCRIPT:\n" "This is the exit script\n}") job_conf = { "init-script": "This is the init script", "env-script": "This is the env script", "err-script": "This is the err script", "pre-script": "This is the pre script", "script": "This is the script", "post-script": "This is the post script", "exit-script": "This is the exit script", } with io.StringIO() as fake_file: JobFileWriter()._write_script(fake_file, job_conf) assert(fake_file.getvalue() == expected) def test_no_script_section_with_comment_only_script(): """Test jobfilewriter does not generate script section when script is comment only""" expected = ("") job_conf = { "init-script": "", "env-script": "", "err-script": "", "pre-script": "#This is the pre script/n #moo /n#baa", "script": "", "post-script": "", "exit-script": "" } with io.StringIO() as fake_file: JobFileWriter()._write_script(fake_file, job_conf) blah = fake_file.getvalue() print(blah) assert(fake_file.getvalue() == expected) def test_write_task_environment(): """Test task environment is correctly written in jobscript""" # set some task environment conditions expected = ('\n\n # CYLC TASK ENVIRONMENT:\n ' 'export CYLC_TASK_JOB="1/moo/01"\n export ' 'CYLC_TASK_NAMESPACE_HIERARCHY="baa moo"\n export ' 'CYLC_TASK_DEPENDENCIES="moo neigh quack"\n export ' 'CYLC_TASK_TRY_NUMBER=1\n export ' 'CYLC_TASK_FLOW_LABEL=aZ\n export ' 'CYLC_TASK_PARAM_duck="quack"\n export ' 'CYLC_TASK_PARAM_mouse="squeak"\n ' 'CYLC_TASK_WORK_DIR_BASE=\'farm_noises/work_d\'\n}') job_conf = { "job_d": "1/moo/01", "namespace_hierarchy": ["baa", "moo"], "dependencies": ['moo', 'neigh', 'quack'], "try_num": 1, "flow_label": "aZ", "param_var": {"duck": "quack", "mouse": "squeak"}, "work_d": "farm_noises/work_d" } with io.StringIO() as fake_file: JobFileWriter()._write_task_environment(fake_file, job_conf) assert(fake_file.getvalue() == expected) def test_write_runtime_environment(): """Test runtime environment is correctly written in jobscript""" expected = ( '\n\ncylc__job__inst__user_env() {\n # TASK RUNTIME ' 'ENVIRONMENT:\n export cow sheep duck\n' ' cow=~/"moo"\n sheep=~baa/"baa"\n ' 'duck=~quack\n}') job_conf = { 'environment': {'cow': '~/moo', 'sheep': '~baa/baa', 'duck': '~quack'} } with io.StringIO() as fake_file: JobFileWriter()._write_runtime_environment(fake_file, job_conf) assert(fake_file.getvalue() == expected) def test_write_epilogue(): """Test epilogue is correctly written in jobscript""" expected = ('\n\n. \"cylc-run/farm_noises/.service/etc/job.sh\"\n' 'cylc__job__main\n\n#EOF: 1/moo/01\n') job_conf = {'job_d': "1/moo/01"} run_d = "cylc-run/farm_noises" with io.StringIO() as fake_file: JobFileWriter()._write_epilogue(fake_file, job_conf, run_d) assert(fake_file.getvalue() == expected) def test_write_global_init_scripts(fixture_get_platform): """Test global init script is correctly written in jobscript""" job_conf = { "platform": fixture_get_platform({ "global init-script": ( 'global init-script = \n' 'export COW=moo\n' 'export PIG=oink\n' 'export DONKEY=HEEHAW\n' ) }) } expected = ('\n\ncylc__job__inst__global_init_script() {\n' '# GLOBAL-INIT-SCRIPT:\nglobal init-script = \nexport ' 'COW=moo\nexport PIG=oink\nexport DONKEY=HEEHAW\n\n}') with io.StringIO() as fake_file: JobFileWriter()._write_global_init_script(fake_file, job_conf) assert(fake_file.getvalue() == expected)
gpl-3.0
damorim/compilers-cin
antigo/ap3/antlr4-python3-runtime-4.7.2/src/antlr4/Recognizer.py
5
5808
# # Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. # Use of this file is governed by the BSD 3-clause license that # can be found in the LICENSE.txt file in the project root. # from antlr4.RuleContext import RuleContext from antlr4.Token import Token from antlr4.error.ErrorListener import ProxyErrorListener, ConsoleErrorListener # need forward delcaration RecognitionException = None class Recognizer(object): tokenTypeMapCache = dict() ruleIndexMapCache = dict() def __init__(self): self._listeners = [ ConsoleErrorListener.INSTANCE ] self._interp = None self._stateNumber = -1 def extractVersion(self, version): pos = version.find(".") major = version[0:pos] version = version[pos+1:] pos = version.find(".") if pos==-1: pos = version.find("-") if pos==-1: pos = len(version) minor = version[0:pos] return major, minor def checkVersion(self, toolVersion): runtimeVersion = "4.7.2" rvmajor, rvminor = self.extractVersion(runtimeVersion) tvmajor, tvminor = self.extractVersion(toolVersion) if rvmajor!=tvmajor or rvminor!=tvminor: print("ANTLR runtime and generated code versions disagree: "+runtimeVersion+"!="+toolVersion) def addErrorListener(self, listener): self._listeners.append(listener) def removeErrorListener(self, listener): self._listeners.remove(listener) def removeErrorListeners(self): self._listeners = [] def getTokenTypeMap(self): tokenNames = self.getTokenNames() if tokenNames is None: from antlr4.error.Errors import UnsupportedOperationException raise UnsupportedOperationException("The current recognizer does not provide a list of token names.") result = self.tokenTypeMapCache.get(tokenNames, None) if result is None: result = zip( tokenNames, range(0, len(tokenNames))) result["EOF"] = Token.EOF self.tokenTypeMapCache[tokenNames] = result return result # Get a map from rule names to rule indexes. # # <p>Used for XPath and tree pattern compilation.</p> # def getRuleIndexMap(self): ruleNames = self.getRuleNames() if ruleNames is None: from antlr4.error.Errors import UnsupportedOperationException raise UnsupportedOperationException("The current recognizer does not provide a list of rule names.") result = self.ruleIndexMapCache.get(ruleNames, None) if result is None: result = zip( ruleNames, range(0, len(ruleNames))) self.ruleIndexMapCache[ruleNames] = result return result def getTokenType(self, tokenName:str): ttype = self.getTokenTypeMap().get(tokenName, None) if ttype is not None: return ttype else: return Token.INVALID_TYPE # What is the error header, normally line/character position information?# def getErrorHeader(self, e:RecognitionException): line = e.getOffendingToken().line column = e.getOffendingToken().column return "line "+line+":"+column # How should a token be displayed in an error message? The default # is to display just the text, but during development you might # want to have a lot of information spit out. Override in that case # to use t.toString() (which, for CommonToken, dumps everything about # the token). This is better than forcing you to override a method in # your token objects because you don't have to go modify your lexer # so that it creates a new Java type. # # @deprecated This method is not called by the ANTLR 4 Runtime. Specific # implementations of {@link ANTLRErrorStrategy} may provide a similar # feature when necessary. For example, see # {@link DefaultErrorStrategy#getTokenErrorDisplay}. # def getTokenErrorDisplay(self, t:Token): if t is None: return "<no token>" s = t.text if s is None: if t.type==Token.EOF: s = "<EOF>" else: s = "<" + str(t.type) + ">" s = s.replace("\n","\\n") s = s.replace("\r","\\r") s = s.replace("\t","\\t") return "'" + s + "'" def getErrorListenerDispatch(self): return ProxyErrorListener(self._listeners) # subclass needs to override these if there are sempreds or actions # that the ATN interp needs to execute def sempred(self, localctx:RuleContext, ruleIndex:int, actionIndex:int): return True def precpred(self, localctx:RuleContext , precedence:int): return True @property def state(self): return self._stateNumber # Indicate that the recognizer has changed internal state that is # consistent with the ATN state passed in. This way we always know # where we are in the ATN as the parser goes along. The rule # context objects form a stack that lets us see the stack of # invoking rules. Combine this and we have complete ATN # configuration information. @state.setter def state(self, atnState:int): self._stateNumber = atnState del RecognitionException import unittest class Test(unittest.TestCase): def testVersion(self): major, minor = Recognizer().extractVersion("1.2") self.assertEqual("1", major) self.assertEqual("2", minor) major, minor = Recognizer().extractVersion("1.2.3") self.assertEqual("1", major) self.assertEqual("2", minor) major, minor = Recognizer().extractVersion("1.2-snapshot") self.assertEqual("1", major) self.assertEqual("2", minor)
mit
Ansahmadiba/bazel
tools/build_defs/docker/rewrite_json.py
14
8084
# Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This package manipulates Docker image layer metadata.""" from collections import namedtuple import copy import json import os import os.path import sys import tarfile from third_party.py import gflags gflags.DEFINE_string( 'name', None, 'The name of the current layer') gflags.DEFINE_string( 'base', None, 'The parent image') gflags.DEFINE_string( 'output', None, 'The output file to generate') gflags.DEFINE_string( 'layer', None, 'The current layer tar') gflags.DEFINE_list( 'entrypoint', None, 'Override the "Entrypoint" of the previous layer') gflags.DEFINE_list( 'command', None, 'Override the "Cmd" of the previous layer') gflags.DEFINE_list( 'ports', None, 'Augment the "ExposedPorts" of the previous layer') gflags.DEFINE_list( 'volumes', None, 'Augment the "Volumes" of the previous layer') gflags.DEFINE_list( 'env', None, 'Augment the "Env" of the previous layer') FLAGS = gflags.FLAGS _MetadataOptionsT = namedtuple( 'MetadataOptionsT', ['name', 'parent', 'size', 'entrypoint', 'cmd', 'env', 'ports', 'volumes']) class MetadataOptions(_MetadataOptionsT): """Docker image layer metadata options.""" def __new__(cls, name=None, parent=None, size=None, entrypoint=None, cmd=None, env=None, ports=None, volumes=None): """Constructor.""" return super(MetadataOptions, cls).__new__( cls, name=name, parent=parent, size=size, entrypoint=entrypoint, cmd=cmd, env=env, ports=ports, volumes=volumes) _DOCKER_VERSION = '1.5.0' _PROCESSOR_ARCHITECTURE = 'amd64' _OPERATING_SYSTEM = 'linux' def Resolve(value, environment): """Resolves environment variables embedded in the given value.""" outer_env = os.environ try: os.environ = environment return os.path.expandvars(value) finally: os.environ = outer_env def DeepCopySkipNull(data): """Do a deep copy, skipping null entry.""" if type(data) == type(dict()): return dict((DeepCopySkipNull(k), DeepCopySkipNull(v)) for k, v in data.iteritems() if v is not None) return copy.deepcopy(data) def RewriteMetadata(data, options): """Rewrite and return a copy of the input data according to options. Args: data: The dict of Docker image layer metadata we're copying and rewriting. options: The changes this layer makes to the overall image's metadata, which first appears in this layer's version of the metadata Returns: A deep copy of data, which has been updated to reflect the metadata additions of this layer. Raises: Exception: a required option was missing. """ output = DeepCopySkipNull(data) if not options.name: raise Exception('Missing required option: name') output['id'] = options.name if options.parent: output['parent'] = options.parent elif data: raise Exception('Expected empty input object when parent is omitted') if options.size: output['Size'] = options.size elif 'Size' in output: del output['Size'] if 'config' not in output: output['config'] = {} if options.entrypoint: output['config']['Entrypoint'] = options.entrypoint if options.cmd: output['config']['Cmd'] = options.cmd output['docker_version'] = _DOCKER_VERSION output['architecture'] = _PROCESSOR_ARCHITECTURE output['os'] = _OPERATING_SYSTEM if options.env: environ_dict = {} # Build a dictionary of existing environment variables (used by Resolve). for kv in output['config'].get('Env', []): (k, v) = kv.split('=', 1) environ_dict[k] = v # Merge in new environment variables, resolving references. for kv in options.env: (k, v) = kv.split('=', 1) # Resolve handles scenarios like "PATH=$PATH:...". v = Resolve(v, environ_dict) environ_dict[k] = v output['config']['Env'] = [ '%s=%s' % (k, environ_dict[k]) for k in sorted(environ_dict.keys())] if options.ports: if 'ExposedPorts' not in output['config']: output['config']['ExposedPorts'] = {} for p in options.ports: if '/' in p: # The port spec has the form 80/tcp, 1234/udp # so we simply use it as the key. output['config']['ExposedPorts'][p] = {} else: # Assume tcp output['config']['ExposedPorts'][p + '/tcp'] = {} if options.volumes: if 'Volumes' not in output['config']: output['config']['Volumes'] = {} for p in options.volumes: output['config']['Volumes'][p] = {} # TODO(mattmoor): comment, created, container_config # container_config contains information about the container # that was used to create this layer, so it shouldn't # propagate from the parent to child. This is where we would # annotate information that can be extract by tools like Blubber # or Quay.io's UI to gain insight into the source that generated # the layer. A Dockerfile might produce something like: # # (nop) /bin/sh -c "apt-get update" # We might consider encoding the fully-qualified bazel build target: # //tools/build_defs/docker:image # However, we should be sensitive to leaking data through this field. if 'container_config' in output: del output['container_config'] return output def GetTarFile(f, name): """Return the content of a file inside a tar file. This method looks for ./f, /f and f file entry in a tar file and if found, return its content. This allows to read file with various path prefix. Args: f: The tar file to read. name: The name of the file inside the tar file. Returns: The content of the file, or None if not found. """ with tarfile.open(f, 'r') as tar: members = [tarinfo.name for tarinfo in tar.getmembers()] for i in ['', './', '/']: if i + name in members: return tar.extractfile(i + name).read() return None def GetParentIdentifier(f): """Try to look at the parent identifier from a docker image. The identifier is expected to be in the 'top' file for our rule so we look at it first ('./top', 'top'). If it's not found, then we use the 'repositories' file and tries to parse it to get the first declared repository (so we can actually parse a file generated by 'docker save'). Args: f: the input tar file. Returns: The identifier of the docker image, or None if no identifier was found. """ # TODO(dmarting): Maybe we could drop the 'top' file all together? top = GetTarFile(f, 'top') if top: return top repositories = GetTarFile(f, 'repositories') if repositories: data = json.loads(repositories) for k1 in data: for k2 in data[k1]: # Returns the first found key return data[k1][k2] return None def main(unused_argv): parent = '' base_json = '{}' if FLAGS.base: parent = GetParentIdentifier(FLAGS.base) if parent: base_json = GetTarFile(FLAGS.base, '%s/json' % parent) data = json.loads(base_json) name = FLAGS.name if name.startswith('@'): with open(name[1:], 'r') as f: name = f.read() output = RewriteMetadata(data, MetadataOptions( name=name, parent=parent, size=os.path.getsize(FLAGS.layer), entrypoint=FLAGS.entrypoint, cmd=FLAGS.command, env=FLAGS.env, ports=FLAGS.ports, volumes=FLAGS.volumes)) with open(FLAGS.output, 'w') as fp: json.dump(output, fp, sort_keys=True) fp.write('\n') if __name__ == '__main__': main(FLAGS(sys.argv))
apache-2.0
lunafeng/django
django/core/files/storage.py
281
13339
import errno import os import warnings from datetime import datetime from django.conf import settings from django.core.exceptions import SuspiciousFileOperation from django.core.files import File, locks from django.core.files.move import file_move_safe from django.utils._os import abspathu, safe_join from django.utils.crypto import get_random_string from django.utils.deconstruct import deconstructible from django.utils.deprecation import RemovedInDjango110Warning from django.utils.encoding import filepath_to_uri, force_text from django.utils.functional import LazyObject from django.utils.inspect import func_supports_parameter from django.utils.module_loading import import_string from django.utils.six.moves.urllib.parse import urljoin from django.utils.text import get_valid_filename __all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage') class Storage(object): """ A base storage class, providing some default behaviors that all other storage systems can inherit or override, as necessary. """ # The following methods represent a public interface to private methods. # These shouldn't be overridden by subclasses unless absolutely necessary. def open(self, name, mode='rb'): """ Retrieves the specified file from storage. """ return self._open(name, mode) def save(self, name, content, max_length=None): """ Saves new content to the file specified by name. The content should be a proper File object or any python file-like object, ready to be read from the beginning. """ # Get the proper name for the file, as it will actually be saved. if name is None: name = content.name if not hasattr(content, 'chunks'): content = File(content) if func_supports_parameter(self.get_available_name, 'max_length'): name = self.get_available_name(name, max_length=max_length) else: warnings.warn( 'Backwards compatibility for storage backends without ' 'support for the `max_length` argument in ' 'Storage.get_available_name() will be removed in Django 1.10.', RemovedInDjango110Warning, stacklevel=2 ) name = self.get_available_name(name) name = self._save(name, content) # Store filenames with forward slashes, even on Windows return force_text(name.replace('\\', '/')) # These methods are part of the public API, with default implementations. def get_valid_name(self, name): """ Returns a filename, based on the provided filename, that's suitable for use in the target storage system. """ return get_valid_filename(name) def get_available_name(self, name, max_length=None): """ Returns a filename that's free on the target storage system, and available for new content to be written to. """ dir_name, file_name = os.path.split(name) file_root, file_ext = os.path.splitext(file_name) # If the filename already exists, add an underscore and a random 7 # character alphanumeric string (before the file extension, if one # exists) to the filename until the generated filename doesn't exist. # Truncate original name if required, so the new filename does not # exceed the max_length. while self.exists(name) or (max_length and len(name) > max_length): # file_ext includes the dot. name = os.path.join(dir_name, "%s_%s%s" % (file_root, get_random_string(7), file_ext)) if max_length is None: continue # Truncate file_root if max_length exceeded. truncation = len(name) - max_length if truncation > 0: file_root = file_root[:-truncation] # Entire file_root was truncated in attempt to find an available filename. if not file_root: raise SuspiciousFileOperation( 'Storage can not find an available filename for "%s". ' 'Please make sure that the corresponding file field ' 'allows sufficient "max_length".' % name ) name = os.path.join(dir_name, "%s_%s%s" % (file_root, get_random_string(7), file_ext)) return name def path(self, name): """ Returns a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method. """ raise NotImplementedError("This backend doesn't support absolute paths.") # The following methods form the public API for storage systems, but with # no default implementations. Subclasses must implement *all* of these. def delete(self, name): """ Deletes the specified file from the storage system. """ raise NotImplementedError('subclasses of Storage must provide a delete() method') def exists(self, name): """ Returns True if a file referenced by the given name already exists in the storage system, or False if the name is available for a new file. """ raise NotImplementedError('subclasses of Storage must provide an exists() method') def listdir(self, path): """ Lists the contents of the specified path, returning a 2-tuple of lists; the first item being directories, the second item being files. """ raise NotImplementedError('subclasses of Storage must provide a listdir() method') def size(self, name): """ Returns the total size, in bytes, of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide a size() method') def url(self, name): """ Returns an absolute URL where the file's contents can be accessed directly by a Web browser. """ raise NotImplementedError('subclasses of Storage must provide a url() method') def accessed_time(self, name): """ Returns the last accessed time (as datetime object) of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide an accessed_time() method') def created_time(self, name): """ Returns the creation time (as datetime object) of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide a created_time() method') def modified_time(self, name): """ Returns the last modified time (as datetime object) of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide a modified_time() method') @deconstructible class FileSystemStorage(Storage): """ Standard filesystem storage """ def __init__(self, location=None, base_url=None, file_permissions_mode=None, directory_permissions_mode=None): if location is None: location = settings.MEDIA_ROOT self.base_location = location self.location = abspathu(self.base_location) if base_url is None: base_url = settings.MEDIA_URL elif not base_url.endswith('/'): base_url += '/' self.base_url = base_url self.file_permissions_mode = ( file_permissions_mode if file_permissions_mode is not None else settings.FILE_UPLOAD_PERMISSIONS ) self.directory_permissions_mode = ( directory_permissions_mode if directory_permissions_mode is not None else settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS ) def _open(self, name, mode='rb'): return File(open(self.path(name), mode)) def _save(self, name, content): full_path = self.path(name) # Create any intermediate directories that do not exist. # Note that there is a race between os.path.exists and os.makedirs: # if os.makedirs fails with EEXIST, the directory was created # concurrently, and we can continue normally. Refs #16082. directory = os.path.dirname(full_path) if not os.path.exists(directory): try: if self.directory_permissions_mode is not None: # os.makedirs applies the global umask, so we reset it, # for consistency with file_permissions_mode behavior. old_umask = os.umask(0) try: os.makedirs(directory, self.directory_permissions_mode) finally: os.umask(old_umask) else: os.makedirs(directory) except OSError as e: if e.errno != errno.EEXIST: raise if not os.path.isdir(directory): raise IOError("%s exists and is not a directory." % directory) # There's a potential race condition between get_available_name and # saving the file; it's possible that two threads might return the # same name, at which point all sorts of fun happens. So we need to # try to create the file, but if it already exists we have to go back # to get_available_name() and try again. while True: try: # This file has a file path that we can move. if hasattr(content, 'temporary_file_path'): file_move_safe(content.temporary_file_path(), full_path) # This is a normal uploadedfile that we can stream. else: # This fun binary flag incantation makes os.open throw an # OSError if the file already exists before we open it. flags = (os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0)) # The current umask value is masked out by os.open! fd = os.open(full_path, flags, 0o666) _file = None try: locks.lock(fd, locks.LOCK_EX) for chunk in content.chunks(): if _file is None: mode = 'wb' if isinstance(chunk, bytes) else 'wt' _file = os.fdopen(fd, mode) _file.write(chunk) finally: locks.unlock(fd) if _file is not None: _file.close() else: os.close(fd) except OSError as e: if e.errno == errno.EEXIST: # Ooops, the file exists. We need a new file name. name = self.get_available_name(name) full_path = self.path(name) else: raise else: # OK, the file save worked. Break out of the loop. break if self.file_permissions_mode is not None: os.chmod(full_path, self.file_permissions_mode) return name def delete(self, name): assert name, "The name argument is not allowed to be empty." name = self.path(name) # If the file exists, delete it from the filesystem. # Note that there is a race between os.path.exists and os.remove: # if os.remove fails with ENOENT, the file was removed # concurrently, and we can continue normally. if os.path.exists(name): try: os.remove(name) except OSError as e: if e.errno != errno.ENOENT: raise def exists(self, name): return os.path.exists(self.path(name)) def listdir(self, path): path = self.path(path) directories, files = [], [] for entry in os.listdir(path): if os.path.isdir(os.path.join(path, entry)): directories.append(entry) else: files.append(entry) return directories, files def path(self, name): return safe_join(self.location, name) def size(self, name): return os.path.getsize(self.path(name)) def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") return urljoin(self.base_url, filepath_to_uri(name)) def accessed_time(self, name): return datetime.fromtimestamp(os.path.getatime(self.path(name))) def created_time(self, name): return datetime.fromtimestamp(os.path.getctime(self.path(name))) def modified_time(self, name): return datetime.fromtimestamp(os.path.getmtime(self.path(name))) def get_storage_class(import_path=None): return import_string(import_path or settings.DEFAULT_FILE_STORAGE) class DefaultStorage(LazyObject): def _setup(self): self._wrapped = get_storage_class()() default_storage = DefaultStorage()
bsd-3-clause
ennoborg/gramps
gramps/gen/utils/resourcepath.py
6
3959
# # Gramps - a GTK+/GNOME based genealogy program # # Copyright (C) 2013 John Ralls <jralls@ceridwen.us> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # import sys import os import logging LOG = logging.getLogger("ResourcePath") _hdlr = logging.StreamHandler() _hdlr.setFormatter(logging.Formatter(fmt="%(name)s.%(levelname)s: %(message)s")) LOG.addHandler(_hdlr) from ..constfunc import get_env_var class ResourcePath: """ ResourcePath is a singleton, meaning that only one of them is ever created. At startup it finds the paths to Gramps's resource files and caches them for future use. It should be called only by const.py; other code should retrieve the paths from there. """ instance = None def __new__(cls): if not cls.instance: cls.instance = super(ResourcePath, cls).__new__(cls) cls.instance.initialized = False return cls.instance def __init__(self): if self.initialized: return resource_file = os.path.join(os.path.abspath(os.path.dirname( __file__)), 'resource-path') installed = os.path.exists(resource_file) if installed: test_path = os.path.join("gramps", "authors.xml") else: test_path = os.path.join("data", "authors.xml") resource_path = None tmp_path = get_env_var('GRAMPS_RESOURCES') if (tmp_path and os.path.exists(os.path.join(tmp_path, test_path))): resource_path = tmp_path elif installed: try: with open(resource_file, encoding='utf-8', errors='strict') as fp: resource_path = fp.readline() except UnicodeError as err: LOG.exception("Encoding error while parsing resource path", err) sys.exit(1) except IOError as err: LOG.exception("Failed to open resource file", err) sys.exit(1) if not os.path.exists(os.path.join(resource_path, test_path)): LOG.error("Resource Path %s is invalid", resource_path) sys.exit(1) else: # Let's try to run from source without env['GRAMPS_RESOURCES']: resource_path = os.path.join(os.path.abspath(os.path.dirname( __file__)), '..', "..", "..") test_path = os.path.join("data", "authors.xml") if (not os.path.exists(os.path.join(resource_path, test_path))): LOG.error("Unable to determine resource path") sys.exit(1) resource_path = os.path.abspath(resource_path) if installed: self.locale_dir = os.path.join(resource_path, 'locale') self.data_dir = os.path.join(resource_path, 'gramps') self.image_dir = os.path.join(resource_path, 'gramps', 'images') self.doc_dir = os.path.join(resource_path, 'doc', 'gramps') else: self.locale_dir = os.path.join(resource_path, 'build', 'mo') self.image_dir = os.path.join(resource_path, 'images') self.data_dir = os.path.join(resource_path, 'data') self.doc_dir = os.path.join(resource_path, 'build', 'data') self.initialized = True
gpl-2.0
ssarangi/numba
numba/tests/test_cffi.py
3
3140
from __future__ import print_function, division, absolute_import from numba import unittest_support as unittest from numba import jit, cffi_support, types from numba.compiler import compile_isolated, Flags from numba.tests.support import TestCase from numba.tests.cffi_usecases import * import numpy as np enable_pyobj_flags = Flags() enable_pyobj_flags.set("enable_pyobject") no_pyobj_flags = Flags() @unittest.skipUnless(cffi_support.SUPPORTED, "CFFI not supported") class TestCFFI(TestCase): def test_type_map(self): signature = cffi_support.map_type(ffi.typeof(cffi_sin)) self.assertEqual(len(signature.args), 1) self.assertEqual(signature.args[0], types.double) def _test_function(self, pyfunc, flags=enable_pyobj_flags): cres = compile_isolated(pyfunc, [types.double], flags=flags) cfunc = cres.entry_point for x in [-1.2, -1, 0, 0.1, 3.14]: self.assertPreciseEqual(pyfunc(x), cfunc(x)) def test_sin_function(self): self._test_function(use_cffi_sin) def test_sin_function_npm(self): self._test_function(use_cffi_sin, flags=no_pyobj_flags) def test_sin_function_ool(self, flags=enable_pyobj_flags): self._test_function(use_cffi_sin_ool) def test_sin_function_npm_ool(self): self._test_function(use_cffi_sin_ool, flags=no_pyobj_flags) def test_two_funcs(self): # Check that two constant functions don't get mixed up. self._test_function(use_two_funcs) def test_two_funcs_ool(self): self._test_function(use_two_funcs_ool) def test_function_pointer(self): pyfunc = use_func_pointer cfunc = jit(nopython=True)(pyfunc) for (fa, fb, x) in [ (cffi_sin, cffi_cos, 1.0), (cffi_sin, cffi_cos, -1.0), (cffi_cos, cffi_sin, 1.0), (cffi_cos, cffi_sin, -1.0), (cffi_sin_ool, cffi_cos_ool, 1.0), (cffi_sin_ool, cffi_cos_ool, -1.0), (cffi_cos_ool, cffi_sin_ool, 1.0), (cffi_cos_ool, cffi_sin_ool, -1.0), (cffi_sin, cffi_cos_ool, 1.0), (cffi_sin, cffi_cos_ool, -1.0), (cffi_cos, cffi_sin_ool, 1.0), (cffi_cos, cffi_sin_ool, -1.0)]: expected = pyfunc(fa, fb, x) got = cfunc(fa, fb, x) self.assertEqual(got, expected) # A single specialization was compiled for all calls self.assertEqual(len(cfunc.overloads), 1, cfunc.overloads) def test_user_defined_sybols(self): pyfunc = use_user_defined_symbols cfunc = jit(nopython=True)(pyfunc) self.assertEqual(pyfunc(), cfunc()) def _test_pass_numpy_array(self, pyfunc, dtype): x = np.arange(10).astype(dtype) cfunc = jit(nopython=True)(pyfunc) np.testing.assert_equal(pyfunc(x), cfunc(x)) def test_pass_numpy_array_float32(self): self._test_pass_numpy_array(vector_sin_float32, np.float32) def test_pass_numpy_array_float64(self): self._test_pass_numpy_array(vector_sin_float64, np.float64) if __name__ == '__main__': unittest.main()
bsd-2-clause
ray-project/ray
ci/travis/build-docker-images.py
1
17503
import argparse import datetime import json import functools import glob import os import re import shutil import subprocess import sys from typing import List, Tuple import docker print = functools.partial(print, file=sys.stderr, flush=True) DOCKER_USERNAME = "raytravisbot" DOCKER_CLIENT = None PYTHON_WHL_VERSION = "cp3" DOCKER_HUB_DESCRIPTION = { "base-deps": ("Internal Image, refer to " "https://hub.docker.com/r/rayproject/ray"), "ray-deps": ("Internal Image, refer to " "https://hub.docker.com/r/rayproject/ray"), "ray": "Official Docker Images for Ray, the distributed computing API.", "ray-ml": "Developer ready Docker Image for Ray.", "autoscaler": ( "Deprecated image, please use: " "https://hub.docker.com/repository/docker/rayproject/ray-ml") } PY_MATRIX = {"-py36": "3.6.12", "-py37": "3.7.7", "-py38": "3.8.5"} def _get_branch(): branch = (os.environ.get("TRAVIS_BRANCH") or os.environ.get("BUILDKITE_BRANCH")) if not branch: print("Branch not found!") print(os.environ) print("Environment is above ^^") return branch def _release_build(): branch = _get_branch() if branch is None: return False return branch != "master" and branch.startswith("releases") def _valid_branch(): branch = _get_branch() if branch is None: return False return branch == "master" or _release_build() def _get_curr_dir(): return os.path.dirname(os.path.realpath(__file__)) def _get_root_dir(): return os.path.join(_get_curr_dir(), "../../") def _get_commit_sha(): sha = (os.environ.get("TRAVIS_COMMIT") or os.environ.get("BUILDKITE_COMMIT") or "") if len(sha) < 6: print("INVALID SHA FOUND") return "ERROR" return sha[:6] def _configure_human_version(): global _get_branch global _get_commit_sha fake_branch_name = input("Provide a 'branch name'. For releases, it " "should be `releases/x.x.x`") _get_branch = lambda: fake_branch_name # noqa: E731 fake_sha = input("Provide a SHA (used for tag value)") _get_commit_sha = lambda: fake_sha # noqa: E731 def _get_wheel_name(minor_version_number): if minor_version_number: matches = glob.glob(f"{_get_root_dir()}/.whl/*{PYTHON_WHL_VERSION}" f"{minor_version_number}*-manylinux*") assert len(matches) == 1, ( f"Found ({len(matches)}) matches for '*{PYTHON_WHL_VERSION}" f"{minor_version_number}*-manylinux*' instead of 1") return os.path.basename(matches[0]) else: matches = glob.glob( f"{_get_root_dir()}/.whl/*{PYTHON_WHL_VERSION}*-manylinux*") return [os.path.basename(i) for i in matches] def _check_if_docker_files_modified(): stdout = subprocess.check_output([ sys.executable, f"{_get_curr_dir()}/determine_tests_to_run.py", "--output=json" ]) affected_env_var_list = json.loads(stdout) affected = ("RAY_CI_DOCKER_AFFECTED" in affected_env_var_list or "RAY_CI_PYTHON_DEPENDENCIES_AFFECTED" in affected_env_var_list) print(f"Docker affected: {affected}") return affected def _build_cpu_gpu_images(image_name, no_cache=True) -> List[str]: built_images = [] for gpu in ["-cpu", "-gpu"]: for py_name, py_version in PY_MATRIX.items(): build_args = {} build_args["PYTHON_VERSION"] = py_version # I.e. "-py36"[-1] == 6 build_args["PYTHON_MINOR_VERSION"] = py_name[-1] if image_name == "base-deps": build_args["BASE_IMAGE"] = ( "nvidia/cuda:11.2.0-cudnn8-devel-ubuntu18.04" if gpu == "-gpu" else "ubuntu:focal") else: # NOTE(ilr) This is a bit of an abuse of the name "GPU" build_args["GPU"] = f"{py_name}{gpu}" if image_name in ["ray", "ray-deps", "ray-worker-container"]: wheel = _get_wheel_name(build_args["PYTHON_MINOR_VERSION"]) build_args["WHEEL_PATH"] = f".whl/{wheel}" tagged_name = f"rayproject/{image_name}:nightly{py_name}{gpu}" for i in range(2): cleanup = DOCKER_CLIENT.containers.prune().get( "SpaceReclaimed") if cleanup is not None: print(f"Cleaned up {cleanup / (2**20)}MB") output = DOCKER_CLIENT.api.build( path=os.path.join(_get_root_dir(), "docker", image_name), tag=tagged_name, nocache=no_cache, buildargs=build_args) full_output = "" try: start = datetime.datetime.now() current_iter = start for line in output: if datetime.datetime.now( ) - current_iter >= datetime.timedelta(minutes=5): current_iter = datetime.datetime.now() elapsed = datetime.datetime.now() - start print(f"Still building {tagged_name} after " f"{elapsed.seconds} seconds") full_output += line.decode("utf-8") except Exception as e: print(f"FAILURE with error {e}") if len(DOCKER_CLIENT.api.images(tagged_name)) == 0: print(f"ERROR building: {tagged_name} & error below:") print(full_output) if (i == 1): raise Exception("FAILED TO BUILD IMAGE") print("TRYING AGAIN") else: break print("BUILT: ", tagged_name) built_images.append(tagged_name) return built_images def copy_wheels(human_build): if human_build: print("Please download images using:\n" "`pip download --python-version <py_version> ray==<ray_version>") root_dir = _get_root_dir() wheels = _get_wheel_name(None) for wheel in wheels: source = os.path.join(root_dir, ".whl", wheel) ray_dst = os.path.join(root_dir, "docker/ray/.whl/") ray_dep_dst = os.path.join(root_dir, "docker/ray-deps/.whl/") ray_worker_container_dst = os.path.join( root_dir, "docker/ray-worker-container/.whl/") os.makedirs(ray_dst, exist_ok=True) shutil.copy(source, ray_dst) os.makedirs(ray_dep_dst, exist_ok=True) shutil.copy(source, ray_dep_dst) os.makedirs(ray_worker_container_dst, exist_ok=True) shutil.copy(source, ray_worker_container_dst) def build_or_pull_base_images(rebuild_base_images: bool = True) -> List[str]: """Returns images to tag and build""" DOCKER_CLIENT.api.pull(repository="rayproject/base-deps", tag="nightly") age = DOCKER_CLIENT.api.inspect_image("rayproject/base-deps:nightly")[ "Created"] short_date = datetime.datetime.strptime(age.split("T")[0], "%Y-%m-%d") is_stale = ( datetime.datetime.now() - short_date) > datetime.timedelta(days=14) print("Pulling images for caching") DOCKER_CLIENT.api.pull( repository="rayproject/base-deps", tag="nightly-cpu") DOCKER_CLIENT.api.pull( repository="rayproject/base-deps", tag="nightly-gpu") DOCKER_CLIENT.api.pull(repository="rayproject/ray-deps", tag="nightly-gpu") DOCKER_CLIENT.api.pull(repository="rayproject/ray-deps", tag="nightly-cpu") # TODO(ilr) See if any caching happens if (rebuild_base_images or is_stale or _release_build()): for image in ["base-deps", "ray-deps"]: _build_cpu_gpu_images(image, no_cache=False) return True else: print("Just pulling images!") return False def build_ray(): return _build_cpu_gpu_images("ray") def build_ray_ml(): root_dir = _get_root_dir() requirement_files = glob.glob( f"{_get_root_dir()}/python/**/requirements*.txt", recursive=True) for fl in requirement_files: shutil.copy(fl, os.path.join(root_dir, "docker/ray-ml/")) ray_ml_images = _build_cpu_gpu_images("ray-ml") for img in ray_ml_images: tag = img.split(":")[-1] DOCKER_CLIENT.api.tag( image=img, repository="rayproject/autoscaler", tag=tag) def _get_docker_creds() -> Tuple[str, str]: docker_password = os.environ.get("DOCKER_PASSWORD") assert docker_password, "DOCKER_PASSWORD not set." return DOCKER_USERNAME, docker_password def build_ray_worker_container(): return _build_cpu_gpu_images("ray-worker-container") # For non-release builds, push "nightly" & "sha" # For release builds, push "nightly" & "latest" & "x.x.x" def push_and_tag_images(push_base_images: bool, merge_build: bool = False): def docker_push(image, tag): # Do not tag release builds because they are no longer up to # date after the branch cut. if "nightly" in tag and _release_build(): return if merge_build: print(f"PUSHING: {image}:{tag}, result:") # This docker API is janky. Without "stream=True" it returns a # massive string filled with every progress bar update, which can # cause CI to back up. # # With stream=True, it's a line-at-a-time generator of the same # info. So we can slow it down by printing every couple hundred # lines i = 0 for progress_line in DOCKER_CLIENT.api.push( image, tag=tag, stream=True): if i % 100 == 0: print(progress_line) else: print( "This is a PR Build! On a merge build, we would normally push " f"to: {image}:{tag}") def get_new_tag(old_tag, new_tag): return old_tag.replace("nightly", new_tag) date_tag = datetime.datetime.now().strftime("%Y-%m-%d") sha_tag = _get_commit_sha() if _release_build(): release_name = re.search("[0-9]\.[0-9]\.[0-9].*", _get_branch()).group(0) date_tag = release_name sha_tag = release_name image_list = ["ray", "ray-ml", "autoscaler"] if push_base_images: image_list.extend(["base-deps", "ray-deps"]) for image in image_list: for py_version in PY_MATRIX.keys(): full_image = f"rayproject/{image}" # Tag "nightly-py3x" from "nightly-py3x-cpu" DOCKER_CLIENT.api.tag( image=f"{full_image}:nightly{py_version}-cpu", repository=full_image, tag=f"nightly{py_version}") for arch_tag in ["-cpu", "-gpu", ""]: full_arch_tag = f"nightly{py_version}{arch_tag}" # Tag and push rayproject/<image>:nightly<py_tag><arch_tag> docker_push(full_image, full_arch_tag) # Ex: specific_tag == "1.0.1" or "<sha>" or "<date>" specific_tag = get_new_tag( full_arch_tag, date_tag if "-deps" in image else sha_tag) # Tag and push rayproject/<image>:<sha/date><py_tag><arch_tag> DOCKER_CLIENT.api.tag( image=f"{full_image}:{full_arch_tag}", repository=full_image, tag=specific_tag) docker_push(full_image, specific_tag) if "-py37" in py_version: non_python_specific_tag = specific_tag.replace("-py37", "") DOCKER_CLIENT.api.tag( image=f"{full_image}:{full_arch_tag}", repository=full_image, tag=non_python_specific_tag) # Tag and push rayproject/<image>:<sha/date><arch_tag> docker_push(full_image, non_python_specific_tag) non_python_nightly_tag = full_arch_tag.replace("-py37", "") DOCKER_CLIENT.api.tag( image=f"{full_image}:{full_arch_tag}", repository=full_image, tag=non_python_nightly_tag) # Tag and push rayproject/<image>:nightly<arch_tag> docker_push(full_image, non_python_nightly_tag) # Push infra here: # https://github.com/christian-korneck/docker-pushrm/blob/master/README-containers.md#push-a-readme-file-to-dockerhub # noqa def push_readmes(merge_build: bool): if not merge_build: print("Not pushing README because this is a PR build.") return username, password = _get_docker_creds() for image, tag_line in DOCKER_HUB_DESCRIPTION.items(): environment = { "DOCKER_USER": username, "DOCKER_PASS": password, "PUSHRM_FILE": f"/myvol/docker/{image}/README.md", "PUSHRM_DEBUG": 1, "PUSHRM_SHORT": tag_line } cmd_string = (f"rayproject/{image}") print( DOCKER_CLIENT.containers.run( "chko/docker-pushrm:1", command=cmd_string, volumes={ os.path.abspath(_get_root_dir()): { "bind": "/myvol", "mode": "rw", } }, environment=environment, remove=True, detach=False, stderr=True, stdout=True, tty=False)) # Build base-deps/ray-deps only on file change, 2 weeks, per release # Build ray, ray-ml, autoscaler every time # build-docker-images.py --py-versions PY37 --build-type PR --rebuild-all MERGE = "MERGE" HUMAN = "HUMAN" PR = "PR" BUILDKITE = "BUILDKITE" BUILD_TYPES = [MERGE, HUMAN, PR, BUILDKITE] if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "--py-versions", choices=["PY36", "PY37", "PY38"], default="PY37", nargs="*", help="Which python versions to build. Must be in (PY36, PY37, PY38)") parser.add_argument( "--build-type", choices=BUILD_TYPES, required=True, help="Whether to bypass checking if docker is affected") parser.add_argument( "--build-base", dest="base", action="store_true", help="Whether to build base-deps & ray-deps") parser.add_argument("--no-build-base", dest="base", action="store_false") parser.set_defaults(base=True) parser.add_argument( "--only-build-worker-container", dest="only_build_worker_container", action="store_true", help="Whether only to build ray-worker-container") parser.set_defaults(only_build_worker_container=False) args = parser.parse_args() py_versions = args.py_versions py_versions = py_versions if isinstance(py_versions, list) else [py_versions] for key in set(PY_MATRIX.keys()): if key[1:].upper() not in py_versions: PY_MATRIX.pop(key) assert len(PY_MATRIX) == len( py_versions ), f"Length of PY_MATRIX != args {PY_MATRIX} : {args.py_versions}" print("Building the following python versions: ", PY_MATRIX) print("Building base images: ", args.base) build_type = args.build_type is_buildkite = build_type == BUILDKITE if build_type == BUILDKITE: if os.environ.get("BUILDKITE_PULL_REQUEST", "") == "false": build_type = MERGE else: build_type = PR if build_type == HUMAN: _configure_human_version() if (build_type in {HUMAN, MERGE} or is_buildkite or _check_if_docker_files_modified()): DOCKER_CLIENT = docker.from_env() is_merge = build_type == MERGE # Buildkite is authenticated in the background. if is_merge and not is_buildkite: # We do this here because we want to be authenticated for # Docker pulls as well as pushes (to avoid rate-limits). username, password = _get_docker_creds() DOCKER_CLIENT.api.login(username=username, password=password) copy_wheels(build_type == HUMAN) base_images_built = build_or_pull_base_images(args.base) if args.only_build_worker_container: build_ray_worker_container() # TODO Currently don't push ray_worker_container else: build_ray() build_ray_ml() if build_type in {MERGE, PR}: valid_branch = _valid_branch() if (not valid_branch) and is_merge: print(f"Invalid Branch found: {_get_branch()}") push_and_tag_images(base_images_built, valid_branch and is_merge) if build_type in {MERGE, PR}: valid_branch = _valid_branch() if (not valid_branch) and is_merge: print(f"Invalid Branch found: {_get_branch()}") push_and_tag_images(base_images_built, valid_branch and is_merge) # TODO(ilr) Re-Enable Push READMEs by using a normal password # (not auth token :/) # push_readmes(build_type is MERGE)
apache-2.0
wolfier/incubator-airflow
airflow/migrations/versions/13eb55f81627_for_compatibility.py
9
1162
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """maintain history for compatibility with earlier migrations Revision ID: 13eb55f81627 Revises: 1507a7289a2f Create Date: 2015-08-23 05:12:49.732174 """ # revision identifiers, used by Alembic. revision = '13eb55f81627' down_revision = '1507a7289a2f' branch_labels = None depends_on = None def upgrade(): pass def downgrade(): pass
apache-2.0
2014cdbg7/2014cdbg7
wsgi/static/Brython2.1.0-20140419-113919/Lib/xml/dom/minidom.py
727
66854
"""Simple implementation of the Level 1 DOM. Namespaces and other minor Level 2 features are also supported. parse("foo.xml") parseString("<foo><bar/></foo>") Todo: ===== * convenience methods for getting elements and text. * more testing * bring some of the writer and linearizer code into conformance with this interface * SAX 2 namespaces """ import io import xml.dom from xml.dom import EMPTY_NAMESPACE, EMPTY_PREFIX, XMLNS_NAMESPACE, domreg from xml.dom.minicompat import * from xml.dom.xmlbuilder import DOMImplementationLS, DocumentLS # This is used by the ID-cache invalidation checks; the list isn't # actually complete, since the nodes being checked will never be the # DOCUMENT_NODE or DOCUMENT_FRAGMENT_NODE. (The node being checked is # the node being added or removed, not the node being modified.) # _nodeTypes_with_children = (xml.dom.Node.ELEMENT_NODE, xml.dom.Node.ENTITY_REFERENCE_NODE) class Node(xml.dom.Node): namespaceURI = None # this is non-null only for elements and attributes parentNode = None ownerDocument = None nextSibling = None previousSibling = None prefix = EMPTY_PREFIX # non-null only for NS elements and attributes def __bool__(self): return True def toxml(self, encoding=None): return self.toprettyxml("", "", encoding) def toprettyxml(self, indent="\t", newl="\n", encoding=None): if encoding is None: writer = io.StringIO() else: writer = io.TextIOWrapper(io.BytesIO(), encoding=encoding, errors="xmlcharrefreplace", newline='\n') if self.nodeType == Node.DOCUMENT_NODE: # Can pass encoding only to document, to put it into XML header self.writexml(writer, "", indent, newl, encoding) else: self.writexml(writer, "", indent, newl) if encoding is None: return writer.getvalue() else: return writer.detach().getvalue() def hasChildNodes(self): return bool(self.childNodes) def _get_childNodes(self): return self.childNodes def _get_firstChild(self): if self.childNodes: return self.childNodes[0] def _get_lastChild(self): if self.childNodes: return self.childNodes[-1] def insertBefore(self, newChild, refChild): if newChild.nodeType == self.DOCUMENT_FRAGMENT_NODE: for c in tuple(newChild.childNodes): self.insertBefore(c, refChild) ### The DOM does not clearly specify what to return in this case return newChild if newChild.nodeType not in self._child_node_types: raise xml.dom.HierarchyRequestErr( "%s cannot be child of %s" % (repr(newChild), repr(self))) if newChild.parentNode is not None: newChild.parentNode.removeChild(newChild) if refChild is None: self.appendChild(newChild) else: try: index = self.childNodes.index(refChild) except ValueError: raise xml.dom.NotFoundErr() if newChild.nodeType in _nodeTypes_with_children: _clear_id_cache(self) self.childNodes.insert(index, newChild) newChild.nextSibling = refChild refChild.previousSibling = newChild if index: node = self.childNodes[index-1] node.nextSibling = newChild newChild.previousSibling = node else: newChild.previousSibling = None newChild.parentNode = self return newChild def appendChild(self, node): if node.nodeType == self.DOCUMENT_FRAGMENT_NODE: for c in tuple(node.childNodes): self.appendChild(c) ### The DOM does not clearly specify what to return in this case return node if node.nodeType not in self._child_node_types: raise xml.dom.HierarchyRequestErr( "%s cannot be child of %s" % (repr(node), repr(self))) elif node.nodeType in _nodeTypes_with_children: _clear_id_cache(self) if node.parentNode is not None: node.parentNode.removeChild(node) _append_child(self, node) node.nextSibling = None return node def replaceChild(self, newChild, oldChild): if newChild.nodeType == self.DOCUMENT_FRAGMENT_NODE: refChild = oldChild.nextSibling self.removeChild(oldChild) return self.insertBefore(newChild, refChild) if newChild.nodeType not in self._child_node_types: raise xml.dom.HierarchyRequestErr( "%s cannot be child of %s" % (repr(newChild), repr(self))) if newChild is oldChild: return if newChild.parentNode is not None: newChild.parentNode.removeChild(newChild) try: index = self.childNodes.index(oldChild) except ValueError: raise xml.dom.NotFoundErr() self.childNodes[index] = newChild newChild.parentNode = self oldChild.parentNode = None if (newChild.nodeType in _nodeTypes_with_children or oldChild.nodeType in _nodeTypes_with_children): _clear_id_cache(self) newChild.nextSibling = oldChild.nextSibling newChild.previousSibling = oldChild.previousSibling oldChild.nextSibling = None oldChild.previousSibling = None if newChild.previousSibling: newChild.previousSibling.nextSibling = newChild if newChild.nextSibling: newChild.nextSibling.previousSibling = newChild return oldChild def removeChild(self, oldChild): try: self.childNodes.remove(oldChild) except ValueError: raise xml.dom.NotFoundErr() if oldChild.nextSibling is not None: oldChild.nextSibling.previousSibling = oldChild.previousSibling if oldChild.previousSibling is not None: oldChild.previousSibling.nextSibling = oldChild.nextSibling oldChild.nextSibling = oldChild.previousSibling = None if oldChild.nodeType in _nodeTypes_with_children: _clear_id_cache(self) oldChild.parentNode = None return oldChild def normalize(self): L = [] for child in self.childNodes: if child.nodeType == Node.TEXT_NODE: if not child.data: # empty text node; discard if L: L[-1].nextSibling = child.nextSibling if child.nextSibling: child.nextSibling.previousSibling = child.previousSibling child.unlink() elif L and L[-1].nodeType == child.nodeType: # collapse text node node = L[-1] node.data = node.data + child.data node.nextSibling = child.nextSibling if child.nextSibling: child.nextSibling.previousSibling = node child.unlink() else: L.append(child) else: L.append(child) if child.nodeType == Node.ELEMENT_NODE: child.normalize() self.childNodes[:] = L def cloneNode(self, deep): return _clone_node(self, deep, self.ownerDocument or self) def isSupported(self, feature, version): return self.ownerDocument.implementation.hasFeature(feature, version) def _get_localName(self): # Overridden in Element and Attr where localName can be Non-Null return None # Node interfaces from Level 3 (WD 9 April 2002) def isSameNode(self, other): return self is other def getInterface(self, feature): if self.isSupported(feature, None): return self else: return None # The "user data" functions use a dictionary that is only present # if some user data has been set, so be careful not to assume it # exists. def getUserData(self, key): try: return self._user_data[key][0] except (AttributeError, KeyError): return None def setUserData(self, key, data, handler): old = None try: d = self._user_data except AttributeError: d = {} self._user_data = d if key in d: old = d[key][0] if data is None: # ignore handlers passed for None handler = None if old is not None: del d[key] else: d[key] = (data, handler) return old def _call_user_data_handler(self, operation, src, dst): if hasattr(self, "_user_data"): for key, (data, handler) in list(self._user_data.items()): if handler is not None: handler.handle(operation, key, data, src, dst) # minidom-specific API: def unlink(self): self.parentNode = self.ownerDocument = None if self.childNodes: for child in self.childNodes: child.unlink() self.childNodes = NodeList() self.previousSibling = None self.nextSibling = None # A Node is its own context manager, to ensure that an unlink() call occurs. # This is similar to how a file object works. def __enter__(self): return self def __exit__(self, et, ev, tb): self.unlink() defproperty(Node, "firstChild", doc="First child node, or None.") defproperty(Node, "lastChild", doc="Last child node, or None.") defproperty(Node, "localName", doc="Namespace-local name of this node.") def _append_child(self, node): # fast path with less checks; usable by DOM builders if careful childNodes = self.childNodes if childNodes: last = childNodes[-1] node.previousSibling = last last.nextSibling = node childNodes.append(node) node.parentNode = self def _in_document(node): # return True iff node is part of a document tree while node is not None: if node.nodeType == Node.DOCUMENT_NODE: return True node = node.parentNode return False def _write_data(writer, data): "Writes datachars to writer." if data: data = data.replace("&", "&amp;").replace("<", "&lt;"). \ replace("\"", "&quot;").replace(">", "&gt;") writer.write(data) def _get_elements_by_tagName_helper(parent, name, rc): for node in parent.childNodes: if node.nodeType == Node.ELEMENT_NODE and \ (name == "*" or node.tagName == name): rc.append(node) _get_elements_by_tagName_helper(node, name, rc) return rc def _get_elements_by_tagName_ns_helper(parent, nsURI, localName, rc): for node in parent.childNodes: if node.nodeType == Node.ELEMENT_NODE: if ((localName == "*" or node.localName == localName) and (nsURI == "*" or node.namespaceURI == nsURI)): rc.append(node) _get_elements_by_tagName_ns_helper(node, nsURI, localName, rc) return rc class DocumentFragment(Node): nodeType = Node.DOCUMENT_FRAGMENT_NODE nodeName = "#document-fragment" nodeValue = None attributes = None parentNode = None _child_node_types = (Node.ELEMENT_NODE, Node.TEXT_NODE, Node.CDATA_SECTION_NODE, Node.ENTITY_REFERENCE_NODE, Node.PROCESSING_INSTRUCTION_NODE, Node.COMMENT_NODE, Node.NOTATION_NODE) def __init__(self): self.childNodes = NodeList() class Attr(Node): __slots__=('_name', '_value', 'namespaceURI', '_prefix', 'childNodes', '_localName', 'ownerDocument', 'ownerElement') nodeType = Node.ATTRIBUTE_NODE attributes = None specified = False _is_id = False _child_node_types = (Node.TEXT_NODE, Node.ENTITY_REFERENCE_NODE) def __init__(self, qName, namespaceURI=EMPTY_NAMESPACE, localName=None, prefix=None): self.ownerElement = None self._name = qName self.namespaceURI = namespaceURI self._prefix = prefix self.childNodes = NodeList() # Add the single child node that represents the value of the attr self.childNodes.append(Text()) # nodeValue and value are set elsewhere def _get_localName(self): try: return self._localName except AttributeError: return self.nodeName.split(":", 1)[-1] def _get_name(self): return self.name def _get_specified(self): return self.specified def _get_name(self): return self._name def _set_name(self, value): self._name = value if self.ownerElement is not None: _clear_id_cache(self.ownerElement) nodeName = name = property(_get_name, _set_name) def _get_value(self): return self._value def _set_value(self, value): self._value = value self.childNodes[0].data = value if self.ownerElement is not None: _clear_id_cache(self.ownerElement) self.childNodes[0].data = value nodeValue = value = property(_get_value, _set_value) def _get_prefix(self): return self._prefix def _set_prefix(self, prefix): nsuri = self.namespaceURI if prefix == "xmlns": if nsuri and nsuri != XMLNS_NAMESPACE: raise xml.dom.NamespaceErr( "illegal use of 'xmlns' prefix for the wrong namespace") self._prefix = prefix if prefix is None: newName = self.localName else: newName = "%s:%s" % (prefix, self.localName) if self.ownerElement: _clear_id_cache(self.ownerElement) self.name = newName prefix = property(_get_prefix, _set_prefix) def unlink(self): # This implementation does not call the base implementation # since most of that is not needed, and the expense of the # method call is not warranted. We duplicate the removal of # children, but that's all we needed from the base class. elem = self.ownerElement if elem is not None: del elem._attrs[self.nodeName] del elem._attrsNS[(self.namespaceURI, self.localName)] if self._is_id: self._is_id = False elem._magic_id_nodes -= 1 self.ownerDocument._magic_id_count -= 1 for child in self.childNodes: child.unlink() del self.childNodes[:] def _get_isId(self): if self._is_id: return True doc = self.ownerDocument elem = self.ownerElement if doc is None or elem is None: return False info = doc._get_elem_info(elem) if info is None: return False if self.namespaceURI: return info.isIdNS(self.namespaceURI, self.localName) else: return info.isId(self.nodeName) def _get_schemaType(self): doc = self.ownerDocument elem = self.ownerElement if doc is None or elem is None: return _no_type info = doc._get_elem_info(elem) if info is None: return _no_type if self.namespaceURI: return info.getAttributeTypeNS(self.namespaceURI, self.localName) else: return info.getAttributeType(self.nodeName) defproperty(Attr, "isId", doc="True if this attribute is an ID.") defproperty(Attr, "localName", doc="Namespace-local name of this attribute.") defproperty(Attr, "schemaType", doc="Schema type for this attribute.") class NamedNodeMap(object): """The attribute list is a transient interface to the underlying dictionaries. Mutations here will change the underlying element's dictionary. Ordering is imposed artificially and does not reflect the order of attributes as found in an input document. """ __slots__ = ('_attrs', '_attrsNS', '_ownerElement') def __init__(self, attrs, attrsNS, ownerElement): self._attrs = attrs self._attrsNS = attrsNS self._ownerElement = ownerElement def _get_length(self): return len(self._attrs) def item(self, index): try: return self[list(self._attrs.keys())[index]] except IndexError: return None def items(self): L = [] for node in self._attrs.values(): L.append((node.nodeName, node.value)) return L def itemsNS(self): L = [] for node in self._attrs.values(): L.append(((node.namespaceURI, node.localName), node.value)) return L def __contains__(self, key): if isinstance(key, str): return key in self._attrs else: return key in self._attrsNS def keys(self): return self._attrs.keys() def keysNS(self): return self._attrsNS.keys() def values(self): return self._attrs.values() def get(self, name, value=None): return self._attrs.get(name, value) __len__ = _get_length def _cmp(self, other): if self._attrs is getattr(other, "_attrs", None): return 0 else: return (id(self) > id(other)) - (id(self) < id(other)) def __eq__(self, other): return self._cmp(other) == 0 def __ge__(self, other): return self._cmp(other) >= 0 def __gt__(self, other): return self._cmp(other) > 0 def __le__(self, other): return self._cmp(other) <= 0 def __lt__(self, other): return self._cmp(other) < 0 def __ne__(self, other): return self._cmp(other) != 0 def __getitem__(self, attname_or_tuple): if isinstance(attname_or_tuple, tuple): return self._attrsNS[attname_or_tuple] else: return self._attrs[attname_or_tuple] # same as set def __setitem__(self, attname, value): if isinstance(value, str): try: node = self._attrs[attname] except KeyError: node = Attr(attname) node.ownerDocument = self._ownerElement.ownerDocument self.setNamedItem(node) node.value = value else: if not isinstance(value, Attr): raise TypeError("value must be a string or Attr object") node = value self.setNamedItem(node) def getNamedItem(self, name): try: return self._attrs[name] except KeyError: return None def getNamedItemNS(self, namespaceURI, localName): try: return self._attrsNS[(namespaceURI, localName)] except KeyError: return None def removeNamedItem(self, name): n = self.getNamedItem(name) if n is not None: _clear_id_cache(self._ownerElement) del self._attrs[n.nodeName] del self._attrsNS[(n.namespaceURI, n.localName)] if hasattr(n, 'ownerElement'): n.ownerElement = None return n else: raise xml.dom.NotFoundErr() def removeNamedItemNS(self, namespaceURI, localName): n = self.getNamedItemNS(namespaceURI, localName) if n is not None: _clear_id_cache(self._ownerElement) del self._attrsNS[(n.namespaceURI, n.localName)] del self._attrs[n.nodeName] if hasattr(n, 'ownerElement'): n.ownerElement = None return n else: raise xml.dom.NotFoundErr() def setNamedItem(self, node): if not isinstance(node, Attr): raise xml.dom.HierarchyRequestErr( "%s cannot be child of %s" % (repr(node), repr(self))) old = self._attrs.get(node.name) if old: old.unlink() self._attrs[node.name] = node self._attrsNS[(node.namespaceURI, node.localName)] = node node.ownerElement = self._ownerElement _clear_id_cache(node.ownerElement) return old def setNamedItemNS(self, node): return self.setNamedItem(node) def __delitem__(self, attname_or_tuple): node = self[attname_or_tuple] _clear_id_cache(node.ownerElement) node.unlink() def __getstate__(self): return self._attrs, self._attrsNS, self._ownerElement def __setstate__(self, state): self._attrs, self._attrsNS, self._ownerElement = state defproperty(NamedNodeMap, "length", doc="Number of nodes in the NamedNodeMap.") AttributeList = NamedNodeMap class TypeInfo(object): __slots__ = 'namespace', 'name' def __init__(self, namespace, name): self.namespace = namespace self.name = name def __repr__(self): if self.namespace: return "<TypeInfo %r (from %r)>" % (self.name, self.namespace) else: return "<TypeInfo %r>" % self.name def _get_name(self): return self.name def _get_namespace(self): return self.namespace _no_type = TypeInfo(None, None) class Element(Node): __slots__=('ownerDocument', 'parentNode', 'tagName', 'nodeName', 'prefix', 'namespaceURI', '_localName', 'childNodes', '_attrs', '_attrsNS', 'nextSibling', 'previousSibling') nodeType = Node.ELEMENT_NODE nodeValue = None schemaType = _no_type _magic_id_nodes = 0 _child_node_types = (Node.ELEMENT_NODE, Node.PROCESSING_INSTRUCTION_NODE, Node.COMMENT_NODE, Node.TEXT_NODE, Node.CDATA_SECTION_NODE, Node.ENTITY_REFERENCE_NODE) def __init__(self, tagName, namespaceURI=EMPTY_NAMESPACE, prefix=None, localName=None): self.parentNode = None self.tagName = self.nodeName = tagName self.prefix = prefix self.namespaceURI = namespaceURI self.childNodes = NodeList() self.nextSibling = self.previousSibling = None # Attribute dictionaries are lazily created # attributes are double-indexed: # tagName -> Attribute # URI,localName -> Attribute # in the future: consider lazy generation # of attribute objects this is too tricky # for now because of headaches with # namespaces. self._attrs = None self._attrsNS = None def _ensure_attributes(self): if self._attrs is None: self._attrs = {} self._attrsNS = {} def _get_localName(self): try: return self._localName except AttributeError: return self.tagName.split(":", 1)[-1] def _get_tagName(self): return self.tagName def unlink(self): if self._attrs is not None: for attr in list(self._attrs.values()): attr.unlink() self._attrs = None self._attrsNS = None Node.unlink(self) def getAttribute(self, attname): if self._attrs is None: return "" try: return self._attrs[attname].value except KeyError: return "" def getAttributeNS(self, namespaceURI, localName): if self._attrsNS is None: return "" try: return self._attrsNS[(namespaceURI, localName)].value except KeyError: return "" def setAttribute(self, attname, value): attr = self.getAttributeNode(attname) if attr is None: attr = Attr(attname) attr.value = value # also sets nodeValue attr.ownerDocument = self.ownerDocument self.setAttributeNode(attr) elif value != attr.value: attr.value = value if attr.isId: _clear_id_cache(self) def setAttributeNS(self, namespaceURI, qualifiedName, value): prefix, localname = _nssplit(qualifiedName) attr = self.getAttributeNodeNS(namespaceURI, localname) if attr is None: attr = Attr(qualifiedName, namespaceURI, localname, prefix) attr.value = value attr.ownerDocument = self.ownerDocument self.setAttributeNode(attr) else: if value != attr.value: attr.value = value if attr.isId: _clear_id_cache(self) if attr.prefix != prefix: attr.prefix = prefix attr.nodeName = qualifiedName def getAttributeNode(self, attrname): if self._attrs is None: return None return self._attrs.get(attrname) def getAttributeNodeNS(self, namespaceURI, localName): if self._attrsNS is None: return None return self._attrsNS.get((namespaceURI, localName)) def setAttributeNode(self, attr): if attr.ownerElement not in (None, self): raise xml.dom.InuseAttributeErr("attribute node already owned") self._ensure_attributes() old1 = self._attrs.get(attr.name, None) if old1 is not None: self.removeAttributeNode(old1) old2 = self._attrsNS.get((attr.namespaceURI, attr.localName), None) if old2 is not None and old2 is not old1: self.removeAttributeNode(old2) _set_attribute_node(self, attr) if old1 is not attr: # It might have already been part of this node, in which case # it doesn't represent a change, and should not be returned. return old1 if old2 is not attr: return old2 setAttributeNodeNS = setAttributeNode def removeAttribute(self, name): if self._attrsNS is None: raise xml.dom.NotFoundErr() try: attr = self._attrs[name] except KeyError: raise xml.dom.NotFoundErr() self.removeAttributeNode(attr) def removeAttributeNS(self, namespaceURI, localName): if self._attrsNS is None: raise xml.dom.NotFoundErr() try: attr = self._attrsNS[(namespaceURI, localName)] except KeyError: raise xml.dom.NotFoundErr() self.removeAttributeNode(attr) def removeAttributeNode(self, node): if node is None: raise xml.dom.NotFoundErr() try: self._attrs[node.name] except KeyError: raise xml.dom.NotFoundErr() _clear_id_cache(self) node.unlink() # Restore this since the node is still useful and otherwise # unlinked node.ownerDocument = self.ownerDocument removeAttributeNodeNS = removeAttributeNode def hasAttribute(self, name): if self._attrs is None: return False return name in self._attrs def hasAttributeNS(self, namespaceURI, localName): if self._attrsNS is None: return False return (namespaceURI, localName) in self._attrsNS def getElementsByTagName(self, name): return _get_elements_by_tagName_helper(self, name, NodeList()) def getElementsByTagNameNS(self, namespaceURI, localName): return _get_elements_by_tagName_ns_helper( self, namespaceURI, localName, NodeList()) def __repr__(self): return "<DOM Element: %s at %#x>" % (self.tagName, id(self)) def writexml(self, writer, indent="", addindent="", newl=""): # indent = current indentation # addindent = indentation to add to higher levels # newl = newline string writer.write(indent+"<" + self.tagName) attrs = self._get_attributes() a_names = sorted(attrs.keys()) for a_name in a_names: writer.write(" %s=\"" % a_name) _write_data(writer, attrs[a_name].value) writer.write("\"") if self.childNodes: writer.write(">") if (len(self.childNodes) == 1 and self.childNodes[0].nodeType == Node.TEXT_NODE): self.childNodes[0].writexml(writer, '', '', '') else: writer.write(newl) for node in self.childNodes: node.writexml(writer, indent+addindent, addindent, newl) writer.write(indent) writer.write("</%s>%s" % (self.tagName, newl)) else: writer.write("/>%s"%(newl)) def _get_attributes(self): self._ensure_attributes() return NamedNodeMap(self._attrs, self._attrsNS, self) def hasAttributes(self): if self._attrs: return True else: return False # DOM Level 3 attributes, based on the 22 Oct 2002 draft def setIdAttribute(self, name): idAttr = self.getAttributeNode(name) self.setIdAttributeNode(idAttr) def setIdAttributeNS(self, namespaceURI, localName): idAttr = self.getAttributeNodeNS(namespaceURI, localName) self.setIdAttributeNode(idAttr) def setIdAttributeNode(self, idAttr): if idAttr is None or not self.isSameNode(idAttr.ownerElement): raise xml.dom.NotFoundErr() if _get_containing_entref(self) is not None: raise xml.dom.NoModificationAllowedErr() if not idAttr._is_id: idAttr._is_id = True self._magic_id_nodes += 1 self.ownerDocument._magic_id_count += 1 _clear_id_cache(self) defproperty(Element, "attributes", doc="NamedNodeMap of attributes on the element.") defproperty(Element, "localName", doc="Namespace-local name of this element.") def _set_attribute_node(element, attr): _clear_id_cache(element) element._ensure_attributes() element._attrs[attr.name] = attr element._attrsNS[(attr.namespaceURI, attr.localName)] = attr # This creates a circular reference, but Element.unlink() # breaks the cycle since the references to the attribute # dictionaries are tossed. attr.ownerElement = element class Childless: """Mixin that makes childless-ness easy to implement and avoids the complexity of the Node methods that deal with children. """ __slots__ = () attributes = None childNodes = EmptyNodeList() firstChild = None lastChild = None def _get_firstChild(self): return None def _get_lastChild(self): return None def appendChild(self, node): raise xml.dom.HierarchyRequestErr( self.nodeName + " nodes cannot have children") def hasChildNodes(self): return False def insertBefore(self, newChild, refChild): raise xml.dom.HierarchyRequestErr( self.nodeName + " nodes do not have children") def removeChild(self, oldChild): raise xml.dom.NotFoundErr( self.nodeName + " nodes do not have children") def normalize(self): # For childless nodes, normalize() has nothing to do. pass def replaceChild(self, newChild, oldChild): raise xml.dom.HierarchyRequestErr( self.nodeName + " nodes do not have children") class ProcessingInstruction(Childless, Node): nodeType = Node.PROCESSING_INSTRUCTION_NODE __slots__ = ('target', 'data') def __init__(self, target, data): self.target = target self.data = data # nodeValue is an alias for data def _get_nodeValue(self): return self.data def _set_nodeValue(self, value): self.data = data nodeValue = property(_get_nodeValue, _set_nodeValue) # nodeName is an alias for target def _get_nodeName(self): return self.target def _set_nodeName(self, value): self.target = value nodeName = property(_get_nodeName, _set_nodeName) def writexml(self, writer, indent="", addindent="", newl=""): writer.write("%s<?%s %s?>%s" % (indent,self.target, self.data, newl)) class CharacterData(Childless, Node): __slots__=('_data', 'ownerDocument','parentNode', 'previousSibling', 'nextSibling') def __init__(self): self.ownerDocument = self.parentNode = None self.previousSibling = self.nextSibling = None self._data = '' Node.__init__(self) def _get_length(self): return len(self.data) __len__ = _get_length def _get_data(self): return self._data def _set_data(self, data): self._data = data data = nodeValue = property(_get_data, _set_data) def __repr__(self): data = self.data if len(data) > 10: dotdotdot = "..." else: dotdotdot = "" return '<DOM %s node "%r%s">' % ( self.__class__.__name__, data[0:10], dotdotdot) def substringData(self, offset, count): if offset < 0: raise xml.dom.IndexSizeErr("offset cannot be negative") if offset >= len(self.data): raise xml.dom.IndexSizeErr("offset cannot be beyond end of data") if count < 0: raise xml.dom.IndexSizeErr("count cannot be negative") return self.data[offset:offset+count] def appendData(self, arg): self.data = self.data + arg def insertData(self, offset, arg): if offset < 0: raise xml.dom.IndexSizeErr("offset cannot be negative") if offset >= len(self.data): raise xml.dom.IndexSizeErr("offset cannot be beyond end of data") if arg: self.data = "%s%s%s" % ( self.data[:offset], arg, self.data[offset:]) def deleteData(self, offset, count): if offset < 0: raise xml.dom.IndexSizeErr("offset cannot be negative") if offset >= len(self.data): raise xml.dom.IndexSizeErr("offset cannot be beyond end of data") if count < 0: raise xml.dom.IndexSizeErr("count cannot be negative") if count: self.data = self.data[:offset] + self.data[offset+count:] def replaceData(self, offset, count, arg): if offset < 0: raise xml.dom.IndexSizeErr("offset cannot be negative") if offset >= len(self.data): raise xml.dom.IndexSizeErr("offset cannot be beyond end of data") if count < 0: raise xml.dom.IndexSizeErr("count cannot be negative") if count: self.data = "%s%s%s" % ( self.data[:offset], arg, self.data[offset+count:]) defproperty(CharacterData, "length", doc="Length of the string data.") class Text(CharacterData): __slots__ = () nodeType = Node.TEXT_NODE nodeName = "#text" attributes = None def splitText(self, offset): if offset < 0 or offset > len(self.data): raise xml.dom.IndexSizeErr("illegal offset value") newText = self.__class__() newText.data = self.data[offset:] newText.ownerDocument = self.ownerDocument next = self.nextSibling if self.parentNode and self in self.parentNode.childNodes: if next is None: self.parentNode.appendChild(newText) else: self.parentNode.insertBefore(newText, next) self.data = self.data[:offset] return newText def writexml(self, writer, indent="", addindent="", newl=""): _write_data(writer, "%s%s%s" % (indent, self.data, newl)) # DOM Level 3 (WD 9 April 2002) def _get_wholeText(self): L = [self.data] n = self.previousSibling while n is not None: if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): L.insert(0, n.data) n = n.previousSibling else: break n = self.nextSibling while n is not None: if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): L.append(n.data) n = n.nextSibling else: break return ''.join(L) def replaceWholeText(self, content): # XXX This needs to be seriously changed if minidom ever # supports EntityReference nodes. parent = self.parentNode n = self.previousSibling while n is not None: if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): next = n.previousSibling parent.removeChild(n) n = next else: break n = self.nextSibling if not content: parent.removeChild(self) while n is not None: if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): next = n.nextSibling parent.removeChild(n) n = next else: break if content: self.data = content return self else: return None def _get_isWhitespaceInElementContent(self): if self.data.strip(): return False elem = _get_containing_element(self) if elem is None: return False info = self.ownerDocument._get_elem_info(elem) if info is None: return False else: return info.isElementContent() defproperty(Text, "isWhitespaceInElementContent", doc="True iff this text node contains only whitespace" " and is in element content.") defproperty(Text, "wholeText", doc="The text of all logically-adjacent text nodes.") def _get_containing_element(node): c = node.parentNode while c is not None: if c.nodeType == Node.ELEMENT_NODE: return c c = c.parentNode return None def _get_containing_entref(node): c = node.parentNode while c is not None: if c.nodeType == Node.ENTITY_REFERENCE_NODE: return c c = c.parentNode return None class Comment(CharacterData): nodeType = Node.COMMENT_NODE nodeName = "#comment" def __init__(self, data): CharacterData.__init__(self) self._data = data def writexml(self, writer, indent="", addindent="", newl=""): if "--" in self.data: raise ValueError("'--' is not allowed in a comment node") writer.write("%s<!--%s-->%s" % (indent, self.data, newl)) class CDATASection(Text): __slots__ = () nodeType = Node.CDATA_SECTION_NODE nodeName = "#cdata-section" def writexml(self, writer, indent="", addindent="", newl=""): if self.data.find("]]>") >= 0: raise ValueError("']]>' not allowed in a CDATA section") writer.write("<![CDATA[%s]]>" % self.data) class ReadOnlySequentialNamedNodeMap(object): __slots__ = '_seq', def __init__(self, seq=()): # seq should be a list or tuple self._seq = seq def __len__(self): return len(self._seq) def _get_length(self): return len(self._seq) def getNamedItem(self, name): for n in self._seq: if n.nodeName == name: return n def getNamedItemNS(self, namespaceURI, localName): for n in self._seq: if n.namespaceURI == namespaceURI and n.localName == localName: return n def __getitem__(self, name_or_tuple): if isinstance(name_or_tuple, tuple): node = self.getNamedItemNS(*name_or_tuple) else: node = self.getNamedItem(name_or_tuple) if node is None: raise KeyError(name_or_tuple) return node def item(self, index): if index < 0: return None try: return self._seq[index] except IndexError: return None def removeNamedItem(self, name): raise xml.dom.NoModificationAllowedErr( "NamedNodeMap instance is read-only") def removeNamedItemNS(self, namespaceURI, localName): raise xml.dom.NoModificationAllowedErr( "NamedNodeMap instance is read-only") def setNamedItem(self, node): raise xml.dom.NoModificationAllowedErr( "NamedNodeMap instance is read-only") def setNamedItemNS(self, node): raise xml.dom.NoModificationAllowedErr( "NamedNodeMap instance is read-only") def __getstate__(self): return [self._seq] def __setstate__(self, state): self._seq = state[0] defproperty(ReadOnlySequentialNamedNodeMap, "length", doc="Number of entries in the NamedNodeMap.") class Identified: """Mix-in class that supports the publicId and systemId attributes.""" __slots__ = 'publicId', 'systemId' def _identified_mixin_init(self, publicId, systemId): self.publicId = publicId self.systemId = systemId def _get_publicId(self): return self.publicId def _get_systemId(self): return self.systemId class DocumentType(Identified, Childless, Node): nodeType = Node.DOCUMENT_TYPE_NODE nodeValue = None name = None publicId = None systemId = None internalSubset = None def __init__(self, qualifiedName): self.entities = ReadOnlySequentialNamedNodeMap() self.notations = ReadOnlySequentialNamedNodeMap() if qualifiedName: prefix, localname = _nssplit(qualifiedName) self.name = localname self.nodeName = self.name def _get_internalSubset(self): return self.internalSubset def cloneNode(self, deep): if self.ownerDocument is None: # it's ok clone = DocumentType(None) clone.name = self.name clone.nodeName = self.name operation = xml.dom.UserDataHandler.NODE_CLONED if deep: clone.entities._seq = [] clone.notations._seq = [] for n in self.notations._seq: notation = Notation(n.nodeName, n.publicId, n.systemId) clone.notations._seq.append(notation) n._call_user_data_handler(operation, n, notation) for e in self.entities._seq: entity = Entity(e.nodeName, e.publicId, e.systemId, e.notationName) entity.actualEncoding = e.actualEncoding entity.encoding = e.encoding entity.version = e.version clone.entities._seq.append(entity) e._call_user_data_handler(operation, n, entity) self._call_user_data_handler(operation, self, clone) return clone else: return None def writexml(self, writer, indent="", addindent="", newl=""): writer.write("<!DOCTYPE ") writer.write(self.name) if self.publicId: writer.write("%s PUBLIC '%s'%s '%s'" % (newl, self.publicId, newl, self.systemId)) elif self.systemId: writer.write("%s SYSTEM '%s'" % (newl, self.systemId)) if self.internalSubset is not None: writer.write(" [") writer.write(self.internalSubset) writer.write("]") writer.write(">"+newl) class Entity(Identified, Node): attributes = None nodeType = Node.ENTITY_NODE nodeValue = None actualEncoding = None encoding = None version = None def __init__(self, name, publicId, systemId, notation): self.nodeName = name self.notationName = notation self.childNodes = NodeList() self._identified_mixin_init(publicId, systemId) def _get_actualEncoding(self): return self.actualEncoding def _get_encoding(self): return self.encoding def _get_version(self): return self.version def appendChild(self, newChild): raise xml.dom.HierarchyRequestErr( "cannot append children to an entity node") def insertBefore(self, newChild, refChild): raise xml.dom.HierarchyRequestErr( "cannot insert children below an entity node") def removeChild(self, oldChild): raise xml.dom.HierarchyRequestErr( "cannot remove children from an entity node") def replaceChild(self, newChild, oldChild): raise xml.dom.HierarchyRequestErr( "cannot replace children of an entity node") class Notation(Identified, Childless, Node): nodeType = Node.NOTATION_NODE nodeValue = None def __init__(self, name, publicId, systemId): self.nodeName = name self._identified_mixin_init(publicId, systemId) class DOMImplementation(DOMImplementationLS): _features = [("core", "1.0"), ("core", "2.0"), ("core", None), ("xml", "1.0"), ("xml", "2.0"), ("xml", None), ("ls-load", "3.0"), ("ls-load", None), ] def hasFeature(self, feature, version): if version == "": version = None return (feature.lower(), version) in self._features def createDocument(self, namespaceURI, qualifiedName, doctype): if doctype and doctype.parentNode is not None: raise xml.dom.WrongDocumentErr( "doctype object owned by another DOM tree") doc = self._create_document() add_root_element = not (namespaceURI is None and qualifiedName is None and doctype is None) if not qualifiedName and add_root_element: # The spec is unclear what to raise here; SyntaxErr # would be the other obvious candidate. Since Xerces raises # InvalidCharacterErr, and since SyntaxErr is not listed # for createDocument, that seems to be the better choice. # XXX: need to check for illegal characters here and in # createElement. # DOM Level III clears this up when talking about the return value # of this function. If namespaceURI, qName and DocType are # Null the document is returned without a document element # Otherwise if doctype or namespaceURI are not None # Then we go back to the above problem raise xml.dom.InvalidCharacterErr("Element with no name") if add_root_element: prefix, localname = _nssplit(qualifiedName) if prefix == "xml" \ and namespaceURI != "http://www.w3.org/XML/1998/namespace": raise xml.dom.NamespaceErr("illegal use of 'xml' prefix") if prefix and not namespaceURI: raise xml.dom.NamespaceErr( "illegal use of prefix without namespaces") element = doc.createElementNS(namespaceURI, qualifiedName) if doctype: doc.appendChild(doctype) doc.appendChild(element) if doctype: doctype.parentNode = doctype.ownerDocument = doc doc.doctype = doctype doc.implementation = self return doc def createDocumentType(self, qualifiedName, publicId, systemId): doctype = DocumentType(qualifiedName) doctype.publicId = publicId doctype.systemId = systemId return doctype # DOM Level 3 (WD 9 April 2002) def getInterface(self, feature): if self.hasFeature(feature, None): return self else: return None # internal def _create_document(self): return Document() class ElementInfo(object): """Object that represents content-model information for an element. This implementation is not expected to be used in practice; DOM builders should provide implementations which do the right thing using information available to it. """ __slots__ = 'tagName', def __init__(self, name): self.tagName = name def getAttributeType(self, aname): return _no_type def getAttributeTypeNS(self, namespaceURI, localName): return _no_type def isElementContent(self): return False def isEmpty(self): """Returns true iff this element is declared to have an EMPTY content model.""" return False def isId(self, aname): """Returns true iff the named attribute is a DTD-style ID.""" return False def isIdNS(self, namespaceURI, localName): """Returns true iff the identified attribute is a DTD-style ID.""" return False def __getstate__(self): return self.tagName def __setstate__(self, state): self.tagName = state def _clear_id_cache(node): if node.nodeType == Node.DOCUMENT_NODE: node._id_cache.clear() node._id_search_stack = None elif _in_document(node): node.ownerDocument._id_cache.clear() node.ownerDocument._id_search_stack= None class Document(Node, DocumentLS): __slots__ = ('_elem_info', 'doctype', '_id_search_stack', 'childNodes', '_id_cache') _child_node_types = (Node.ELEMENT_NODE, Node.PROCESSING_INSTRUCTION_NODE, Node.COMMENT_NODE, Node.DOCUMENT_TYPE_NODE) implementation = DOMImplementation() nodeType = Node.DOCUMENT_NODE nodeName = "#document" nodeValue = None attributes = None parentNode = None previousSibling = nextSibling = None # Document attributes from Level 3 (WD 9 April 2002) actualEncoding = None encoding = None standalone = None version = None strictErrorChecking = False errorHandler = None documentURI = None _magic_id_count = 0 def __init__(self): self.doctype = None self.childNodes = NodeList() # mapping of (namespaceURI, localName) -> ElementInfo # and tagName -> ElementInfo self._elem_info = {} self._id_cache = {} self._id_search_stack = None def _get_elem_info(self, element): if element.namespaceURI: key = element.namespaceURI, element.localName else: key = element.tagName return self._elem_info.get(key) def _get_actualEncoding(self): return self.actualEncoding def _get_doctype(self): return self.doctype def _get_documentURI(self): return self.documentURI def _get_encoding(self): return self.encoding def _get_errorHandler(self): return self.errorHandler def _get_standalone(self): return self.standalone def _get_strictErrorChecking(self): return self.strictErrorChecking def _get_version(self): return self.version def appendChild(self, node): if node.nodeType not in self._child_node_types: raise xml.dom.HierarchyRequestErr( "%s cannot be child of %s" % (repr(node), repr(self))) if node.parentNode is not None: # This needs to be done before the next test since this # may *be* the document element, in which case it should # end up re-ordered to the end. node.parentNode.removeChild(node) if node.nodeType == Node.ELEMENT_NODE \ and self._get_documentElement(): raise xml.dom.HierarchyRequestErr( "two document elements disallowed") return Node.appendChild(self, node) def removeChild(self, oldChild): try: self.childNodes.remove(oldChild) except ValueError: raise xml.dom.NotFoundErr() oldChild.nextSibling = oldChild.previousSibling = None oldChild.parentNode = None if self.documentElement is oldChild: self.documentElement = None return oldChild def _get_documentElement(self): for node in self.childNodes: if node.nodeType == Node.ELEMENT_NODE: return node def unlink(self): if self.doctype is not None: self.doctype.unlink() self.doctype = None Node.unlink(self) def cloneNode(self, deep): if not deep: return None clone = self.implementation.createDocument(None, None, None) clone.encoding = self.encoding clone.standalone = self.standalone clone.version = self.version for n in self.childNodes: childclone = _clone_node(n, deep, clone) assert childclone.ownerDocument.isSameNode(clone) clone.childNodes.append(childclone) if childclone.nodeType == Node.DOCUMENT_NODE: assert clone.documentElement is None elif childclone.nodeType == Node.DOCUMENT_TYPE_NODE: assert clone.doctype is None clone.doctype = childclone childclone.parentNode = clone self._call_user_data_handler(xml.dom.UserDataHandler.NODE_CLONED, self, clone) return clone def createDocumentFragment(self): d = DocumentFragment() d.ownerDocument = self return d def createElement(self, tagName): e = Element(tagName) e.ownerDocument = self return e def createTextNode(self, data): if not isinstance(data, str): raise TypeError("node contents must be a string") t = Text() t.data = data t.ownerDocument = self return t def createCDATASection(self, data): if not isinstance(data, str): raise TypeError("node contents must be a string") c = CDATASection() c.data = data c.ownerDocument = self return c def createComment(self, data): c = Comment(data) c.ownerDocument = self return c def createProcessingInstruction(self, target, data): p = ProcessingInstruction(target, data) p.ownerDocument = self return p def createAttribute(self, qName): a = Attr(qName) a.ownerDocument = self a.value = "" return a def createElementNS(self, namespaceURI, qualifiedName): prefix, localName = _nssplit(qualifiedName) e = Element(qualifiedName, namespaceURI, prefix) e.ownerDocument = self return e def createAttributeNS(self, namespaceURI, qualifiedName): prefix, localName = _nssplit(qualifiedName) a = Attr(qualifiedName, namespaceURI, localName, prefix) a.ownerDocument = self a.value = "" return a # A couple of implementation-specific helpers to create node types # not supported by the W3C DOM specs: def _create_entity(self, name, publicId, systemId, notationName): e = Entity(name, publicId, systemId, notationName) e.ownerDocument = self return e def _create_notation(self, name, publicId, systemId): n = Notation(name, publicId, systemId) n.ownerDocument = self return n def getElementById(self, id): if id in self._id_cache: return self._id_cache[id] if not (self._elem_info or self._magic_id_count): return None stack = self._id_search_stack if stack is None: # we never searched before, or the cache has been cleared stack = [self.documentElement] self._id_search_stack = stack elif not stack: # Previous search was completed and cache is still valid; # no matching node. return None result = None while stack: node = stack.pop() # add child elements to stack for continued searching stack.extend([child for child in node.childNodes if child.nodeType in _nodeTypes_with_children]) # check this node info = self._get_elem_info(node) if info: # We have to process all ID attributes before # returning in order to get all the attributes set to # be IDs using Element.setIdAttribute*(). for attr in node.attributes.values(): if attr.namespaceURI: if info.isIdNS(attr.namespaceURI, attr.localName): self._id_cache[attr.value] = node if attr.value == id: result = node elif not node._magic_id_nodes: break elif info.isId(attr.name): self._id_cache[attr.value] = node if attr.value == id: result = node elif not node._magic_id_nodes: break elif attr._is_id: self._id_cache[attr.value] = node if attr.value == id: result = node elif node._magic_id_nodes == 1: break elif node._magic_id_nodes: for attr in node.attributes.values(): if attr._is_id: self._id_cache[attr.value] = node if attr.value == id: result = node if result is not None: break return result def getElementsByTagName(self, name): return _get_elements_by_tagName_helper(self, name, NodeList()) def getElementsByTagNameNS(self, namespaceURI, localName): return _get_elements_by_tagName_ns_helper( self, namespaceURI, localName, NodeList()) def isSupported(self, feature, version): return self.implementation.hasFeature(feature, version) def importNode(self, node, deep): if node.nodeType == Node.DOCUMENT_NODE: raise xml.dom.NotSupportedErr("cannot import document nodes") elif node.nodeType == Node.DOCUMENT_TYPE_NODE: raise xml.dom.NotSupportedErr("cannot import document type nodes") return _clone_node(node, deep, self) def writexml(self, writer, indent="", addindent="", newl="", encoding=None): if encoding is None: writer.write('<?xml version="1.0" ?>'+newl) else: writer.write('<?xml version="1.0" encoding="%s"?>%s' % ( encoding, newl)) for node in self.childNodes: node.writexml(writer, indent, addindent, newl) # DOM Level 3 (WD 9 April 2002) def renameNode(self, n, namespaceURI, name): if n.ownerDocument is not self: raise xml.dom.WrongDocumentErr( "cannot rename nodes from other documents;\n" "expected %s,\nfound %s" % (self, n.ownerDocument)) if n.nodeType not in (Node.ELEMENT_NODE, Node.ATTRIBUTE_NODE): raise xml.dom.NotSupportedErr( "renameNode() only applies to element and attribute nodes") if namespaceURI != EMPTY_NAMESPACE: if ':' in name: prefix, localName = name.split(':', 1) if ( prefix == "xmlns" and namespaceURI != xml.dom.XMLNS_NAMESPACE): raise xml.dom.NamespaceErr( "illegal use of 'xmlns' prefix") else: if ( name == "xmlns" and namespaceURI != xml.dom.XMLNS_NAMESPACE and n.nodeType == Node.ATTRIBUTE_NODE): raise xml.dom.NamespaceErr( "illegal use of the 'xmlns' attribute") prefix = None localName = name else: prefix = None localName = None if n.nodeType == Node.ATTRIBUTE_NODE: element = n.ownerElement if element is not None: is_id = n._is_id element.removeAttributeNode(n) else: element = None n.prefix = prefix n._localName = localName n.namespaceURI = namespaceURI n.nodeName = name if n.nodeType == Node.ELEMENT_NODE: n.tagName = name else: # attribute node n.name = name if element is not None: element.setAttributeNode(n) if is_id: element.setIdAttributeNode(n) # It's not clear from a semantic perspective whether we should # call the user data handlers for the NODE_RENAMED event since # we're re-using the existing node. The draft spec has been # interpreted as meaning "no, don't call the handler unless a # new node is created." return n defproperty(Document, "documentElement", doc="Top-level element of this document.") def _clone_node(node, deep, newOwnerDocument): """ Clone a node and give it the new owner document. Called by Node.cloneNode and Document.importNode """ if node.ownerDocument.isSameNode(newOwnerDocument): operation = xml.dom.UserDataHandler.NODE_CLONED else: operation = xml.dom.UserDataHandler.NODE_IMPORTED if node.nodeType == Node.ELEMENT_NODE: clone = newOwnerDocument.createElementNS(node.namespaceURI, node.nodeName) for attr in node.attributes.values(): clone.setAttributeNS(attr.namespaceURI, attr.nodeName, attr.value) a = clone.getAttributeNodeNS(attr.namespaceURI, attr.localName) a.specified = attr.specified if deep: for child in node.childNodes: c = _clone_node(child, deep, newOwnerDocument) clone.appendChild(c) elif node.nodeType == Node.DOCUMENT_FRAGMENT_NODE: clone = newOwnerDocument.createDocumentFragment() if deep: for child in node.childNodes: c = _clone_node(child, deep, newOwnerDocument) clone.appendChild(c) elif node.nodeType == Node.TEXT_NODE: clone = newOwnerDocument.createTextNode(node.data) elif node.nodeType == Node.CDATA_SECTION_NODE: clone = newOwnerDocument.createCDATASection(node.data) elif node.nodeType == Node.PROCESSING_INSTRUCTION_NODE: clone = newOwnerDocument.createProcessingInstruction(node.target, node.data) elif node.nodeType == Node.COMMENT_NODE: clone = newOwnerDocument.createComment(node.data) elif node.nodeType == Node.ATTRIBUTE_NODE: clone = newOwnerDocument.createAttributeNS(node.namespaceURI, node.nodeName) clone.specified = True clone.value = node.value elif node.nodeType == Node.DOCUMENT_TYPE_NODE: assert node.ownerDocument is not newOwnerDocument operation = xml.dom.UserDataHandler.NODE_IMPORTED clone = newOwnerDocument.implementation.createDocumentType( node.name, node.publicId, node.systemId) clone.ownerDocument = newOwnerDocument if deep: clone.entities._seq = [] clone.notations._seq = [] for n in node.notations._seq: notation = Notation(n.nodeName, n.publicId, n.systemId) notation.ownerDocument = newOwnerDocument clone.notations._seq.append(notation) if hasattr(n, '_call_user_data_handler'): n._call_user_data_handler(operation, n, notation) for e in node.entities._seq: entity = Entity(e.nodeName, e.publicId, e.systemId, e.notationName) entity.actualEncoding = e.actualEncoding entity.encoding = e.encoding entity.version = e.version entity.ownerDocument = newOwnerDocument clone.entities._seq.append(entity) if hasattr(e, '_call_user_data_handler'): e._call_user_data_handler(operation, n, entity) else: # Note the cloning of Document and DocumentType nodes is # implementation specific. minidom handles those cases # directly in the cloneNode() methods. raise xml.dom.NotSupportedErr("Cannot clone node %s" % repr(node)) # Check for _call_user_data_handler() since this could conceivably # used with other DOM implementations (one of the FourThought # DOMs, perhaps?). if hasattr(node, '_call_user_data_handler'): node._call_user_data_handler(operation, node, clone) return clone def _nssplit(qualifiedName): fields = qualifiedName.split(':', 1) if len(fields) == 2: return fields else: return (None, fields[0]) def _do_pulldom_parse(func, args, kwargs): events = func(*args, **kwargs) toktype, rootNode = events.getEvent() events.expandNode(rootNode) events.clear() return rootNode def parse(file, parser=None, bufsize=None): """Parse a file into a DOM by filename or file object.""" if parser is None and not bufsize: from xml.dom import expatbuilder return expatbuilder.parse(file) else: from xml.dom import pulldom return _do_pulldom_parse(pulldom.parse, (file,), {'parser': parser, 'bufsize': bufsize}) def parseString(string, parser=None): """Parse a file into a DOM from a string.""" if parser is None: from xml.dom import expatbuilder return expatbuilder.parseString(string) else: from xml.dom import pulldom return _do_pulldom_parse(pulldom.parseString, (string,), {'parser': parser}) def getDOMImplementation(features=None): if features: if isinstance(features, str): features = domreg._parse_feature_string(features) for f, v in features: if not Document.implementation.hasFeature(f, v): return None return Document.implementation
gpl-2.0
Qalthos/ansible
lib/ansible/plugins/terminal/voss.py
79
3455
# # (c) 2018 Extreme Networks Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import json import re from ansible.errors import AnsibleConnectionFailure from ansible.module_utils._text import to_text, to_bytes from ansible.plugins.terminal import TerminalBase class TerminalModule(TerminalBase): terminal_stdout_re = [ re.compile(br"[\r\n]+[^\s#>]+(?:[>#])$", re.M) ] terminal_stderr_re = [ re.compile(br"% ?Error"), re.compile(br"% ?Bad secret"), re.compile(br"[\r\n%] Bad passwords"), re.compile(br"invalid input", re.I), re.compile(br"(?:incomplete|ambiguous) command", re.I), re.compile(br"connection timed out", re.I), re.compile(br"[^\r\n]+ not found"), re.compile(br"'[^']' +returned error code: ?\d+"), re.compile(br"Discontiguous Subnet Mask"), re.compile(br"Conflicting IP address"), re.compile(br"[\r\n]Error: ?[\S]+"), re.compile(br"[%\S] ?Informational: ?[\s]+", re.I), re.compile(br"Command authorization failed") ] def on_open_shell(self): try: self._exec_cli_command(u'terminal more disable') except AnsibleConnectionFailure: raise AnsibleConnectionFailure('unable to set terminal parameters') def on_become(self, passwd=None): if self._get_prompt().endswith(b'#'): return cmd = {u'command': u'enable'} if passwd: # Note: python-3.5 cannot combine u"" and r"" together. Thus make # an r string and use to_text to ensure it's text on both py2 and py3. cmd[u'prompt'] = to_text(r"[\r\n](?:Local_)?[Pp]assword: ?$", errors='surrogate_or_strict') cmd[u'answer'] = passwd cmd[u'prompt_retry_check'] = True try: self._exec_cli_command(to_bytes(json.dumps(cmd), errors='surrogate_or_strict')) prompt = self._get_prompt() if prompt is None or not prompt.endswith(b'#'): raise AnsibleConnectionFailure('failed to elevate privilege to enable mode still at prompt [%s]' % prompt) except AnsibleConnectionFailure as e: prompt = self._get_prompt() raise AnsibleConnectionFailure('unable to elevate privilege to enable mode, at prompt [%s] with error: %s' % (prompt, e.message)) def on_unbecome(self): prompt = self._get_prompt() if prompt is None: # if prompt is None most likely the terminal is hung up at a prompt return if prompt.endswith(b')#'): self._exec_cli_command(b'end') self._exec_cli_command(b'disable') elif prompt.endswith(b'#'): self._exec_cli_command(b'disable')
gpl-3.0
elky/django
django/templatetags/l10n.py
40
1689
from django.template import Library, Node, TemplateSyntaxError from django.utils import formats register = Library() @register.filter(is_safe=False) def localize(value): """ Force a value to be rendered as a localized value, regardless of the value of ``settings.USE_L10N``. """ return str(formats.localize(value, use_l10n=True)) @register.filter(is_safe=False) def unlocalize(value): """ Force a value to be rendered as a non-localized value, regardless of the value of ``settings.USE_L10N``. """ return str(value) class LocalizeNode(Node): def __init__(self, nodelist, use_l10n): self.nodelist = nodelist self.use_l10n = use_l10n def __repr__(self): return '<%s>' % self.__class__.__name__ def render(self, context): old_setting = context.use_l10n context.use_l10n = self.use_l10n output = self.nodelist.render(context) context.use_l10n = old_setting return output @register.tag('localize') def localize_tag(parser, token): """ Force or prevents localization of values, regardless of the value of `settings.USE_L10N`. Sample usage:: {% localize off %} var pi = {{ 3.1415 }}; {% endlocalize %} """ use_l10n = None bits = list(token.split_contents()) if len(bits) == 1: use_l10n = True elif len(bits) > 2 or bits[1] not in ('on', 'off'): raise TemplateSyntaxError("%r argument should be 'on' or 'off'" % bits[0]) else: use_l10n = bits[1] == 'on' nodelist = parser.parse(('endlocalize',)) parser.delete_first_token() return LocalizeNode(nodelist, use_l10n)
bsd-3-clause
ahmetabdi/SickRage
lib/unidecode/x08e.py
252
4659
data = ( 'Chu ', # 0x00 'Jing ', # 0x01 'Nie ', # 0x02 'Xiao ', # 0x03 'Bo ', # 0x04 'Chi ', # 0x05 'Qun ', # 0x06 'Mou ', # 0x07 'Shu ', # 0x08 'Lang ', # 0x09 'Yong ', # 0x0a 'Jiao ', # 0x0b 'Chou ', # 0x0c 'Qiao ', # 0x0d '[?] ', # 0x0e 'Ta ', # 0x0f 'Jian ', # 0x10 'Qi ', # 0x11 'Wo ', # 0x12 'Wei ', # 0x13 'Zhuo ', # 0x14 'Jie ', # 0x15 'Ji ', # 0x16 'Nie ', # 0x17 'Ju ', # 0x18 'Ju ', # 0x19 'Lun ', # 0x1a 'Lu ', # 0x1b 'Leng ', # 0x1c 'Huai ', # 0x1d 'Ju ', # 0x1e 'Chi ', # 0x1f 'Wan ', # 0x20 'Quan ', # 0x21 'Ti ', # 0x22 'Bo ', # 0x23 'Zu ', # 0x24 'Qie ', # 0x25 'Ji ', # 0x26 'Cu ', # 0x27 'Zong ', # 0x28 'Cai ', # 0x29 'Zong ', # 0x2a 'Peng ', # 0x2b 'Zhi ', # 0x2c 'Zheng ', # 0x2d 'Dian ', # 0x2e 'Zhi ', # 0x2f 'Yu ', # 0x30 'Duo ', # 0x31 'Dun ', # 0x32 'Chun ', # 0x33 'Yong ', # 0x34 'Zhong ', # 0x35 'Di ', # 0x36 'Zhe ', # 0x37 'Chen ', # 0x38 'Chuai ', # 0x39 'Jian ', # 0x3a 'Gua ', # 0x3b 'Tang ', # 0x3c 'Ju ', # 0x3d 'Fu ', # 0x3e 'Zu ', # 0x3f 'Die ', # 0x40 'Pian ', # 0x41 'Rou ', # 0x42 'Nuo ', # 0x43 'Ti ', # 0x44 'Cha ', # 0x45 'Tui ', # 0x46 'Jian ', # 0x47 'Dao ', # 0x48 'Cuo ', # 0x49 'Xi ', # 0x4a 'Ta ', # 0x4b 'Qiang ', # 0x4c 'Zhan ', # 0x4d 'Dian ', # 0x4e 'Ti ', # 0x4f 'Ji ', # 0x50 'Nie ', # 0x51 'Man ', # 0x52 'Liu ', # 0x53 'Zhan ', # 0x54 'Bi ', # 0x55 'Chong ', # 0x56 'Lu ', # 0x57 'Liao ', # 0x58 'Cu ', # 0x59 'Tang ', # 0x5a 'Dai ', # 0x5b 'Suo ', # 0x5c 'Xi ', # 0x5d 'Kui ', # 0x5e 'Ji ', # 0x5f 'Zhi ', # 0x60 'Qiang ', # 0x61 'Di ', # 0x62 'Man ', # 0x63 'Zong ', # 0x64 'Lian ', # 0x65 'Beng ', # 0x66 'Zao ', # 0x67 'Nian ', # 0x68 'Bie ', # 0x69 'Tui ', # 0x6a 'Ju ', # 0x6b 'Deng ', # 0x6c 'Ceng ', # 0x6d 'Xian ', # 0x6e 'Fan ', # 0x6f 'Chu ', # 0x70 'Zhong ', # 0x71 'Dun ', # 0x72 'Bo ', # 0x73 'Cu ', # 0x74 'Zu ', # 0x75 'Jue ', # 0x76 'Jue ', # 0x77 'Lin ', # 0x78 'Ta ', # 0x79 'Qiao ', # 0x7a 'Qiao ', # 0x7b 'Pu ', # 0x7c 'Liao ', # 0x7d 'Dun ', # 0x7e 'Cuan ', # 0x7f 'Kuang ', # 0x80 'Zao ', # 0x81 'Ta ', # 0x82 'Bi ', # 0x83 'Bi ', # 0x84 'Zhu ', # 0x85 'Ju ', # 0x86 'Chu ', # 0x87 'Qiao ', # 0x88 'Dun ', # 0x89 'Chou ', # 0x8a 'Ji ', # 0x8b 'Wu ', # 0x8c 'Yue ', # 0x8d 'Nian ', # 0x8e 'Lin ', # 0x8f 'Lie ', # 0x90 'Zhi ', # 0x91 'Li ', # 0x92 'Zhi ', # 0x93 'Chan ', # 0x94 'Chu ', # 0x95 'Duan ', # 0x96 'Wei ', # 0x97 'Long ', # 0x98 'Lin ', # 0x99 'Xian ', # 0x9a 'Wei ', # 0x9b 'Zuan ', # 0x9c 'Lan ', # 0x9d 'Xie ', # 0x9e 'Rang ', # 0x9f 'Xie ', # 0xa0 'Nie ', # 0xa1 'Ta ', # 0xa2 'Qu ', # 0xa3 'Jie ', # 0xa4 'Cuan ', # 0xa5 'Zuan ', # 0xa6 'Xi ', # 0xa7 'Kui ', # 0xa8 'Jue ', # 0xa9 'Lin ', # 0xaa 'Shen ', # 0xab 'Gong ', # 0xac 'Dan ', # 0xad 'Segare ', # 0xae 'Qu ', # 0xaf 'Ti ', # 0xb0 'Duo ', # 0xb1 'Duo ', # 0xb2 'Gong ', # 0xb3 'Lang ', # 0xb4 'Nerau ', # 0xb5 'Luo ', # 0xb6 'Ai ', # 0xb7 'Ji ', # 0xb8 'Ju ', # 0xb9 'Tang ', # 0xba 'Utsuke ', # 0xbb '[?] ', # 0xbc 'Yan ', # 0xbd 'Shitsuke ', # 0xbe 'Kang ', # 0xbf 'Qu ', # 0xc0 'Lou ', # 0xc1 'Lao ', # 0xc2 'Tuo ', # 0xc3 'Zhi ', # 0xc4 'Yagate ', # 0xc5 'Ti ', # 0xc6 'Dao ', # 0xc7 'Yagate ', # 0xc8 'Yu ', # 0xc9 'Che ', # 0xca 'Ya ', # 0xcb 'Gui ', # 0xcc 'Jun ', # 0xcd 'Wei ', # 0xce 'Yue ', # 0xcf 'Xin ', # 0xd0 'Di ', # 0xd1 'Xuan ', # 0xd2 'Fan ', # 0xd3 'Ren ', # 0xd4 'Shan ', # 0xd5 'Qiang ', # 0xd6 'Shu ', # 0xd7 'Tun ', # 0xd8 'Chen ', # 0xd9 'Dai ', # 0xda 'E ', # 0xdb 'Na ', # 0xdc 'Qi ', # 0xdd 'Mao ', # 0xde 'Ruan ', # 0xdf 'Ren ', # 0xe0 'Fan ', # 0xe1 'Zhuan ', # 0xe2 'Hong ', # 0xe3 'Hu ', # 0xe4 'Qu ', # 0xe5 'Huang ', # 0xe6 'Di ', # 0xe7 'Ling ', # 0xe8 'Dai ', # 0xe9 'Ao ', # 0xea 'Zhen ', # 0xeb 'Fan ', # 0xec 'Kuang ', # 0xed 'Ang ', # 0xee 'Peng ', # 0xef 'Bei ', # 0xf0 'Gu ', # 0xf1 'Ku ', # 0xf2 'Pao ', # 0xf3 'Zhu ', # 0xf4 'Rong ', # 0xf5 'E ', # 0xf6 'Ba ', # 0xf7 'Zhou ', # 0xf8 'Zhi ', # 0xf9 'Yao ', # 0xfa 'Ke ', # 0xfb 'Yi ', # 0xfc 'Qing ', # 0xfd 'Shi ', # 0xfe 'Ping ', # 0xff )
gpl-3.0
UNIZAR-30226-2017-05/WebVideojuegos
web/proySoftware/controladores/perfilUsuarioVisible.py
1
1218
#!/usr/bin/env python # -*- coding: utf-8 -*- from proySoftware import app from flask import request from flask import render_template from flask import flash from flask_wtf import FlaskForm from wtforms import Form, BooleanField, StringField, PasswordField, validators from wtforms.validators import * from ..models import * from ..views import * from ..utils import * from registro import * from perfilUsuario import * @app.route('/perfil/<name>', methods=['GET']) def perfilUsuarioVisible(name): #perfil get, devuelve la informacion del perfil usuario = get_user_by_name(name) #action=get_action_list() #favorites=get_favorite_list() #form = UpdateList() imgForm = ImgPerfilForm() acciones = get_actions(usuario.id) UsuarioVideojuegos = get_videogames_user(usuario.id) #return render_template("_views/perfilUsuarioVisible.html", user=usuario, logueado=data) return render_template("_views/perfilUsuarioVisible.html", user=usuario, UsuarioVideojuegos = UsuarioVideojuegos, acciones = acciones) @app.route('/perfilVisID/<pk>', methods=['GET']) def perfilVisID(pk): return make_response(redirect(url_for('perfilUsuarioVisible', name=get_user_name(pk))))
mit
RydrDojo/Ridr
pylotVenv/lib/python2.7/site-packages/sqlalchemy/util/compat.py
70
6809
# util/compat.py # Copyright (C) 2005-2015 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Handle Python version/platform incompatibilities.""" import sys try: import threading except ImportError: import dummy_threading as threading py33 = sys.version_info >= (3, 3) py32 = sys.version_info >= (3, 2) py3k = sys.version_info >= (3, 0) py2k = sys.version_info < (3, 0) py265 = sys.version_info >= (2, 6, 5) jython = sys.platform.startswith('java') pypy = hasattr(sys, 'pypy_version_info') win32 = sys.platform.startswith('win') cpython = not pypy and not jython # TODO: something better for this ? import collections next = next if py3k: import pickle else: try: import cPickle as pickle except ImportError: import pickle # work around http://bugs.python.org/issue2646 if py265: safe_kwarg = lambda arg: arg else: safe_kwarg = str ArgSpec = collections.namedtuple("ArgSpec", ["args", "varargs", "keywords", "defaults"]) if py3k: import builtins from inspect import getfullargspec as inspect_getfullargspec from urllib.parse import (quote_plus, unquote_plus, parse_qsl, quote, unquote) import configparser from io import StringIO from io import BytesIO as byte_buffer def inspect_getargspec(func): return ArgSpec( *inspect_getfullargspec(func)[0:4] ) string_types = str, binary_type = bytes text_type = str int_types = int, iterbytes = iter def u(s): return s def ue(s): return s def b(s): return s.encode("latin-1") if py32: callable = callable else: def callable(fn): return hasattr(fn, '__call__') def cmp(a, b): return (a > b) - (a < b) from functools import reduce print_ = getattr(builtins, "print") import_ = getattr(builtins, '__import__') import itertools itertools_filterfalse = itertools.filterfalse itertools_filter = filter itertools_imap = map from itertools import zip_longest import base64 def b64encode(x): return base64.b64encode(x).decode('ascii') def b64decode(x): return base64.b64decode(x.encode('ascii')) else: from inspect import getargspec as inspect_getfullargspec inspect_getargspec = inspect_getfullargspec from urllib import quote_plus, unquote_plus, quote, unquote from urlparse import parse_qsl import ConfigParser as configparser from StringIO import StringIO from cStringIO import StringIO as byte_buffer string_types = basestring, binary_type = str text_type = unicode int_types = int, long def iterbytes(buf): return (ord(byte) for byte in buf) def u(s): # this differs from what six does, which doesn't support non-ASCII # strings - we only use u() with # literal source strings, and all our source files with non-ascii # in them (all are tests) are utf-8 encoded. return unicode(s, "utf-8") def ue(s): return unicode(s, "unicode_escape") def b(s): return s def import_(*args): if len(args) == 4: args = args[0:3] + ([str(arg) for arg in args[3]],) return __import__(*args) callable = callable cmp = cmp reduce = reduce import base64 b64encode = base64.b64encode b64decode = base64.b64decode def print_(*args, **kwargs): fp = kwargs.pop("file", sys.stdout) if fp is None: return for arg in enumerate(args): if not isinstance(arg, basestring): arg = str(arg) fp.write(arg) import itertools itertools_filterfalse = itertools.ifilterfalse itertools_filter = itertools.ifilter itertools_imap = itertools.imap from itertools import izip_longest as zip_longest import time if win32 or jython: time_func = time.clock else: time_func = time.time from collections import namedtuple from operator import attrgetter as dottedgetter if py3k: def reraise(tp, value, tb=None, cause=None): if cause is not None: value.__cause__ = cause if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value def raise_from_cause(exception, exc_info=None): if exc_info is None: exc_info = sys.exc_info() exc_type, exc_value, exc_tb = exc_info reraise(type(exception), exception, tb=exc_tb, cause=exc_value) else: exec("def reraise(tp, value, tb=None, cause=None):\n" " raise tp, value, tb\n") def raise_from_cause(exception, exc_info=None): # not as nice as that of Py3K, but at least preserves # the code line where the issue occurred if exc_info is None: exc_info = sys.exc_info() exc_type, exc_value, exc_tb = exc_info reraise(type(exception), exception, tb=exc_tb) if py3k: exec_ = getattr(builtins, 'exec') else: def exec_(func_text, globals_, lcl=None): if lcl is None: exec('exec func_text in globals_') else: exec('exec func_text in globals_, lcl') def with_metaclass(meta, *bases): """Create a base class with a metaclass. Drops the middle class upon creation. Source: http://lucumr.pocoo.org/2013/5/21/porting-to-python-3-redux/ """ class metaclass(meta): __call__ = type.__call__ __init__ = type.__init__ def __new__(cls, name, this_bases, d): if this_bases is None: return type.__new__(cls, name, (), d) return meta(name, bases, d) return metaclass('temporary_class', None, {}) from contextlib import contextmanager try: from contextlib import nested except ImportError: # removed in py3k, credit to mitsuhiko for # workaround @contextmanager def nested(*managers): exits = [] vars = [] exc = (None, None, None) try: for mgr in managers: exit = mgr.__exit__ enter = mgr.__enter__ vars.append(enter()) exits.append(exit) yield vars except: exc = sys.exc_info() finally: while exits: exit = exits.pop() try: if exit(*exc): exc = (None, None, None) except: exc = sys.exc_info() if exc != (None, None, None): reraise(exc[0], exc[1], exc[2])
mit
CSD-Public/stonix
src/tests/rules/unit_tests/zzzTestRuleDisableMediaAutomaticActions.py
1
7734
#!/usr/bin/env python3 ############################################################################### # # # Copyright 2019. Triad National Security, LLC. All rights reserved. # # This program was produced under U.S. Government contract 89233218CNA000001 # # for Los Alamos National Laboratory (LANL), which is operated by Triad # # National Security, LLC for the U.S. Department of Energy/National Nuclear # # Security Administration. # # # # All rights in the program are reserved by Triad National Security, LLC, and # # the U.S. Department of Energy/National Nuclear Security Administration. The # # Government is granted for itself and others acting on its behalf a # # nonexclusive, paid-up, irrevocable worldwide license in this material to # # reproduce, prepare derivative works, distribute copies to the public, # # perform publicly and display publicly, and to permit others to do so. # # # ############################################################################### ''' This is a Unit Test for Rule ConfigureAppleSoftwareUpdate @author: ekkehard j. koch @change: 02/27/2013 Original Implementation @change: 2016/02/10 roy Added sys.path.append for being able to unit test this file as well as with the test harness. ''' import unittest import sys sys.path.append("../../../..") from src.tests.lib.RuleTestTemplate import RuleTest from src.stonix_resources.CommandHelper import CommandHelper from src.tests.lib.logdispatcher_mock import LogPriority from src.stonix_resources.rules.DisableMediaAutomaticActions import DisableMediaAutomaticActions class zzzTestRuleDisableMediaAutomaticActions(RuleTest): def setUp(self): RuleTest.setUp(self) self.rule = DisableMediaAutomaticActions(self.config, self.environ, self.logdispatch, self.statechglogger) self.rulename = self.rule.rulename self.rulenumber = self.rule.rulenumber self.ch = CommandHelper(self.logdispatch) self.dc = "/usr/bin/defaults" def tearDown(self): pass def runTest(self): self.simpleRuleTest() def setConditionsForRule(self): '''This makes sure the intial report fails by executing the following commands: defaults -currentHost delete /Library/Preferences/com.apple.digihub com.apple.digihub.blank.cd.appeared defaults -currentHost delete /Library/Preferences/com.apple.digihub com.apple.digihub.blank.dvd.appeared defaults -currentHost delete /Library/Preferences/com.apple.digihub com.apple.digihub.cd.picture.appeared defaults -currentHost delete /Library/Preferences/com.apple.digihub com.apple.digihub.dvd.video.appeared :param self: essential if you override this definition :returns: boolean - If successful True; If failure False @author: ekkehard j. koch ''' success = True if success: command = [self.dc, "-currentHost", "delete", "/Library/Preferences/com.apple.digihub", "com.apple.digihub.blank.cd.appeared"] self.logdispatch.log(LogPriority.DEBUG, str(command)) success = self.ch.executeCommand(command) if success: command = [self.dc, "-currentHost", "delete", "/Library/Preferences/com.apple.digihub", "com.apple.digihub.blank.dvd.appeared"] self.logdispatch.log(LogPriority.DEBUG, str(command)) success = self.ch.executeCommand(command) if success: command = [self.dc, "-currentHost", "delete", "/Library/Preferences/com.apple.digihub", "com.apple.digihub.cd.picture.appeared"] self.logdispatch.log(LogPriority.DEBUG, str(command)) success = self.ch.executeCommand(command) if success: command = [self.dc, "-currentHost", "delete", "/Library/Preferences/com.apple.digihub", "com.apple.digihub.dvd.video.appeared"] self.logdispatch.log(LogPriority.DEBUG, str(command)) success = self.ch.executeCommand(command) if success: success = self.checkReportForRule(False, True) return success def checkReportForRule(self, pCompliance, pRuleSuccess): '''To see what happended run these commands: defaults -currentHost read /Library/Preferences/com.apple.digihub com.apple.digihub.blank.cd.appeared defaults -currentHost read /Library/Preferences/com.apple.digihub com.apple.digihub.blank.dvd.appeared defaults -currentHost read /Library/Preferences/com.apple.digihub com.apple.digihub.cd.picture.appeared defaults -currentHost read /Library/Preferences/com.apple.digihub com.apple.digihub.dvd.video.appeared :param self: essential if you override this definition :param pCompliance: :param pRuleSuccess: :returns: boolean - If successful True; If failure False @author: ekkehard j. koch ''' self.logdispatch.log(LogPriority.DEBUG, "pCompliance = " + \ str(pCompliance) + ".") self.logdispatch.log(LogPriority.DEBUG, "pRuleSuccess = " + \ str(pRuleSuccess) + ".") success = True if success: command = [self.dc, "-currentHost", "read", "/Library/Preferences/com.apple.digihub", "com.apple.digihub.blank.cd.appeared"] self.logdispatch.log(LogPriority.DEBUG, str(command)) success = self.ch.executeCommand(command) if success: command = [self.dc, "-currentHost", "read", "/Library/Preferences/com.apple.digihub", "com.apple.digihub.blank.dvd.appeared"] self.logdispatch.log(LogPriority.DEBUG, str(command)) success = self.ch.executeCommand(command) if success: command = [self.dc, "-currentHost", "read", "/Library/Preferences/com.apple.digihub", "com.apple.digihub.cd.picture.appeared"] self.logdispatch.log(LogPriority.DEBUG, str(command)) success = self.ch.executeCommand(command) if success: command = [self.dc, "-currentHost", "read", "/Library/Preferences/com.apple.digihub", "com.apple.digihub.dvd.video.appeared"] self.logdispatch.log(LogPriority.DEBUG, str(command)) success = self.ch.executeCommand(command) return success def checkFixForRule(self, pRuleSuccess): self.logdispatch.log(LogPriority.DEBUG, "pRuleSuccess = " + \ str(pRuleSuccess) + ".") success = self.checkReportForRule(True, pRuleSuccess) return success def checkUndoForRule(self, pRuleSuccess): self.logdispatch.log(LogPriority.DEBUG, "pRuleSuccess = " + \ str(pRuleSuccess) + ".") success = self.checkReportForRule(False, pRuleSuccess) return success if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] unittest.main()
gpl-2.0
spaceone/univention-management-console
univention/umc/util/module.py
1
1305
# -*- coding: utf-8 -*- # # Univention Management Console # # Copyright 2014 Univention GmbH # # http://www.univention.de/ # # All rights reserved. # # The source code of this program is made available # under the terms of the GNU Affero General Public License version 3 # (GNU AGPL V3) as published by the Free Software Foundation. # # Binary versions of this program provided by Univention to you as # well as other copyrighted, protected or trademarked materials like # Logos, graphics, fonts, specific documentations and configurations, # cryptographic keys etc. are subject to a license agreement between # you and Univention and not subject to the GNU AGPL V3. # # In the case you use this program under the terms of the GNU AGPL V3, # the program is provided in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public # License with the Debian GNU/Linux or Univention distribution in file # /usr/share/common-licenses/AGPL-3; if not, see # <http://www.gnu.org/licenses/>. from univention.management.console.module import Manager, Module __all__ = ['Manager', 'Module']
agpl-3.0
seanwestfall/django
django/utils/deprecation.py
199
2627
from __future__ import absolute_import import inspect import warnings class RemovedInDjango20Warning(PendingDeprecationWarning): pass class RemovedInDjango110Warning(DeprecationWarning): pass RemovedInNextVersionWarning = RemovedInDjango110Warning class warn_about_renamed_method(object): def __init__(self, class_name, old_method_name, new_method_name, deprecation_warning): self.class_name = class_name self.old_method_name = old_method_name self.new_method_name = new_method_name self.deprecation_warning = deprecation_warning def __call__(self, f): def wrapped(*args, **kwargs): warnings.warn( "`%s.%s` is deprecated, use `%s` instead." % (self.class_name, self.old_method_name, self.new_method_name), self.deprecation_warning, 2) return f(*args, **kwargs) return wrapped class RenameMethodsBase(type): """ Handles the deprecation paths when renaming a method. It does the following: 1) Define the new method if missing and complain about it. 2) Define the old method if missing. 3) Complain whenever an old method is called. See #15363 for more details. """ renamed_methods = () def __new__(cls, name, bases, attrs): new_class = super(RenameMethodsBase, cls).__new__(cls, name, bases, attrs) for base in inspect.getmro(new_class): class_name = base.__name__ for renamed_method in cls.renamed_methods: old_method_name = renamed_method[0] old_method = base.__dict__.get(old_method_name) new_method_name = renamed_method[1] new_method = base.__dict__.get(new_method_name) deprecation_warning = renamed_method[2] wrapper = warn_about_renamed_method(class_name, *renamed_method) # Define the new method if missing and complain about it if not new_method and old_method: warnings.warn( "`%s.%s` method should be renamed `%s`." % (class_name, old_method_name, new_method_name), deprecation_warning, 2) setattr(base, new_method_name, old_method) setattr(base, old_method_name, wrapper(old_method)) # Define the old method as a wrapped call to the new method. if not old_method and new_method: setattr(base, old_method_name, wrapper(new_method)) return new_class
bsd-3-clause
dodaro/preasp
tests/asp/weakConstraints/laby3.gringo.test.py
4
113788
input = """ 1 2 0 0 1 3 0 0 1 4 0 0 1 5 0 0 1 6 0 0 1 7 0 0 1 8 0 0 1 9 0 0 1 10 0 0 1 11 0 0 1 12 0 0 1 13 0 0 1 14 0 0 1 15 0 0 1 16 0 0 1 17 0 0 1 18 0 0 1 19 0 0 1 20 0 0 1 21 0 0 1 22 0 0 1 23 0 0 1 24 0 0 1 25 0 0 1 26 0 0 1 27 0 0 1 28 0 0 1 29 0 0 1 30 0 0 1 31 0 0 1 32 0 0 1 33 0 0 1 34 0 0 1 35 0 0 1 36 0 0 1 37 0 0 1 38 0 0 1 39 0 0 1 40 0 0 1 41 0 0 1 42 0 0 1 43 0 0 1 44 0 0 1 45 0 0 1 46 0 0 1 47 0 0 1 48 0 0 1 49 0 0 1 50 0 0 1 51 0 0 1 52 0 0 1 53 0 0 1 54 0 0 1 55 0 0 1 56 0 0 1 57 0 0 1 58 0 0 1 59 0 0 1 60 0 0 1 61 0 0 1 62 0 0 1 63 0 0 1 64 0 0 1 65 0 0 1 66 0 0 1 67 0 0 1 68 0 0 1 69 0 0 1 70 0 0 1 71 0 0 1 72 0 0 1 73 0 0 1 74 0 0 1 75 0 0 1 76 0 0 1 77 0 0 1 78 0 0 1 79 0 0 1 80 0 0 1 81 0 0 1 82 0 0 1 83 0 0 1 84 0 0 1 85 0 0 1 86 0 0 1 87 0 0 1 88 0 0 1 89 0 0 1 90 0 0 1 91 0 0 1 92 0 0 1 93 0 0 1 94 0 0 1 95 0 0 1 96 0 0 1 97 0 0 1 98 0 0 1 99 0 0 1 100 0 0 1 101 0 0 1 102 0 0 1 103 0 0 1 104 0 0 1 105 0 0 1 106 0 0 1 107 0 0 1 108 0 0 1 109 0 0 1 110 0 0 1 111 0 0 1 112 0 0 1 113 0 0 1 114 0 0 1 115 0 0 1 116 0 0 1 117 0 0 1 118 0 0 1 119 0 0 1 120 0 0 1 121 0 0 1 122 0 0 1 123 0 0 1 124 0 0 1 125 0 0 1 126 0 0 1 127 0 0 1 128 0 0 1 129 0 0 1 130 0 0 1 131 0 0 1 132 0 0 1 133 0 0 1 134 0 0 1 135 0 0 1 136 0 0 1 137 0 0 1 138 0 0 1 139 0 0 1 140 0 0 1 141 0 0 1 142 0 0 1 143 0 0 1 144 0 0 1 145 0 0 1 146 0 0 1 147 0 0 1 148 0 0 1 149 0 0 1 150 0 0 1 151 0 0 1 152 0 0 1 153 0 0 1 154 0 0 1 155 0 0 1 156 0 0 1 157 0 0 1 158 0 0 1 159 0 0 1 160 0 0 1 161 0 0 1 162 0 0 1 163 0 0 1 164 0 0 1 165 0 0 1 166 0 0 1 167 0 0 1 168 0 0 1 169 0 0 1 170 0 0 1 171 0 0 1 172 0 0 1 173 0 0 1 174 0 0 1 175 0 0 1 176 0 0 1 177 0 0 1 178 0 0 1 179 0 0 1 180 0 0 1 181 0 0 1 182 0 0 1 183 0 0 1 184 0 0 1 185 0 0 1 186 0 0 1 187 0 0 1 188 0 0 1 189 0 0 1 190 0 0 1 191 0 0 1 192 0 0 1 193 0 0 1 194 0 0 1 195 0 0 1 196 0 0 1 197 0 0 1 198 0 0 1 199 0 0 1 200 0 0 1 201 0 0 1 202 0 0 1 203 0 0 1 204 0 0 1 205 0 0 1 206 0 0 1 207 0 0 1 208 0 0 1 209 0 0 1 210 0 0 1 211 0 0 1 212 0 0 1 213 0 0 1 214 0 0 1 215 0 0 1 216 0 0 1 217 0 0 1 218 0 0 1 219 0 0 1 220 0 0 1 221 0 0 1 222 0 0 1 223 0 0 1 224 0 0 1 225 0 0 1 226 0 0 1 227 0 0 1 228 0 0 1 229 0 0 1 230 0 0 1 231 0 0 1 232 0 0 1 233 0 0 1 234 0 0 1 235 0 0 1 236 0 0 1 237 0 0 1 238 0 0 1 239 0 0 1 240 0 0 1 241 0 0 1 242 0 0 1 243 0 0 1 244 0 0 1 245 0 0 1 246 0 0 1 247 0 0 1 248 0 0 1 249 0 0 1 250 0 0 1 251 0 0 1 252 0 0 1 253 0 0 1 254 0 0 1 255 0 0 1 256 0 0 1 257 0 0 1 258 0 0 1 259 0 0 1 260 0 0 1 261 0 0 1 262 0 0 1 263 0 0 1 264 0 0 1 265 0 0 1 266 0 0 1 267 0 0 1 268 0 0 1 269 0 0 1 270 0 0 1 271 0 0 1 272 0 0 1 273 0 0 1 274 0 0 1 275 0 0 1 276 0 0 1 277 0 0 1 278 0 0 1 279 0 0 1 280 0 0 1 281 0 0 1 282 0 0 1 283 0 0 1 284 0 0 1 285 0 0 1 286 0 0 1 287 0 0 1 288 0 0 1 289 0 0 1 290 0 0 1 291 0 0 1 292 0 0 1 293 0 0 1 294 0 0 1 295 0 0 1 296 0 0 1 297 0 0 1 298 0 0 1 299 0 0 1 300 0 0 1 301 0 0 1 302 0 0 1 303 0 0 1 304 0 0 1 305 0 0 1 306 0 0 1 307 0 0 1 308 0 0 1 309 0 0 1 310 0 0 1 311 0 0 1 312 0 0 1 313 0 0 1 314 0 0 1 315 0 0 1 316 0 0 1 317 0 0 1 318 0 0 1 319 0 0 1 320 0 0 1 321 0 0 1 322 0 0 1 323 0 0 1 324 0 0 1 325 0 0 1 326 0 0 1 327 0 0 1 328 0 0 1 329 0 0 1 330 0 0 1 331 0 0 1 332 0 0 1 333 0 0 1 334 0 0 1 335 0 0 1 336 0 0 1 337 0 0 1 338 0 0 1 339 0 0 1 340 0 0 1 341 0 0 1 342 0 0 1 343 0 0 1 344 0 0 1 345 0 0 1 346 0 0 1 347 0 0 1 348 0 0 1 349 0 0 1 350 0 0 1 351 0 0 1 352 0 0 1 353 0 0 1 354 0 0 1 355 0 0 1 356 0 0 1 357 0 0 1 358 0 0 1 359 0 0 1 360 0 0 1 361 0 0 1 362 0 0 1 363 0 0 1 364 0 0 1 365 0 0 1 366 0 0 1 367 0 0 1 368 0 0 1 369 0 0 1 370 0 0 1 371 0 0 1 372 0 0 1 373 0 0 1 374 0 0 1 375 0 0 1 376 0 0 1 377 0 0 1 378 0 0 1 379 0 0 1 380 0 0 1 381 0 0 1 382 0 0 1 383 0 0 1 384 0 0 1 385 0 0 1 386 0 0 1 387 0 0 1 388 0 0 1 389 0 0 1 390 0 0 1 391 0 0 1 392 0 0 1 393 0 0 1 394 0 0 1 395 0 0 1 396 0 0 1 397 0 0 1 398 0 0 1 399 0 0 1 400 0 0 1 401 0 0 1 402 0 0 1 403 0 0 1 404 0 0 1 405 0 0 1 406 0 0 1 407 0 0 1 408 0 0 1 409 0 0 1 410 0 0 1 411 0 0 1 412 0 0 1 413 0 0 1 414 0 0 1 415 0 0 1 416 0 0 1 417 0 0 1 418 0 0 1 419 0 0 1 420 0 0 1 421 0 0 1 422 0 0 1 423 0 0 1 424 0 0 1 425 0 0 1 426 0 0 1 427 0 0 1 428 1 1 429 1 430 4 4 431 432 433 434 1 435 4 4 436 437 438 439 1 440 4 4 436 441 438 442 1 443 4 4 444 441 445 442 1 446 4 4 447 448 449 450 1 451 4 4 447 432 449 434 1 452 4 4 453 454 455 456 1 457 4 4 458 448 459 450 1 460 4 4 461 437 462 439 1 463 4 4 458 454 459 456 1 464 4 4 447 454 449 456 1 465 4 4 466 467 468 469 1 470 4 4 458 437 459 439 1 471 4 4 458 467 459 469 1 472 4 4 461 454 462 456 1 473 4 4 461 441 462 442 1 474 4 4 466 475 468 476 1 477 4 4 458 478 459 479 1 480 4 4 461 448 462 450 1 481 4 4 444 454 445 456 1 482 4 4 447 467 449 469 1 483 4 4 484 437 485 439 1 486 4 4 461 487 462 488 1 489 4 4 444 478 445 479 1 490 4 4 461 432 462 434 1 491 4 4 492 487 493 488 1 494 4 4 447 441 449 442 1 495 4 4 492 432 493 434 1 496 4 4 453 467 455 469 1 497 4 4 453 478 455 479 1 498 4 4 466 487 468 488 1 499 4 4 492 500 493 501 1 502 4 4 453 441 455 442 1 503 4 4 453 448 455 450 1 504 4 4 447 437 449 439 1 505 4 4 458 500 459 501 1 506 4 4 507 500 508 501 1 509 4 4 431 454 433 456 1 510 4 4 447 487 449 488 1 511 4 4 507 432 508 434 1 512 4 4 507 487 508 488 1 513 4 4 484 432 485 434 1 514 4 4 444 432 445 434 1 515 4 4 458 487 459 488 1 516 4 4 453 517 455 518 1 519 4 4 461 467 462 469 1 520 4 4 431 448 433 450 1 521 4 4 507 448 508 450 1 522 4 4 458 517 459 518 1 523 4 4 507 454 508 456 1 524 4 4 461 500 462 501 1 525 4 4 507 437 508 439 1 526 4 4 436 454 438 456 1 527 4 4 453 487 455 488 1 528 4 4 466 517 468 518 1 529 4 4 530 531 532 533 1 534 4 4 535 536 537 538 1 539 4 4 535 540 537 541 1 542 4 4 543 540 544 541 1 545 4 4 546 547 548 549 1 550 4 4 546 531 548 533 1 551 4 4 552 553 554 555 1 556 4 4 557 547 558 549 1 559 4 4 560 536 561 538 1 562 4 4 557 553 558 555 1 563 4 4 546 553 548 555 1 564 4 4 565 566 567 568 1 569 4 4 557 536 558 538 1 570 4 4 557 566 558 568 1 571 4 4 560 553 561 555 1 572 4 4 560 540 561 541 1 573 4 4 565 574 567 575 1 576 4 4 557 577 558 578 1 579 4 4 560 547 561 549 1 580 4 4 543 553 544 555 1 581 4 4 546 566 548 568 1 582 4 4 583 536 584 538 1 585 4 4 560 586 561 587 1 588 4 4 543 577 544 578 1 589 4 4 560 531 561 533 1 590 4 4 591 586 592 587 1 593 4 4 546 540 548 541 1 594 4 4 591 531 592 533 1 595 4 4 552 566 554 568 1 596 4 4 552 577 554 578 1 597 4 4 565 586 567 587 1 598 4 4 591 599 592 600 1 601 4 4 552 540 554 541 1 602 4 4 552 547 554 549 1 603 4 4 546 536 548 538 1 604 4 4 557 599 558 600 1 605 4 4 606 599 607 600 1 608 4 4 530 553 532 555 1 609 4 4 546 586 548 587 1 610 4 4 606 531 607 533 1 611 4 4 606 586 607 587 1 612 4 4 583 531 584 533 1 613 4 4 543 531 544 533 1 614 4 4 557 586 558 587 1 615 4 4 552 616 554 617 1 618 4 4 560 566 561 568 1 619 4 4 530 547 532 549 1 620 4 4 606 547 607 549 1 621 4 4 557 616 558 617 1 622 4 4 606 553 607 555 1 623 4 4 560 599 561 600 1 624 4 4 606 536 607 538 1 625 4 4 535 553 537 555 1 626 4 4 552 586 554 587 1 627 4 4 565 616 567 617 1 628 4 4 629 630 631 632 1 633 4 4 634 635 636 637 1 638 4 4 634 639 636 640 1 641 4 4 642 639 643 640 1 644 4 4 645 646 647 648 1 649 4 4 645 630 647 632 1 650 4 4 651 652 653 654 1 655 4 4 656 646 657 648 1 658 4 4 659 635 660 637 1 661 4 4 656 652 657 654 1 662 4 4 645 652 647 654 1 663 4 4 664 665 666 667 1 668 4 4 656 635 657 637 1 669 4 4 656 665 657 667 1 670 4 4 659 652 660 654 1 671 4 4 659 639 660 640 1 672 4 4 664 673 666 674 1 675 4 4 656 676 657 677 1 678 4 4 659 646 660 648 1 679 4 4 642 652 643 654 1 680 4 4 645 665 647 667 1 681 4 4 682 635 683 637 1 684 4 4 659 685 660 686 1 687 4 4 642 676 643 677 1 688 4 4 659 630 660 632 1 689 4 4 690 685 691 686 1 692 4 4 645 639 647 640 1 693 4 4 690 630 691 632 1 694 4 4 651 665 653 667 1 695 4 4 651 676 653 677 1 696 4 4 664 685 666 686 1 697 4 4 690 698 691 699 1 700 4 4 651 639 653 640 1 701 4 4 651 646 653 648 1 702 4 4 645 635 647 637 1 703 4 4 656 698 657 699 1 704 4 4 705 698 706 699 1 707 4 4 629 652 631 654 1 708 4 4 645 685 647 686 1 709 4 4 705 630 706 632 1 710 4 4 705 685 706 686 1 711 4 4 682 630 683 632 1 712 4 4 642 630 643 632 1 713 4 4 656 685 657 686 1 714 4 4 651 715 653 716 1 717 4 4 659 665 660 667 1 718 4 4 629 646 631 648 1 719 4 4 705 646 706 648 1 720 4 4 656 715 657 716 1 721 4 4 705 652 706 654 1 722 4 4 659 698 660 699 1 723 4 4 705 635 706 637 1 724 4 4 634 652 636 654 1 725 4 4 651 685 653 686 1 726 4 4 664 715 666 716 1 727 4 4 728 729 730 731 1 732 4 4 733 734 735 736 1 737 4 4 733 738 735 739 1 740 4 4 741 738 742 739 1 743 4 4 744 745 746 747 1 748 4 4 744 729 746 731 1 749 4 4 750 751 752 753 1 754 4 4 755 745 756 747 1 757 4 4 758 734 759 736 1 760 4 4 755 751 756 753 1 761 4 4 744 751 746 753 1 762 4 4 763 764 765 766 1 767 4 4 755 734 756 736 1 768 4 4 755 764 756 766 1 769 4 4 758 751 759 753 1 770 4 4 758 738 759 739 1 771 4 4 763 772 765 773 1 774 4 4 755 775 756 776 1 777 4 4 758 745 759 747 1 778 4 4 741 751 742 753 1 779 4 4 744 764 746 766 1 780 4 4 781 734 782 736 1 783 4 4 758 784 759 785 1 786 4 4 741 775 742 776 1 787 4 4 758 729 759 731 1 788 4 4 789 784 790 785 1 791 4 4 744 738 746 739 1 792 4 4 789 729 790 731 1 793 4 4 750 764 752 766 1 794 4 4 750 775 752 776 1 795 4 4 763 784 765 785 1 796 4 4 789 797 790 798 1 799 4 4 750 738 752 739 1 800 4 4 750 745 752 747 1 801 4 4 744 734 746 736 1 802 4 4 755 797 756 798 1 803 4 4 804 797 805 798 1 806 4 4 728 751 730 753 1 807 4 4 744 784 746 785 1 808 4 4 804 729 805 731 1 809 4 4 804 784 805 785 1 810 4 4 781 729 782 731 1 811 4 4 741 729 742 731 1 812 4 4 755 784 756 785 1 813 4 4 750 814 752 815 1 816 4 4 758 764 759 766 1 817 4 4 728 745 730 747 1 818 4 4 804 745 805 747 1 819 4 4 755 814 756 815 1 820 4 4 804 751 805 753 1 821 4 4 758 797 759 798 1 822 4 4 804 734 805 736 1 823 4 4 733 751 735 753 1 824 4 4 750 784 752 785 1 825 4 4 763 814 765 815 1 826 4 4 827 828 829 830 1 831 4 4 832 833 834 835 1 836 4 4 832 837 834 838 1 839 4 4 840 837 841 838 1 842 4 4 843 844 845 846 1 847 4 4 843 828 845 830 1 848 4 4 849 850 851 852 1 853 4 4 854 844 855 846 1 856 4 4 857 833 858 835 1 859 4 4 854 850 855 852 1 860 4 4 843 850 845 852 1 861 4 4 862 863 864 865 1 866 4 4 854 833 855 835 1 867 4 4 854 863 855 865 1 868 4 4 857 850 858 852 1 869 4 4 857 837 858 838 1 870 4 4 862 871 864 872 1 873 4 4 854 874 855 875 1 876 4 4 857 844 858 846 1 877 4 4 840 850 841 852 1 878 4 4 843 863 845 865 1 879 4 4 880 833 881 835 1 882 4 4 857 883 858 884 1 885 4 4 840 874 841 875 1 886 4 4 857 828 858 830 1 887 4 4 888 883 889 884 1 890 4 4 843 837 845 838 1 891 4 4 888 828 889 830 1 892 4 4 849 863 851 865 1 893 4 4 849 874 851 875 1 894 4 4 862 883 864 884 1 895 4 4 888 896 889 897 1 898 4 4 849 837 851 838 1 899 4 4 849 844 851 846 1 900 4 4 843 833 845 835 1 901 4 4 854 896 855 897 1 902 4 4 903 896 904 897 1 905 4 4 827 850 829 852 1 906 4 4 843 883 845 884 1 907 4 4 903 828 904 830 1 908 4 4 903 883 904 884 1 909 4 4 880 828 881 830 1 910 4 4 840 828 841 830 1 911 4 4 854 883 855 884 1 912 4 4 849 913 851 914 1 915 4 4 857 863 858 865 1 916 4 4 827 844 829 846 1 917 4 4 903 844 904 846 1 918 4 4 854 913 855 914 1 919 4 4 903 850 904 852 1 920 4 4 857 896 858 897 1 921 4 4 903 833 904 835 1 922 4 4 832 850 834 852 1 923 4 4 849 883 851 884 1 924 4 4 862 913 864 914 1 925 1 0 474 1 926 1 0 528 1 927 1 0 527 1 928 1 0 524 1 929 1 0 522 1 930 1 0 521 1 931 1 0 520 1 932 1 0 516 1 933 1 0 515 1 934 1 0 515 1 935 1 0 515 1 936 1 0 514 1 937 1 0 512 1 938 1 0 509 1 939 1 0 506 1 940 1 0 505 1 941 1 0 504 1 942 1 0 504 1 943 1 0 502 1 944 1 0 502 1 945 1 0 502 1 946 1 0 499 1 947 1 0 496 1 948 1 0 486 1 949 1 0 480 1 950 1 0 472 1 951 1 0 471 1 952 1 0 464 1 953 1 0 460 1 954 1 0 457 1 955 1 0 446 1 956 1 0 443 1 957 1 0 443 1 958 1 0 435 1 959 1 0 435 1 960 1 0 435 1 961 2 0 925 573 1 962 2 0 926 627 1 963 2 0 927 626 1 964 2 0 928 623 1 965 2 0 929 621 1 966 2 0 930 620 1 967 2 0 931 619 1 968 2 0 932 615 1 969 2 0 935 614 1 970 2 0 934 614 1 971 2 0 933 614 1 972 2 0 936 613 1 973 2 0 937 611 1 974 2 0 938 608 1 975 2 0 939 605 1 976 2 0 940 604 1 977 2 0 942 603 1 978 2 0 941 603 1 979 2 0 945 601 1 980 2 0 944 601 1 981 2 0 943 601 1 982 2 0 946 598 1 983 2 0 947 595 1 984 2 0 948 585 1 985 2 0 949 579 1 986 2 0 950 571 1 987 2 0 951 570 1 988 2 0 952 563 1 989 2 0 953 559 1 990 2 0 954 556 1 991 2 0 955 545 1 992 2 0 957 542 1 993 2 0 956 542 1 994 2 0 960 534 1 995 2 0 959 534 1 996 2 0 958 534 1 997 2 0 961 672 1 998 2 0 962 726 1 999 2 0 963 725 1 1000 2 0 964 722 1 1001 2 0 965 720 1 1002 2 0 966 719 1 1003 2 0 967 718 1 1004 2 0 968 714 1 1005 2 0 971 713 1 1006 2 0 970 713 1 1007 2 0 969 713 1 1008 2 0 972 712 1 1009 2 0 973 710 1 1010 2 0 974 707 1 1011 2 0 975 704 1 1012 2 0 976 703 1 1013 2 0 978 702 1 1014 2 0 977 702 1 1015 2 0 981 700 1 1016 2 0 980 700 1 1017 2 0 979 700 1 1018 2 0 982 697 1 1019 2 0 983 694 1 1020 2 0 984 684 1 1021 2 0 985 678 1 1022 2 0 986 670 1 1023 2 0 987 669 1 1024 2 0 988 662 1 1025 2 0 989 658 1 1026 2 0 990 655 1 1027 2 0 991 644 1 1028 2 0 993 641 1 1029 2 0 992 641 1 1030 2 0 996 633 1 1031 2 0 995 633 1 1032 2 0 994 633 1 1033 2 0 997 771 1 1034 2 0 998 825 1 1035 2 0 999 824 1 1036 2 0 1000 821 1 1037 2 0 1001 819 1 1038 2 0 1002 818 1 1039 2 0 1003 817 1 1040 2 0 1004 813 1 1041 2 0 1007 812 1 1042 2 0 1006 812 1 1043 2 0 1005 812 1 1044 2 0 1008 811 1 1045 2 0 1009 809 1 1046 2 0 1010 806 1 1047 2 0 1011 803 1 1048 2 0 1012 802 1 1049 2 0 1014 801 1 1050 2 0 1013 801 1 1051 2 0 1017 799 1 1052 2 0 1016 799 1 1053 2 0 1015 799 1 1054 2 0 1018 796 1 1055 2 0 1019 793 1 1056 2 0 1020 783 1 1057 2 0 1021 777 1 1058 2 0 1022 769 1 1059 2 0 1023 768 1 1060 2 0 1024 761 1 1061 2 0 1025 757 1 1062 2 0 1026 754 1 1063 2 0 1027 743 1 1064 2 0 1029 740 1 1065 2 0 1028 740 1 1066 2 0 1032 732 1 1067 2 0 1031 732 1 1068 2 0 1030 732 1 1069 2 0 1033 870 1 1070 2 0 1034 924 1 1071 2 0 1035 923 1 1072 2 0 1036 920 1 1073 2 0 1037 918 1 1074 2 0 1038 917 1 1075 2 0 1039 916 1 1076 2 0 1040 912 1 1077 2 0 1043 911 1 1078 2 0 1042 911 1 1079 2 0 1041 911 1 1080 2 0 1044 910 1 1081 2 0 1045 908 1 1082 2 0 1046 905 1 1083 2 0 1047 902 1 1084 2 0 1048 901 1 1085 2 0 1050 900 1 1086 2 0 1049 900 1 1087 2 0 1053 898 1 1088 2 0 1052 898 1 1089 2 0 1051 898 1 1090 2 0 1054 895 1 1091 2 0 1055 892 1 1092 2 0 1056 882 1 1093 2 0 1057 876 1 1094 2 0 1058 868 1 1095 2 0 1059 867 1 1096 2 0 1060 860 1 1097 2 0 1061 856 1 1098 2 0 1062 853 1 1099 2 0 1063 842 1 1100 2 0 1065 839 1 1101 2 0 1064 839 1 1102 2 0 1068 831 1 1103 2 0 1067 831 1 1104 2 0 1066 831 3 44 436 484 444 466 453 458 447 461 507 492 431 437 454 467 432 475 517 478 441 487 500 448 438 485 445 468 455 459 449 462 508 493 433 439 456 469 434 476 518 479 442 488 501 450 1 0 1105 2 1106 44 0 1 436 484 444 466 453 458 447 461 507 492 431 437 454 467 432 475 517 478 441 487 500 448 438 485 445 468 455 459 449 462 508 493 433 439 456 469 434 476 518 479 442 488 501 450 1 1 2 1 1106 1105 2 1107 44 0 2 436 484 444 466 453 458 447 461 507 492 431 437 454 467 432 475 517 478 441 487 500 448 438 485 445 468 455 459 449 462 508 493 433 439 456 469 434 476 518 479 442 488 501 450 1 1 2 0 1107 1105 1 1105 1 0 428 1 1108 1 0 433 1 1109 1 0 493 1 1110 1 0 508 1 1111 1 0 508 1 1112 1 0 508 1 1113 1 0 508 1 1114 1 0 462 1 1115 1 0 462 1 1116 1 0 462 1 1117 1 0 462 1 1118 1 0 462 1 1119 1 0 462 1 1120 1 0 462 1 1121 1 0 449 1 1122 1 0 449 1 1123 1 0 449 1 1124 1 0 449 1 1125 1 0 449 1 1126 1 0 459 1 1127 1 0 459 1 1128 1 0 459 1 1129 1 0 459 1 1130 1 0 459 1 1131 1 0 459 1 1132 1 0 455 1 1133 1 0 455 1 1134 1 0 455 1 1135 1 0 455 1 1136 1 0 455 1 1137 1 0 468 1 1138 1 0 445 1 1139 1 0 438 1 1140 1 0 431 1 1141 1 0 492 1 1142 1 0 507 1 1143 1 0 507 1 1144 1 0 507 1 1145 1 0 507 1 1146 1 0 461 1 1147 1 0 461 1 1148 1 0 461 1 1149 1 0 461 1 1150 1 0 461 1 1151 1 0 461 1 1152 1 0 461 1 1153 1 0 447 1 1154 1 0 447 1 1155 1 0 447 1 1156 1 0 447 1 1157 1 0 447 1 1158 1 0 458 1 1159 1 0 458 1 1160 1 0 458 1 1161 1 0 458 1 1162 1 0 458 1 1163 1 0 458 1 1164 1 0 453 1 1165 1 0 453 1 1166 1 0 453 1 1167 1 0 453 1 1168 1 0 453 1 1169 1 0 466 1 1170 1 0 444 1 1171 1 0 436 1 1172 1 0 450 1 1173 1 0 450 1 1174 1 0 450 1 1175 1 0 450 1 1176 1 0 450 1 1177 1 0 501 1 1178 1 0 501 1 1179 1 0 488 1 1180 1 0 488 1 1181 1 0 488 1 1182 1 0 488 1 1183 1 0 488 1 1184 1 0 488 1 1185 1 0 442 1 1186 1 0 479 1 1187 1 0 518 1 1188 1 0 518 1 1189 1 0 434 1 1190 1 0 434 1 1191 1 0 434 1 1192 1 0 434 1 1193 1 0 434 1 1194 1 0 434 1 1195 1 0 469 1 1196 1 0 469 1 1197 1 0 469 1 1198 1 0 469 1 1199 1 0 456 1 1200 1 0 456 1 1201 1 0 456 1 1202 1 0 456 1 1203 1 0 456 1 1204 1 0 439 1 1205 1 0 439 1 1206 1 0 439 1 1207 1 0 439 1 1208 1 0 448 1 1209 1 0 448 1 1210 1 0 448 1 1211 1 0 448 1 1212 1 0 448 1 1213 1 0 500 1 1214 1 0 500 1 1215 1 0 487 1 1216 1 0 487 1 1217 1 0 487 1 1218 1 0 487 1 1219 1 0 487 1 1220 1 0 487 1 1221 1 0 441 1 1222 1 0 478 1 1223 1 0 517 1 1224 1 0 517 1 1225 1 0 432 1 1226 1 0 432 1 1227 1 0 432 1 1228 1 0 432 1 1229 1 0 432 1 1230 1 0 432 1 1231 1 0 467 1 1232 1 0 467 1 1233 1 0 467 1 1234 1 0 467 1 1235 1 0 454 1 1236 1 0 454 1 1237 1 0 454 1 1238 1 0 454 1 1239 1 0 454 1 1240 1 0 437 1 1241 1 0 437 1 1242 1 0 437 1 1243 1 0 437 1 1244 1 0 1137 1 1245 1 0 1243 1 1246 1 0 1242 1 1247 1 0 1242 1 1248 1 0 1238 1 1249 1 0 1237 1 1250 1 0 1234 1 1251 1 0 1231 1 1252 1 0 1230 1 1253 1 0 1225 1 1254 1 0 1224 1 1255 1 0 1223 1 1256 1 0 1220 1 1257 1 0 1220 1 1258 1 0 1220 1 1259 1 0 1217 1 1260 1 0 1216 1 1261 1 0 1215 1 1262 1 0 1214 1 1263 1 0 1213 1 1264 1 0 1212 1 1265 1 0 1211 1 1266 1 0 1210 1 1267 1 0 1209 1 1268 1 0 1207 1 1269 1 0 1205 1 1270 1 0 1205 1 1271 1 0 1205 1 1272 1 0 1204 1 1273 1 0 1204 1 1274 1 0 1203 1 1275 1 0 1202 1 1276 1 0 1199 1 1277 1 0 1196 1 1278 1 0 1195 1 1279 1 0 1188 1 1280 1 0 1187 1 1281 1 0 1183 1 1282 1 0 1182 1 1283 1 0 1181 1 1259 1 0 1181 1 1284 1 0 1181 1 1285 1 0 1180 1 1286 1 0 1178 1 1287 1 0 1177 1 1288 1 0 1176 1 1265 1 0 1175 1 1289 1 0 1173 1 1290 1 0 1172 1 1291 1 0 1170 1 1292 1 0 1170 1 1293 1 0 1169 1 945 1 0 1167 1 1294 1 0 1166 1 1295 1 0 1166 1 1296 1 0 1166 1 1297 1 0 1165 1 935 1 0 1162 1 1298 1 0 1161 1 1299 1 0 1159 1 1245 1 0 1157 1 1300 1 0 1153 1 1289 1 0 1153 1 1301 1 0 1152 1 948 1 0 1150 1 1302 1 0 1149 1 1303 1 0 1147 1 949 1 0 1146 1 1304 1 0 1144 1 1305 1 0 1143 1 1285 1 0 1141 1 1306 1 0 1139 1 1307 1 0 1139 1 1276 1 0 1139 1 1256 1 0 1136 1 1257 1 0 1136 1 927 1 0 1136 1 1295 1 0 1134 1 1308 1 0 1131 1 1298 1 0 1131 1 940 1 0 1131 1 1309 1 0 1130 1 1310 1 0 1127 1 1247 1 0 1126 1 1311 1 0 1124 1 1312 1 0 1123 1 1313 1 0 1123 1 1314 1 0 1121 1 928 1 0 1120 1 1315 1 0 1119 1 1316 1 0 1116 1 1275 1 0 1115 1 953 1 0 1114 1 1317 1 0 1112 1 1263 1 0 1111 1 1318 1 0 1110 1 1319 1 0 1108 1 1320 2 0 1244 627 1 1321 3 0 1244 1254 1255 1 1322 2 0 1254 627 1 1323 2 0 1258 626 1 1324 2 0 1257 626 1 1325 2 0 1256 626 1 1326 2 0 1307 625 1 1327 2 0 1306 625 1 1328 2 0 1276 625 1 1329 2 0 1318 624 1 1330 2 0 1268 624 1 1331 2 0 1301 623 1 1332 2 0 1262 623 1 1333 2 0 1274 622 1 1334 2 0 1279 621 1 1335 2 0 1317 620 1 1336 2 0 1288 620 1 1337 2 0 1315 618 1 1338 2 0 1280 615 1 1339 2 0 1255 615 1 1340 2 0 1281 614 1 1341 2 0 1252 612 1 1342 2 0 1304 611 1 1343 2 0 1282 611 1 1344 2 0 1284 609 1 1345 2 0 1283 609 1 1346 2 0 1259 609 1 1347 2 0 1305 605 1 1348 2 0 1286 605 1 1349 2 0 1263 605 1 1350 2 0 1308 604 1 1351 2 0 1298 604 1 1352 2 0 1314 603 1 1353 2 0 1245 603 1 1354 2 0 1264 602 1 1355 2 0 1287 598 1 1356 2 0 1260 597 1 1357 2 0 1296 596 1 1358 2 0 1295 596 1 1359 2 0 1294 596 1 1360 2 0 1250 595 1 1361 2 0 1285 590 1 1362 2 0 1292 588 1 1363 2 0 1291 588 1 1364 2 0 1261 585 1 1365 2 0 1271 582 1 1366 2 0 1270 582 1 1367 2 0 1269 582 1 1368 2 0 1311 581 1 1369 2 0 1277 581 1 1370 2 0 1316 579 1 1371 2 0 1265 579 1 1372 2 0 1309 576 1 1373 2 0 1293 573 1 1374 2 0 1302 572 1 1375 2 0 1275 571 1 1376 2 0 1278 570 1 1377 2 0 1247 569 1 1378 2 0 1246 569 1 1379 2 0 1251 564 1 1380 2 0 1313 563 1 1381 2 0 1312 563 1 1382 2 0 1248 563 1 1383 2 0 1299 562 1 1384 2 0 1249 562 1 1385 2 0 1303 559 1 1386 2 0 1273 559 1 1387 2 0 1272 559 1 1388 2 0 1310 556 1 1389 2 0 1266 556 1 1390 2 0 1297 551 1 1391 2 0 1300 545 1 1392 2 0 1289 545 1 1393 2 0 1267 545 1 1394 2 0 1253 529 1 1395 2 0 1320 726 1 1396 2 0 1321 615 1 1320 3 0 1396 1339 1322 1 1397 3 0 1396 968 965 1 1398 3 0 1396 1338 1357 1 1244 3 0 1321 1255 1254 1 1399 3 0 1321 932 929 1 1400 3 0 1321 1280 1296 1 1396 3 0 1320 1322 1339 1 1401 2 0 1322 726 1 1402 2 0 1325 725 1 1403 2 0 1324 725 1 1404 2 0 1323 725 1 1405 2 0 1328 724 1 1406 2 0 1327 724 1 1407 2 0 1326 724 1 1408 2 0 1330 723 1 1409 2 0 1329 723 1 1410 2 0 1332 722 1 1411 2 0 1331 722 1 1412 2 0 1333 721 1 1413 2 0 1334 720 1 1414 2 0 1336 719 1 1415 2 0 1335 719 1 1416 2 0 1337 717 1 1417 2 0 1339 714 1 1418 2 0 1338 714 1 1419 2 0 1340 713 1 1420 2 0 1341 711 1 1421 2 0 1343 710 1 1422 2 0 1342 710 1 1423 2 0 1346 708 1 1424 2 0 1345 708 1 1425 2 0 1344 708 1 1426 2 0 1349 704 1 1427 2 0 1348 704 1 1428 2 0 1347 704 1 1429 2 0 1351 703 1 1430 2 0 1350 703 1 1431 2 0 1353 702 1 1432 2 0 1352 702 1 1433 2 0 1354 701 1 1434 2 0 1355 697 1 1435 2 0 1356 696 1 1436 2 0 1359 695 1 1437 2 0 1358 695 1 1438 2 0 1357 695 1 1439 2 0 1360 694 1 1440 2 0 1361 689 1 1441 2 0 1363 687 1 1442 2 0 1362 687 1 1443 2 0 1364 684 1 1444 2 0 1367 681 1 1445 2 0 1366 681 1 1446 2 0 1365 681 1 1447 2 0 1369 680 1 1448 2 0 1368 680 1 1449 2 0 1371 678 1 1450 2 0 1370 678 1 1451 2 0 1372 675 1 1452 2 0 1373 672 1 1453 2 0 1374 671 1 1454 2 0 1375 670 1 1455 2 0 1376 669 1 1456 2 0 1378 668 1 1457 2 0 1377 668 1 1458 2 0 1379 663 1 1459 2 0 1382 662 1 1460 2 0 1381 662 1 1461 2 0 1380 662 1 1462 2 0 1384 661 1 1463 2 0 1383 661 1 1464 2 0 1387 658 1 1465 2 0 1386 658 1 1466 2 0 1385 658 1 1467 2 0 1389 655 1 1468 2 0 1388 655 1 1469 2 0 1390 650 1 1470 2 0 1393 644 1 1471 2 0 1392 644 1 1472 2 0 1391 644 1 1473 2 0 1394 628 1 1474 2 0 1395 825 1 1475 2 0 1397 720 1 1476 2 0 1396 714 1 1477 2 0 1398 695 1 1397 2 0 1399 621 1 1398 2 0 1400 596 1 1478 3 0 1477 1437 1451 1 1476 3 0 1477 1438 1418 1 1479 3 0 1477 1436 1017 1 1395 3 0 1476 1417 1401 1 1475 3 0 1476 1004 1001 1 1477 3 0 1476 1418 1438 1 1476 3 0 1475 1001 1004 1 1480 3 0 1400 1295 1309 1 1321 3 0 1400 1296 1280 1 1481 3 0 1400 1294 945 1 1321 3 0 1399 929 932 1 1482 3 0 1398 1358 1372 1 1396 3 0 1398 1357 1338 1 1483 3 0 1398 1359 979 1 1396 3 0 1397 965 968 1 1476 3 0 1395 1401 1417 1 1484 2 0 1401 825 1 1485 2 0 1404 824 1 1486 2 0 1403 824 1 1487 2 0 1402 824 1 1488 2 0 1407 823 1 1489 2 0 1406 823 1 1490 2 0 1405 823 1 1491 2 0 1409 822 1 1492 2 0 1408 822 1 1493 2 0 1411 821 1 1494 2 0 1410 821 1 1495 2 0 1412 820 1 1496 2 0 1413 819 1 1497 2 0 1415 818 1 1498 2 0 1414 818 1 1499 2 0 1416 816 1 1500 2 0 1418 813 1 1501 2 0 1417 813 1 1502 2 0 1419 812 1 1503 2 0 1420 810 1 1504 2 0 1422 809 1 1505 2 0 1421 809 1 1506 2 0 1425 807 1 1507 2 0 1424 807 1 1508 2 0 1423 807 1 1509 2 0 1428 803 1 1510 2 0 1427 803 1 1511 2 0 1426 803 1 1512 2 0 1430 802 1 1513 2 0 1429 802 1 1514 2 0 1432 801 1 1515 2 0 1431 801 1 1516 2 0 1433 800 1 1517 2 0 1434 796 1 1518 2 0 1435 795 1 1519 2 0 1438 794 1 1520 2 0 1437 794 1 1521 2 0 1436 794 1 1522 2 0 1439 793 1 1523 2 0 1440 788 1 1524 2 0 1442 786 1 1525 2 0 1441 786 1 1526 2 0 1443 783 1 1527 2 0 1446 780 1 1528 2 0 1445 780 1 1529 2 0 1444 780 1 1530 2 0 1448 779 1 1531 2 0 1447 779 1 1532 2 0 1450 777 1 1533 2 0 1449 777 1 1534 2 0 1451 774 1 1535 2 0 1452 771 1 1536 2 0 1453 770 1 1537 2 0 1454 769 1 1538 2 0 1455 768 1 1539 2 0 1457 767 1 1540 2 0 1456 767 1 1541 2 0 1458 762 1 1542 2 0 1461 761 1 1543 2 0 1460 761 1 1544 2 0 1459 761 1 1545 2 0 1463 760 1 1546 2 0 1462 760 1 1547 2 0 1466 757 1 1548 2 0 1465 757 1 1549 2 0 1464 757 1 1550 2 0 1468 754 1 1551 2 0 1467 754 1 1552 2 0 1469 749 1 1553 2 0 1472 743 1 1554 2 0 1471 743 1 1555 2 0 1470 743 1 1556 2 0 1473 727 1 1557 2 0 1474 924 1 1558 2 0 1475 819 1 1559 2 0 1476 813 1 1560 2 0 1479 799 1 1561 2 0 1477 794 1 1562 2 0 1478 774 1 1479 2 0 1483 700 1 1478 2 0 1482 675 1 1483 2 0 1481 601 1 1482 2 0 1480 576 1 1561 3 0 1562 1534 1520 1 1562 3 0 1561 1520 1534 1 1559 3 0 1561 1519 1500 1 1560 3 0 1561 1521 1051 1 1561 3 0 1560 1051 1521 1 1563 3 0 1560 1053 1035 1 1474 3 0 1559 1501 1484 1 1558 3 0 1559 1040 1037 1 1561 3 0 1559 1500 1519 1 1559 3 0 1558 1037 1040 1 1398 3 0 1483 979 1359 1 1564 3 0 1483 981 963 1 1398 3 0 1482 1372 1358 1 1400 3 0 1481 945 1294 1 1565 3 0 1481 943 927 1 1400 3 0 1480 1309 1295 1 1477 3 0 1479 1017 1436 1 1566 3 0 1479 1015 999 1 1477 3 0 1478 1451 1437 1 1559 3 0 1474 1484 1501 1 1567 2 0 1484 924 1 1568 2 0 1487 923 1 1569 2 0 1486 923 1 1570 2 0 1485 923 1 1571 2 0 1490 922 1 1572 2 0 1489 922 1 1573 2 0 1488 922 1 1574 2 0 1492 921 1 1575 2 0 1491 921 1 1576 2 0 1494 920 1 1577 2 0 1493 920 1 1578 2 0 1495 919 1 1579 2 0 1496 918 1 1580 2 0 1498 917 1 1581 2 0 1497 917 1 1582 2 0 1499 915 1 1583 2 0 1501 912 1 1584 2 0 1500 912 1 1585 2 0 1502 911 1 1586 2 0 1503 909 1 1587 2 0 1505 908 1 1588 2 0 1504 908 1 1589 2 0 1508 906 1 1590 2 0 1507 906 1 1591 2 0 1506 906 1 1592 2 0 1511 902 1 1593 2 0 1510 902 1 1594 2 0 1509 902 1 1595 2 0 1513 901 1 1596 2 0 1512 901 1 1597 2 0 1515 900 1 1598 2 0 1514 900 1 1599 2 0 1516 899 1 1600 2 0 1517 895 1 1601 2 0 1518 894 1 1602 2 0 1521 893 1 1603 2 0 1520 893 1 1604 2 0 1519 893 1 1605 2 0 1522 892 1 1606 2 0 1523 887 1 1607 2 0 1525 885 1 1608 2 0 1524 885 1 1609 2 0 1526 882 1 1610 2 0 1529 879 1 1611 2 0 1528 879 1 1612 2 0 1527 879 1 1613 2 0 1531 878 1 1614 2 0 1530 878 1 1615 2 0 1533 876 1 1616 2 0 1532 876 1 1617 2 0 1534 873 1 1618 2 0 1535 870 1 1619 2 0 1536 869 1 1620 2 0 1537 868 1 1621 2 0 1538 867 1 1622 2 0 1540 866 1 1623 2 0 1539 866 1 1624 2 0 1541 861 1 1625 2 0 1544 860 1 1626 2 0 1543 860 1 1627 2 0 1542 860 1 1628 2 0 1546 859 1 1629 2 0 1545 859 1 1630 2 0 1549 856 1 1631 2 0 1548 856 1 1632 2 0 1547 856 1 1633 2 0 1551 853 1 1634 2 0 1550 853 1 1635 2 0 1552 848 1 1636 2 0 1555 842 1 1637 2 0 1554 842 1 1638 2 0 1553 842 1 1639 2 0 1556 826 1 1640 2 0 1563 923 1 1641 2 0 1558 918 1 1642 2 0 1559 912 1 1643 2 0 1560 898 1 1644 2 0 1561 893 1 1645 2 0 1562 873 1 1563 2 0 1566 824 1 1566 2 0 1564 725 1 1564 2 0 1565 626 1 1644 3 0 1645 1617 1603 1 1645 3 0 1644 1603 1617 1 1642 3 0 1644 1604 1584 1 1643 3 0 1644 1602 1089 1 1644 3 0 1643 1089 1602 1 1640 3 0 1643 1087 1071 1 1557 3 0 1642 1583 1567 1 1641 3 0 1642 1076 1073 1 1644 3 0 1642 1584 1604 1 1642 3 0 1641 1073 1076 1 1646 3 0 1640 1569 1079 1 1643 3 0 1640 1071 1087 1 1647 3 0 1566 1403 1007 1 1479 3 0 1566 999 1015 1 1648 3 0 1565 1257 935 1 1481 3 0 1565 927 943 1 1649 3 0 1564 1324 969 1 1483 3 0 1564 963 981 1 1650 3 0 1563 1486 1041 1 1560 3 0 1563 1035 1053 1 1642 3 0 1557 1567 1583 1 1646 2 0 1650 911 1 1650 2 0 1647 812 1 1647 2 0 1649 713 1 1649 2 0 1648 614 1 1563 3 0 1650 1041 1486 1 1651 3 0 1650 1042 1506 1 1564 3 0 1649 969 1324 1 1652 3 0 1649 970 1344 1 1565 3 0 1648 935 1257 1 1653 3 0 1648 934 1284 1 1566 3 0 1647 1007 1403 1 1654 3 0 1647 1006 1425 1 1640 3 0 1646 1079 1569 1 1655 3 0 1646 1078 1591 1 1655 2 0 1651 906 1 1651 2 0 1654 807 1 1654 2 0 1652 708 1 1652 2 0 1653 609 1 1646 3 0 1655 1591 1078 1 1656 3 0 1655 1589 1609 1 1647 3 0 1654 1425 1006 1 1657 3 0 1654 1423 1443 1 1648 3 0 1653 1284 934 1 1658 3 0 1653 1259 1261 1 1649 3 0 1652 1344 970 1 1659 3 0 1652 1346 1364 1 1650 3 0 1651 1506 1042 1 1660 3 0 1651 1508 1526 1 1656 2 0 1660 882 1 1660 2 0 1657 783 1 1657 2 0 1659 684 1 1659 2 0 1658 585 1 1651 3 0 1660 1526 1508 1 1661 3 0 1660 1056 1045 1 1652 3 0 1659 1364 1346 1 1662 3 0 1659 984 973 1 1653 3 0 1658 1261 1259 1 1663 3 0 1658 948 937 1 1654 3 0 1657 1443 1423 1 1664 3 0 1657 1020 1009 1 1655 3 0 1656 1609 1589 1 1665 3 0 1656 1092 1081 1 1665 2 0 1661 908 1 1661 2 0 1664 809 1 1664 2 0 1662 710 1 1662 2 0 1663 611 1 1656 3 0 1665 1081 1092 1 1666 3 0 1665 1587 1606 1 1667 3 0 1665 1588 1594 1 1657 3 0 1664 1009 1020 1 1668 3 0 1664 1421 1440 1 1669 3 0 1664 1422 1428 1 1658 3 0 1663 937 948 1 1670 3 0 1663 1282 1285 1 1671 3 0 1663 1304 1305 1 1659 3 0 1662 973 984 1 1672 3 0 1662 1343 1361 1 1673 3 0 1662 1342 1347 1 1660 3 0 1661 1045 1056 1 1674 3 0 1661 1505 1523 1 1675 3 0 1661 1504 1509 1 1667 2 0 1675 902 1 1666 2 0 1674 887 1 1675 2 0 1669 803 1 1674 2 0 1668 788 1 1669 2 0 1673 704 1 1668 2 0 1672 689 1 1673 2 0 1671 605 1 1672 2 0 1670 590 1 1676 3 0 1675 1511 1036 1 1677 3 0 1675 1510 1054 1 1661 3 0 1675 1509 1504 1 1678 3 0 1675 1047 1038 1 1661 3 0 1674 1523 1505 1 1679 3 0 1673 1349 964 1 1680 3 0 1673 1348 982 1 1662 3 0 1673 1347 1342 1 1681 3 0 1673 975 966 1 1662 3 0 1672 1361 1343 1 1682 3 0 1671 1263 928 1 1683 3 0 1671 1286 946 1 1663 3 0 1671 1305 1304 1 1684 3 0 1671 939 930 1 1663 3 0 1670 1285 1282 1 1685 3 0 1669 1426 1000 1 1686 3 0 1669 1427 1018 1 1664 3 0 1669 1428 1422 1 1687 3 0 1669 1011 1002 1 1664 3 0 1668 1440 1421 1 1688 3 0 1667 1592 1072 1 1689 3 0 1667 1593 1090 1 1665 3 0 1667 1594 1588 1 1690 3 0 1667 1083 1074 1 1665 3 0 1666 1606 1587 1 1688 2 0 1676 920 1 1690 2 0 1678 917 1 1689 2 0 1677 895 1 1676 2 0 1685 821 1 1678 2 0 1687 818 1 1677 2 0 1686 796 1 1685 2 0 1679 722 1 1687 2 0 1681 719 1 1686 2 0 1680 697 1 1679 2 0 1682 623 1 1681 2 0 1684 620 1 1680 2 0 1683 598 1 1691 3 0 1690 1580 1616 1 1667 3 0 1690 1074 1083 1 1667 3 0 1689 1090 1593 1 1667 3 0 1688 1072 1592 1 1691 3 0 1688 1576 1615 1 1692 3 0 1687 1414 1450 1 1669 3 0 1687 1002 1011 1 1669 3 0 1686 1018 1427 1 1669 3 0 1685 1000 1426 1 1692 3 0 1685 1410 1449 1 1693 3 0 1684 1288 1316 1 1671 3 0 1684 930 939 1 1671 3 0 1683 946 1286 1 1671 3 0 1682 928 1263 1 1693 3 0 1682 1262 1265 1 1694 3 0 1681 1336 1370 1 1673 3 0 1681 966 975 1 1673 3 0 1680 982 1348 1 1673 3 0 1679 964 1349 1 1694 3 0 1679 1332 1371 1 1695 3 0 1678 1498 1532 1 1675 3 0 1678 1038 1047 1 1675 3 0 1677 1054 1510 1 1675 3 0 1676 1036 1511 1 1695 3 0 1676 1494 1533 1 1691 2 0 1695 876 1 1695 2 0 1692 777 1 1692 2 0 1694 678 1 1694 2 0 1693 579 1 1696 3 0 1695 1057 1554 1 1678 3 0 1695 1532 1498 1 1676 3 0 1695 1533 1494 1 1697 3 0 1694 985 1392 1 1681 3 0 1694 1370 1336 1 1679 3 0 1694 1371 1332 1 1698 3 0 1693 949 1289 1 1684 3 0 1693 1316 1288 1 1682 3 0 1693 1265 1262 1 1699 3 0 1692 1021 1471 1 1687 3 0 1692 1450 1414 1 1685 3 0 1692 1449 1410 1 1700 3 0 1691 1093 1637 1 1690 3 0 1691 1616 1580 1 1688 3 0 1691 1615 1576 1 1700 2 0 1696 842 1 1696 2 0 1699 743 1 1699 2 0 1697 644 1 1697 2 0 1698 545 1 1701 3 0 1700 1636 1098 1 1691 3 0 1700 1637 1093 1 1702 3 0 1699 1470 1026 1 1692 3 0 1699 1471 1021 1 1703 3 0 1698 1267 954 1 1693 3 0 1698 1289 949 1 1704 3 0 1697 1393 990 1 1694 3 0 1697 1392 985 1 1705 3 0 1696 1555 1062 1 1695 3 0 1696 1554 1057 1 1701 2 0 1705 853 1 1705 2 0 1702 754 1 1702 2 0 1704 655 1 1704 2 0 1703 556 1 1706 3 0 1705 1550 1516 1 1696 3 0 1705 1062 1555 1 1707 3 0 1705 1551 1512 1 1708 3 0 1704 1388 1354 1 1697 3 0 1704 990 1393 1 1709 3 0 1704 1389 1350 1 1710 3 0 1703 1310 1264 1 1698 3 0 1703 954 1267 1 1711 3 0 1703 1266 1308 1 1712 3 0 1702 1468 1433 1 1699 3 0 1702 1026 1470 1 1713 3 0 1702 1467 1430 1 1714 3 0 1701 1634 1599 1 1700 3 0 1701 1098 1636 1 1715 3 0 1701 1633 1596 1 1715 2 0 1707 901 1 1714 2 0 1706 899 1 1707 2 0 1713 802 1 1706 2 0 1712 800 1 1713 2 0 1709 703 1 1712 2 0 1708 701 1 1709 2 0 1711 604 1 1708 2 0 1710 602 1 1701 3 0 1715 1596 1633 1 1701 3 0 1714 1599 1634 1 1702 3 0 1713 1430 1467 1 1702 3 0 1712 1433 1468 1 1703 3 0 1711 1308 1266 1 1703 3 0 1710 1264 1310 1 1704 3 0 1709 1350 1389 1 1704 3 0 1708 1354 1388 1 1705 3 0 1707 1512 1551 1 1705 3 0 1706 1516 1550 1 1716 1 0 1237 1 1717 1 0 1202 1 1718 1 0 1157 1 1719 1 0 1124 1 1720 1 0 464 1 1721 2 1 1722 1720 1 1721 2 1 1723 1719 1 1721 2 1 1724 1718 1 1721 2 1 1725 1717 1 1721 2 1 1726 1716 1 1727 2 0 1720 563 1 1728 2 0 1719 581 1 1729 2 0 1718 603 1 1730 2 0 1717 571 1 1731 2 0 1716 562 3 44 535 583 543 565 552 557 546 560 606 591 530 536 553 566 531 574 616 577 540 586 599 547 537 584 544 567 554 558 548 561 607 592 532 538 555 568 533 575 617 578 541 587 600 549 1 0 1732 2 1733 44 0 1 535 583 543 565 552 557 546 560 606 591 530 536 553 566 531 574 616 577 540 586 599 547 537 584 544 567 554 558 548 561 607 592 532 538 555 568 533 575 617 578 541 587 600 549 1 1 2 1 1733 1732 2 1734 44 0 2 535 583 543 565 552 557 546 560 606 591 530 536 553 566 531 574 616 577 540 586 599 547 537 584 544 567 554 558 548 561 607 592 532 538 555 568 533 575 617 578 541 587 600 549 1 1 2 0 1734 1732 1 1732 1 0 1721 1 1735 2 1 1736 1731 1 1735 2 1 1737 1730 1 1735 2 1 1738 1729 1 1735 2 1 1739 1728 1 1735 2 1 1740 1727 1 1741 2 0 1731 661 1 1742 2 0 1730 670 1 1743 2 0 1729 702 1 1744 2 0 1728 680 1 1745 2 0 1727 662 1 1746 1 0 532 1 1747 1 0 592 1 1748 1 0 607 1 1749 1 0 607 1 1750 1 0 607 1 1751 1 0 607 1 1752 1 0 561 1 1753 1 0 561 1 1754 1 0 561 1 1755 1 0 561 1 1756 1 0 561 1 1757 1 0 561 1 1758 1 0 561 1 1759 1 0 548 1 1760 1 0 548 1 1761 1 0 548 1 1762 1 0 548 1 1763 1 0 548 1 1764 1 0 558 1 1765 1 0 558 1 1766 1 0 558 1 1767 1 0 558 1 1768 1 0 558 1 1769 1 0 558 1 1770 1 0 554 1 1771 1 0 554 1 1772 1 0 554 1 1773 1 0 554 1 1774 1 0 554 1 1775 1 0 567 1 1776 1 0 544 1 1777 1 0 537 1 1778 1 0 530 1 1779 1 0 591 1 1780 1 0 606 1 1781 1 0 606 1 1782 1 0 606 1 1783 1 0 606 1 1784 1 0 560 1 1785 1 0 560 1 1786 1 0 560 1 1787 1 0 560 1 1788 1 0 560 1 1789 1 0 560 1 1790 1 0 560 1 1791 1 0 546 1 1792 1 0 546 1 1793 1 0 546 1 1794 1 0 546 1 1795 1 0 546 1 1796 1 0 557 1 1797 1 0 557 1 1798 1 0 557 1 1799 1 0 557 1 1800 1 0 557 1 1801 1 0 557 1 1802 1 0 552 1 1803 1 0 552 1 1804 1 0 552 1 1805 1 0 552 1 1806 1 0 552 1 1807 1 0 565 1 1808 1 0 543 1 1809 1 0 535 1 1810 1 0 549 1 1811 1 0 549 1 1812 1 0 549 1 1813 1 0 549 1 1814 1 0 549 1 1815 1 0 600 1 1816 1 0 600 1 1817 1 0 587 1 1818 1 0 587 1 1819 1 0 587 1 1820 1 0 587 1 1821 1 0 587 1 1822 1 0 587 1 1823 1 0 541 1 1824 1 0 578 1 1825 1 0 617 1 1826 1 0 617 1 1827 1 0 533 1 1828 1 0 533 1 1829 1 0 533 1 1830 1 0 533 1 1831 1 0 533 1 1832 1 0 533 1 1833 1 0 568 1 1834 1 0 568 1 1835 1 0 568 1 1836 1 0 568 1 1837 1 0 555 1 1838 1 0 555 1 1839 1 0 555 1 1840 1 0 555 1 1841 1 0 555 1 1842 1 0 538 1 1843 1 0 538 1 1844 1 0 538 1 1845 1 0 538 1 1846 1 0 547 1 1847 1 0 547 1 1848 1 0 547 1 1849 1 0 547 1 1850 1 0 547 1 1851 1 0 599 1 1852 1 0 599 1 1853 1 0 586 1 1854 1 0 586 1 1855 1 0 586 1 1856 1 0 586 1 1857 1 0 586 1 1858 1 0 586 1 1859 1 0 540 1 1860 1 0 577 1 1861 1 0 616 1 1862 1 0 616 1 1863 1 0 531 1 1864 1 0 531 1 1865 1 0 531 1 1866 1 0 531 1 1867 1 0 531 1 1868 1 0 531 1 1869 1 0 566 1 1870 1 0 566 1 1871 1 0 566 1 1872 1 0 566 1 1873 1 0 553 1 1874 1 0 553 1 1875 1 0 553 1 1876 1 0 553 1 1877 1 0 553 1 1878 1 0 536 1 1879 1 0 536 1 1880 1 0 536 1 1881 1 0 536 3 44 634 682 642 664 651 656 645 659 705 690 629 635 652 665 630 673 715 676 639 685 698 646 636 683 643 666 653 657 647 660 706 691 631 637 654 667 632 674 716 677 640 686 699 648 1 0 1882 2 1883 44 0 1 634 682 642 664 651 656 645 659 705 690 629 635 652 665 630 673 715 676 639 685 698 646 636 683 643 666 653 657 647 660 706 691 631 637 654 667 632 674 716 677 640 686 699 648 1 1 2 1 1883 1882 2 1884 44 0 2 634 682 642 664 651 656 645 659 705 690 629 635 652 665 630 673 715 676 639 685 698 646 636 683 643 666 653 657 647 660 706 691 631 637 654 667 632 674 716 677 640 686 699 648 1 1 2 0 1884 1882 1 1882 1 0 1735 1 1885 2 1 1886 1745 1 1885 2 1 1887 1744 1 1885 2 1 1888 1743 1 1885 2 1 1889 1742 1 1885 2 1 1890 1741 1 1891 2 0 1745 761 1 1892 2 0 1744 779 1 1893 2 0 1743 801 1 1894 2 0 1742 769 1 1895 2 0 1741 760 1 1731 2 0 1720 1875 1 1730 2 0 1720 1840 1 1729 2 0 1720 1795 1 1728 2 0 1720 1762 1 1896 2 0 1719 1870 1 1897 2 0 1719 1836 1 1727 2 0 1719 1792 1 1898 2 0 1719 1760 1 1899 2 0 1718 1880 1 1900 2 0 1718 1842 1 1901 2 0 1718 1791 1 1727 2 0 1718 1761 1 1727 2 0 1717 1876 1 1902 2 0 1717 1841 1 1900 2 0 1717 1785 1 1897 2 0 1717 1757 1 1903 2 0 1716 1874 1 1727 2 0 1716 1839 1 1899 2 0 1716 1798 1 1896 2 0 1716 1767 1 1320 2 0 1321 1862 1 1396 2 0 1399 1861 1 1398 2 0 1480 1860 1 1564 2 0 1648 1858 1 1649 2 0 1653 1857 1 1662 2 0 1670 1856 1 1652 2 0 1658 1855 1 1904 2 0 1565 1854 1 1659 2 0 1663 1853 1 1679 2 0 1671 1852 1 1673 2 0 1683 1851 1 1708 2 0 1703 1850 1 1694 2 0 1684 1849 1 1704 2 0 1698 1848 1 1697 2 0 1693 1847 1 1397 2 0 1321 1826 1 1396 2 0 1244 1825 1 1482 2 0 1400 1824 1 1649 2 0 1565 1821 1 1662 2 0 1658 1820 1 1652 2 0 1648 1819 1 1672 2 0 1663 1818 1 1659 2 0 1653 1817 1 1673 2 0 1682 1816 1 1680 2 0 1671 1815 1 1681 2 0 1693 1814 1 1694 2 0 1698 1813 1 1704 2 0 1710 1812 1 1697 2 0 1703 1811 1 961 2 0 1244 1807 1 1396 2 0 1400 1806 1 1483 2 0 1565 1805 1 1398 2 0 1481 1804 1 1397 2 0 1480 1801 1 1649 2 0 1711 1800 1 1709 2 0 1703 1799 1 1905 2 0 1653 1794 1 1679 2 0 1693 1790 1 1659 2 0 1682 1788 1 1906 2 0 1658 1787 1 1662 2 0 1671 1782 1 1673 2 0 1684 1781 1 1672 2 0 1683 1779 1 1320 2 0 925 1775 1 1564 2 0 1481 1774 1 1483 2 0 1400 1773 1 1398 2 0 1321 1772 1 1907 2 0 1710 1770 1 1709 2 0 1648 1769 1 1482 2 0 1399 1768 1 1704 2 0 1711 1765 1 1908 2 0 1703 1764 1 1738 2 0 1698 1759 1 1679 2 0 1658 1758 1 1694 2 0 1682 1754 1 1909 2 0 1693 1752 1 1681 2 0 1671 1750 1 1673 2 0 1663 1749 1 1910 2 0 1684 1748 1 1680 2 0 1670 1747 1 1352 2 0 1303 1881 1 977 2 0 1273 1881 1 978 2 0 1272 1881 1 1353 2 0 953 1881 1 1911 2 0 1314 1880 1 1912 2 0 1245 1880 1 1377 2 0 942 1880 1 1378 2 0 941 1880 1 1385 2 0 1318 1879 1 989 2 0 1268 1879 1 994 2 0 1271 1878 1 995 2 0 1270 1878 1 996 2 0 1269 1878 1 986 2 0 1274 1877 1 988 2 0 1275 1876 1 1382 2 0 950 1876 1 1913 2 0 1313 1875 1 1914 2 0 1312 1875 1 1383 2 0 1248 1875 1 1384 2 0 952 1875 1 1915 2 0 1299 1874 1 1916 2 0 1249 1874 1 1917 2 0 1307 1873 1 1918 2 0 1306 1873 1 974 2 0 1276 1873 1 983 2 0 1278 1872 1 1360 2 0 951 1872 1 1369 2 0 1315 1871 1 1919 2 0 1311 1870 1 987 2 0 1277 1870 1 1920 2 0 1250 1869 1 1379 2 0 947 1869 1 1341 2 0 936 1868 1 1921 2 0 1253 1866 1 962 2 0 1280 1862 1 1922 2 0 1255 1862 1 1322 2 0 932 1862 1 968 2 0 1279 1861 1 1339 2 0 929 1861 1 1923 2 0 1309 1860 1 1924 2 0 1302 1859 1 963 2 0 1281 1858 1 1323 2 0 935 1858 1 1324 2 0 934 1858 1 1325 2 0 933 1858 1 969 2 0 1284 1857 1 971 2 0 1283 1857 1 970 2 0 1259 1857 1 973 2 0 1285 1856 1 1344 2 0 1261 1855 1 1346 2 0 948 1855 1 1925 2 0 1258 1854 1 1926 2 0 1257 1854 1 1927 2 0 1256 1854 1 1356 2 0 927 1854 1 1928 2 0 1304 1853 1 984 2 0 1282 1853 1 1364 2 0 937 1853 1 1929 2 0 1305 1852 1 964 2 0 1286 1852 1 1331 2 0 1263 1852 1 1332 2 0 939 1852 1 975 2 0 1287 1851 1 1349 2 0 946 1851 1 1930 2 0 1310 1850 1 1931 2 0 1266 1850 1 1354 2 0 954 1850 1 1932 2 0 1317 1849 1 985 2 0 1288 1849 1 1371 2 0 930 1849 1 1933 2 0 1300 1848 1 990 2 0 1289 1848 1 1388 2 0 1267 1848 1 1389 2 0 955 1848 1 1392 2 0 1316 1847 1 991 2 0 1265 1847 1 1393 2 0 949 1847 1 967 2 0 1290 1846 1 1329 2 0 1303 1845 1 1934 2 0 1273 1845 1 1935 2 0 1272 1845 1 1330 2 0 953 1845 1 977 2 0 1247 1844 1 978 2 0 1246 1844 1 1365 2 0 960 1843 1 1366 2 0 959 1843 1 1367 2 0 958 1843 1 1385 2 0 1314 1842 1 989 2 0 1245 1842 1 1386 2 0 942 1842 1 1387 2 0 941 1842 1 1936 2 0 1275 1841 1 1333 2 0 950 1841 1 1937 2 0 1313 1840 1 1938 2 0 1312 1840 1 986 2 0 1248 1840 1 1375 2 0 952 1840 1 1382 2 0 1299 1839 1 988 2 0 1249 1839 1 1913 2 0 1297 1838 1 1328 2 0 938 1837 1 1939 2 0 1311 1836 1 1337 2 0 1277 1836 1 983 2 0 1251 1835 1 1940 2 0 1278 1834 1 1369 2 0 951 1834 1 987 2 0 1250 1833 1 1376 2 0 947 1833 1 972 2 0 1252 1832 1 1941 2 0 1253 1827 1 1942 2 0 1280 1826 1 965 2 0 1255 1826 1 1334 2 0 932 1826 1 968 2 0 1254 1825 1 1338 2 0 926 1825 1 1943 2 0 1296 1824 1 1944 2 0 1295 1824 1 1945 2 0 1294 1824 1 963 2 0 1260 1822 1 969 2 0 1258 1821 1 970 2 0 1257 1821 1 971 2 0 1256 1821 1 1340 2 0 927 1821 1 973 2 0 1261 1820 1 1343 2 0 948 1820 1 1946 2 0 1281 1819 1 1344 2 0 935 1819 1 1346 2 0 934 1819 1 1345 2 0 933 1819 1 1947 2 0 1304 1818 1 1948 2 0 1282 1818 1 1361 2 0 937 1818 1 1364 2 0 1284 1817 1 1928 2 0 1283 1817 1 984 2 0 1259 1817 1 1349 2 0 1301 1816 1 975 2 0 1262 1816 1 1348 2 0 928 1816 1 1949 2 0 1305 1815 1 1950 2 0 1286 1815 1 982 2 0 1263 1815 1 1355 2 0 939 1815 1 1951 2 0 1316 1814 1 966 2 0 1265 1814 1 1336 2 0 949 1814 1 1932 2 0 1300 1813 1 1370 2 0 1289 1813 1 985 2 0 1267 1813 1 1371 2 0 955 1813 1 990 2 0 1264 1812 1 1393 2 0 1310 1811 1 991 2 0 1266 1811 1 1392 2 0 954 1811 1 1952 2 0 931 1810 1 995 2 0 1307 1809 1 996 2 0 1306 1809 1 994 2 0 1276 1809 1 1362 2 0 957 1808 1 1363 2 0 956 1808 1 1953 2 0 1254 1807 1 1373 2 0 926 1807 1 1954 2 0 1296 1806 1 968 2 0 1295 1806 1 1338 2 0 1294 1806 1 1955 2 0 1258 1805 1 980 2 0 1257 1805 1 981 2 0 1256 1805 1 979 2 0 927 1805 1 1357 2 0 945 1804 1 1358 2 0 944 1804 1 1359 2 0 943 1804 1 1915 2 0 1250 1803 1 1390 2 0 947 1803 1 965 2 0 1309 1801 1 971 2 0 1308 1800 1 970 2 0 1298 1800 1 969 2 0 940 1800 1 976 2 0 1310 1799 1 1956 2 0 1266 1799 1 1351 2 0 954 1799 1 1911 2 0 1299 1798 1 1912 2 0 1249 1798 1 1913 2 0 1278 1797 1 1383 2 0 951 1797 1 990 2 0 1247 1796 1 1933 2 0 1246 1796 1 977 2 0 1313 1795 1 978 2 0 1312 1795 1 1352 2 0 1248 1795 1 1353 2 0 952 1795 1 1957 2 0 1284 1794 1 1958 2 0 1283 1794 1 1924 2 0 1259 1794 1 988 2 0 1311 1792 1 1382 2 0 1277 1792 1 991 2 0 1314 1791 1 1393 2 0 1245 1791 1 1392 2 0 942 1791 1 1391 2 0 941 1791 1 964 2 0 1316 1790 1 1929 2 0 1265 1790 1 1331 2 0 949 1790 1 1364 2 0 1301 1788 1 1928 2 0 1262 1788 1 984 2 0 928 1788 1 1959 2 0 1261 1787 1 1374 2 0 948 1787 1 986 2 0 1315 1786 1 989 2 0 1275 1785 1 1385 2 0 950 1785 1 1371 2 0 1303 1784 1 1370 2 0 1273 1784 1 1932 2 0 1272 1784 1 985 2 0 953 1784 1 1329 2 0 1274 1783 1 1960 2 0 1305 1782 1 1343 2 0 1286 1782 1 973 2 0 1263 1782 1 1342 2 0 939 1782 1 975 2 0 1317 1781 1 1349 2 0 1288 1781 1 1347 2 0 930 1781 1 966 2 0 1318 1780 1 1336 2 0 1268 1780 1 1947 2 0 1287 1779 1 1361 2 0 946 1779 1 967 2 0 1319 1778 1 1328 2 0 960 1777 1 1326 2 0 959 1777 1 1327 2 0 958 1777 1 992 2 0 1292 1776 1 993 2 0 1291 1776 1 962 2 0 1293 1775 1 963 2 0 945 1774 1 1324 2 0 944 1774 1 1325 2 0 943 1774 1 979 2 0 1296 1773 1 980 2 0 1295 1773 1 981 2 0 1294 1773 1 1359 2 0 1280 1772 1 1923 2 0 1255 1772 1 1358 2 0 932 1772 1 983 2 0 1297 1771 1 1961 2 0 1264 1770 1 1956 2 0 1281 1769 1 976 2 0 935 1769 1 1351 2 0 934 1769 1 1350 2 0 933 1769 1 1944 2 0 1279 1768 1 1372 2 0 929 1768 1 987 2 0 1299 1767 1 1919 2 0 1249 1767 1 1913 2 0 1247 1766 1 1914 2 0 1246 1766 1 1933 2 0 1308 1765 1 990 2 0 1298 1765 1 1388 2 0 940 1765 1 1912 2 0 1310 1764 1 1911 2 0 1266 1764 1 1377 2 0 954 1764 1 1940 2 0 1313 1762 1 1962 2 0 1312 1762 1 1369 2 0 1248 1762 1 1368 2 0 952 1762 1 1382 2 0 1314 1761 1 988 2 0 1245 1761 1 1380 2 0 942 1761 1 1381 2 0 941 1761 1 1963 2 0 1311 1760 1 1964 2 0 1277 1760 1 978 2 0 1300 1759 1 977 2 0 1289 1759 1 1353 2 0 1267 1759 1 1352 2 0 955 1759 1 1331 2 0 1261 1758 1 964 2 0 948 1758 1 1939 2 0 1275 1757 1 1337 2 0 950 1757 1 1965 2 0 1315 1756 1 984 2 0 1302 1755 1 985 2 0 1301 1754 1 1932 2 0 1262 1754 1 1370 2 0 928 1754 1 986 2 0 1303 1753 1 1937 2 0 1273 1753 1 1938 2 0 1272 1753 1 1375 2 0 953 1753 1 1386 2 0 1316 1752 1 1385 2 0 1265 1752 1 989 2 0 949 1752 1 1333 2 0 1318 1751 1 1936 2 0 1268 1751 1 966 2 0 1305 1750 1 1951 2 0 1286 1750 1 1336 2 0 1263 1750 1 1335 2 0 939 1750 1 975 2 0 1304 1749 1 1348 2 0 1282 1749 1 1349 2 0 937 1749 1 1935 2 0 1317 1748 1 1330 2 0 1288 1748 1 1329 2 0 930 1748 1 982 2 0 1285 1747 1 1966 2 0 931 1746 1 1967 1 0 631 1 1968 1 0 691 1 1969 1 0 706 1 1970 1 0 706 1 1971 1 0 706 1 1972 1 0 706 1 1973 1 0 660 1 1974 1 0 660 1 1975 1 0 660 1 1976 1 0 660 1 1977 1 0 660 1 1978 1 0 660 1 1979 1 0 660 1 1980 1 0 647 1 1981 1 0 647 1 1982 1 0 647 1 1983 1 0 647 1 1984 1 0 647 1 1985 1 0 657 1 1986 1 0 657 1 1987 1 0 657 1 1988 1 0 657 1 1989 1 0 657 1 1990 1 0 657 1 1991 1 0 653 1 1992 1 0 653 1 1993 1 0 653 1 1994 1 0 653 1 1995 1 0 653 1 1996 1 0 666 1 1997 1 0 643 1 1998 1 0 636 1 1999 1 0 629 1 2000 1 0 690 1 2001 1 0 705 1 2002 1 0 705 1 2003 1 0 705 1 2004 1 0 705 1 2005 1 0 659 1 2006 1 0 659 1 2007 1 0 659 1 2008 1 0 659 1 2009 1 0 659 1 2010 1 0 659 1 2011 1 0 659 1 2012 1 0 645 1 2013 1 0 645 1 2014 1 0 645 1 2015 1 0 645 1 2016 1 0 645 1 2017 1 0 656 1 2018 1 0 656 1 2019 1 0 656 1 2020 1 0 656 1 2021 1 0 656 1 2022 1 0 656 1 2023 1 0 651 1 2024 1 0 651 1 2025 1 0 651 1 2026 1 0 651 1 2027 1 0 651 1 2028 1 0 664 1 2029 1 0 642 1 2030 1 0 634 1 2031 1 0 648 1 2032 1 0 648 1 2033 1 0 648 1 2034 1 0 648 1 2035 1 0 648 1 2036 1 0 699 1 2037 1 0 699 1 2038 1 0 686 1 2039 1 0 686 1 2040 1 0 686 1 2041 1 0 686 1 2042 1 0 686 1 2043 1 0 686 1 2044 1 0 640 1 2045 1 0 677 1 2046 1 0 716 1 2047 1 0 716 1 2048 1 0 632 1 2049 1 0 632 1 2050 1 0 632 1 2051 1 0 632 1 2052 1 0 632 1 2053 1 0 632 1 2054 1 0 667 1 2055 1 0 667 1 2056 1 0 667 1 2057 1 0 667 1 2058 1 0 654 1 2059 1 0 654 1 2060 1 0 654 1 2061 1 0 654 1 2062 1 0 654 1 2063 1 0 637 1 2064 1 0 637 1 2065 1 0 637 1 2066 1 0 637 1 2067 1 0 646 1 2068 1 0 646 1 2069 1 0 646 1 2070 1 0 646 1 2071 1 0 646 1 2072 1 0 698 1 2073 1 0 698 1 2074 1 0 685 1 2075 1 0 685 1 2076 1 0 685 1 2077 1 0 685 1 2078 1 0 685 1 2079 1 0 685 1 2080 1 0 639 1 2081 1 0 676 1 2082 1 0 715 1 2083 1 0 715 1 2084 1 0 630 1 2085 1 0 630 1 2086 1 0 630 1 2087 1 0 630 1 2088 1 0 630 1 2089 1 0 630 1 2090 1 0 665 1 2091 1 0 665 1 2092 1 0 665 1 2093 1 0 665 1 2094 1 0 652 1 2095 1 0 652 1 2096 1 0 652 1 2097 1 0 652 1 2098 1 0 652 1 2099 1 0 635 1 2100 1 0 635 1 2101 1 0 635 1 2102 1 0 635 3 44 733 781 741 763 750 755 744 758 804 789 728 734 751 764 729 772 814 775 738 784 797 745 735 782 742 765 752 756 746 759 805 790 730 736 753 766 731 773 815 776 739 785 798 747 1 0 2103 2 2104 44 0 1 733 781 741 763 750 755 744 758 804 789 728 734 751 764 729 772 814 775 738 784 797 745 735 782 742 765 752 756 746 759 805 790 730 736 753 766 731 773 815 776 739 785 798 747 1 1 2 1 2104 2103 2 2105 44 0 2 733 781 741 763 750 755 744 758 804 789 728 734 751 764 729 772 814 775 738 784 797 745 735 782 742 765 752 756 746 759 805 790 730 736 753 766 731 773 815 776 739 785 798 747 1 1 2 0 2105 2103 1 2103 1 0 1885 1 1735 2 1 2106 1903 1 1735 2 1 2107 1902 1 1735 2 1 1697 1901 1 1735 2 1 1909 1900 1 1735 2 1 1908 1899 1 1735 2 1 2108 1898 1 1735 2 1 2109 1897 1 1735 2 1 2110 1896 1 2111 2 1 2112 1895 1 2111 2 1 2113 1894 1 2111 2 1 2114 1893 1 2111 2 1 2115 1892 1 2111 2 1 2116 1891 1 1741 2 0 1903 2059 1 2117 2 0 1903 1992 1 2118 2 0 1903 650 1 1742 2 0 1902 2098 1 2119 2 0 1902 2004 1 2120 2 0 1902 721 1 2121 2 0 1901 2069 1 2122 2 0 1901 2034 1 1743 2 0 1901 1980 1 2123 2 0 1901 644 1 1743 2 0 1900 2102 1 2119 2 0 1900 2066 1 2122 2 0 1900 2005 1 1742 2 0 1900 1974 1 2124 2 0 1900 658 1 1743 2 0 1899 2065 1 2121 2 0 1899 2017 1 1741 2 0 1899 1987 1 2125 2 0 1899 668 1 2126 2 0 1898 2050 1 1744 2 0 1898 2014 1 2127 2 0 1898 649 1 1744 2 0 1897 2092 1 1742 2 0 1897 2007 1 2126 2 0 1897 1977 1 2128 2 0 1897 717 1 2117 2 0 1896 2093 1 1744 2 0 1896 2055 1 1741 2 0 1896 2018 1 2129 2 0 1896 669 1 2130 2 0 1895 859 1 2131 2 0 1894 868 1 2132 2 0 1893 900 1 2133 2 0 1892 878 1 2134 2 0 1891 860 1 2118 2 0 1731 2095 1 1745 2 0 1731 2060 1 2125 2 0 1731 2019 1 2129 2 0 1731 1988 1 1745 2 0 1730 2097 1 2120 2 0 1730 2062 1 2124 2 0 1730 2006 1 2128 2 0 1730 1978 1 2125 2 0 1729 2101 1 2124 2 0 1729 2063 1 2123 2 0 1729 2012 1 1745 2 0 1729 1982 1 2129 2 0 1728 2091 1 2128 2 0 1728 2057 1 1745 2 0 1728 2013 1 2127 2 0 1728 1981 1 1741 2 0 1727 2096 1 1742 2 0 1727 2061 1 1743 2 0 1727 2016 1 1744 2 0 1727 1983 1 1888 2 0 1909 2102 1 2135 2 0 1738 2101 1 2136 2 0 1910 2100 1 1395 2 0 1396 2083 1 1476 2 0 1397 2082 1 1477 2 0 1482 2081 1 2137 2 0 1906 2080 1 1566 2 0 1649 2079 1 1647 2 0 1652 2078 1 1664 2 0 1672 2077 1 1654 2 0 1659 2076 1 2138 2 0 1564 2075 1 1657 2 0 1662 2074 1 1685 2 0 1673 2073 1 1669 2 0 1680 2072 1 1712 2 0 1704 2071 1 1692 2 0 1681 2070 1 1702 2 0 1697 2069 1 1699 2 0 1694 2068 1 2139 2 0 1909 2066 1 1888 2 0 1908 2065 1 2136 2 0 1738 2063 1 1475 2 0 1396 2047 1 1476 2 0 1320 2046 1 1478 2 0 1398 2045 1 2140 2 0 1905 2044 1 1566 2 0 1904 2043 1 1647 2 0 1564 2042 1 1664 2 0 1659 2041 1 1654 2 0 1649 2040 1 1668 2 0 1662 2039 1 1657 2 0 1652 2038 1 1669 2 0 1679 2037 1 1686 2 0 1673 2036 1 1687 2 0 1694 2035 1 1692 2 0 1697 2034 1 1702 2 0 1708 2033 1 1699 2 0 1704 2032 1 997 2 0 1320 2028 1 1476 2 0 1398 2027 1 1479 2 0 1564 2026 1 1477 2 0 1483 2025 1 1712 2 0 1907 2023 1 1475 2 0 1482 2022 1 1647 2 0 1709 2021 1 1713 2 0 1704 2020 1 1702 2 0 1908 2017 1 2137 2 0 1652 2015 1 1699 2 0 1738 2012 1 1685 2 0 1694 2011 1 1657 2 0 1679 2009 1 2140 2 0 1659 2008 1 1692 2 0 1909 2005 1 1664 2 0 1673 2003 1 1669 2 0 1681 2002 1 1687 2 0 1910 2001 1 1668 2 0 1680 2000 1 1395 2 0 961 1996 1 1566 2 0 1483 1995 1 1479 2 0 1398 1994 1 1477 2 0 1396 1993 1 2141 2 0 1708 1991 1 1713 2 0 1649 1990 1 1478 2 0 1397 1989 1 1890 2 0 1908 1987 1 1702 2 0 1709 1986 1 2135 2 0 1704 1985 1 1654 2 0 1905 1984 1 1886 2 0 1738 1982 1 1888 2 0 1697 1980 1 1685 2 0 1659 1979 1 1657 2 0 1906 1976 1 1692 2 0 1679 1975 1 1889 2 0 1909 1974 1 2136 2 0 1694 1973 1 2142 2 0 1910 1972 1 1687 2 0 1673 1971 1 1669 2 0 1662 1970 1 2139 2 0 1681 1969 1 1686 2 0 1672 1968 1 2139 2 0 1910 723 1 1888 2 0 1738 702 1 2138 2 0 1904 696 1 2137 2 0 1905 692 1 2140 2 0 1906 671 1 2135 2 0 1908 668 1 2136 2 0 1909 658 1 1886 3 0 1889 1454 1461 1 2136 3 0 1889 1022 1464 1 1889 3 0 1886 1461 1454 1 1888 3 0 1886 1459 1013 1 1887 3 0 1886 1460 1447 1 2143 3 0 1890 1462 1469 1 2135 3 0 1890 1463 1456 1 2136 3 0 2139 1408 1465 1 1888 3 0 2136 1025 1014 1 2139 3 0 2136 1465 1408 1 1889 3 0 2136 1464 1022 1 1888 3 0 2135 1457 1431 1 1890 3 0 2135 1456 1463 1 2135 3 0 1888 1431 1457 1 2136 3 0 1888 1014 1025 1 1886 3 0 1888 1013 1459 1 1909 3 0 1910 1330 1386 1 2107 3 0 1910 1935 1333 1 1738 3 0 1909 989 977 1 1910 3 0 1909 1386 1330 1 1737 3 0 1909 1387 986 1 1908 3 0 1738 1353 1377 1 1909 3 0 1738 977 989 1 1740 3 0 1738 978 1382 1 1738 3 0 1908 1377 1353 1 1736 3 0 1908 1378 1383 1 1905 3 0 1906 1959 1924 1 1906 3 0 1905 1924 1959 1 1652 3 0 1905 1958 1946 1 1564 3 0 1904 1926 1323 1 1649 3 0 1709 1956 971 1 1672 3 0 1680 1949 1947 1 1659 3 0 1679 1929 1928 1 1680 3 0 1672 1947 1949 1 1679 3 0 1659 1928 1929 1 1905 3 0 1652 1946 1958 1 1709 3 0 1649 971 1956 1 1904 3 0 1564 1323 1926 1 1397 3 0 1482 1943 1942 1 1482 3 0 1397 1942 1943 1 1013 2 0 1387 2102 1 1014 2 0 1386 2102 1 1432 2 0 1385 2102 1 1431 2 0 989 2102 1 2144 2 0 1353 2101 1 2145 2 0 1352 2101 1 1456 2 0 978 2101 1 1457 2 0 977 2101 1 1464 2 0 1935 2100 1 1465 2 0 1934 2100 1 1025 2 0 1330 2100 1 1466 2 0 1329 2100 1 1030 2 0 1367 2099 1 1031 2 0 1366 2099 1 1032 2 0 1365 2099 1 1454 2 0 1936 2098 1 1022 2 0 1333 2098 1 1460 2 0 1938 2097 1 1461 2 0 1937 2097 1 1024 2 0 1375 2097 1 1459 2 0 986 2097 1 1463 2 0 1382 2096 1 2146 2 0 1381 2096 1 2147 2 0 1380 2096 1 1462 2 0 988 2096 1 2148 2 0 1914 2095 1 1469 2 0 1913 2095 1 2149 2 0 1384 2095 1 2150 2 0 1383 2095 1 1010 2 0 1328 2094 1 2151 2 0 1327 2094 1 2152 2 0 1326 2094 1 2153 2 0 1919 2093 1 1019 2 0 1376 2093 1 1439 2 0 987 2093 1 1448 2 0 1939 2092 1 1447 2 0 1337 2092 1 2154 2 0 1962 2091 1 1455 2 0 1940 2091 1 1023 2 0 1369 2091 1 2155 2 0 1368 2091 1 2156 2 0 1360 2090 1 1458 2 0 983 2090 1 1420 2 0 972 2089 1 2157 2 0 1921 2088 1 2158 2 0 1394 2087 1 2159 2 0 1965 2085 1 1473 2 0 1941 2084 1 2160 2 0 1954 2083 1 2161 2 0 1339 2083 1 998 2 0 1338 2083 1 1401 2 0 968 2083 1 1418 2 0 1942 2082 1 1004 2 0 1334 2082 1 1417 2 0 965 2082 1 1436 2 0 1945 2081 1 1437 2 0 1944 2081 1 1438 2 0 1943 2081 1 2162 2 0 1372 2081 1 2163 2 0 1959 2080 1 2164 2 0 1374 2080 1 999 2 0 1340 2079 1 1402 2 0 971 2079 1 1403 2 0 970 2079 1 1404 2 0 969 2079 1 1419 2 0 1946 2078 1 1006 2 0 1346 2078 1 1005 2 0 1345 2078 1 1007 2 0 1344 2078 1 1421 2 0 1948 2077 1 1422 2 0 1947 2077 1 1009 2 0 1361 2077 1 1424 2 0 1928 2076 1 1425 2 0 1364 2076 1 1423 2 0 984 2076 1 2165 2 0 1325 2075 1 2166 2 0 1324 2075 1 2167 2 0 1323 2075 1 1435 2 0 963 2075 1 2168 2 0 1960 2074 1 1020 2 0 1343 2074 1 2169 2 0 1342 2074 1 1443 2 0 973 2074 1 1411 2 0 1349 2073 1 1000 2 0 1348 2073 1 2170 2 0 1347 2073 1 1410 2 0 975 2073 1 1427 2 0 1950 2072 1 1428 2 0 1949 2072 1 1011 2 0 1355 2072 1 1426 2 0 982 2072 1 2171 2 0 1933 2071 1 2172 2 0 1389 2071 1 2173 2 0 1388 2071 1 1433 2 0 990 2071 1 1450 2 0 1951 2070 1 1021 2 0 1336 2070 1 2174 2 0 1335 2070 1 1449 2 0 966 2070 1 1468 2 0 1393 2069 1 1026 2 0 1392 2069 1 2175 2 0 1391 2069 1 1467 2 0 991 2069 1 1472 2 0 1932 2068 1 1027 2 0 1371 2068 1 1471 2 0 1370 2068 1 1470 2 0 985 2068 1 1003 2 0 1952 2067 1 2176 2 0 1387 2066 1 2177 2 0 1386 2066 1 1409 2 0 1385 2066 1 1408 2 0 989 2066 1 1431 2 0 1912 2065 1 1432 2 0 1911 2065 1 1013 2 0 1378 2065 1 1014 2 0 1377 2065 1 1444 2 0 996 2064 1 1445 2 0 995 2064 1 1446 2 0 994 2064 1 1025 2 0 1353 2063 1 1466 2 0 1352 2063 1 1464 2 0 978 2063 1 1465 2 0 977 2063 1 2178 2 0 1938 2062 1 2179 2 0 1937 2062 1 2180 2 0 1375 2062 1 1412 2 0 986 2062 1 1022 2 0 1382 2061 1 2181 2 0 1381 2061 1 2182 2 0 1380 2061 1 1454 2 0 988 2061 1 1460 2 0 1914 2060 1 1461 2 0 1913 2060 1 1024 2 0 1384 2060 1 1459 2 0 1383 2060 1 1462 2 0 1916 2059 1 1463 2 0 1915 2059 1 2147 2 0 1390 2059 1 1406 2 0 1918 2058 1 1407 2 0 1917 2058 1 1405 2 0 974 2058 1 2183 2 0 1962 2057 1 2184 2 0 1940 2057 1 1416 2 0 1369 2057 1 2185 2 0 1368 2057 1 1439 2 0 1920 2056 1 1019 2 0 1379 2056 1 1448 2 0 1919 2055 1 2186 2 0 1376 2055 1 1447 2 0 987 2055 1 1023 2 0 1360 2054 1 1455 2 0 983 2054 1 1008 2 0 1341 2053 1 2187 2 0 1965 2052 1 2188 2 0 1964 2050 1 2189 2 0 1963 2050 1 1473 2 0 1921 2049 1 2190 2 0 1394 2048 1 2191 2 0 1954 2047 1 1001 2 0 1339 2047 1 2192 2 0 1338 2047 1 1413 2 0 968 2047 1 1417 2 0 1922 2046 1 1004 2 0 1322 2046 1 1418 2 0 962 2046 1 1451 2 0 1923 2045 1 2193 2 0 1359 2045 1 2194 2 0 1358 2045 1 2195 2 0 1357 2045 1 2196 2 0 1958 2044 1 2197 2 0 1957 2044 1 1453 2 0 1924 2044 1 1402 2 0 1927 2043 1 1403 2 0 1926 2043 1 1404 2 0 1925 2043 1 999 2 0 1356 2043 1 1005 2 0 1325 2042 1 1006 2 0 1324 2042 1 1007 2 0 1323 2042 1 1419 2 0 963 2042 1 1422 2 0 1928 2041 1 1009 2 0 1364 2041 1 1421 2 0 984 2041 1 2198 2 0 1340 2040 1 1424 2 0 971 2040 1 1423 2 0 970 2040 1 1425 2 0 969 2040 1 2199 2 0 1960 2039 1 2200 2 0 1343 2039 1 2201 2 0 1342 2039 1 1440 2 0 973 2039 1 2168 2 0 1946 2038 1 1020 2 0 1346 2038 1 2169 2 0 1345 2038 1 1443 2 0 1344 2038 1 1428 2 0 1929 2037 1 1011 2 0 1332 2037 1 1426 2 0 1331 2037 1 1427 2 0 964 2037 1 1018 2 0 1349 2036 1 2202 2 0 1348 2036 1 2203 2 0 1347 2036 1 1434 2 0 975 2036 1 1415 2 0 1932 2035 1 1002 2 0 1371 2035 1 2204 2 0 1370 2035 1 1414 2 0 985 2035 1 1021 2 0 1393 2034 1 1450 2 0 1392 2034 1 2174 2 0 1391 2034 1 1449 2 0 991 2034 1 1467 2 0 1931 2033 1 1468 2 0 1930 2033 1 1026 2 0 1354 2033 1 1472 2 0 1933 2032 1 1027 2 0 1389 2032 1 1470 2 0 1388 2032 1 1471 2 0 990 2032 1 2205 2 0 967 2031 1 1032 2 0 1328 2030 1 1030 2 0 1327 2030 1 1031 2 0 1326 2030 1 1441 2 0 993 2029 1 1442 2 0 992 2029 1 2206 2 0 1922 2028 1 2207 2 0 1322 2028 1 1452 2 0 962 2028 1 1417 2 0 1923 2027 1 1418 2 0 1359 2027 1 1004 2 0 1358 2027 1 2208 2 0 1357 2027 1 1015 2 0 1325 2026 1 1016 2 0 1324 2026 1 2209 2 0 1323 2026 1 1017 2 0 963 2026 1 2162 2 0 1955 2025 1 1436 2 0 981 2025 1 1437 2 0 980 2025 1 1438 2 0 979 2025 1 2150 2 0 1360 2024 1 1469 2 0 983 2024 1 1433 2 0 1961 2023 1 2192 2 0 1945 2022 1 1413 2 0 1944 2022 1 2191 2 0 1943 2022 1 1001 2 0 1372 2022 1 1419 2 0 1956 2021 1 1006 2 0 1351 2021 1 1005 2 0 1350 2021 1 1007 2 0 976 2021 1 1430 2 0 1933 2020 1 2210 2 0 1389 2020 1 1012 2 0 1388 2020 1 1429 2 0 990 2020 1 1456 2 0 1914 2019 1 1457 2 0 1913 2019 1 2144 2 0 1384 2019 1 2145 2 0 1383 2019 1 1462 2 0 1919 2018 1 2147 2 0 1376 2018 1 1463 2 0 987 2018 1 1468 2 0 1912 2017 1 1467 2 0 1911 2017 1 2175 2 0 1378 2017 1 1026 2 0 1377 2017 1 1432 2 0 1382 2016 1 1013 2 0 1381 2016 1 1014 2 0 1380 2016 1 1431 2 0 988 2016 1 2211 2 0 1946 2015 1 2164 2 0 1346 2015 1 2212 2 0 1345 2015 1 2163 2 0 1344 2015 1 1447 2 0 1964 2014 1 1448 2 0 1963 2014 1 1460 2 0 1962 2013 1 1461 2 0 1940 2013 1 1459 2 0 1369 2013 1 1024 2 0 1368 2013 1 1470 2 0 1353 2012 1 1027 2 0 1352 2012 1 1472 2 0 978 2012 1 1471 2 0 977 2012 1 1410 2 0 1932 2011 1 2170 2 0 1371 2011 1 1000 2 0 1370 2011 1 1411 2 0 985 2011 1 1416 2 0 1965 2010 1 2168 2 0 1929 2009 1 2169 2 0 1332 2009 1 1443 2 0 1331 2009 1 1020 2 0 964 2009 1 2196 2 0 1928 2008 1 2197 2 0 1364 2008 1 1453 2 0 984 2008 1 1454 2 0 1939 2007 1 1022 2 0 1337 2007 1 1464 2 0 1938 2006 1 1465 2 0 1937 2006 1 1025 2 0 1375 2006 1 1466 2 0 986 2006 1 2174 2 0 1387 2005 1 1450 2 0 1386 2005 1 1449 2 0 1385 2005 1 1021 2 0 989 2005 1 1408 2 0 1936 2004 1 1409 2 0 1333 2004 1 1009 2 0 1349 2003 1 1421 2 0 1348 2003 1 2213 2 0 1347 2003 1 1422 2 0 975 2003 1 1427 2 0 1951 2002 1 1426 2 0 1336 2002 1 1011 2 0 1335 2002 1 1428 2 0 966 2002 1 1415 2 0 1935 2001 1 2204 2 0 1934 2001 1 1414 2 0 1330 2001 1 1002 2 0 1329 2001 1 2200 2 0 1950 2000 1 2199 2 0 1949 2000 1 2201 2 0 1355 2000 1 1440 2 0 982 2000 1 1003 2 0 1966 1999 1 1406 2 0 996 1998 1 1407 2 0 995 1998 1 1405 2 0 994 1998 1 1028 2 0 1363 1997 1 1029 2 0 1362 1997 1 1401 2 0 1953 1996 1 998 2 0 1373 1996 1 1404 2 0 1955 1995 1 1402 2 0 981 1995 1 1403 2 0 980 1995 1 999 2 0 979 1995 1 2209 2 0 1923 1994 1 1015 2 0 1359 1994 1 1016 2 0 1358 1994 1 1017 2 0 1357 1994 1 1438 2 0 1954 1993 1 2162 2 0 1339 1993 1 1436 2 0 1338 1993 1 1437 2 0 968 1993 1 2153 2 0 1916 1992 1 1439 2 0 1915 1992 1 1019 2 0 1390 1992 1 2214 2 0 1931 1991 1 2215 2 0 1930 1991 1 2216 2 0 1354 1991 1 2210 2 0 1340 1990 1 1430 2 0 971 1990 1 1429 2 0 970 1990 1 1012 2 0 969 1990 1 2193 2 0 1942 1989 1 2194 2 0 1334 1989 1 1451 2 0 965 1989 1 2154 2 0 1914 1988 1 1455 2 0 1913 1988 1 2155 2 0 1384 1988 1 1023 2 0 1383 1988 1 1462 2 0 1912 1987 1 1463 2 0 1911 1987 1 2146 2 0 1378 1987 1 2147 2 0 1377 1987 1 1467 2 0 1956 1986 1 1026 2 0 1351 1986 1 2175 2 0 1350 1986 1 1468 2 0 976 1986 1 1456 2 0 1933 1985 1 2145 2 0 1389 1985 1 2144 2 0 1388 1985 1 1457 2 0 990 1985 1 1424 2 0 1958 1984 1 1425 2 0 1957 1984 1 1423 2 0 1924 1984 1 1447 2 0 1382 1983 1 2217 2 0 1381 1983 1 2186 2 0 1380 1983 1 1448 2 0 988 1983 1 1024 2 0 1353 1982 1 1459 2 0 1352 1982 1 1460 2 0 978 1982 1 1461 2 0 977 1982 1 2218 2 0 1962 1981 1 2219 2 0 1940 1981 1 2159 2 0 1369 1981 1 2220 2 0 1368 1981 1 1431 2 0 1393 1980 1 1014 2 0 1392 1980 1 1013 2 0 1391 1980 1 1432 2 0 991 1980 1 1410 2 0 1928 1979 1 1411 2 0 1364 1979 1 1000 2 0 984 1979 1 2183 2 0 1938 1978 1 2184 2 0 1937 1978 1 2185 2 0 1375 1978 1 1416 2 0 986 1978 1 2189 2 0 1939 1977 1 2188 2 0 1337 1977 1 1443 2 0 1959 1976 1 1020 2 0 1374 1976 1 1449 2 0 1929 1975 1 2174 2 0 1332 1975 1 1021 2 0 1331 1975 1 1450 2 0 964 1975 1 2181 2 0 1387 1974 1 2182 2 0 1386 1974 1 1022 2 0 1385 1974 1 1454 2 0 989 1974 1 1464 2 0 1932 1973 1 1466 2 0 1371 1973 1 1465 2 0 1370 1973 1 1025 2 0 985 1973 1 2178 2 0 1935 1972 1 2179 2 0 1934 1972 1 2180 2 0 1330 1972 1 1412 2 0 1329 1972 1 1414 2 0 1349 1971 1 2204 2 0 1348 1971 1 1002 2 0 1347 1971 1 1415 2 0 975 1971 1 1428 2 0 1960 1970 1 1427 2 0 1343 1970 1 1011 2 0 1342 1970 1 1426 2 0 973 1970 1 2177 2 0 1951 1969 1 1408 2 0 1336 1969 1 2176 2 0 1335 1969 1 1409 2 0 966 1969 1 2202 2 0 1948 1968 1 1434 2 0 1947 1968 1 1018 2 0 1361 1968 1 2221 2 0 967 1967 1 2161 2 0 1922 726 1 2176 2 0 1935 723 1 2177 2 0 1934 723 1 2170 2 0 1929 722 1 2180 2 0 1936 721 1 2192 2 0 1942 720 1 2204 2 0 1951 719 1 2185 2 0 1939 717 1 2208 2 0 1954 714 1 2213 2 0 1960 710 1 2198 2 0 1946 708 1 2151 2 0 1918 707 1 2152 2 0 1917 707 1 2210 2 0 1956 703 1 2172 2 0 1931 701 1 2173 2 0 1930 701 1 2209 2 0 1955 700 1 2202 2 0 1950 697 1 2203 2 0 1949 697 1 2165 2 0 1927 696 1 2166 2 0 1926 696 1 2167 2 0 1925 696 1 2162 2 0 1923 695 1 2158 2 0 1921 693 1 2212 2 0 1958 692 1 2163 2 0 1957 692 1 2164 2 0 1924 692 1 2200 2 0 1948 689 1 2201 2 0 1947 689 1 2188 2 0 1965 688 1 2169 2 0 1928 684 1 2217 2 0 1962 680 1 2186 2 0 1940 680 1 2174 2 0 1932 678 1 2193 2 0 1945 675 1 2194 2 0 1944 675 1 2195 2 0 1943 675 1 2207 2 0 1953 672 1 2197 2 0 1959 671 1 2181 2 0 1938 670 1 2182 2 0 1937 670 1 2155 2 0 1919 669 1 2144 2 0 1912 668 1 2145 2 0 1911 668 1 2156 2 0 1920 663 1 2146 2 0 1914 661 1 2147 2 0 1913 661 1 2175 2 0 1933 655 1 2149 2 0 1916 650 1 2150 2 0 1915 650 1 2159 2 0 1964 649 1 2220 2 0 1963 649 1 2222 1 0 730 1 2223 1 0 790 1 2224 1 0 805 1 2225 1 0 805 1 2226 1 0 805 1 2227 1 0 805 1 2228 1 0 759 1 2229 1 0 759 1 2230 1 0 759 1 2231 1 0 759 1 2232 1 0 759 1 2233 1 0 759 1 2234 1 0 759 1 2235 1 0 746 1 2236 1 0 746 1 2237 1 0 746 1 2238 1 0 746 1 2239 1 0 746 1 2240 1 0 756 1 2241 1 0 756 1 2242 1 0 756 1 2243 1 0 756 1 2244 1 0 756 1 2245 1 0 756 1 2246 1 0 752 1 2247 1 0 752 1 2248 1 0 752 1 2249 1 0 752 1 2250 1 0 752 1 2251 1 0 765 1 2252 1 0 742 1 2253 1 0 735 1 2254 1 0 728 1 2255 1 0 789 1 2256 1 0 804 1 2257 1 0 804 1 2258 1 0 804 1 2259 1 0 804 1 2260 1 0 758 1 2261 1 0 758 1 2262 1 0 758 1 2263 1 0 758 1 2264 1 0 758 1 2265 1 0 758 1 2266 1 0 758 1 2267 1 0 744 1 2268 1 0 744 1 2269 1 0 744 1 2270 1 0 744 1 2271 1 0 744 1 2272 1 0 755 1 2273 1 0 755 1 2274 1 0 755 1 2275 1 0 755 1 2276 1 0 755 1 2277 1 0 755 1 2278 1 0 750 1 2279 1 0 750 1 2280 1 0 750 1 2281 1 0 750 1 2282 1 0 750 1 2283 1 0 763 1 2284 1 0 741 1 2285 1 0 733 1 2286 1 0 747 1 2287 1 0 747 1 2288 1 0 747 1 2289 1 0 747 1 2290 1 0 747 1 2291 1 0 798 1 2292 1 0 798 1 2293 1 0 785 1 2294 1 0 785 1 2295 1 0 785 1 2296 1 0 785 1 2297 1 0 785 1 2298 1 0 785 1 2299 1 0 739 1 2300 1 0 776 1 2301 1 0 815 1 2302 1 0 815 1 2303 1 0 731 1 2304 1 0 731 1 2305 1 0 731 1 2306 1 0 731 1 2307 1 0 731 1 2308 1 0 731 1 2309 1 0 766 1 2310 1 0 766 1 2311 1 0 766 1 2312 1 0 766 1 2313 1 0 753 1 2314 1 0 753 1 2315 1 0 753 1 2316 1 0 753 1 2317 1 0 753 1 2318 1 0 736 1 2319 1 0 736 1 2320 1 0 736 1 2321 1 0 736 1 2322 1 0 745 1 2323 1 0 745 1 2324 1 0 745 1 2325 1 0 745 1 2326 1 0 745 1 2327 1 0 797 1 2328 1 0 797 1 2329 1 0 784 1 2330 1 0 784 1 2331 1 0 784 1 2332 1 0 784 1 2333 1 0 784 1 2334 1 0 784 1 2335 1 0 738 1 2336 1 0 775 1 2337 1 0 814 1 2338 1 0 814 1 2339 1 0 729 1 2340 1 0 729 1 2341 1 0 729 1 2342 1 0 729 1 2343 1 0 729 1 2344 1 0 729 1 2345 1 0 764 1 2346 1 0 764 1 2347 1 0 764 1 2348 1 0 764 1 2349 1 0 751 1 2350 1 0 751 1 2351 1 0 751 1 2352 1 0 751 1 2353 1 0 751 1 2354 1 0 734 1 2355 1 0 734 1 2356 1 0 734 1 2357 1 0 734 3 44 832 880 840 862 849 854 843 857 903 888 827 833 850 863 828 871 913 874 837 883 896 844 834 881 841 864 851 855 845 858 904 889 829 835 852 865 830 872 914 875 838 884 897 846 1 0 2358 2 2359 44 0 1 832 880 840 862 849 854 843 857 903 888 827 833 850 863 828 871 913 874 837 883 896 844 834 881 841 864 851 855 845 858 904 889 829 835 852 865 830 872 914 875 838 884 897 846 1 1 2 1 2359 2358 2 2360 44 0 2 832 880 840 862 849 854 843 857 903 888 827 833 850 863 828 871 913 874 837 883 896 844 834 881 841 864 851 855 845 858 904 889 829 835 852 865 830 872 914 875 838 884 897 846 1 1 2 0 2360 2358 1 2358 1 0 2111 1 2361 2 1 2362 2134 1 2361 2 1 2363 2133 1 2361 2 1 2364 2132 1 2361 2 1 2365 2131 1 2361 2 1 2366 2130 1 1885 2 1 2367 2129 1 1885 2 1 2368 2128 1 1885 2 1 2369 2127 1 1885 2 1 2370 2126 1 1885 2 1 2135 2125 1 1885 2 1 2136 2124 1 1885 2 1 1699 2123 1 1885 2 1 1692 2122 1 1885 2 1 1702 2121 1 1885 2 1 2142 2120 1 1885 2 1 2139 2119 1 1885 2 1 2143 2118 1 1885 2 1 2371 2117 1 2372 2 0 2129 2348 1 1892 2 0 2129 2310 1 1895 2 0 2129 2273 1 2373 2 0 2129 768 1 1892 2 0 2128 2347 1 1894 2 0 2128 2262 1 2374 2 0 2128 2232 1 2375 2 0 2128 816 1 2374 2 0 2127 2305 1 1892 2 0 2127 2269 1 2376 2 0 2127 748 1 2376 2 0 2126 2340 1 2377 2 0 2126 2307 1 2375 2 0 2126 2265 1 2374 2 0 2126 787 1 1893 2 0 2125 2320 1 2378 2 0 2125 2272 1 1895 2 0 2125 2242 1 2379 2 0 2125 767 1 1893 2 0 2124 2357 1 2380 2 0 2124 2321 1 2381 2 0 2124 2260 1 1894 2 0 2124 2229 1 2382 2 0 2124 757 1 2378 2 0 2123 2324 1 2381 2 0 2123 2289 1 1893 2 0 2123 2235 1 2383 2 0 2123 743 1 2383 2 0 2122 2323 1 2384 2 0 2122 2290 1 2385 2 0 2122 2266 1 2382 2 0 2122 2228 1 2381 2 0 2122 777 1 2386 2 0 2121 2326 1 2383 2 0 2121 2287 1 2387 2 0 2121 2275 1 2379 2 0 2121 2240 1 2378 2 0 2121 754 1 1894 2 0 2120 2353 1 2380 2 0 2120 2259 1 2388 2 0 2120 820 1 2382 2 0 2119 2355 1 2384 2 0 2119 2256 1 2388 2 0 2119 2227 1 2380 2 0 2119 822 1 1895 2 0 2118 2314 1 2372 2 0 2118 2247 1 2389 2 0 2118 749 1 2390 2 0 2117 2345 1 2373 2 0 2117 2309 1 2389 2 0 2117 2279 1 2372 2 0 2117 793 1 1895 2 0 1745 2351 1 1894 2 0 1745 2316 1 1893 2 0 1745 2271 1 1892 2 0 1745 2238 1 2373 2 0 1744 2346 1 2375 2 0 1744 2312 1 1891 2 0 1744 2268 1 2376 2 0 1744 2236 1 2379 2 0 1743 2356 1 2382 2 0 1743 2318 1 2383 2 0 1743 2267 1 1891 2 0 1743 2237 1 1891 2 0 1742 2352 1 2388 2 0 1742 2317 1 2382 2 0 1742 2261 1 2375 2 0 1742 2233 1 2389 2 0 1741 2350 1 1891 2 0 1741 2315 1 2379 2 0 1741 2274 1 2373 2 0 1741 2243 1 2114 2 0 2136 2357 1 2391 2 0 1888 2356 1 2392 2 0 2139 2355 1 2113 2 0 2142 2353 1 2116 2 0 1889 2352 1 2112 2 0 1886 2351 1 2393 2 0 1890 2350 1 2394 2 0 1887 2346 1 1474 2 0 1476 2338 1 1559 2 0 1475 2337 1 1561 2 0 1478 2336 1 2395 2 0 2140 2335 1 1563 2 0 1647 2334 1 1650 2 0 1654 2333 1 1661 2 0 1668 2332 1 1651 2 0 1657 2331 1 2396 2 0 1566 2330 1 1660 2 0 1664 2329 1 1676 2 0 1669 2328 1 1675 2 0 1686 2327 1 1706 2 0 1702 2326 1 1695 2 0 1687 2325 1 1705 2 0 1699 2324 1 1696 2 0 1692 2323 1 2397 2 0 2136 2321 1 2114 2 0 2135 2320 1 2392 2 0 1888 2318 1 2398 2 0 1889 2317 1 2113 2 0 1886 2316 1 2116 2 0 1890 2315 1 2112 2 0 2143 2314 1 2399 2 0 1887 2312 1 1558 2 0 1476 2302 1 1559 2 0 1395 2301 1 1562 2 0 1477 2300 1 2400 2 0 2137 2299 1 1563 2 0 2138 2298 1 1650 2 0 1566 2297 1 1661 2 0 1657 2296 1 1651 2 0 1647 2295 1 1674 2 0 1664 2294 1 1660 2 0 1654 2293 1 1675 2 0 1685 2292 1 1677 2 0 1669 2291 1 1678 2 0 1692 2290 1 1695 2 0 1699 2289 1 1705 2 0 1712 2288 1 1696 2 0 1702 2287 1 1033 2 0 1395 2283 1 1559 2 0 1477 2282 1 1560 2 0 1566 2281 1 1561 2 0 1479 2280 1 1706 2 0 2141 2278 1 1558 2 0 1478 2277 1 1650 2 0 1713 2276 1 1707 2 0 1702 2275 1 2391 2 0 1890 2274 1 1705 2 0 2135 2272 1 2114 2 0 1886 2271 1 2395 2 0 1654 2270 1 2116 2 0 1887 2268 1 1696 2 0 1888 2267 1 1676 2 0 1692 2266 1 1660 2 0 1685 2264 1 2400 2 0 1657 2263 1 2392 2 0 1889 2261 1 1695 2 0 2136 2260 1 2397 2 0 2142 2259 1 1661 2 0 1669 2258 1 1675 2 0 1687 2257 1 1678 2 0 2139 2256 1 1674 2 0 1686 2255 1 1474 2 0 997 2251 1 1563 2 0 1479 2250 1 1560 2 0 1477 2249 1 1561 2 0 1476 2248 1 2401 2 0 2143 2247 1 2402 2 0 1712 2246 1 1707 2 0 1647 2245 1 1562 2 0 1475 2244 1 2394 2 0 1890 2243 1 2112 2 0 2135 2242 1 1705 2 0 1713 2241 1 2391 2 0 1702 2240 1 1651 2 0 2137 2239 1 2115 2 0 1886 2238 1 2116 2 0 1888 2237 1 2403 2 0 1887 2236 1 2114 2 0 1699 2235 1 1676 2 0 1657 2234 1 2399 2 0 1889 2233 1 1660 2 0 2140 2231 1 1695 2 0 1685 2230 1 2113 2 0 2136 2229 1 2392 2 0 1692 2228 1 2398 2 0 2139 2227 1 1678 2 0 1669 2226 1 1675 2 0 1664 2225 1 2397 2 0 1687 2224 1 1677 2 0 1668 2223 1 1889 2 0 2107 2098 1 1886 2 0 1737 2097 1 1890 2 0 1740 2096 1 2143 2 0 1736 2095 1 2142 2 0 1737 2062 1 1889 2 0 1740 2061 1 1886 2 0 1736 2060 1 2135 2 0 1736 2019 1 1888 2 0 1740 2016 1 2136 2 0 1737 2006 1 2139 2 0 2107 2004 1 2367 2 0 1736 1988 1 1887 2 0 1740 1983 1 2368 2 0 1737 1978 1 2397 2 0 2139 822 1 2398 2 0 2142 820 1 2114 2 0 1888 801 1 2396 2 0 2138 795 1 2395 2 0 2137 791 1 2115 2 0 1887 779 1 2400 2 0 2140 770 1 2113 2 0 1889 769 1 2391 2 0 2135 767 1 2116 2 0 1886 761 1 2112 2 0 1890 760 1 2392 2 0 2136 757 1 2393 2 0 2143 749 1 2142 2 0 2107 721 1 1889 2 0 1737 670 1 1886 2 0 1740 662 1 1890 2 0 1736 661 1 1887 3 0 2368 2185 2186 1 1889 3 0 2368 1416 2181 1 2370 3 0 2368 2183 2188 1 2371 3 0 2367 2155 1019 1 1887 3 0 2367 1455 1448 1 1890 3 0 2367 1023 2146 1 2394 3 0 2115 1530 1538 1 2116 3 0 2115 1531 1543 1 2392 3 0 2397 1492 1548 1 2115 3 0 2394 1538 1530 1 2112 3 0 2393 1552 1546 1 2393 3 0 2112 1546 1552 1 2391 3 0 2112 1545 1540 1 2113 3 0 2116 1542 1537 1 2114 3 0 2116 1544 1050 1 2115 3 0 2116 1543 1531 1 2116 3 0 2113 1537 1542 1 2392 3 0 2113 1058 1549 1 2114 3 0 2392 1061 1049 1 2397 3 0 2392 1548 1492 1 2113 3 0 2392 1549 1058 1 2114 3 0 2391 1539 1515 1 2112 3 0 2391 1540 1545 1 2391 3 0 2114 1515 1539 1 2392 3 0 2114 1049 1061 1 2116 3 0 2114 1050 1544 1 2106 3 0 1736 1384 1390 1 1740 3 0 1736 1913 988 1 1908 3 0 1736 1383 1378 1 2110 3 0 1736 1914 987 1 1736 3 0 1740 988 1913 1 1737 3 0 1740 1380 1375 1 1738 3 0 1740 1382 978 1 1739 3 0 1740 1381 1369 1 1740 3 0 1737 1375 1380 1 2107 3 0 1737 1937 1936 1 1909 3 0 1737 986 1387 1 2109 3 0 1737 1938 1337 1 1737 3 0 2107 1936 1937 1 1910 3 0 2107 1333 1935 1 1890 3 0 2143 1469 1462 1 2371 3 0 2143 2148 1439 1 2367 3 0 1887 1448 1455 1 2368 3 0 1887 2186 2185 1 1886 3 0 1887 1447 1460 1 2369 3 0 1887 2217 2159 1 1889 3 0 2142 2180 2182 1 2139 3 0 2142 1412 2176 1 2142 3 0 1889 2182 2180 1 2368 3 0 1889 2181 1416 1 1890 3 0 1886 1024 2147 1 1886 3 0 1890 2147 1024 1 2367 3 0 1890 2146 1023 1 2137 3 0 2140 2197 2164 1 1657 3 0 2140 2196 2168 1 2142 3 0 2139 2176 1412 1 1566 3 0 2138 2166 1404 1 2140 3 0 2137 2164 2197 1 1654 3 0 2137 2212 2198 1 1647 3 0 1713 2210 1005 1 1668 3 0 1686 2203 2201 1 1657 3 0 1685 2170 2169 1 1686 3 0 1668 2201 2203 1 2140 3 0 1657 2168 2196 1 1685 3 0 1657 2169 2170 1 2137 3 0 1654 2198 2212 1 1713 3 0 1647 1005 2210 1 2138 3 0 1566 1404 2166 1 1475 3 0 1478 2195 2192 1 1478 3 0 1475 2192 2195 1 997 3 0 1395 2160 1452 1 1395 3 0 997 1452 2160 1 1514 2 0 1466 2357 1 1049 2 0 1465 2357 1 1050 2 0 1464 2357 1 1515 2 0 1025 2357 1 2404 2 0 1432 2356 1 2405 2 0 1431 2356 1 1539 2 0 1014 2356 1 1540 2 0 1013 2356 1 1548 2 0 2177 2355 1 1549 2 0 2176 2355 1 1547 2 0 1409 2355 1 1061 2 0 1408 2355 1 1066 2 0 1446 2354 1 1067 2 0 1445 2354 1 1068 2 0 1444 2354 1 1537 2 0 2180 2353 1 2406 2 0 2179 2353 1 2407 2 0 2178 2353 1 1058 2 0 1412 2353 1 1542 2 0 2182 2352 1 1543 2 0 2181 2352 1 1060 2 0 1454 2352 1 1544 2 0 1022 2352 1 2408 2 0 1461 2351 1 2409 2 0 1460 2351 1 1545 2 0 1459 2351 1 1546 2 0 1024 2351 1 1552 2 0 2147 2350 1 2410 2 0 2146 2350 1 2411 2 0 1463 2350 1 2412 2 0 1462 2350 1 2413 2 0 1407 2349 1 2414 2 0 1406 2349 1 1046 2 0 1405 2349 1 2415 2 0 2155 2348 1 2416 2 0 2154 2348 1 1055 2 0 1455 2348 1 1522 2 0 1023 2348 1 1530 2 0 2185 2347 1 2417 2 0 2184 2347 1 2418 2 0 2183 2347 1 1531 2 0 1416 2347 1 2419 2 0 2217 2346 1 1538 2 0 2186 2346 1 2420 2 0 1448 2346 1 1059 2 0 1447 2346 1 2421 2 0 2153 2345 1 2422 2 0 1439 2345 1 1541 2 0 1019 2345 1 1503 2 0 1008 2344 1 2423 2 0 2158 2343 1 2424 2 0 1473 2342 1 2425 2 0 2187 2341 1 2426 2 0 2157 2341 1 2427 2 0 2189 2340 1 2428 2 0 2188 2340 1 1556 2 0 2190 2339 1 2429 2 0 2208 2338 1 1034 2 0 1418 2338 1 2430 2 0 1417 2338 1 1484 2 0 1004 2338 1 1500 2 0 2192 2337 1 2431 2 0 2191 2337 1 1040 2 0 1413 2337 1 1501 2 0 1001 2337 1 1519 2 0 2195 2336 1 1520 2 0 2194 2336 1 1521 2 0 2193 2336 1 2432 2 0 1451 2336 1 2433 2 0 2197 2335 1 2434 2 0 2196 2335 1 2435 2 0 1453 2335 1 1035 2 0 1419 2334 1 1485 2 0 1007 2334 1 1486 2 0 1006 2334 1 1487 2 0 1005 2334 1 1502 2 0 2198 2333 1 1041 2 0 1425 2333 1 1043 2 0 1424 2333 1 1042 2 0 1423 2333 1 1504 2 0 2201 2332 1 1505 2 0 2200 2332 1 2436 2 0 2199 2332 1 1045 2 0 1440 2332 1 1507 2 0 2169 2331 1 2437 2 0 2168 2331 1 1506 2 0 1443 2331 1 1508 2 0 1020 2331 1 2438 2 0 1404 2330 1 2439 2 0 1403 2330 1 2440 2 0 1402 2330 1 1518 2 0 999 2330 1 2441 2 0 2213 2329 1 2442 2 0 1422 2329 1 1056 2 0 1421 2329 1 1526 2 0 1009 2329 1 2443 2 0 1428 2328 1 1036 2 0 1427 2328 1 1493 2 0 1426 2328 1 1494 2 0 1011 2328 1 1509 2 0 2203 2327 1 1510 2 0 2202 2327 1 1047 2 0 1434 2327 1 1511 2 0 1018 2327 1 2444 2 0 2175 2326 1 2445 2 0 1468 2326 1 2446 2 0 1467 2326 1 1516 2 0 1026 2326 1 1532 2 0 2204 2325 1 2447 2 0 1415 2325 1 1057 2 0 1414 2325 1 1533 2 0 1002 2325 1 2448 2 0 1472 2324 1 1062 2 0 1471 2324 1 1550 2 0 1470 2324 1 1551 2 0 1027 2324 1 1553 2 0 2174 2323 1 1554 2 0 1450 2323 1 1063 2 0 1449 2323 1 1555 2 0 1021 2323 1 1039 2 0 2205 2322 1 1491 2 0 1466 2321 1 2449 2 0 1465 2321 1 2450 2 0 1464 2321 1 1492 2 0 1025 2321 1 1514 2 0 2145 2320 1 1515 2 0 2144 2320 1 1049 2 0 1457 2320 1 1050 2 0 1456 2320 1 1527 2 0 1032 2319 1 1528 2 0 1031 2319 1 1529 2 0 1030 2319 1 1547 2 0 1432 2318 1 1061 2 0 1431 2318 1 1548 2 0 1014 2318 1 1549 2 0 1013 2318 1 2451 2 0 2182 2317 1 2452 2 0 2181 2317 1 2453 2 0 1454 2317 1 1495 2 0 1022 2317 1 2406 2 0 1461 2316 1 2407 2 0 1460 2316 1 1058 2 0 1459 2316 1 1537 2 0 1024 2316 1 1542 2 0 2147 2315 1 1543 2 0 2146 2315 1 1544 2 0 1463 2315 1 1060 2 0 1462 2315 1 1545 2 0 2150 2314 1 1546 2 0 2149 2314 1 2409 2 0 2148 2314 1 2408 2 0 1469 2314 1 1488 2 0 2152 2313 1 1489 2 0 2151 2313 1 1490 2 0 1010 2313 1 2454 2 0 2217 2312 1 2455 2 0 2186 2312 1 2456 2 0 1448 2312 1 1499 2 0 1447 2312 1 1522 2 0 2156 2311 1 1055 2 0 1458 2311 1 1530 2 0 2155 2310 1 2418 2 0 2154 2310 1 2417 2 0 1455 2310 1 1531 2 0 1023 2310 1 2420 2 0 2153 2309 1 1059 2 0 1439 2309 1 1538 2 0 1019 2309 1 1044 2 0 1420 2308 1 2457 2 0 2189 2307 1 2458 2 0 2188 2307 1 2459 2 0 2187 2306 1 2424 2 0 2157 2306 1 2460 2 0 2220 2305 1 2426 2 0 2219 2305 1 2461 2 0 2218 2305 1 2425 2 0 2159 2305 1 1556 2 0 2158 2304 1 2462 2 0 1473 2303 1 2463 2 0 2208 2302 1 2464 2 0 1418 2302 1 1037 2 0 1417 2302 1 1496 2 0 1004 2302 1 1501 2 0 2161 2301 1 2431 2 0 2160 2301 1 1040 2 0 1401 2301 1 1500 2 0 998 2301 1 1534 2 0 2162 2300 1 2465 2 0 1438 2300 1 2466 2 0 1437 2300 1 2467 2 0 1436 2300 1 2468 2 0 2212 2299 1 2469 2 0 2211 2299 1 1536 2 0 2164 2299 1 2470 2 0 2163 2299 1 1485 2 0 2167 2298 1 1486 2 0 2166 2298 1 1487 2 0 2165 2298 1 1035 2 0 1435 2298 1 1041 2 0 1404 2297 1 1042 2 0 1403 2297 1 1043 2 0 1402 2297 1 1502 2 0 999 2297 1 1504 2 0 2169 2296 1 2436 2 0 2168 2296 1 1045 2 0 1443 2296 1 1505 2 0 1020 2296 1 2437 2 0 1419 2295 1 1506 2 0 1007 2295 1 1508 2 0 1006 2295 1 1507 2 0 1005 2295 1 2471 2 0 2213 2294 1 2472 2 0 1422 2294 1 2473 2 0 1421 2294 1 1523 2 0 1009 2294 1 2441 2 0 2198 2293 1 1526 2 0 1425 2293 1 2442 2 0 1424 2293 1 1056 2 0 1423 2293 1 1509 2 0 2170 2292 1 1511 2 0 1411 2292 1 1047 2 0 1410 2292 1 1510 2 0 1000 2292 1 2474 2 0 1428 2291 1 2475 2 0 1427 2291 1 1054 2 0 1426 2291 1 1517 2 0 1011 2291 1 1497 2 0 2174 2290 1 2476 2 0 1450 2290 1 1038 2 0 1449 2290 1 1498 2 0 1021 2290 1 2447 2 0 1472 2289 1 1532 2 0 1471 2289 1 1057 2 0 1470 2289 1 1533 2 0 1027 2289 1 1550 2 0 2173 2288 1 1551 2 0 2172 2288 1 2448 2 0 2171 2288 1 1062 2 0 1433 2288 1 1553 2 0 2175 2287 1 1555 2 0 1468 2287 1 1063 2 0 1467 2287 1 1554 2 0 1026 2287 1 2477 2 0 1003 2286 1 1067 2 0 1407 2285 1 1068 2 0 1406 2285 1 1066 2 0 1405 2285 1 1524 2 0 1029 2284 1 1525 2 0 1028 2284 1 2478 2 0 2161 2283 1 2479 2 0 2160 2283 1 2480 2 0 1401 2283 1 1535 2 0 998 2283 1 1501 2 0 2162 2282 1 2431 2 0 1438 2282 1 1040 2 0 1437 2282 1 1500 2 0 1436 2282 1 2481 2 0 1404 2281 1 1052 2 0 1403 2281 1 1053 2 0 1402 2281 1 1051 2 0 999 2281 1 2432 2 0 2209 2280 1 1519 2 0 1017 2280 1 1520 2 0 1016 2280 1 1521 2 0 1015 2280 1 2412 2 0 2153 2279 1 2411 2 0 1439 2279 1 1552 2 0 1019 2279 1 1516 2 0 2216 2278 1 2445 2 0 2215 2278 1 2446 2 0 2214 2278 1 2463 2 0 2195 2277 1 1496 2 0 2194 2277 1 2464 2 0 2193 2277 1 1037 2 0 1451 2277 1 1502 2 0 2210 2276 1 1043 2 0 1430 2276 1 1042 2 0 1429 2276 1 1041 2 0 1012 2276 1 1512 2 0 2175 2275 1 1048 2 0 1468 2275 1 2482 2 0 1467 2275 1 1513 2 0 1026 2275 1 1539 2 0 2147 2274 1 1540 2 0 2146 2274 1 2404 2 0 1463 2274 1 2405 2 0 1462 2274 1 1546 2 0 2155 2273 1 2409 2 0 2154 2273 1 2408 2 0 1455 2273 1 1545 2 0 1023 2273 1 1551 2 0 2145 2272 1 1550 2 0 2144 2272 1 1062 2 0 1457 2272 1 2448 2 0 1456 2272 1 1049 2 0 1461 2271 1 1050 2 0 1460 2271 1 1514 2 0 1459 2271 1 1515 2 0 1024 2271 1 2483 2 0 2198 2270 1 2433 2 0 1425 2270 1 2434 2 0 1424 2270 1 2435 2 0 1423 2270 1 1530 2 0 2220 2269 1 2417 2 0 2219 2269 1 2418 2 0 2218 2269 1 1531 2 0 2159 2269 1 1543 2 0 2217 2268 1 1542 2 0 2186 2268 1 1060 2 0 1448 2268 1 1544 2 0 1447 2268 1 1063 2 0 1432 2267 1 1555 2 0 1431 2267 1 1554 2 0 1014 2267 1 1553 2 0 1013 2267 1 1494 2 0 2174 2266 1 1036 2 0 1450 2266 1 2443 2 0 1449 2266 1 1493 2 0 1021 2266 1 2456 2 0 2189 2265 1 1499 2 0 2188 2265 1 2441 2 0 2170 2264 1 1526 2 0 1411 2264 1 2442 2 0 1410 2264 1 1056 2 0 1000 2264 1 2468 2 0 2169 2263 1 2469 2 0 2168 2263 1 2470 2 0 1443 2263 1 1536 2 0 1020 2263 1 1537 2 0 2185 2262 1 2406 2 0 2184 2262 1 2407 2 0 2183 2262 1 1058 2 0 1416 2262 1 1548 2 0 2182 2261 1 1549 2 0 2181 2261 1 1061 2 0 1454 2261 1 1547 2 0 1022 2261 1 1533 2 0 1466 2260 1 1532 2 0 1465 2260 1 2447 2 0 1464 2260 1 1057 2 0 1025 2260 1 1492 2 0 2180 2259 1 2449 2 0 2179 2259 1 2450 2 0 2178 2259 1 1491 2 0 1412 2259 1 2436 2 0 1428 2258 1 1505 2 0 1427 2258 1 1045 2 0 1426 2258 1 1504 2 0 1011 2258 1 1510 2 0 2204 2257 1 1047 2 0 1415 2257 1 1511 2 0 1414 2257 1 1509 2 0 1002 2257 1 2476 2 0 2177 2256 1 1497 2 0 2176 2256 1 1038 2 0 1409 2256 1 1498 2 0 1408 2256 1 2471 2 0 2203 2255 1 2473 2 0 2202 2255 1 2472 2 0 1434 2255 1 1523 2 0 1018 2255 1 1039 2 0 2221 2254 1 1490 2 0 1032 2253 1 1488 2 0 1031 2253 1 1489 2 0 1030 2253 1 1064 2 0 1442 2252 1 1065 2 0 1441 2252 1 1484 2 0 2207 2251 1 2430 2 0 2206 2251 1 1034 2 0 1452 2251 1 1485 2 0 2209 2250 1 1035 2 0 1017 2250 1 1486 2 0 1016 2250 1 1487 2 0 1015 2250 1 2481 2 0 2162 2249 1 1051 2 0 1438 2249 1 1052 2 0 1437 2249 1 1053 2 0 1436 2249 1 1519 2 0 2208 2248 1 1521 2 0 1418 2248 1 2432 2 0 1417 2248 1 1520 2 0 1004 2248 1 1522 2 0 2150 2247 1 2415 2 0 2149 2247 1 2416 2 0 2148 2247 1 1055 2 0 1469 2247 1 2484 2 0 2173 2246 1 2485 2 0 2172 2246 1 2486 2 0 2171 2246 1 2487 2 0 1433 2246 1 2482 2 0 1419 2245 1 1048 2 0 1007 2245 1 1513 2 0 1006 2245 1 1512 2 0 1005 2245 1 2467 2 0 2192 2244 1 2465 2 0 2191 2244 1 2466 2 0 1413 2244 1 1534 2 0 1001 2244 1 1538 2 0 2147 2243 1 2419 2 0 2146 2243 1 1059 2 0 1463 2243 1 2420 2 0 1462 2243 1 1545 2 0 2145 2242 1 1546 2 0 2144 2242 1 2408 2 0 1457 2242 1 2409 2 0 1456 2242 1 1551 2 0 2210 2241 1 2448 2 0 1430 2241 1 1062 2 0 1429 2241 1 1550 2 0 1012 2241 1 1540 2 0 2175 2240 1 2405 2 0 1468 2240 1 2404 2 0 1467 2240 1 1539 2 0 1026 2240 1 1507 2 0 2212 2239 1 2437 2 0 2211 2239 1 1508 2 0 2164 2239 1 1506 2 0 2163 2239 1 2417 2 0 1461 2238 1 2418 2 0 1460 2238 1 1531 2 0 1459 2238 1 1530 2 0 1024 2238 1 1544 2 0 1432 2237 1 1060 2 0 1431 2237 1 1542 2 0 1014 2237 1 1543 2 0 1013 2237 1 2488 2 0 2217 2236 1 2489 2 0 2186 2236 1 2427 2 0 1448 2236 1 2428 2 0 1447 2236 1 1050 2 0 1472 2235 1 1049 2 0 1471 2235 1 1515 2 0 1470 2235 1 1514 2 0 1027 2235 1 1494 2 0 2169 2234 1 2443 2 0 2168 2234 1 1493 2 0 1443 2234 1 1036 2 0 1020 2234 1 2455 2 0 2182 2233 1 2454 2 0 2181 2233 1 2456 2 0 1454 2233 1 1499 2 0 1022 2233 1 2460 2 0 2185 2232 1 2426 2 0 2184 2232 1 2461 2 0 2183 2232 1 2425 2 0 1416 2232 1 1526 2 0 2197 2231 1 2442 2 0 2196 2231 1 1056 2 0 1453 2231 1 1533 2 0 2170 2230 1 1057 2 0 1411 2230 1 2447 2 0 1410 2230 1 1532 2 0 1000 2230 1 1058 2 0 1466 2229 1 2406 2 0 1465 2229 1 2407 2 0 1464 2229 1 1537 2 0 1025 2229 1 1549 2 0 2174 2228 1 1548 2 0 1450 2228 1 1547 2 0 1449 2228 1 1061 2 0 1021 2228 1 2451 2 0 2177 2227 1 2452 2 0 2176 2227 1 1495 2 0 1409 2227 1 2453 2 0 1408 2227 1 1038 2 0 1428 2226 1 2476 2 0 1427 2226 1 1498 2 0 1426 2226 1 1497 2 0 1011 2226 1 1509 2 0 2213 2225 1 1047 2 0 1422 2225 1 1510 2 0 1421 2225 1 1511 2 0 1009 2225 1 2449 2 0 2204 2224 1 2450 2 0 1415 2224 1 1492 2 0 1414 2224 1 1491 2 0 1002 2224 1 1517 2 0 2201 2223 1 2475 2 0 2200 2223 1 2474 2 0 2199 2223 1 1054 2 0 1440 2223 1 2490 2 0 1003 2222 1 2430 2 0 2161 825 1 2429 2 0 2160 825 1 2449 2 0 2177 822 1 2450 2 0 2176 822 1 2443 2 0 2170 821 1 2453 2 0 2180 820 1 2451 2 0 2179 820 1 2452 2 0 2178 820 1 2464 2 0 2192 819 1 2463 2 0 2191 819 1 2476 2 0 2204 818 1 2456 2 0 2185 816 1 2455 2 0 2184 816 1 2454 2 0 2183 816 1 2431 2 0 2208 813 1 2436 2 0 2213 809 1 2458 2 0 2187 808 1 2423 2 0 2157 808 1 2437 2 0 2198 807 1 2413 2 0 2152 806 1 2414 2 0 2151 806 1 2482 2 0 2210 802 1 2445 2 0 2173 800 1 2446 2 0 2172 800 1 2444 2 0 2171 800 1 2481 2 0 2209 799 1 2474 2 0 2203 796 1 2475 2 0 2202 796 1 2438 2 0 2167 795 1 2439 2 0 2166 795 1 2440 2 0 2165 795 1 2432 2 0 2162 794 1 2415 2 0 2153 793 1 2424 2 0 2158 792 1 2434 2 0 2212 791 1 2483 2 0 2211 791 1 2435 2 0 2164 791 1 2433 2 0 2163 791 1 2472 2 0 2201 788 1 2473 2 0 2200 788 1 2471 2 0 2199 788 1 2460 2 0 2189 787 1 2425 2 0 2188 787 1 2442 2 0 2169 783 1 2441 2 0 2168 783 1 2418 2 0 2217 779 1 2417 2 0 2186 779 1 2447 2 0 2174 777 1 2465 2 0 2195 774 1 2466 2 0 2194 774 1 2467 2 0 2193 774 1 2480 2 0 2207 771 1 2478 2 0 2206 771 1 2470 2 0 2197 770 1 2468 2 0 2196 770 1 2406 2 0 2182 769 1 2407 2 0 2181 769 1 2420 2 0 2155 768 1 2419 2 0 2154 768 1 2404 2 0 2145 767 1 2405 2 0 2144 767 1 2422 2 0 2156 762 1 2408 2 0 2147 760 1 2409 2 0 2146 760 1 2448 2 0 2175 754 1 2411 2 0 2150 749 1 2412 2 0 2149 749 1 2410 2 0 2148 749 1 2427 2 0 2220 748 1 2489 2 0 2219 748 1 2488 2 0 2218 748 1 2428 2 0 2159 748 1 2491 1 0 829 1 2492 1 0 889 1 2493 1 0 904 1 2494 1 0 904 1 2495 1 0 904 1 2496 1 0 904 1 2497 1 0 858 1 2498 1 0 858 1 2499 1 0 858 1 2500 1 0 858 1 2501 1 0 858 1 2502 1 0 858 1 2503 1 0 858 1 2504 1 0 845 1 2505 1 0 845 1 2506 1 0 845 1 2507 1 0 845 1 2508 1 0 845 1 2509 1 0 855 1 2510 1 0 855 1 2511 1 0 855 1 2512 1 0 855 1 2513 1 0 855 1 2514 1 0 855 1 2515 1 0 851 1 2516 1 0 851 1 2517 1 0 851 1 2518 1 0 851 1 2519 1 0 851 1 2520 1 0 864 1 2521 1 0 841 1 2522 1 0 834 1 2523 1 0 827 1 2524 1 0 888 1 2525 1 0 903 1 2526 1 0 903 1 2527 1 0 903 1 2528 1 0 903 1 2529 1 0 857 1 2530 1 0 857 1 2531 1 0 857 1 2532 1 0 857 1 2533 1 0 857 1 2534 1 0 857 1 2535 1 0 857 1 2536 1 0 843 1 2537 1 0 843 1 2538 1 0 843 1 2539 1 0 843 1 2540 1 0 843 1 2541 1 0 854 1 2542 1 0 854 1 2543 1 0 854 1 2544 1 0 854 1 2545 1 0 854 1 2546 1 0 854 1 2547 1 0 849 1 2548 1 0 849 1 2549 1 0 849 1 2550 1 0 849 1 2551 1 0 849 1 2552 1 0 862 1 2553 1 0 840 1 2554 1 0 832 1 2555 1 0 846 1 2556 1 0 846 1 2557 1 0 846 1 2558 1 0 846 1 2559 1 0 846 1 2560 1 0 897 1 2561 1 0 897 1 2562 1 0 884 1 2563 1 0 884 1 2564 1 0 884 1 2565 1 0 884 1 2566 1 0 884 1 2567 1 0 884 1 2568 1 0 838 1 2569 1 0 875 1 2570 1 0 914 1 2571 1 0 914 1 2572 1 0 830 1 2573 1 0 830 1 2574 1 0 830 1 2575 1 0 830 1 2576 1 0 830 1 2577 1 0 830 1 2578 1 0 865 1 2579 1 0 865 1 2580 1 0 865 1 2581 1 0 865 1 2582 1 0 852 1 2583 1 0 852 1 2584 1 0 852 1 2585 1 0 852 1 2586 1 0 852 1 2587 1 0 835 1 2588 1 0 835 1 2589 1 0 835 1 2590 1 0 835 1 2591 1 0 844 1 2592 1 0 844 1 2593 1 0 844 1 2594 1 0 844 1 2595 1 0 844 1 2596 1 0 896 1 2597 1 0 896 1 2598 1 0 883 1 2599 1 0 883 1 2600 1 0 883 1 2601 1 0 883 1 2602 1 0 883 1 2603 1 0 883 1 2604 1 0 837 1 2605 1 0 874 1 2606 1 0 913 1 2607 1 0 913 1 2608 1 0 828 1 2609 1 0 828 1 2610 1 0 828 1 2611 1 0 828 1 2612 1 0 828 1 2613 1 0 828 1 2614 1 0 863 1 2615 1 0 863 1 2616 1 0 863 1 2617 1 0 863 1 2618 1 0 850 1 2619 1 0 850 1 2620 1 0 850 1 2621 1 0 850 1 2622 1 0 850 1 2623 1 0 833 1 2624 1 0 833 1 2625 1 0 833 1 2626 1 0 833 1 2111 2 1 2627 2390 1 2111 2 1 2393 2389 1 2111 2 1 2398 2388 1 2111 2 1 1707 2387 1 2111 2 1 1706 2386 1 2111 2 1 1676 2385 1 2111 2 1 1678 2384 1 2111 2 1 1696 2383 1 2111 2 1 2392 2382 1 2111 2 1 1695 2381 1 2111 2 1 2397 2380 1 2111 2 1 2391 2379 1 2111 2 1 1705 2378 1 2111 2 1 2628 2377 1 2111 2 1 2403 2376 1 2111 2 1 2399 2375 1 2111 2 1 2629 2374 1 2111 2 1 2394 2373 1 2111 2 1 2401 2372 1 2630 2 0 2390 2580 1 2631 2 0 2390 861 1 2130 2 0 2389 2583 1 2630 2 0 2389 2516 1 2632 2 0 2389 848 1 2131 2 0 2388 2622 1 2633 2 0 2388 2528 1 2634 2 0 2388 919 1 2635 2 0 2387 2545 1 2636 2 0 2387 2510 1 2637 2 0 2387 901 1 2636 2 0 2386 2557 1 2638 2 0 2386 2515 1 2639 2 0 2386 899 1 2640 2 0 2385 2561 1 2641 2 0 2385 2533 1 2642 2 0 2385 2499 1 2643 2 0 2385 920 1 2642 2 0 2384 2594 1 2640 2 0 2384 2526 1 2633 2 0 2384 2493 1 2644 2 0 2384 917 1 2636 2 0 2383 2593 1 2642 2 0 2383 2558 1 2132 2 0 2383 2504 1 2645 2 0 2383 842 1 2132 2 0 2382 2626 1 2633 2 0 2382 2590 1 2642 2 0 2382 2529 1 2131 2 0 2382 2498 1 2646 2 0 2382 856 1 2645 2 0 2381 2592 1 2644 2 0 2381 2559 1 2643 2 0 2381 2535 1 2646 2 0 2381 2497 1 2642 2 0 2381 876 1 2646 2 0 2380 2624 1 2644 2 0 2380 2525 1 2634 2 0 2380 2496 1 2633 2 0 2380 921 1 2132 2 0 2379 2589 1 2636 2 0 2379 2541 1 2130 2 0 2379 2511 1 2647 2 0 2379 866 1 2639 2 0 2378 2595 1 2645 2 0 2378 2556 1 2637 2 0 2378 2544 1 2647 2 0 2378 2509 1 2636 2 0 2378 853 1 2648 2 0 2377 2610 1 2649 2 0 2377 2575 1 2650 2 0 2377 907 1 2648 2 0 2376 2574 1 2133 2 0 2376 2538 1 2651 2 0 2376 847 1 2133 2 0 2375 2616 1 2131 2 0 2375 2531 1 2648 2 0 2375 2501 1 2652 2 0 2375 915 1 2651 2 0 2374 2609 1 2650 2 0 2374 2576 1 2652 2 0 2374 2534 1 2648 2 0 2374 886 1 2630 2 0 2373 2617 1 2133 2 0 2373 2579 1 2130 2 0 2373 2542 1 2653 2 0 2373 867 1 2631 2 0 2372 2614 1 2653 2 0 2372 2578 1 2632 2 0 2372 2548 1 2630 2 0 2372 892 1 2632 2 0 1895 2619 1 2134 2 0 1895 2584 1 2647 2 0 1895 2543 1 2653 2 0 1895 2512 1 2134 2 0 1894 2621 1 2634 2 0 1894 2586 1 2646 2 0 1894 2530 1 2652 2 0 1894 2502 1 2647 2 0 1893 2625 1 2646 2 0 1893 2587 1 2645 2 0 1893 2536 1 2134 2 0 1893 2506 1 2653 2 0 1892 2615 1 2652 2 0 1892 2581 1 2134 2 0 1892 2537 1 2651 2 0 1892 2505 1 2130 2 0 1891 2620 1 2131 2 0 1891 2585 1 2132 2 0 1891 2540 1 2133 2 0 1891 2507 1 2364 2 0 2392 2626 1 2654 2 0 2114 2625 1 2655 2 0 2397 2624 1 2365 2 0 2398 2622 1 2362 2 0 2113 2621 1 2366 2 0 2116 2620 1 2656 2 0 2112 2619 1 2657 2 0 2394 2617 1 2363 2 0 2399 2616 1 2658 2 0 2115 2615 1 2659 2 0 2401 2614 1 1557 2 0 1559 2607 1 1642 2 0 1558 2606 1 1644 2 0 1562 2605 1 2660 2 0 2400 2604 1 1640 2 0 1650 2603 1 1646 2 0 1651 2602 1 1665 2 0 1674 2601 1 1655 2 0 1660 2600 1 2661 2 0 1563 2599 1 1656 2 0 1661 2598 1 1688 2 0 1675 2597 1 1667 2 0 1677 2596 1 1714 2 0 1705 2595 1 1691 2 0 1678 2594 1 1701 2 0 1696 2593 1 1700 2 0 1695 2592 1 2662 2 0 2392 2590 1 2364 2 0 2391 2589 1 2655 2 0 2114 2587 1 2663 2 0 2113 2586 1 2365 2 0 2116 2585 1 2362 2 0 2112 2584 1 2366 2 0 2393 2583 1 2664 2 0 2115 2581 1 2363 2 0 2394 2579 1 2658 2 0 2401 2578 1 2665 2 0 2403 2574 1 1641 2 0 1559 2571 1 1642 2 0 1474 2570 1 1645 2 0 1561 2569 1 2666 2 0 2395 2568 1 1640 2 0 2396 2567 1 1646 2 0 1563 2566 1 1665 2 0 1660 2565 1 1655 2 0 1650 2564 1 1666 2 0 1661 2563 1 1656 2 0 1651 2562 1 1667 2 0 1676 2561 1 1689 2 0 1675 2560 1 1690 2 0 1695 2559 1 1691 2 0 1696 2558 1 1701 2 0 1706 2557 1 1700 2 0 1705 2556 1 1069 2 0 1474 2552 1 1642 2 0 1561 2551 1 1643 2 0 1563 2550 1 1644 2 0 1560 2549 1 2656 2 0 2401 2548 1 1714 2 0 2402 2547 1 1641 2 0 1562 2546 1 1646 2 0 1707 2545 1 1715 2 0 1705 2544 1 2654 2 0 2112 2543 1 2366 2 0 2394 2542 1 1701 2 0 2391 2541 1 2364 2 0 2116 2540 1 2660 2 0 1651 2539 1 2363 2 0 2403 2538 1 2362 2 0 2115 2537 1 1700 2 0 2114 2536 1 1688 2 0 1695 2535 1 1656 2 0 1676 2533 1 2666 2 0 1660 2532 1 2365 2 0 2399 2531 1 2655 2 0 2113 2530 1 1691 2 0 2392 2529 1 2662 2 0 2398 2528 1 1665 2 0 1675 2527 1 1667 2 0 1678 2526 1 1690 2 0 2397 2525 1 1666 2 0 1677 2524 1 1557 2 0 1033 2520 1 1640 2 0 1560 2519 1 1643 2 0 1561 2518 1 1644 2 0 1559 2517 1 2657 2 0 2393 2516 1 2667 2 0 1706 2515 1 1715 2 0 1650 2514 1 1645 2 0 1558 2513 1 2658 2 0 2112 2512 1 2366 2 0 2391 2511 1 1701 2 0 1707 2510 1 2654 2 0 1705 2509 1 1655 2 0 2395 2508 1 2363 2 0 2116 2507 1 2362 2 0 2114 2506 1 2668 2 0 2115 2505 1 2364 2 0 1696 2504 1 1688 2 0 1660 2503 1 2664 2 0 2113 2502 1 2665 2 0 2399 2501 1 1656 2 0 2400 2500 1 1691 2 0 1676 2499 1 2365 2 0 2392 2498 1 2655 2 0 1695 2497 1 2663 2 0 2397 2496 1 1690 2 0 1675 2495 1 1667 2 0 1661 2494 1 2662 2 0 1678 2493 1 1689 2 0 1674 2492 1 2401 2 0 2367 2348 1 2115 2 0 2368 2347 1 2627 2 0 2371 2345 1 2403 2 0 2370 2340 1 2115 2 0 2367 2310 1 2394 2 0 2371 2309 1 2628 2 0 2370 2307 1 2629 2 0 2369 2305 1 2393 2 0 2371 2279 1 2112 2 0 2367 2273 1 2115 2 0 2369 2269 1 2399 2 0 2370 2265 1 2113 2 0 2368 2262 1 2629 2 0 2368 2232 1 2371 2 0 2110 2093 1 1887 2 0 2109 2092 1 2367 2 0 1739 2091 1 1890 2 0 2106 2059 1 2368 2 0 1739 2057 1 1887 2 0 2110 2055 1 1890 2 0 2110 2018 1 1886 2 0 1739 2013 1 1889 2 0 2109 2007 1 2371 2 0 2106 1992 1 2369 2 0 1739 1981 1 2370 2 0 2109 1977 1 2662 2 0 2397 921 1 2663 2 0 2398 919 1 2664 2 0 2399 915 1 2364 2 0 2114 900 1 2661 2 0 2396 894 1 2657 2 0 2401 892 1 2660 2 0 2395 890 1 2363 2 0 2115 878 1 2666 2 0 2400 869 1 2365 2 0 2113 868 1 2658 2 0 2394 867 1 2654 2 0 2391 866 1 2362 2 0 2116 860 1 2366 2 0 2112 859 1 2655 2 0 2392 856 1 2656 2 0 2393 848 1 2668 2 0 2403 847 1 2399 2 0 2368 816 1 2401 2 0 2371 793 1 2629 2 0 2370 787 1 2394 2 0 2367 768 1 2403 2 0 2369 748 1 2368 2 0 2109 717 1 1887 2 0 1739 680 1 2367 2 0 2110 669 1 2143 2 0 2106 650 1 2403 3 0 2629 2460 2489 1 2628 3 0 2629 2426 2457 1 2399 3 0 2629 2425 2454 1 2629 3 0 2628 2457 2426 1 2401 3 0 2627 1541 2415 1 2655 3 0 2662 1574 1631 1 2363 3 0 2658 1621 1614 1 2658 3 0 2363 1614 1621 1 2362 3 0 2363 1613 1626 1 2366 3 0 2656 1635 1628 1 2656 3 0 2366 1628 1635 1 2654 3 0 2366 1629 1622 1 2365 3 0 2362 1627 1620 1 2364 3 0 2362 1625 1085 1 2363 3 0 2362 1626 1613 1 2362 3 0 2365 1620 1627 1 2655 3 0 2365 1094 1630 1 2364 3 0 2655 1097 1086 1 2662 3 0 2655 1631 1574 1 2365 3 0 2655 1630 1094 1 2364 3 0 2654 1623 1597 1 2366 3 0 2654 1622 1629 1 2654 3 0 2364 1597 1623 1 2655 3 0 2364 1086 1097 1 2362 3 0 2364 1085 1625 1 2370 3 0 2369 2219 2189 1 1887 3 0 2369 2159 2217 1 1739 3 0 2109 1939 1940 1 1737 3 0 2109 1337 1938 1 2110 3 0 1739 1368 1376 1 2109 3 0 1739 1940 1939 1 1740 3 0 1739 1369 1381 1 2108 3 0 1739 1962 1964 1 2669 3 0 2110 1919 983 1 1739 3 0 2110 1376 1368 1 1736 3 0 2110 987 1914 1 1736 3 0 2106 1390 1384 1 2670 3 0 2371 2153 1458 1 2367 3 0 2371 1019 2155 1 2143 3 0 2371 1439 2148 1 2369 3 0 2370 2189 2219 1 2368 3 0 2370 2188 2183 1 2629 3 0 2403 2489 2460 1 2115 3 0 2403 2428 2418 1 2399 3 0 2115 2417 2456 1 2403 3 0 2115 2418 2428 1 2627 3 0 2401 2415 1541 1 2394 3 0 2401 1055 2420 1 2393 3 0 2401 1522 2410 1 2395 3 0 2400 2470 2435 1 1660 3 0 2400 2468 2441 1 2115 3 0 2399 2456 2417 1 2113 3 0 2399 1499 2407 1 2629 3 0 2399 2454 2425 1 2113 3 0 2398 2453 2406 1 2397 3 0 2398 1495 2450 1 2398 3 0 2397 2450 1495 1 1563 3 0 2396 2439 1485 1 2400 3 0 2395 2435 2470 1 1651 3 0 2395 2434 2437 1 2401 3 0 2394 2420 1055 1 2112 3 0 2394 1059 2409 1 2401 3 0 2393 2410 1522 1 2116 3 0 2112 2408 1060 1 2394 3 0 2112 2409 1059 1 2112 3 0 2116 1060 2408 1 2398 3 0 2113 2406 2453 1 2399 3 0 2113 2407 1499 1 1650 3 0 1707 2482 1043 1 1674 3 0 1677 2474 2472 1 1660 3 0 1676 2443 2442 1 1677 3 0 1674 2472 2474 1 2400 3 0 1660 2441 2468 1 1676 3 0 1660 2442 2443 1 2395 3 0 1651 2437 2434 1 1707 3 0 1650 1043 2482 1 2396 3 0 1563 1485 2439 1 1558 3 0 1562 2465 2464 1 1562 3 0 1558 2464 2465 1 1033 3 0 1474 2429 1535 1 1474 3 0 1033 1535 2429 1 1085 2 0 1549 2626 1 1086 2 0 1548 2626 1 1598 2 0 1547 2626 1 1597 2 0 1061 2626 1 2671 2 0 1515 2625 1 2672 2 0 1514 2625 1 1622 2 0 1050 2625 1 1623 2 0 1049 2625 1 1630 2 0 2450 2624 1 1631 2 0 2449 2624 1 1097 2 0 1492 2624 1 1632 2 0 1491 2624 1 1102 2 0 1529 2623 1 1103 2 0 1528 2623 1 1104 2 0 1527 2623 1 1620 2 0 2453 2622 1 2673 2 0 2452 2622 1 2674 2 0 2451 2622 1 1094 2 0 1495 2622 1 1626 2 0 2407 2621 1 1627 2 0 2406 2621 1 1096 2 0 1537 2621 1 1625 2 0 1058 2621 1 1629 2 0 1544 2620 1 2675 2 0 1543 2620 1 2676 2 0 1542 2620 1 1628 2 0 1060 2620 1 2677 2 0 2409 2619 1 1635 2 0 2408 2619 1 2678 2 0 1546 2619 1 2679 2 0 1545 2619 1 1082 2 0 1490 2618 1 2680 2 0 1489 2618 1 2681 2 0 1488 2618 1 2682 2 0 2420 2617 1 2683 2 0 2419 2617 1 1091 2 0 1538 2617 1 1605 2 0 1059 2617 1 1614 2 0 2456 2616 1 2684 2 0 2455 2616 1 2685 2 0 2454 2616 1 1613 2 0 1499 2616 1 2686 2 0 2418 2615 1 1621 2 0 2417 2615 1 1095 2 0 1531 2615 1 2687 2 0 1530 2615 1 2688 2 0 2416 2614 1 2689 2 0 2415 2614 1 2690 2 0 1522 2614 1 1624 2 0 1055 2614 1 1586 2 0 1044 2613 1 2691 2 0 2459 2612 1 2692 2 0 2424 2612 1 2693 2 0 1556 2611 1 2694 2 0 2458 2610 1 2695 2 0 2457 2610 1 2696 2 0 2423 2610 1 2697 2 0 2461 2609 1 2698 2 0 2460 2609 1 2699 2 0 2426 2609 1 2700 2 0 2425 2609 1 1639 2 0 2462 2608 1 2701 2 0 2431 2607 1 2702 2 0 1501 2607 1 1070 2 0 1500 2607 1 1567 2 0 1040 2607 1 1584 2 0 2464 2606 1 2703 2 0 2463 2606 1 1076 2 0 1496 2606 1 1583 2 0 1037 2606 1 1602 2 0 2467 2605 1 1603 2 0 2466 2605 1 1604 2 0 2465 2605 1 2704 2 0 1534 2605 1 2705 2 0 2470 2604 1 2706 2 0 2469 2604 1 2707 2 0 2468 2604 1 2708 2 0 1536 2604 1 1071 2 0 1502 2603 1 1568 2 0 1043 2603 1 1569 2 0 1042 2603 1 1570 2 0 1041 2603 1 1585 2 0 2437 2602 1 1078 2 0 1508 2602 1 1077 2 0 1507 2602 1 1079 2 0 1506 2602 1 1587 2 0 2473 2601 1 1588 2 0 2472 2601 1 2709 2 0 2471 2601 1 1081 2 0 1523 2601 1 1590 2 0 2442 2600 1 2710 2 0 2441 2600 1 1591 2 0 1526 2600 1 1589 2 0 1056 2600 1 2711 2 0 1487 2599 1 2712 2 0 1486 2599 1 2713 2 0 1485 2599 1 1601 2 0 1035 2599 1 2714 2 0 2436 2598 1 1092 2 0 1505 2598 1 2715 2 0 1504 2598 1 1609 2 0 1045 2598 1 1577 2 0 1511 2597 1 1072 2 0 1510 2597 1 2716 2 0 1509 2597 1 1576 2 0 1047 2597 1 1593 2 0 2475 2596 1 1594 2 0 2474 2596 1 1083 2 0 1517 2596 1 1592 2 0 1054 2596 1 2717 2 0 2448 2595 1 2718 2 0 1551 2595 1 2719 2 0 1550 2595 1 1599 2 0 1062 2595 1 1616 2 0 2476 2594 1 1093 2 0 1498 2594 1 2720 2 0 1497 2594 1 1615 2 0 1038 2594 1 1634 2 0 1555 2593 1 1098 2 0 1554 2593 1 2721 2 0 1553 2593 1 1633 2 0 1063 2593 1 1638 2 0 2447 2592 1 1099 2 0 1533 2592 1 1637 2 0 1532 2592 1 1636 2 0 1057 2592 1 1075 2 0 2477 2591 1 2722 2 0 1549 2590 1 2723 2 0 1548 2590 1 1575 2 0 1547 2590 1 1574 2 0 1061 2590 1 1597 2 0 2405 2589 1 1598 2 0 2404 2589 1 1085 2 0 1540 2589 1 1086 2 0 1539 2589 1 1610 2 0 1068 2588 1 1611 2 0 1067 2588 1 1612 2 0 1066 2588 1 1097 2 0 1515 2587 1 1632 2 0 1514 2587 1 1630 2 0 1050 2587 1 1631 2 0 1049 2587 1 2724 2 0 2407 2586 1 2725 2 0 2406 2586 1 2726 2 0 1537 2586 1 1578 2 0 1058 2586 1 1094 2 0 1544 2585 1 2673 2 0 1543 2585 1 2674 2 0 1542 2585 1 1620 2 0 1060 2585 1 1626 2 0 2409 2584 1 1627 2 0 2408 2584 1 1096 2 0 1546 2584 1 1625 2 0 1545 2584 1 1628 2 0 2412 2583 1 1629 2 0 2411 2583 1 2675 2 0 2410 2583 1 2676 2 0 1552 2583 1 1572 2 0 2414 2582 1 1573 2 0 2413 2582 1 1571 2 0 1046 2582 1 2727 2 0 2418 2581 1 2728 2 0 2417 2581 1 1582 2 0 1531 2581 1 2729 2 0 1530 2581 1 1605 2 0 2422 2580 1 2682 2 0 2421 2580 1 1091 2 0 1541 2580 1 1614 2 0 2420 2579 1 2685 2 0 2419 2579 1 2684 2 0 1538 2579 1 1613 2 0 1059 2579 1 2686 2 0 2416 2578 1 2687 2 0 2415 2578 1 1095 2 0 1522 2578 1 1621 2 0 1055 2578 1 1080 2 0 1503 2577 1 2730 2 0 2461 2576 1 2731 2 0 2460 2576 1 2692 2 0 2426 2576 1 2691 2 0 2425 2576 1 2732 2 0 2458 2575 1 2733 2 0 2457 2575 1 2693 2 0 2423 2575 1 2696 2 0 2489 2574 1 2734 2 0 2488 2574 1 2694 2 0 2428 2574 1 2695 2 0 2427 2574 1 2735 2 0 2459 2573 1 1639 2 0 2424 2573 1 2736 2 0 1556 2572 1 2737 2 0 2431 2571 1 1073 2 0 1501 2571 1 2738 2 0 1500 2571 1 1579 2 0 1040 2571 1 1583 2 0 2430 2570 1 2703 2 0 2429 2570 1 1076 2 0 1484 2570 1 1584 2 0 1034 2570 1 1617 2 0 2432 2569 1 2739 2 0 1521 2569 1 2740 2 0 1520 2569 1 2741 2 0 1519 2569 1 2742 2 0 2483 2568 1 1619 2 0 2435 2568 1 2743 2 0 2434 2568 1 2744 2 0 2433 2568 1 1568 2 0 2440 2567 1 1569 2 0 2439 2567 1 1570 2 0 2438 2567 1 1071 2 0 1518 2567 1 1077 2 0 1487 2566 1 1078 2 0 1486 2566 1 1079 2 0 1485 2566 1 1585 2 0 1035 2566 1 1588 2 0 2442 2565 1 2709 2 0 2441 2565 1 1081 2 0 1526 2565 1 1587 2 0 1056 2565 1 2710 2 0 1502 2564 1 1590 2 0 1043 2564 1 1589 2 0 1042 2564 1 1591 2 0 1041 2564 1 2745 2 0 2436 2563 1 2746 2 0 1505 2563 1 2747 2 0 1504 2563 1 1606 2 0 1045 2563 1 2714 2 0 2437 2562 1 1092 2 0 1508 2562 1 2715 2 0 1507 2562 1 1609 2 0 1506 2562 1 1594 2 0 2443 2561 1 1083 2 0 1494 2561 1 1592 2 0 1493 2561 1 1593 2 0 1036 2561 1 1090 2 0 1511 2560 1 2748 2 0 1510 2560 1 2749 2 0 1509 2560 1 1600 2 0 1047 2560 1 1581 2 0 2447 2559 1 1074 2 0 1533 2559 1 2750 2 0 1532 2559 1 1580 2 0 1057 2559 1 1093 2 0 1555 2558 1 1616 2 0 1554 2558 1 2720 2 0 1553 2558 1 1615 2 0 1063 2558 1 1633 2 0 2446 2557 1 1634 2 0 2445 2557 1 2721 2 0 2444 2557 1 1098 2 0 1516 2557 1 1638 2 0 2448 2556 1 1099 2 0 1551 2556 1 1636 2 0 1550 2556 1 1637 2 0 1062 2556 1 2751 2 0 1039 2555 1 1104 2 0 1490 2554 1 1102 2 0 1489 2554 1 1103 2 0 1488 2554 1 1607 2 0 1065 2553 1 1608 2 0 1064 2553 1 2752 2 0 2430 2552 1 2753 2 0 2429 2552 1 2754 2 0 1484 2552 1 1618 2 0 1034 2552 1 1583 2 0 2432 2551 1 1584 2 0 1521 2551 1 1076 2 0 1520 2551 1 2703 2 0 1519 2551 1 1087 2 0 1487 2550 1 1088 2 0 1486 2550 1 2755 2 0 1485 2550 1 1089 2 0 1035 2550 1 2704 2 0 2481 2549 1 1602 2 0 1053 2549 1 1603 2 0 1052 2549 1 1604 2 0 1051 2549 1 2677 2 0 2416 2548 1 2678 2 0 2415 2548 1 2679 2 0 1522 2548 1 1635 2 0 1055 2548 1 1599 2 0 2487 2547 1 2717 2 0 2486 2547 1 2718 2 0 2485 2547 1 2719 2 0 2484 2547 1 2738 2 0 2467 2546 1 1579 2 0 2466 2546 1 2737 2 0 2465 2546 1 1073 2 0 1534 2546 1 1585 2 0 2482 2545 1 1078 2 0 1513 2545 1 1077 2 0 1512 2545 1 1079 2 0 1048 2545 1 1596 2 0 2448 2544 1 2756 2 0 1551 2544 1 1084 2 0 1550 2544 1 1595 2 0 1062 2544 1 1622 2 0 2409 2543 1 1623 2 0 2408 2543 1 2671 2 0 1546 2543 1 2672 2 0 1545 2543 1 1628 2 0 2420 2542 1 2675 2 0 2419 2542 1 2676 2 0 1538 2542 1 1629 2 0 1059 2542 1 1634 2 0 2405 2541 1 1633 2 0 2404 2541 1 2721 2 0 1540 2541 1 1098 2 0 1539 2541 1 1598 2 0 1544 2540 1 1085 2 0 1543 2540 1 1086 2 0 1542 2540 1 1597 2 0 1060 2540 1 2706 2 0 2437 2539 1 2708 2 0 1508 2539 1 2707 2 0 1507 2539 1 2705 2 0 1506 2539 1 2684 2 0 2489 2538 1 2685 2 0 2488 2538 1 1613 2 0 2428 2538 1 1614 2 0 2427 2538 1 1626 2 0 2418 2537 1 1627 2 0 2417 2537 1 1625 2 0 1531 2537 1 1096 2 0 1530 2537 1 1636 2 0 1515 2536 1 1099 2 0 1514 2536 1 1638 2 0 1050 2536 1 1637 2 0 1049 2536 1 1576 2 0 2447 2535 1 2716 2 0 1533 2535 1 1072 2 0 1532 2535 1 1577 2 0 1057 2535 1 2727 2 0 2461 2534 1 2729 2 0 2460 2534 1 2728 2 0 2426 2534 1 1582 2 0 2425 2534 1 2714 2 0 2443 2533 1 2715 2 0 1494 2533 1 1609 2 0 1493 2533 1 1092 2 0 1036 2533 1 2743 2 0 2442 2532 1 2742 2 0 2441 2532 1 2744 2 0 1526 2532 1 1619 2 0 1056 2532 1 1620 2 0 2456 2531 1 2674 2 0 2455 2531 1 2673 2 0 2454 2531 1 1094 2 0 1499 2531 1 1630 2 0 2407 2530 1 1631 2 0 2406 2530 1 1097 2 0 1537 2530 1 1632 2 0 1058 2530 1 2720 2 0 1549 2529 1 1616 2 0 1548 2529 1 1615 2 0 1547 2529 1 1093 2 0 1061 2529 1 1574 2 0 2453 2528 1 2722 2 0 2452 2528 1 2723 2 0 2451 2528 1 1575 2 0 1495 2528 1 1081 2 0 1511 2527 1 1587 2 0 1510 2527 1 2709 2 0 1509 2527 1 1588 2 0 1047 2527 1 1593 2 0 2476 2526 1 1592 2 0 1498 2526 1 1083 2 0 1497 2526 1 1594 2 0 1038 2526 1 1581 2 0 2450 2525 1 2750 2 0 2449 2525 1 1580 2 0 1492 2525 1 1074 2 0 1491 2525 1 2746 2 0 2475 2524 1 2745 2 0 2474 2524 1 2747 2 0 1517 2524 1 1606 2 0 1054 2524 1 1075 2 0 2490 2523 1 1572 2 0 1068 2522 1 1573 2 0 1067 2522 1 1571 2 0 1066 2522 1 1100 2 0 1525 2521 1 1101 2 0 1524 2521 1 1567 2 0 2480 2520 1 2701 2 0 2479 2520 1 2702 2 0 2478 2520 1 1070 2 0 1535 2520 1 1570 2 0 2481 2519 1 1568 2 0 1053 2519 1 1569 2 0 1052 2519 1 1071 2 0 1051 2519 1 2755 2 0 2432 2518 1 1087 2 0 1521 2518 1 1088 2 0 1520 2518 1 1089 2 0 1519 2518 1 1604 2 0 2431 2517 1 2704 2 0 1501 2517 1 1602 2 0 1500 2517 1 1603 2 0 1040 2517 1 2682 2 0 2412 2516 1 1605 2 0 2411 2516 1 2683 2 0 2410 2516 1 1091 2 0 1552 2516 1 2757 2 0 2446 2515 1 2758 2 0 2445 2515 1 2759 2 0 2444 2515 1 2760 2 0 1516 2515 1 2756 2 0 1502 2514 1 1596 2 0 1043 2514 1 1595 2 0 1042 2514 1 1084 2 0 1041 2514 1 2739 2 0 2464 2513 1 2741 2 0 2463 2513 1 2740 2 0 1496 2513 1 1617 2 0 1037 2513 1 2686 2 0 2409 2512 1 1621 2 0 2408 2512 1 2687 2 0 1546 2512 1 1095 2 0 1545 2512 1 1628 2 0 2405 2511 1 1629 2 0 2404 2511 1 2675 2 0 1540 2511 1 2676 2 0 1539 2511 1 1633 2 0 2482 2510 1 1098 2 0 1513 2510 1 2721 2 0 1512 2510 1 1634 2 0 1048 2510 1 1622 2 0 2448 2509 1 2672 2 0 1551 2509 1 2671 2 0 1550 2509 1 1623 2 0 1062 2509 1 2710 2 0 2483 2508 1 1589 2 0 2435 2508 1 1590 2 0 2434 2508 1 1591 2 0 2433 2508 1 1613 2 0 1544 2507 1 2685 2 0 1543 2507 1 2684 2 0 1542 2507 1 1614 2 0 1060 2507 1 1096 2 0 1515 2506 1 1625 2 0 1514 2506 1 1626 2 0 1050 2506 1 1627 2 0 1049 2506 1 2697 2 0 2418 2505 1 2699 2 0 2417 2505 1 2700 2 0 1531 2505 1 2698 2 0 1530 2505 1 1597 2 0 1555 2504 1 1086 2 0 1554 2504 1 1085 2 0 1553 2504 1 1598 2 0 1063 2504 1 1576 2 0 2442 2503 1 2716 2 0 2441 2503 1 1577 2 0 1526 2503 1 1072 2 0 1056 2503 1 2727 2 0 2407 2502 1 2728 2 0 2406 2502 1 2729 2 0 1537 2502 1 1582 2 0 1058 2502 1 2695 2 0 2456 2501 1 2696 2 0 2455 2501 1 2734 2 0 2454 2501 1 2694 2 0 1499 2501 1 1609 2 0 2470 2500 1 2714 2 0 2469 2500 1 2715 2 0 2468 2500 1 1092 2 0 1536 2500 1 1615 2 0 2443 2499 1 2720 2 0 1494 2499 1 1093 2 0 1493 2499 1 1616 2 0 1036 2499 1 2673 2 0 1549 2498 1 2674 2 0 1548 2498 1 1094 2 0 1547 2498 1 1620 2 0 1061 2498 1 1630 2 0 2447 2497 1 1632 2 0 1533 2497 1 1631 2 0 1532 2497 1 1097 2 0 1057 2497 1 2724 2 0 2450 2496 1 2725 2 0 2449 2496 1 2726 2 0 1492 2496 1 1578 2 0 1491 2496 1 1580 2 0 1511 2495 1 2750 2 0 1510 2495 1 1074 2 0 1509 2495 1 1581 2 0 1047 2495 1 1594 2 0 2436 2494 1 1593 2 0 1505 2494 1 1083 2 0 1504 2494 1 1592 2 0 1045 2494 1 2723 2 0 2476 2493 1 1574 2 0 1498 2493 1 2722 2 0 1497 2493 1 1575 2 0 1038 2493 1 2748 2 0 2473 2492 1 1600 2 0 2472 2492 1 2749 2 0 2471 2492 1 1090 2 0 1523 2492 1 2761 2 0 1039 2491 1 2702 2 0 2430 924 1 2701 2 0 2429 924 1 2722 2 0 2450 921 1 2723 2 0 2449 921 1 2716 2 0 2443 920 1 2726 2 0 2453 919 1 2724 2 0 2452 919 1 2725 2 0 2451 919 1 2738 2 0 2464 918 1 2737 2 0 2463 918 1 2750 2 0 2476 917 1 2729 2 0 2456 915 1 2728 2 0 2455 915 1 2727 2 0 2454 915 1 2703 2 0 2431 912 1 2709 2 0 2436 908 1 2691 2 0 2458 907 1 2731 2 0 2457 907 1 2692 2 0 2423 907 1 2710 2 0 2437 906 1 2680 2 0 2414 905 1 2681 2 0 2413 905 1 2756 2 0 2482 901 1 2718 2 0 2446 899 1 2719 2 0 2445 899 1 2717 2 0 2444 899 1 2755 2 0 2481 898 1 2748 2 0 2475 895 1 2749 2 0 2474 895 1 2711 2 0 2440 894 1 2712 2 0 2439 894 1 2713 2 0 2438 894 1 2704 2 0 2432 893 1 2683 2 0 2416 892 1 2682 2 0 2415 892 1 2732 2 0 2459 891 1 2693 2 0 2424 891 1 2706 2 0 2483 890 1 2708 2 0 2435 890 1 2707 2 0 2434 890 1 2705 2 0 2433 890 1 2746 2 0 2473 887 1 2747 2 0 2472 887 1 2745 2 0 2471 887 1 2734 2 0 2461 886 1 2695 2 0 2460 886 1 2696 2 0 2426 886 1 2694 2 0 2425 886 1 2715 2 0 2442 882 1 2714 2 0 2441 882 1 2685 2 0 2418 878 1 2684 2 0 2417 878 1 2720 2 0 2447 876 1 2739 2 0 2467 873 1 2740 2 0 2466 873 1 2741 2 0 2465 873 1 2754 2 0 2480 870 1 2753 2 0 2479 870 1 2752 2 0 2478 870 1 2744 2 0 2470 869 1 2742 2 0 2469 869 1 2743 2 0 2468 869 1 2673 2 0 2407 868 1 2674 2 0 2406 868 1 2687 2 0 2420 867 1 2686 2 0 2419 867 1 2671 2 0 2405 866 1 2672 2 0 2404 866 1 2690 2 0 2422 861 1 2689 2 0 2421 861 1 2675 2 0 2409 859 1 2676 2 0 2408 859 1 2721 2 0 2448 853 1 2678 2 0 2412 848 1 2679 2 0 2411 848 1 2677 2 0 2410 848 1 2699 2 0 2489 847 1 2697 2 0 2488 847 1 2700 2 0 2428 847 1 2698 2 0 2427 847 1 2361 2 1 2658 2653 1 2361 2 1 2664 2652 1 2361 2 1 2668 2651 1 2361 2 1 2762 2650 1 2361 2 1 2763 2649 1 2361 2 1 2665 2648 1 2361 2 1 2654 2647 1 2361 2 1 2655 2646 1 2361 2 1 1700 2645 1 2361 2 1 1690 2644 1 2361 2 1 1688 2643 1 2361 2 1 1691 2642 1 2361 2 1 1656 2641 1 2361 2 1 1667 2640 1 2361 2 1 1714 2639 1 2361 2 1 2667 2638 1 2361 2 1 1715 2637 1 2361 2 1 1701 2636 1 2361 2 1 1646 2635 1 2361 2 1 2663 2634 1 2361 2 1 2662 2633 1 2361 2 1 2656 2632 1 2361 2 1 2659 2631 1 2361 2 1 2657 2630 1 2665 2 0 2628 2610 1 2668 2 0 2629 2609 1 2657 2 0 2627 2580 1 2762 2 0 2629 2576 1 2763 2 0 2628 2575 1 2664 2 0 2629 2534 1 2401 2 0 2670 2311 1 2670 2 0 2669 2090 1 2367 2 0 2669 2054 1 2370 2 0 2108 2050 1 2143 2 0 2669 2024 1 1887 2 0 2108 2014 1 2762 2 0 2628 907 1 2665 2 0 2629 886 1 2659 2 0 2627 861 1 2627 2 0 2670 762 1 2371 2 0 2669 694 1 2369 2 0 2108 649 1 2762 3 0 2763 2733 2692 1 2665 3 0 2762 2731 2696 1 2763 3 0 2762 2692 2733 1 2371 3 0 2670 1458 2153 1 2110 3 0 2669 983 1919 1 1739 3 0 2108 1964 1962 1 2665 3 0 2668 2699 2695 1 2363 3 0 2668 2700 2685 1 2660 3 0 2666 2744 2708 1 1656 3 0 2666 2743 2714 1 2668 3 0 2665 2695 2699 1 2762 3 0 2665 2696 2731 1 2664 3 0 2665 2694 2727 1 2363 3 0 2664 2729 2684 1 2365 3 0 2664 1582 2673 1 2665 3 0 2664 2727 2694 1 2365 3 0 2663 2726 2674 1 2662 3 0 2663 1578 2722 1 2663 3 0 2662 2722 1578 1 1640 3 0 2661 2712 1570 1 2666 3 0 2660 2708 2744 1 1655 3 0 2660 2707 2710 1 2657 3 0 2659 1624 2682 1 2657 3 0 2658 2687 1091 1 2366 3 0 2658 1095 2675 1 2664 3 0 2363 2684 2729 1 2668 3 0 2363 2685 2700 1 2659 3 0 2657 2682 1624 1 2658 3 0 2657 1091 2687 1 2656 3 0 2657 1605 2677 1 2657 3 0 2656 2677 1605 1 2362 3 0 2366 2676 1096 1 2658 3 0 2366 2675 1095 1 2366 3 0 2362 1096 2676 1 2663 3 0 2365 2674 2726 1 2664 3 0 2365 2673 1582 1 1646 3 0 1715 2756 1077 1 1666 3 0 1689 2749 2747 1 1656 3 0 1688 2716 2715 1 1689 3 0 1666 2747 2749 1 2666 3 0 1656 2714 2743 1 1688 3 0 1656 2715 2716 1 2660 3 0 1655 2710 2707 1 1715 3 0 1646 1077 2756 1 1641 3 0 1645 2741 2738 1 1645 3 0 1641 2738 2741 1 2661 3 0 1640 1570 2712 1 1069 3 0 1557 2701 1618 1 1557 3 0 1069 1618 2701 1 1 1 0 2361 6 0 220 0 438 537 636 735 834 485 584 683 782 881 445 544 643 742 841 468 567 666 765 864 455 554 653 752 851 459 558 657 756 855 449 548 647 746 845 462 561 660 759 858 508 607 706 805 904 493 592 691 790 889 433 532 631 730 829 436 535 634 733 832 484 583 682 781 880 444 543 642 741 840 466 565 664 763 862 453 552 651 750 849 458 557 656 755 854 447 546 645 744 843 461 560 659 758 857 507 606 705 804 903 492 591 690 789 888 431 530 629 728 827 439 538 637 736 835 456 555 654 753 852 469 568 667 766 865 434 533 632 731 830 476 575 674 773 872 518 617 716 815 914 479 578 677 776 875 442 541 640 739 838 488 587 686 785 884 501 600 699 798 897 450 549 648 747 846 437 536 635 734 833 454 553 652 751 850 467 566 665 764 863 432 531 630 729 828 475 574 673 772 871 517 616 715 814 913 478 577 676 775 874 441 540 639 738 837 487 586 685 784 883 500 599 698 797 896 448 547 646 745 844 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 431 push(11,w,1) 432 push(4,s,1) 433 push(11,e,1) 434 push(4,n,1) 436 push(1,w,1) 437 push(1,s,1) 438 push(1,e,1) 439 push(1,n,1) 441 push(8,s,1) 442 push(8,n,1) 444 push(3,w,1) 445 push(3,e,1) 447 push(7,w,1) 448 push(11,s,1) 449 push(7,e,1) 450 push(11,n,1) 453 push(5,w,1) 454 push(2,s,1) 455 push(5,e,1) 456 push(2,n,1) 458 push(6,w,1) 459 push(6,e,1) 461 push(8,w,1) 462 push(8,e,1) 466 push(4,w,1) 467 push(3,s,1) 468 push(4,e,1) 469 push(3,n,1) 475 push(5,s,1) 476 push(5,n,1) 478 push(7,s,1) 479 push(7,n,1) 484 push(2,w,1) 485 push(2,e,1) 487 push(9,s,1) 488 push(9,n,1) 492 push(10,w,1) 493 push(10,e,1) 500 push(10,s,1) 501 push(10,n,1) 507 push(9,w,1) 508 push(9,e,1) 517 push(6,s,1) 518 push(6,n,1) 530 push(11,w,2) 531 push(4,s,2) 532 push(11,e,2) 533 push(4,n,2) 535 push(1,w,2) 536 push(1,s,2) 537 push(1,e,2) 538 push(1,n,2) 540 push(8,s,2) 541 push(8,n,2) 543 push(3,w,2) 544 push(3,e,2) 546 push(7,w,2) 547 push(11,s,2) 548 push(7,e,2) 549 push(11,n,2) 552 push(5,w,2) 553 push(2,s,2) 554 push(5,e,2) 555 push(2,n,2) 557 push(6,w,2) 558 push(6,e,2) 560 push(8,w,2) 561 push(8,e,2) 565 push(4,w,2) 566 push(3,s,2) 567 push(4,e,2) 568 push(3,n,2) 574 push(5,s,2) 575 push(5,n,2) 577 push(7,s,2) 578 push(7,n,2) 583 push(2,w,2) 584 push(2,e,2) 586 push(9,s,2) 587 push(9,n,2) 591 push(10,w,2) 592 push(10,e,2) 599 push(10,s,2) 600 push(10,n,2) 606 push(9,w,2) 607 push(9,e,2) 616 push(6,s,2) 617 push(6,n,2) 629 push(11,w,3) 630 push(4,s,3) 631 push(11,e,3) 632 push(4,n,3) 634 push(1,w,3) 635 push(1,s,3) 636 push(1,e,3) 637 push(1,n,3) 639 push(8,s,3) 640 push(8,n,3) 642 push(3,w,3) 643 push(3,e,3) 645 push(7,w,3) 646 push(11,s,3) 647 push(7,e,3) 648 push(11,n,3) 651 push(5,w,3) 652 push(2,s,3) 653 push(5,e,3) 654 push(2,n,3) 656 push(6,w,3) 657 push(6,e,3) 659 push(8,w,3) 660 push(8,e,3) 664 push(4,w,3) 665 push(3,s,3) 666 push(4,e,3) 667 push(3,n,3) 673 push(5,s,3) 674 push(5,n,3) 676 push(7,s,3) 677 push(7,n,3) 682 push(2,w,3) 683 push(2,e,3) 685 push(9,s,3) 686 push(9,n,3) 690 push(10,w,3) 691 push(10,e,3) 698 push(10,s,3) 699 push(10,n,3) 705 push(9,w,3) 706 push(9,e,3) 715 push(6,s,3) 716 push(6,n,3) 728 push(11,w,4) 729 push(4,s,4) 730 push(11,e,4) 731 push(4,n,4) 733 push(1,w,4) 734 push(1,s,4) 735 push(1,e,4) 736 push(1,n,4) 738 push(8,s,4) 739 push(8,n,4) 741 push(3,w,4) 742 push(3,e,4) 744 push(7,w,4) 745 push(11,s,4) 746 push(7,e,4) 747 push(11,n,4) 750 push(5,w,4) 751 push(2,s,4) 752 push(5,e,4) 753 push(2,n,4) 755 push(6,w,4) 756 push(6,e,4) 758 push(8,w,4) 759 push(8,e,4) 763 push(4,w,4) 764 push(3,s,4) 765 push(4,e,4) 766 push(3,n,4) 772 push(5,s,4) 773 push(5,n,4) 775 push(7,s,4) 776 push(7,n,4) 781 push(2,w,4) 782 push(2,e,4) 784 push(9,s,4) 785 push(9,n,4) 789 push(10,w,4) 790 push(10,e,4) 797 push(10,s,4) 798 push(10,n,4) 804 push(9,w,4) 805 push(9,e,4) 814 push(6,s,4) 815 push(6,n,4) 827 push(11,w,5) 828 push(4,s,5) 829 push(11,e,5) 830 push(4,n,5) 832 push(1,w,5) 833 push(1,s,5) 834 push(1,e,5) 835 push(1,n,5) 837 push(8,s,5) 838 push(8,n,5) 840 push(3,w,5) 841 push(3,e,5) 843 push(7,w,5) 844 push(11,s,5) 845 push(7,e,5) 846 push(11,n,5) 849 push(5,w,5) 850 push(2,s,5) 851 push(5,e,5) 852 push(2,n,5) 854 push(6,w,5) 855 push(6,e,5) 857 push(8,w,5) 858 push(8,e,5) 862 push(4,w,5) 863 push(3,s,5) 864 push(4,e,5) 865 push(3,n,5) 871 push(5,s,5) 872 push(5,n,5) 874 push(7,s,5) 875 push(7,n,5) 880 push(2,w,5) 881 push(2,e,5) 883 push(9,s,5) 884 push(9,n,5) 888 push(10,w,5) 889 push(10,e,5) 896 push(10,s,5) 897 push(10,n,5) 903 push(9,w,5) 904 push(9,e,5) 913 push(6,s,5) 914 push(6,n,5) 122 reach(4,5,0) 925 reach(4,5,1) 961 reach(4,5,2) 997 reach(4,5,3) 1033 reach(4,5,4) 1069 reach(4,5,5) 1244 reach(4,6,1) 1320 reach(4,6,2) 1321 reach(5,6,1) 1395 reach(4,6,3) 1396 reach(5,6,2) 1397 reach(6,6,2) 1398 reach(5,7,2) 1399 reach(6,6,1) 1400 reach(5,7,1) 1474 reach(4,6,4) 1475 reach(6,6,3) 1476 reach(5,6,3) 1477 reach(5,7,3) 1478 reach(6,7,3) 1479 reach(5,8,3) 1480 reach(6,7,1) 1481 reach(5,8,1) 1482 reach(6,7,2) 1483 reach(5,8,2) 1557 reach(4,6,5) 1558 reach(6,6,4) 1559 reach(5,6,4) 1560 reach(5,8,4) 1561 reach(5,7,4) 1562 reach(6,7,4) 1563 reach(5,9,4) 1564 reach(5,9,2) 1565 reach(5,9,1) 1566 reach(5,9,3) 1640 reach(5,9,5) 1641 reach(6,6,5) 1642 reach(5,6,5) 1643 reach(5,8,5) 1644 reach(5,7,5) 1645 reach(6,7,5) 1646 reach(6,9,5) 1647 reach(6,9,3) 1648 reach(6,9,1) 1649 reach(6,9,2) 1650 reach(6,9,4) 1651 reach(7,9,4) 1652 reach(7,9,2) 1653 reach(7,9,1) 1654 reach(7,9,3) 1655 reach(7,9,5) 1656 reach(8,9,5) 1657 reach(8,9,3) 1658 reach(8,9,1) 1659 reach(8,9,2) 1660 reach(8,9,4) 1661 reach(9,9,4) 1662 reach(9,9,2) 1663 reach(9,9,1) 1664 reach(9,9,3) 1665 reach(9,9,5) 1666 reach(10,9,5) 1667 reach(9,10,5) 1668 reach(10,9,3) 1669 reach(9,10,3) 1670 reach(10,9,1) 1671 reach(9,10,1) 1672 reach(10,9,2) 1673 reach(9,10,2) 1674 reach(10,9,4) 1675 reach(9,10,4) 1676 reach(8,10,4) 1677 reach(10,10,4) 1678 reach(9,11,4) 1679 reach(8,10,2) 1680 reach(10,10,2) 1681 reach(9,11,2) 1682 reach(8,10,1) 1683 reach(10,10,1) 1684 reach(9,11,1) 1685 reach(8,10,3) 1686 reach(10,10,3) 1687 reach(9,11,3) 1688 reach(8,10,5) 1689 reach(10,10,5) 1690 reach(9,11,5) 1691 reach(8,11,5) 1692 reach(8,11,3) 1693 reach(8,11,1) 1694 reach(8,11,2) 1695 reach(8,11,4) 1696 reach(7,11,4) 1697 reach(7,11,2) 1698 reach(7,11,1) 1699 reach(7,11,3) 1700 reach(7,11,5) 1701 reach(6,11,5) 1702 reach(6,11,3) 1703 reach(6,11,1) 1704 reach(6,11,2) 1705 reach(6,11,4) 1706 reach(5,11,4) 1707 reach(6,10,4) 1708 reach(5,11,2) 1709 reach(6,10,2) 1710 reach(5,11,1) 1711 reach(6,10,1) 1712 reach(5,11,3) 1713 reach(6,10,3) 1714 reach(5,11,5) 1715 reach(6,10,5) 1736 reach(6,2,2) 1737 reach(8,2,2) 1738 reach(7,1,2) 1739 reach(7,3,2) 1740 reach(7,2,2) 1886 reach(7,2,3) 1887 reach(7,3,3) 1888 reach(7,1,3) 1889 reach(8,2,3) 1890 reach(6,2,3) 1904 reach(4,9,2) 1905 reach(7,8,2) 1906 reach(8,8,2) 1907 reach(5,1,2) 1908 reach(6,1,2) 1909 reach(8,1,2) 1910 reach(9,1,2) 2106 reach(5,2,2) 2107 reach(9,2,2) 2108 reach(7,4,2) 2109 reach(8,3,2) 2110 reach(6,3,2) 2112 reach(6,2,4) 2113 reach(8,2,4) 2114 reach(7,1,4) 2115 reach(7,3,4) 2116 reach(7,2,4) 2135 reach(6,1,3) 2136 reach(8,1,3) 2137 reach(7,8,3) 2138 reach(4,9,3) 2139 reach(9,1,3) 2140 reach(8,8,3) 2141 reach(5,1,3) 2142 reach(9,2,3) 2143 reach(5,2,3) 2362 reach(7,2,5) 2363 reach(7,3,5) 2364 reach(7,1,5) 2365 reach(8,2,5) 2366 reach(6,2,5) 2367 reach(6,3,3) 2368 reach(8,3,3) 2369 reach(7,4,3) 2370 reach(8,4,3) 2371 reach(5,3,3) 2391 reach(6,1,4) 2392 reach(8,1,4) 2393 reach(5,2,4) 2394 reach(6,3,4) 2395 reach(7,8,4) 2396 reach(4,9,4) 2397 reach(9,1,4) 2398 reach(9,2,4) 2399 reach(8,3,4) 2400 reach(8,8,4) 2401 reach(5,3,4) 2402 reach(5,1,4) 2403 reach(7,4,4) 2627 reach(4,3,4) 2628 reach(9,4,4) 2629 reach(8,4,4) 2654 reach(6,1,5) 2655 reach(8,1,5) 2656 reach(5,2,5) 2657 reach(5,3,5) 2658 reach(6,3,5) 2659 reach(4,3,5) 2660 reach(7,8,5) 2661 reach(4,9,5) 2662 reach(9,1,5) 2663 reach(9,2,5) 2664 reach(8,3,5) 2665 reach(8,4,5) 2666 reach(8,8,5) 2667 reach(5,1,5) 2668 reach(7,4,5) 2669 reach(5,3,2) 2670 reach(4,3,3) 2762 reach(9,4,5) 2763 reach(10,4,5) 0 B+ 0 B- 1 0 1 """ output = """ COST 3@1 """
apache-2.0
blueboxgroup/neutron
neutron/tests/unit/vmware/test_nsx_sync.py
9
33059
# Copyright 2013 VMware, Inc. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import contextlib import time import mock from oslo.config import cfg from oslo.serialization import jsonutils from neutron.api.v2 import attributes as attr from neutron.common import constants from neutron.common import exceptions as n_exc from neutron import context from neutron.extensions import l3 from neutron.openstack.common import log from neutron.plugins.vmware.api_client import client from neutron.plugins.vmware.api_client import exception as api_exc from neutron.plugins.vmware.api_client import version from neutron.plugins.vmware.common import sync from neutron.plugins.vmware.dbexts import db from neutron.plugins.vmware import nsx_cluster as cluster from neutron.plugins.vmware import nsxlib from neutron.plugins.vmware import plugin from neutron.tests import base from neutron.tests.unit import test_api_v2 from neutron.tests.unit import testlib_api from neutron.tests.unit import vmware from neutron.tests.unit.vmware.apiclient import fake LOG = log.getLogger(__name__) _uuid = test_api_v2._uuid LSWITCHES = [{'uuid': _uuid(), 'name': 'ls-1'}, {'uuid': _uuid(), 'name': 'ls-2'}] LSWITCHPORTS = [{'uuid': _uuid(), 'name': 'lp-1'}, {'uuid': _uuid(), 'name': 'lp-2'}] LROUTERS = [{'uuid': _uuid(), 'name': 'lr-1'}, {'uuid': _uuid(), 'name': 'lr-2'}] class CacheTestCase(base.BaseTestCase): """Test suite providing coverage for the Cache class.""" def setUp(self): self.nsx_cache = sync.NsxCache() for lswitch in LSWITCHES: self.nsx_cache._uuid_dict_mappings[lswitch['uuid']] = ( self.nsx_cache._lswitches) self.nsx_cache._lswitches[lswitch['uuid']] = ( {'data': lswitch, 'hash': hash(jsonutils.dumps(lswitch))}) for lswitchport in LSWITCHPORTS: self.nsx_cache._uuid_dict_mappings[lswitchport['uuid']] = ( self.nsx_cache._lswitchports) self.nsx_cache._lswitchports[lswitchport['uuid']] = ( {'data': lswitchport, 'hash': hash(jsonutils.dumps(lswitchport))}) for lrouter in LROUTERS: self.nsx_cache._uuid_dict_mappings[lrouter['uuid']] = ( self.nsx_cache._lrouters) self.nsx_cache._lrouters[lrouter['uuid']] = ( {'data': lrouter, 'hash': hash(jsonutils.dumps(lrouter))}) super(CacheTestCase, self).setUp() def test_get_lswitches(self): ls_uuids = self.nsx_cache.get_lswitches() self.assertEqual(set(ls_uuids), set([ls['uuid'] for ls in LSWITCHES])) def test_get_lswitchports(self): lp_uuids = self.nsx_cache.get_lswitchports() self.assertEqual(set(lp_uuids), set([lp['uuid'] for lp in LSWITCHPORTS])) def test_get_lrouters(self): lr_uuids = self.nsx_cache.get_lrouters() self.assertEqual(set(lr_uuids), set([lr['uuid'] for lr in LROUTERS])) def test_get_lswitches_changed_only(self): ls_uuids = self.nsx_cache.get_lswitches(changed_only=True) self.assertEqual(0, len(ls_uuids)) def test_get_lswitchports_changed_only(self): lp_uuids = self.nsx_cache.get_lswitchports(changed_only=True) self.assertEqual(0, len(lp_uuids)) def test_get_lrouters_changed_only(self): lr_uuids = self.nsx_cache.get_lrouters(changed_only=True) self.assertEqual(0, len(lr_uuids)) def _verify_update(self, new_resource, changed=True, hit=True): cached_resource = self.nsx_cache[new_resource['uuid']] self.assertEqual(new_resource, cached_resource['data']) self.assertEqual(hit, cached_resource.get('hit', False)) self.assertEqual(changed, cached_resource.get('changed', False)) def test_update_lswitch_new_item(self): new_switch_uuid = _uuid() new_switch = {'uuid': new_switch_uuid, 'name': 'new_switch'} self.nsx_cache.update_lswitch(new_switch) self.assertIn(new_switch_uuid, self.nsx_cache._lswitches.keys()) self._verify_update(new_switch) def test_update_lswitch_existing_item(self): switch = LSWITCHES[0] switch['name'] = 'new_name' self.nsx_cache.update_lswitch(switch) self.assertIn(switch['uuid'], self.nsx_cache._lswitches.keys()) self._verify_update(switch) def test_update_lswitchport_new_item(self): new_switchport_uuid = _uuid() new_switchport = {'uuid': new_switchport_uuid, 'name': 'new_switchport'} self.nsx_cache.update_lswitchport(new_switchport) self.assertIn(new_switchport_uuid, self.nsx_cache._lswitchports.keys()) self._verify_update(new_switchport) def test_update_lswitchport_existing_item(self): switchport = LSWITCHPORTS[0] switchport['name'] = 'new_name' self.nsx_cache.update_lswitchport(switchport) self.assertIn(switchport['uuid'], self.nsx_cache._lswitchports.keys()) self._verify_update(switchport) def test_update_lrouter_new_item(self): new_router_uuid = _uuid() new_router = {'uuid': new_router_uuid, 'name': 'new_router'} self.nsx_cache.update_lrouter(new_router) self.assertIn(new_router_uuid, self.nsx_cache._lrouters.keys()) self._verify_update(new_router) def test_update_lrouter_existing_item(self): router = LROUTERS[0] router['name'] = 'new_name' self.nsx_cache.update_lrouter(router) self.assertIn(router['uuid'], self.nsx_cache._lrouters.keys()) self._verify_update(router) def test_process_updates_initial(self): # Clear cache content to simulate first-time filling self.nsx_cache._lswitches.clear() self.nsx_cache._lswitchports.clear() self.nsx_cache._lrouters.clear() self.nsx_cache.process_updates(LSWITCHES, LROUTERS, LSWITCHPORTS) for resource in LSWITCHES + LROUTERS + LSWITCHPORTS: self._verify_update(resource) def test_process_updates_no_change(self): self.nsx_cache.process_updates(LSWITCHES, LROUTERS, LSWITCHPORTS) for resource in LSWITCHES + LROUTERS + LSWITCHPORTS: self._verify_update(resource, changed=False) def test_process_updates_with_changes(self): LSWITCHES[0]['name'] = 'altered' self.nsx_cache.process_updates(LSWITCHES, LROUTERS, LSWITCHPORTS) for resource in LSWITCHES + LROUTERS + LSWITCHPORTS: changed = (True if resource['uuid'] == LSWITCHES[0]['uuid'] else False) self._verify_update(resource, changed=changed) def _test_process_updates_with_removals(self): lswitches = LSWITCHES[:] lswitch = lswitches.pop() self.nsx_cache.process_updates(lswitches, LROUTERS, LSWITCHPORTS) for resource in LSWITCHES + LROUTERS + LSWITCHPORTS: hit = (False if resource['uuid'] == lswitch['uuid'] else True) self._verify_update(resource, changed=False, hit=hit) return (lswitch, lswitches) def test_process_updates_with_removals(self): self._test_process_updates_with_removals() def test_process_updates_cleanup_after_delete(self): deleted_lswitch, lswitches = self._test_process_updates_with_removals() self.nsx_cache.process_deletes() self.nsx_cache.process_updates(lswitches, LROUTERS, LSWITCHPORTS) self.assertNotIn(deleted_lswitch['uuid'], self.nsx_cache._lswitches) def test_update_resource_does_not_cleanup_deleted_resources(self): deleted_lswitch, lswitches = self._test_process_updates_with_removals() self.nsx_cache.process_deletes() self.nsx_cache.update_lswitch(deleted_lswitch) self.assertIn(deleted_lswitch['uuid'], self.nsx_cache._lswitches) def _verify_delete(self, resource, deleted=True, hit=True): cached_resource = self.nsx_cache[resource['uuid']] data_field = 'data_bk' if deleted else 'data' self.assertEqual(resource, cached_resource[data_field]) self.assertEqual(hit, cached_resource.get('hit', False)) self.assertEqual(deleted, cached_resource.get('changed', False)) def _set_hit(self, resources, uuid_to_delete=None): for resource in resources: if resource['data']['uuid'] != uuid_to_delete: resource['hit'] = True def test_process_deletes_no_change(self): # Mark all resources as hit self._set_hit(self.nsx_cache._lswitches.values()) self._set_hit(self.nsx_cache._lswitchports.values()) self._set_hit(self.nsx_cache._lrouters.values()) self.nsx_cache.process_deletes() for resource in LSWITCHES + LROUTERS + LSWITCHPORTS: self._verify_delete(resource, hit=False, deleted=False) def test_process_deletes_with_removals(self): # Mark all resources but one as hit uuid_to_delete = LSWITCHPORTS[0]['uuid'] self._set_hit(self.nsx_cache._lswitches.values(), uuid_to_delete) self._set_hit(self.nsx_cache._lswitchports.values(), uuid_to_delete) self._set_hit(self.nsx_cache._lrouters.values(), uuid_to_delete) self.nsx_cache.process_deletes() for resource in LSWITCHES + LROUTERS + LSWITCHPORTS: deleted = resource['uuid'] == uuid_to_delete self._verify_delete(resource, hit=False, deleted=deleted) class SyncLoopingCallTestCase(base.BaseTestCase): def test_looping_calls(self): # Avoid runs of the synchronization process - just start # the looping call with mock.patch.object( sync.NsxSynchronizer, '_synchronize_state', return_value=0.01): synchronizer = sync.NsxSynchronizer(mock.ANY, mock.ANY, 100, 0, 0) time.sleep(0.03) # stop looping call before asserting synchronizer._sync_looping_call.stop() # Just verify the looping call has been called, trying # to assess the exact number of calls would be unreliable self.assertTrue(synchronizer._synchronize_state.call_count) class SyncTestCase(testlib_api.SqlTestCase): def setUp(self): # mock api client self.fc = fake.FakeClient(vmware.STUBS_PATH) mock_api = mock.patch(vmware.NSXAPI_NAME, autospec=True) # Avoid runs of the synchronizer looping call # These unit tests will excplicitly invoke synchronization patch_sync = mock.patch.object(sync, '_start_loopingcall') self.mock_api = mock_api.start() patch_sync.start() self.mock_api.return_value.login.return_value = "the_cookie" # Emulate tests against NSX 3.x self.mock_api.return_value.get_version.return_value = ( version.Version("3.1")) self.mock_api.return_value.request.side_effect = self.fc.fake_request self.fake_cluster = cluster.NSXCluster( name='fake-cluster', nsx_controllers=['1.1.1.1:999'], default_tz_uuid=_uuid(), nsx_user='foo', nsx_password='bar') self.fake_cluster.api_client = client.NsxApiClient( ('1.1.1.1', '999', True), self.fake_cluster.nsx_user, self.fake_cluster.nsx_password, http_timeout=self.fake_cluster.http_timeout, retries=self.fake_cluster.retries, redirects=self.fake_cluster.redirects) # Instantiate Neutron plugin # and setup needed config variables args = ['--config-file', vmware.get_fake_conf('neutron.conf.test'), '--config-file', vmware.get_fake_conf('nsx.ini.test')] self.config_parse(args=args) cfg.CONF.set_override('allow_overlapping_ips', True) self._plugin = plugin.NsxPlugin() # Mock neutron manager plugin load functions to speed up tests mock_nm_get_plugin = mock.patch('neutron.manager.NeutronManager.' 'get_plugin') mock_nm_get_service_plugins = mock.patch( 'neutron.manager.NeutronManager.get_service_plugins') self.mock_nm_get_plugin = mock_nm_get_plugin.start() self.mock_nm_get_plugin.return_value = self._plugin mock_nm_get_service_plugins.start() super(SyncTestCase, self).setUp() self.addCleanup(self.fc.reset_all) @contextlib.contextmanager def _populate_data(self, ctx, net_size=2, port_size=2, router_size=2): def network(idx): return {'network': {'name': 'net-%s' % idx, 'admin_state_up': True, 'shared': False, 'port_security_enabled': True, 'tenant_id': 'foo'}} def subnet(idx, net_id): return {'subnet': {'cidr': '10.10.%s.0/24' % idx, 'name': 'sub-%s' % idx, 'gateway_ip': attr.ATTR_NOT_SPECIFIED, 'allocation_pools': attr.ATTR_NOT_SPECIFIED, 'ip_version': 4, 'dns_nameservers': attr.ATTR_NOT_SPECIFIED, 'host_routes': attr.ATTR_NOT_SPECIFIED, 'enable_dhcp': True, 'network_id': net_id, 'tenant_id': 'foo'}} def port(idx, net_id): return {'port': {'network_id': net_id, 'name': 'port-%s' % idx, 'admin_state_up': True, 'device_id': 'miao', 'device_owner': 'bau', 'fixed_ips': attr.ATTR_NOT_SPECIFIED, 'mac_address': attr.ATTR_NOT_SPECIFIED, 'tenant_id': 'foo'}} def router(idx): # Use random uuids as names return {'router': {'name': 'rtr-%s' % idx, 'admin_state_up': True, 'tenant_id': 'foo'}} networks = [] ports = [] routers = [] for i in range(net_size): net = self._plugin.create_network(ctx, network(i)) networks.append(net) self._plugin.create_subnet(ctx, subnet(i, net['id'])) for j in range(port_size): ports.append(self._plugin.create_port( ctx, port("%s-%s" % (i, j), net['id']))) for i in range(router_size): routers.append(self._plugin.create_router(ctx, router(i))) # Do not return anything as the user does need the actual # data created yield # Remove everything for router in routers: self._plugin.delete_router(ctx, router['id']) for port in ports: self._plugin.delete_port(ctx, port['id']) # This will remove networks and subnets for network in networks: self._plugin.delete_network(ctx, network['id']) def _get_tag_dict(self, tags): return dict((tag['scope'], tag['tag']) for tag in tags) def _test_sync(self, exp_net_status, exp_port_status, exp_router_status, action_callback=None, sp=None): ls_uuid = self.fc._fake_lswitch_dict.keys()[0] neutron_net_id = self._get_tag_dict( self.fc._fake_lswitch_dict[ls_uuid]['tags'])['quantum_net_id'] lp_uuid = self.fc._fake_lswitch_lport_dict.keys()[0] neutron_port_id = self._get_tag_dict( self.fc._fake_lswitch_lport_dict[lp_uuid]['tags'])['q_port_id'] lr_uuid = self.fc._fake_lrouter_dict.keys()[0] neutron_rtr_id = self._get_tag_dict( self.fc._fake_lrouter_dict[lr_uuid]['tags'])['q_router_id'] if action_callback: action_callback(ls_uuid, lp_uuid, lr_uuid) # Make chunk big enough to read everything if not sp: sp = sync.SyncParameters(100) self._plugin._synchronizer._synchronize_state(sp) # Verify element is in expected status # TODO(salv-orlando): Verify status for all elements ctx = context.get_admin_context() neutron_net = self._plugin.get_network(ctx, neutron_net_id) neutron_port = self._plugin.get_port(ctx, neutron_port_id) neutron_rtr = self._plugin.get_router(ctx, neutron_rtr_id) self.assertEqual(exp_net_status, neutron_net['status']) self.assertEqual(exp_port_status, neutron_port['status']) self.assertEqual(exp_router_status, neutron_rtr['status']) def _action_callback_status_down(self, ls_uuid, lp_uuid, lr_uuid): self.fc._fake_lswitch_dict[ls_uuid]['status'] = 'false' self.fc._fake_lswitch_lport_dict[lp_uuid]['status'] = 'false' self.fc._fake_lrouter_dict[lr_uuid]['status'] = 'false' def test_initial_sync(self): ctx = context.get_admin_context() with self._populate_data(ctx): self._test_sync( constants.NET_STATUS_ACTIVE, constants.PORT_STATUS_ACTIVE, constants.NET_STATUS_ACTIVE) def test_initial_sync_with_resources_down(self): ctx = context.get_admin_context() with self._populate_data(ctx): self._test_sync( constants.NET_STATUS_DOWN, constants.PORT_STATUS_DOWN, constants.NET_STATUS_DOWN, self._action_callback_status_down) def test_resync_with_resources_down(self): ctx = context.get_admin_context() with self._populate_data(ctx): sp = sync.SyncParameters(100) self._plugin._synchronizer._synchronize_state(sp) # Ensure the synchronizer performs a resync sp.init_sync_performed = True self._test_sync( constants.NET_STATUS_DOWN, constants.PORT_STATUS_DOWN, constants.NET_STATUS_DOWN, self._action_callback_status_down, sp=sp) def _action_callback_del_resource(self, ls_uuid, lp_uuid, lr_uuid): del self.fc._fake_lswitch_dict[ls_uuid] del self.fc._fake_lswitch_lport_dict[lp_uuid] del self.fc._fake_lrouter_dict[lr_uuid] def test_initial_sync_with_resources_removed(self): ctx = context.get_admin_context() with self._populate_data(ctx): self._test_sync( constants.NET_STATUS_ERROR, constants.PORT_STATUS_ERROR, constants.NET_STATUS_ERROR, self._action_callback_del_resource) def test_resync_with_resources_removed(self): ctx = context.get_admin_context() with self._populate_data(ctx): sp = sync.SyncParameters(100) self._plugin._synchronizer._synchronize_state(sp) # Ensure the synchronizer performs a resync sp.init_sync_performed = True self._test_sync( constants.NET_STATUS_ERROR, constants.PORT_STATUS_ERROR, constants.NET_STATUS_ERROR, self._action_callback_del_resource, sp=sp) def _test_sync_with_chunk_larger_maxpagesize( self, net_size, port_size, router_size, chunk_size, exp_calls): ctx = context.get_admin_context() real_func = nsxlib.get_single_query_page sp = sync.SyncParameters(chunk_size) with self._populate_data(ctx, net_size=net_size, port_size=port_size, router_size=router_size): with mock.patch.object(sync, 'MAX_PAGE_SIZE', 15): # The following mock is just for counting calls, # but we will still run the actual function with mock.patch.object( nsxlib, 'get_single_query_page', side_effect=real_func) as mock_get_page: self._test_sync( constants.NET_STATUS_ACTIVE, constants.PORT_STATUS_ACTIVE, constants.NET_STATUS_ACTIVE, sp=sp) # As each resource type does not exceed the maximum page size, # the method should be called once for each resource type self.assertEqual(exp_calls, mock_get_page.call_count) def test_sync_chunk_larger_maxpagesize_no_multiple_requests(self): # total resource size = 20 # total size for each resource does not exceed max page size (15) self._test_sync_with_chunk_larger_maxpagesize( net_size=5, port_size=2, router_size=5, chunk_size=20, exp_calls=3) def test_sync_chunk_larger_maxpagesize_triggers_multiple_requests(self): # total resource size = 48 # total size for each resource does exceed max page size (15) self._test_sync_with_chunk_larger_maxpagesize( net_size=16, port_size=1, router_size=16, chunk_size=48, exp_calls=6) def test_sync_multi_chunk(self): # The fake NSX API client cannot be used for this test ctx = context.get_admin_context() # Generate 4 networks, 1 port per network, and 4 routers with self._populate_data(ctx, net_size=4, port_size=1, router_size=4): fake_lswitches = jsonutils.loads( self.fc.handle_get('/ws.v1/lswitch'))['results'] fake_lrouters = jsonutils.loads( self.fc.handle_get('/ws.v1/lrouter'))['results'] fake_lswitchports = jsonutils.loads( self.fc.handle_get('/ws.v1/lswitch/*/lport'))['results'] return_values = [ # Chunk 0 - lswitches (fake_lswitches, None, 4), # Chunk 0 - lrouters (fake_lrouters[:2], 'xxx', 4), # Chunk 0 - lports (size only) ([], 'start', 4), # Chunk 1 - lrouters (2 more) (lswitches are skipped) (fake_lrouters[2:], None, None), # Chunk 1 - lports (fake_lswitchports, None, 4)] def fake_fetch_data(*args, **kwargs): return return_values.pop(0) # 2 Chunks, with 6 resources each. # 1st chunk lswitches and lrouters # 2nd chunk lrouters and lports # Mock _fetch_data with mock.patch.object( self._plugin._synchronizer, '_fetch_data', side_effect=fake_fetch_data): sp = sync.SyncParameters(6) def do_chunk(chunk_idx, ls_cursor, lr_cursor, lp_cursor): self._plugin._synchronizer._synchronize_state(sp) self.assertEqual(chunk_idx, sp.current_chunk) self.assertEqual(ls_cursor, sp.ls_cursor) self.assertEqual(lr_cursor, sp.lr_cursor) self.assertEqual(lp_cursor, sp.lp_cursor) # check 1st chunk do_chunk(1, None, 'xxx', 'start') # check 2nd chunk do_chunk(0, None, None, None) # Chunk size should have stayed the same self.assertEqual(sp.chunk_size, 6) def test_synchronize_network(self): ctx = context.get_admin_context() with self._populate_data(ctx): # Put a network down to verify synchronization ls_uuid = self.fc._fake_lswitch_dict.keys()[0] q_net_id = self._get_tag_dict( self.fc._fake_lswitch_dict[ls_uuid]['tags'])['quantum_net_id'] self.fc._fake_lswitch_dict[ls_uuid]['status'] = 'false' q_net_data = self._plugin._get_network(ctx, q_net_id) self._plugin._synchronizer.synchronize_network(ctx, q_net_data) # Reload from db q_nets = self._plugin.get_networks(ctx) for q_net in q_nets: if q_net['id'] == q_net_id: exp_status = constants.NET_STATUS_DOWN else: exp_status = constants.NET_STATUS_ACTIVE self.assertEqual(exp_status, q_net['status']) def test_synchronize_network_not_found_in_db_no_raise(self): ctx = context.get_admin_context() with self._populate_data(ctx): # Put a network down to verify synchronization ls_uuid = self.fc._fake_lswitch_dict.keys()[0] q_net_id = self._get_tag_dict( self.fc._fake_lswitch_dict[ls_uuid]['tags'])['quantum_net_id'] self.fc._fake_lswitch_dict[ls_uuid]['status'] = 'false' q_net_data = self._plugin._get_network(ctx, q_net_id) with mock.patch.object(self._plugin, '_get_network') as _get_network: _get_network.side_effect = n_exc.NetworkNotFound( net_id=q_net_data['id']) self._plugin._synchronizer.synchronize_network(ctx, q_net_data) def test_synchronize_network_on_get(self): cfg.CONF.set_override('always_read_status', True, 'NSX_SYNC') ctx = context.get_admin_context() with self._populate_data(ctx): # Put a network down to verify punctual synchronization ls_uuid = self.fc._fake_lswitch_dict.keys()[0] q_net_id = self._get_tag_dict( self.fc._fake_lswitch_dict[ls_uuid]['tags'])['quantum_net_id'] self.fc._fake_lswitch_dict[ls_uuid]['status'] = 'false' q_net_data = self._plugin.get_network(ctx, q_net_id) self.assertEqual(constants.NET_STATUS_DOWN, q_net_data['status']) def test_synchronize_port_not_found_in_db_no_raise(self): ctx = context.get_admin_context() with self._populate_data(ctx): # Put a port down to verify synchronization lp_uuid = self.fc._fake_lswitch_lport_dict.keys()[0] lport = self.fc._fake_lswitch_lport_dict[lp_uuid] q_port_id = self._get_tag_dict(lport['tags'])['q_port_id'] lport['status'] = 'true' q_port_data = self._plugin._get_port(ctx, q_port_id) with mock.patch.object(self._plugin, '_get_port') as _get_port: _get_port.side_effect = n_exc.PortNotFound( port_id=q_port_data['id']) self._plugin._synchronizer.synchronize_port(ctx, q_port_data) def test_synchronize_port(self): ctx = context.get_admin_context() with self._populate_data(ctx): # Put a port down to verify synchronization lp_uuid = self.fc._fake_lswitch_lport_dict.keys()[0] lport = self.fc._fake_lswitch_lport_dict[lp_uuid] q_port_id = self._get_tag_dict(lport['tags'])['q_port_id'] lport['status'] = 'true' q_port_data = self._plugin._get_port(ctx, q_port_id) self._plugin._synchronizer.synchronize_port(ctx, q_port_data) # Reload from db q_ports = self._plugin.get_ports(ctx) for q_port in q_ports: if q_port['id'] == q_port_id: exp_status = constants.PORT_STATUS_ACTIVE else: exp_status = constants.PORT_STATUS_DOWN self.assertEqual(exp_status, q_port['status']) def test_synchronize_port_on_get(self): cfg.CONF.set_override('always_read_status', True, 'NSX_SYNC') ctx = context.get_admin_context() with self._populate_data(ctx): # Put a port down to verify punctual synchronization lp_uuid = self.fc._fake_lswitch_lport_dict.keys()[0] lport = self.fc._fake_lswitch_lport_dict[lp_uuid] q_port_id = self._get_tag_dict(lport['tags'])['q_port_id'] lport['status'] = 'false' q_port_data = self._plugin.get_port(ctx, q_port_id) self.assertEqual(constants.PORT_STATUS_DOWN, q_port_data['status']) def test_synchronize_routernot_found_in_db_no_raise(self): ctx = context.get_admin_context() with self._populate_data(ctx): # Put a router down to verify synchronization lr_uuid = self.fc._fake_lrouter_dict.keys()[0] q_rtr_id = self._get_tag_dict( self.fc._fake_lrouter_dict[lr_uuid]['tags'])['q_router_id'] self.fc._fake_lrouter_dict[lr_uuid]['status'] = 'false' q_rtr_data = self._plugin._get_router(ctx, q_rtr_id) with mock.patch.object(self._plugin, '_get_router') as _get_router: _get_router.side_effect = l3.RouterNotFound( router_id=q_rtr_data['id']) self._plugin._synchronizer.synchronize_router(ctx, q_rtr_data) def test_synchronize_router(self): ctx = context.get_admin_context() with self._populate_data(ctx): # Put a router down to verify synchronization lr_uuid = self.fc._fake_lrouter_dict.keys()[0] q_rtr_id = self._get_tag_dict( self.fc._fake_lrouter_dict[lr_uuid]['tags'])['q_router_id'] self.fc._fake_lrouter_dict[lr_uuid]['status'] = 'false' q_rtr_data = self._plugin._get_router(ctx, q_rtr_id) self._plugin._synchronizer.synchronize_router(ctx, q_rtr_data) # Reload from db q_routers = self._plugin.get_routers(ctx) for q_rtr in q_routers: if q_rtr['id'] == q_rtr_id: exp_status = constants.NET_STATUS_DOWN else: exp_status = constants.NET_STATUS_ACTIVE self.assertEqual(exp_status, q_rtr['status']) def test_synchronize_router_nsx_mapping_not_found(self): ctx = context.get_admin_context() with self._populate_data(ctx): # Put a router down to verify synchronization lr_uuid = self.fc._fake_lrouter_dict.keys()[0] q_rtr_id = self._get_tag_dict( self.fc._fake_lrouter_dict[lr_uuid]['tags'])['q_router_id'] self.fc._fake_lrouter_dict[lr_uuid]['status'] = 'false' q_rtr_data = self._plugin._get_router(ctx, q_rtr_id) # delete router mapping from db. db.delete_neutron_nsx_router_mapping(ctx.session, q_rtr_id) # pop router from fake nsx client router_data = self.fc._fake_lrouter_dict.pop(lr_uuid) self._plugin._synchronizer.synchronize_router(ctx, q_rtr_data) # Reload from db q_routers = self._plugin.get_routers(ctx) for q_rtr in q_routers: if q_rtr['id'] == q_rtr_id: exp_status = constants.NET_STATUS_ERROR else: exp_status = constants.NET_STATUS_ACTIVE self.assertEqual(exp_status, q_rtr['status']) # put the router database since we don't handle missing # router data in the fake nsx api_client self.fc._fake_lrouter_dict[lr_uuid] = router_data def test_synchronize_router_on_get(self): cfg.CONF.set_override('always_read_status', True, 'NSX_SYNC') ctx = context.get_admin_context() with self._populate_data(ctx): # Put a router down to verify punctual synchronization lr_uuid = self.fc._fake_lrouter_dict.keys()[0] q_rtr_id = self._get_tag_dict( self.fc._fake_lrouter_dict[lr_uuid]['tags'])['q_router_id'] self.fc._fake_lrouter_dict[lr_uuid]['status'] = 'false' q_rtr_data = self._plugin.get_router(ctx, q_rtr_id) self.assertEqual(constants.NET_STATUS_DOWN, q_rtr_data['status']) def test_sync_nsx_failure_backoff(self): self.mock_api.return_value.request.side_effect = api_exc.RequestTimeout # chunk size won't matter here sp = sync.SyncParameters(999) for i in range(10): self.assertEqual( min(64, 2 ** i), self._plugin._synchronizer._synchronize_state(sp))
apache-2.0
p0psicles/SickRage
lib/tornado/log.py
82
9819
#!/usr/bin/env python # # Copyright 2012 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Logging support for Tornado. Tornado uses three logger streams: * ``tornado.access``: Per-request logging for Tornado's HTTP servers (and potentially other servers in the future) * ``tornado.application``: Logging of errors from application code (i.e. uncaught exceptions from callbacks) * ``tornado.general``: General-purpose logging, including any errors or warnings from Tornado itself. These streams may be configured independently using the standard library's `logging` module. For example, you may wish to send ``tornado.access`` logs to a separate file for analysis. """ from __future__ import absolute_import, division, print_function, with_statement import logging import logging.handlers import sys from tornado.escape import _unicode from tornado.util import unicode_type, basestring_type try: import curses except ImportError: curses = None # Logger objects for internal tornado use access_log = logging.getLogger("tornado.access") app_log = logging.getLogger("tornado.application") gen_log = logging.getLogger("tornado.general") def _stderr_supports_color(): color = False if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): try: curses.setupterm() if curses.tigetnum("colors") > 0: color = True except Exception: pass return color def _safe_unicode(s): try: return _unicode(s) except UnicodeDecodeError: return repr(s) class LogFormatter(logging.Formatter): """Log formatter used in Tornado. Key features of this formatter are: * Color support when logging to a terminal that supports it. * Timestamps on every log line. * Robust against str/bytes encoding problems. This formatter is enabled automatically by `tornado.options.parse_command_line` (unless ``--logging=none`` is used). """ DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S' DEFAULT_COLORS = { logging.DEBUG: 4, # Blue logging.INFO: 2, # Green logging.WARNING: 3, # Yellow logging.ERROR: 1, # Red } def __init__(self, color=True, fmt=DEFAULT_FORMAT, datefmt=DEFAULT_DATE_FORMAT, colors=DEFAULT_COLORS): r""" :arg bool color: Enables color support. :arg string fmt: Log message format. It will be applied to the attributes dict of log records. The text between ``%(color)s`` and ``%(end_color)s`` will be colored depending on the level if color support is on. :arg dict colors: color mappings from logging level to terminal color code :arg string datefmt: Datetime format. Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``. .. versionchanged:: 3.2 Added ``fmt`` and ``datefmt`` arguments. """ logging.Formatter.__init__(self, datefmt=datefmt) self._fmt = fmt self._colors = {} if color and _stderr_supports_color(): # The curses module has some str/bytes confusion in # python3. Until version 3.2.3, most methods return # bytes, but only accept strings. In addition, we want to # output these strings with the logging module, which # works with unicode strings. The explicit calls to # unicode() below are harmless in python2 but will do the # right conversion in python 3. fg_color = (curses.tigetstr("setaf") or curses.tigetstr("setf") or "") if (3, 0) < sys.version_info < (3, 2, 3): fg_color = unicode_type(fg_color, "ascii") for levelno, code in colors.items(): self._colors[levelno] = unicode_type(curses.tparm(fg_color, code), "ascii") self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii") else: self._normal = '' def format(self, record): try: message = record.getMessage() assert isinstance(message, basestring_type) # guaranteed by logging # Encoding notes: The logging module prefers to work with character # strings, but only enforces that log messages are instances of # basestring. In python 2, non-ascii bytestrings will make # their way through the logging framework until they blow up with # an unhelpful decoding error (with this formatter it happens # when we attach the prefix, but there are other opportunities for # exceptions further along in the framework). # # If a byte string makes it this far, convert it to unicode to # ensure it will make it out to the logs. Use repr() as a fallback # to ensure that all byte strings can be converted successfully, # but don't do it by default so we don't add extra quotes to ascii # bytestrings. This is a bit of a hacky place to do this, but # it's worth it since the encoding errors that would otherwise # result are so useless (and tornado is fond of using utf8-encoded # byte strings whereever possible). record.message = _safe_unicode(message) except Exception as e: record.message = "Bad message (%r): %r" % (e, record.__dict__) record.asctime = self.formatTime(record, self.datefmt) if record.levelno in self._colors: record.color = self._colors[record.levelno] record.end_color = self._normal else: record.color = record.end_color = '' formatted = self._fmt % record.__dict__ if record.exc_info: if not record.exc_text: record.exc_text = self.formatException(record.exc_info) if record.exc_text: # exc_text contains multiple lines. We need to _safe_unicode # each line separately so that non-utf8 bytes don't cause # all the newlines to turn into '\n'. lines = [formatted.rstrip()] lines.extend(_safe_unicode(ln) for ln in record.exc_text.split('\n')) formatted = '\n'.join(lines) return formatted.replace("\n", "\n ") def enable_pretty_logging(options=None, logger=None): """Turns on formatted logging output as configured. This is called automatically by `tornado.options.parse_command_line` and `tornado.options.parse_config_file`. """ if options is None: from tornado.options import options if options.logging is None or options.logging.lower() == 'none': return if logger is None: logger = logging.getLogger() logger.setLevel(getattr(logging, options.logging.upper())) if options.log_file_prefix: channel = logging.handlers.RotatingFileHandler( filename=options.log_file_prefix, maxBytes=options.log_file_max_size, backupCount=options.log_file_num_backups) channel.setFormatter(LogFormatter(color=False)) logger.addHandler(channel) if (options.log_to_stderr or (options.log_to_stderr is None and not logger.handlers)): # Set up color if we are in a tty and curses is installed channel = logging.StreamHandler() channel.setFormatter(LogFormatter()) logger.addHandler(channel) def define_logging_options(options=None): """Add logging-related flags to ``options``. These options are present automatically on the default options instance; this method is only necessary if you have created your own `.OptionParser`. .. versionadded:: 4.2 This function existed in prior versions but was broken and undocumented until 4.2. """ if options is None: # late import to prevent cycle from tornado.options import options options.define("logging", default="info", help=("Set the Python log level. If 'none', tornado won't touch the " "logging configuration."), metavar="debug|info|warning|error|none") options.define("log_to_stderr", type=bool, default=None, help=("Send log output to stderr (colorized if possible). " "By default use stderr if --log_file_prefix is not set and " "no other logging is configured.")) options.define("log_file_prefix", type=str, default=None, metavar="PATH", help=("Path prefix for log files. " "Note that if you are running multiple tornado processes, " "log_file_prefix must be different for each of them (e.g. " "include the port number)")) options.define("log_file_max_size", type=int, default=100 * 1000 * 1000, help="max size of log files before rollover") options.define("log_file_num_backups", type=int, default=10, help="number of log files to keep") options.add_parse_callback(lambda: enable_pretty_logging(options))
gpl-3.0
Nyker510/scikit-learn
examples/neighbors/plot_classification.py
287
1790
""" ================================ Nearest Neighbors Classification ================================ Sample usage of Nearest Neighbors classification. It will plot the decision boundaries for each class. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from matplotlib.colors import ListedColormap from sklearn import neighbors, datasets n_neighbors = 15 # import some data to play with iris = datasets.load_iris() X = iris.data[:, :2] # we only take the first two features. We could # avoid this ugly slicing by using a two-dim dataset y = iris.target h = .02 # step size in the mesh # Create color maps cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF']) cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF']) for weights in ['uniform', 'distance']: # we create an instance of Neighbours Classifier and fit the data. clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights) clf.fit(X, y) # Plot the decision boundary. For that, we will assign a color to each # point in the mesh [x_min, m_max]x[y_min, y_max]. x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1 y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) plt.figure() plt.pcolormesh(xx, yy, Z, cmap=cmap_light) # Plot also the training points plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold) plt.xlim(xx.min(), xx.max()) plt.ylim(yy.min(), yy.max()) plt.title("3-Class classification (k = %i, weights = '%s')" % (n_neighbors, weights)) plt.show()
bsd-3-clause
kkamkou/gitmostwanted.com
migration/versions/420f9b8b9e_attitude_constraint_corrections.py
1
1048
"""attitude constraint corrections Revision ID: 420f9b8b9e Revises: 162f93d4393 Create Date: 2015-06-24 16:52:02.606637 """ # revision identifiers, used by Alembic. revision = '420f9b8b9e' down_revision = '162f93d4393' branch_labels = None depends_on = None from alembic import op def upgrade(): op.drop_constraint('fk_repos_id', 'users_attitude', type_='foreignkey') op.drop_constraint('fk_users_id', 'users_attitude', type_='foreignkey') op.create_foreign_key( 'fk_users_id', 'users_attitude', 'users', ['user_id'], ['id'], ondelete='CASCADE' ) op.create_foreign_key( 'fk_repos_id', 'users_attitude', 'repos', ['repo_id'], ['id'], ondelete='CASCADE' ) def downgrade(): op.drop_constraint('fk_repos_id', 'users_attitude', type_='foreignkey') op.drop_constraint('fk_users_id', 'users_attitude', type_='foreignkey') op.create_foreign_key('fk_users_id', 'users_attitude', 'users', ['user_id'], ['id']) op.create_foreign_key('fk_repos_id', 'users_attitude', 'repos', ['repo_id'], ['id'])
mit
repotvsupertuga/tvsupertuga.repository
script.module.resolveurl/lib/resolveurl/plugins/unitplay.py
2
2095
""" resolveurl XBMC Addon Copyright (C) 2018 jsergio This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import re from lib import helpers from resolveurl import common from resolveurl.resolver import ResolveUrl, ResolverError class UnitPlayResolver(ResolveUrl): name = "unitplay" domains = ["unitplay.net"] pattern = '(?://|\.)(unitplay\.net)/tt([0-9]+)' def __init__(self): self.net = common.Net() def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) headers = {'User-Agent': common.RAND_UA} html = self.net.http_GET(web_url, headers=headers).content if html: player_id = re.search('''SvplayerID\|([a-z0-9]+)''', html, re.I) if player_id: player_url = 'https://unitplay.net//CallPlayer' data = {'id': player_id.group(1)} headers.update({'Referer': web_url}) _html = self.net.http_POST(player_url, data, headers=headers).content if _html: _html = _html.decode("hex") sources = helpers.scrape_sources(_html) if sources: return helpers.pick_source(sources) + helpers.append_headers(headers) raise ResolverError("Unable to locate video") def get_url(self, host, media_id): return self._default_get_url(host, media_id, template='https://{host}/tt{media_id}')
gpl-2.0
SomethingExplosive/android_kernel_asus_flo
tools/perf/scripts/python/check-perf-trace.py
11214
2503
# perf script event handlers, generated by perf script -g python # (c) 2010, Tom Zanussi <tzanussi@gmail.com> # Licensed under the terms of the GNU GPL License version 2 # # This script tests basic functionality such as flag and symbol # strings, common_xxx() calls back into perf, begin, end, unhandled # events, etc. Basically, if this script runs successfully and # displays expected results, Python scripting support should be ok. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from Core import * from perf_trace_context import * unhandled = autodict() def trace_begin(): print "trace_begin" pass def trace_end(): print_unhandled() def irq__softirq_entry(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, vec): print_header(event_name, common_cpu, common_secs, common_nsecs, common_pid, common_comm) print_uncommon(context) print "vec=%s\n" % \ (symbol_str("irq__softirq_entry", "vec", vec)), def kmem__kmalloc(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, call_site, ptr, bytes_req, bytes_alloc, gfp_flags): print_header(event_name, common_cpu, common_secs, common_nsecs, common_pid, common_comm) print_uncommon(context) print "call_site=%u, ptr=%u, bytes_req=%u, " \ "bytes_alloc=%u, gfp_flags=%s\n" % \ (call_site, ptr, bytes_req, bytes_alloc, flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)), def trace_unhandled(event_name, context, event_fields_dict): try: unhandled[event_name] += 1 except TypeError: unhandled[event_name] = 1 def print_header(event_name, cpu, secs, nsecs, pid, comm): print "%-20s %5u %05u.%09u %8u %-20s " % \ (event_name, cpu, secs, nsecs, pid, comm), # print trace fields not included in handler args def print_uncommon(context): print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \ % (common_pc(context), trace_flag_str(common_flags(context)), \ common_lock_depth(context)) def print_unhandled(): keys = unhandled.keys() if not keys: return print "\nunhandled events:\n\n", print "%-40s %10s\n" % ("event", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "-----------"), for event_name in keys: print "%-40s %10d\n" % (event_name, unhandled[event_name])
gpl-2.0
manaris/jythonMusic
library/jython2.5.3/Lib/encodings/cp037.py
593
13377
""" Python Character Mapping Codec cp037 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP037.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp037', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> NULL u'\x01' # 0x01 -> START OF HEADING u'\x02' # 0x02 -> START OF TEXT u'\x03' # 0x03 -> END OF TEXT u'\x9c' # 0x04 -> CONTROL u'\t' # 0x05 -> HORIZONTAL TABULATION u'\x86' # 0x06 -> CONTROL u'\x7f' # 0x07 -> DELETE u'\x97' # 0x08 -> CONTROL u'\x8d' # 0x09 -> CONTROL u'\x8e' # 0x0A -> CONTROL u'\x0b' # 0x0B -> VERTICAL TABULATION u'\x0c' # 0x0C -> FORM FEED u'\r' # 0x0D -> CARRIAGE RETURN u'\x0e' # 0x0E -> SHIFT OUT u'\x0f' # 0x0F -> SHIFT IN u'\x10' # 0x10 -> DATA LINK ESCAPE u'\x11' # 0x11 -> DEVICE CONTROL ONE u'\x12' # 0x12 -> DEVICE CONTROL TWO u'\x13' # 0x13 -> DEVICE CONTROL THREE u'\x9d' # 0x14 -> CONTROL u'\x85' # 0x15 -> CONTROL u'\x08' # 0x16 -> BACKSPACE u'\x87' # 0x17 -> CONTROL u'\x18' # 0x18 -> CANCEL u'\x19' # 0x19 -> END OF MEDIUM u'\x92' # 0x1A -> CONTROL u'\x8f' # 0x1B -> CONTROL u'\x1c' # 0x1C -> FILE SEPARATOR u'\x1d' # 0x1D -> GROUP SEPARATOR u'\x1e' # 0x1E -> RECORD SEPARATOR u'\x1f' # 0x1F -> UNIT SEPARATOR u'\x80' # 0x20 -> CONTROL u'\x81' # 0x21 -> CONTROL u'\x82' # 0x22 -> CONTROL u'\x83' # 0x23 -> CONTROL u'\x84' # 0x24 -> CONTROL u'\n' # 0x25 -> LINE FEED u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK u'\x1b' # 0x27 -> ESCAPE u'\x88' # 0x28 -> CONTROL u'\x89' # 0x29 -> CONTROL u'\x8a' # 0x2A -> CONTROL u'\x8b' # 0x2B -> CONTROL u'\x8c' # 0x2C -> CONTROL u'\x05' # 0x2D -> ENQUIRY u'\x06' # 0x2E -> ACKNOWLEDGE u'\x07' # 0x2F -> BELL u'\x90' # 0x30 -> CONTROL u'\x91' # 0x31 -> CONTROL u'\x16' # 0x32 -> SYNCHRONOUS IDLE u'\x93' # 0x33 -> CONTROL u'\x94' # 0x34 -> CONTROL u'\x95' # 0x35 -> CONTROL u'\x96' # 0x36 -> CONTROL u'\x04' # 0x37 -> END OF TRANSMISSION u'\x98' # 0x38 -> CONTROL u'\x99' # 0x39 -> CONTROL u'\x9a' # 0x3A -> CONTROL u'\x9b' # 0x3B -> CONTROL u'\x14' # 0x3C -> DEVICE CONTROL FOUR u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE u'\x9e' # 0x3E -> CONTROL u'\x1a' # 0x3F -> SUBSTITUTE u' ' # 0x40 -> SPACE u'\xa0' # 0x41 -> NO-BREAK SPACE u'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX u'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS u'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE u'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE u'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE u'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE u'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA u'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE u'\xa2' # 0x4A -> CENT SIGN u'.' # 0x4B -> FULL STOP u'<' # 0x4C -> LESS-THAN SIGN u'(' # 0x4D -> LEFT PARENTHESIS u'+' # 0x4E -> PLUS SIGN u'|' # 0x4F -> VERTICAL LINE u'&' # 0x50 -> AMPERSAND u'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE u'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX u'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS u'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE u'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE u'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX u'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS u'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE u'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN) u'!' # 0x5A -> EXCLAMATION MARK u'$' # 0x5B -> DOLLAR SIGN u'*' # 0x5C -> ASTERISK u')' # 0x5D -> RIGHT PARENTHESIS u';' # 0x5E -> SEMICOLON u'\xac' # 0x5F -> NOT SIGN u'-' # 0x60 -> HYPHEN-MINUS u'/' # 0x61 -> SOLIDUS u'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX u'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS u'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE u'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE u'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE u'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE u'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA u'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE u'\xa6' # 0x6A -> BROKEN BAR u',' # 0x6B -> COMMA u'%' # 0x6C -> PERCENT SIGN u'_' # 0x6D -> LOW LINE u'>' # 0x6E -> GREATER-THAN SIGN u'?' # 0x6F -> QUESTION MARK u'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE u'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE u'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX u'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS u'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE u'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE u'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX u'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS u'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE u'`' # 0x79 -> GRAVE ACCENT u':' # 0x7A -> COLON u'#' # 0x7B -> NUMBER SIGN u'@' # 0x7C -> COMMERCIAL AT u"'" # 0x7D -> APOSTROPHE u'=' # 0x7E -> EQUALS SIGN u'"' # 0x7F -> QUOTATION MARK u'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE u'a' # 0x81 -> LATIN SMALL LETTER A u'b' # 0x82 -> LATIN SMALL LETTER B u'c' # 0x83 -> LATIN SMALL LETTER C u'd' # 0x84 -> LATIN SMALL LETTER D u'e' # 0x85 -> LATIN SMALL LETTER E u'f' # 0x86 -> LATIN SMALL LETTER F u'g' # 0x87 -> LATIN SMALL LETTER G u'h' # 0x88 -> LATIN SMALL LETTER H u'i' # 0x89 -> LATIN SMALL LETTER I u'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC) u'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE u'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC) u'\xb1' # 0x8F -> PLUS-MINUS SIGN u'\xb0' # 0x90 -> DEGREE SIGN u'j' # 0x91 -> LATIN SMALL LETTER J u'k' # 0x92 -> LATIN SMALL LETTER K u'l' # 0x93 -> LATIN SMALL LETTER L u'm' # 0x94 -> LATIN SMALL LETTER M u'n' # 0x95 -> LATIN SMALL LETTER N u'o' # 0x96 -> LATIN SMALL LETTER O u'p' # 0x97 -> LATIN SMALL LETTER P u'q' # 0x98 -> LATIN SMALL LETTER Q u'r' # 0x99 -> LATIN SMALL LETTER R u'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR u'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR u'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE u'\xb8' # 0x9D -> CEDILLA u'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE u'\xa4' # 0x9F -> CURRENCY SIGN u'\xb5' # 0xA0 -> MICRO SIGN u'~' # 0xA1 -> TILDE u's' # 0xA2 -> LATIN SMALL LETTER S u't' # 0xA3 -> LATIN SMALL LETTER T u'u' # 0xA4 -> LATIN SMALL LETTER U u'v' # 0xA5 -> LATIN SMALL LETTER V u'w' # 0xA6 -> LATIN SMALL LETTER W u'x' # 0xA7 -> LATIN SMALL LETTER X u'y' # 0xA8 -> LATIN SMALL LETTER Y u'z' # 0xA9 -> LATIN SMALL LETTER Z u'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK u'\xbf' # 0xAB -> INVERTED QUESTION MARK u'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC) u'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE u'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC) u'\xae' # 0xAF -> REGISTERED SIGN u'^' # 0xB0 -> CIRCUMFLEX ACCENT u'\xa3' # 0xB1 -> POUND SIGN u'\xa5' # 0xB2 -> YEN SIGN u'\xb7' # 0xB3 -> MIDDLE DOT u'\xa9' # 0xB4 -> COPYRIGHT SIGN u'\xa7' # 0xB5 -> SECTION SIGN u'\xb6' # 0xB6 -> PILCROW SIGN u'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER u'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF u'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS u'[' # 0xBA -> LEFT SQUARE BRACKET u']' # 0xBB -> RIGHT SQUARE BRACKET u'\xaf' # 0xBC -> MACRON u'\xa8' # 0xBD -> DIAERESIS u'\xb4' # 0xBE -> ACUTE ACCENT u'\xd7' # 0xBF -> MULTIPLICATION SIGN u'{' # 0xC0 -> LEFT CURLY BRACKET u'A' # 0xC1 -> LATIN CAPITAL LETTER A u'B' # 0xC2 -> LATIN CAPITAL LETTER B u'C' # 0xC3 -> LATIN CAPITAL LETTER C u'D' # 0xC4 -> LATIN CAPITAL LETTER D u'E' # 0xC5 -> LATIN CAPITAL LETTER E u'F' # 0xC6 -> LATIN CAPITAL LETTER F u'G' # 0xC7 -> LATIN CAPITAL LETTER G u'H' # 0xC8 -> LATIN CAPITAL LETTER H u'I' # 0xC9 -> LATIN CAPITAL LETTER I u'\xad' # 0xCA -> SOFT HYPHEN u'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX u'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS u'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE u'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE u'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE u'}' # 0xD0 -> RIGHT CURLY BRACKET u'J' # 0xD1 -> LATIN CAPITAL LETTER J u'K' # 0xD2 -> LATIN CAPITAL LETTER K u'L' # 0xD3 -> LATIN CAPITAL LETTER L u'M' # 0xD4 -> LATIN CAPITAL LETTER M u'N' # 0xD5 -> LATIN CAPITAL LETTER N u'O' # 0xD6 -> LATIN CAPITAL LETTER O u'P' # 0xD7 -> LATIN CAPITAL LETTER P u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q u'R' # 0xD9 -> LATIN CAPITAL LETTER R u'\xb9' # 0xDA -> SUPERSCRIPT ONE u'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX u'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS u'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE u'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE u'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS u'\\' # 0xE0 -> REVERSE SOLIDUS u'\xf7' # 0xE1 -> DIVISION SIGN u'S' # 0xE2 -> LATIN CAPITAL LETTER S u'T' # 0xE3 -> LATIN CAPITAL LETTER T u'U' # 0xE4 -> LATIN CAPITAL LETTER U u'V' # 0xE5 -> LATIN CAPITAL LETTER V u'W' # 0xE6 -> LATIN CAPITAL LETTER W u'X' # 0xE7 -> LATIN CAPITAL LETTER X u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z u'\xb2' # 0xEA -> SUPERSCRIPT TWO u'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX u'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS u'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE u'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE u'0' # 0xF0 -> DIGIT ZERO u'1' # 0xF1 -> DIGIT ONE u'2' # 0xF2 -> DIGIT TWO u'3' # 0xF3 -> DIGIT THREE u'4' # 0xF4 -> DIGIT FOUR u'5' # 0xF5 -> DIGIT FIVE u'6' # 0xF6 -> DIGIT SIX u'7' # 0xF7 -> DIGIT SEVEN u'8' # 0xF8 -> DIGIT EIGHT u'9' # 0xF9 -> DIGIT NINE u'\xb3' # 0xFA -> SUPERSCRIPT THREE u'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX u'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS u'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE u'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE u'\x9f' # 0xFF -> CONTROL ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
gpl-3.0
chengdh/openerp-ktv
openerp/addons/l10n_ve/__openerp__.py
8
2013
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved # $Id$ # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2008 Netquatro, C.A. (<http://openerp.netquatro.com>). All Rights Reserved # $Id$ # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name" : "Venezuela - Accounting", "version" : "1.0", "author" : ['OpenERP SA', "Netquatro"], "category" : "Localization/Account Charts", "description": ''' This is the module to manage the accounting chart for Venezuela in OpenERP. =========================================================================== Este módulo es para manejar un catálogo de cuentas ejemplo para Venezuela. ''', "depends" : ["account", "base_vat", "account_chart"], "demo_xml" : [], "update_xml" : ['account_tax_code.xml',"account_chart.xml", 'account_tax.xml','l10n_chart_ve_wizard.xml'], "auto_install": False, "installable": True, "certificate" : "00763145921185574557", 'images': ['images/config_chart_l10n_ve.jpeg','images/l10n_ve_chart.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
TimoRoth/oggm
oggm/tests/conftest.py
1
9836
"""Pytest fixtures to be used in other test modules""" import os import shutil import logging import getpass from functools import wraps import numpy as np import pytest import shapely.geometry as shpg import matplotlib.pyplot as plt from oggm.shop import cru, histalp, ecmwf from oggm import cfg, tasks from oggm.core import flowline from oggm.tests.funcs import init_hef, get_test_dir from oggm import utils from oggm.utils import mkdir, _downloads from oggm.utils import oggm_urlretrieve from oggm.tests import HAS_MPL_FOR_TESTS, HAS_INTERNET logger = logging.getLogger(__name__) def pytest_configure(config): for marker in ["slow", "download", "creds", "internet", "test_env", "graphic "]: config.addinivalue_line("markers", marker) if config.pluginmanager.hasplugin('xdist'): try: from ilock import ILock utils.lock = ILock("oggm_xdist_download_lock_" + getpass.getuser()) logger.info("ilock locking setup successfully for xdist tests") except BaseException: logger.warning("could not setup ilock locking for distributed " "tests") def pytest_addoption(parser): parser.addoption("--run-slow", action="store_true", default=False, help="Run slow tests") parser.addoption("--run-download", action="store_true", default=False, help="Run download tests") parser.addoption("--run-creds", action="store_true", default=False, help="Run download tests requiring credentials") parser.addoption("--run-test-env", metavar="ENVNAME", default="", help="Run only specified test env") parser.addoption("--no-run-internet", action="store_true", default=False, help="Don't run any tests accessing the internet") def pytest_collection_modifyitems(config, items): use_internet = HAS_INTERNET and not config.getoption("--no-run-internet") skip_slow = not config.getoption("--run-slow") skip_download = not use_internet or not config.getoption("--run-download") skip_cred = skip_download or not config.getoption("--run-creds") run_test_env = config.getoption("--run-test-env") slow_marker = pytest.mark.skip(reason="need --run-slow option to run") download_marker = pytest.mark.skip(reason="need --run-download option to " "run, internet access is " "required") cred_marker = pytest.mark.skip(reason="need --run-creds option to run, " "internet access is required") internet_marker = pytest.mark.skip(reason="internet access is required") test_env_marker = pytest.mark.skip(reason="only test_env=%s tests are run" % run_test_env) graphic_marker = pytest.mark.skip(reason="requires mpl V1.5+ and " "pytest-mpl") for item in items: if skip_slow and "slow" in item.keywords: item.add_marker(slow_marker) if skip_download and "download" in item.keywords: item.add_marker(download_marker) if skip_cred and "creds" in item.keywords: item.add_marker(cred_marker) if not use_internet and "internet" in item.keywords: item.add_marker(internet_marker) if run_test_env: test_env = item.get_closest_marker("test_env") if not test_env or test_env.args[0] != run_test_env: item.add_marker(test_env_marker) if "graphic" in item.keywords: def wrap_graphic_test(test): @wraps(test) def test_wrapper(*args, **kwargs): try: return test(*args, **kwargs) finally: plt.close() return test_wrapper item.obj = wrap_graphic_test(item.obj) if not HAS_MPL_FOR_TESTS: item.add_marker(graphic_marker) @pytest.fixture(autouse=True) def patch_data_urls(monkeypatch): """This makes sure we never download the big files with our tests""" url = 'https://cluster.klima.uni-bremen.de/~oggm/test_climate/' monkeypatch.setattr(cru, 'CRU_SERVER', url + 'cru/') monkeypatch.setattr(cru, 'CRU_BASE', 'cru_ts3.23.1901.2014.{}.dat.nc') monkeypatch.setattr(histalp, 'HISTALP_SERVER', url + 'histalp/') monkeypatch.setattr(ecmwf, 'ECMWF_SERVER', url) basenames = { 'ERA5': { 'inv': 'era5/monthly/v1.0/era5_invariant.nc', 'pre': 'era5/monthly/v1.0/era5_monthly_prcp_1979-2018.nc', 'tmp': 'era5/monthly/v1.0/era5_monthly_t2m_1979-2018.nc' }, 'ERA5L': { 'inv': 'era5-land/monthly/v1.0/era5_land_invariant_flat.nc', 'pre': 'era5-land/monthly/v1.0/era5_land_monthly_prcp_1981-2018_flat' '.nc', 'tmp': 'era5-land/monthly/v1.0/era5_land_monthly_t2m_1981-2018_flat.nc' }, 'CERA': { 'inv': 'cera-20c/monthly/v1.0/cera-20c_invariant.nc', 'pre': 'cera-20c/monthly/v1.0/cera-20c_pcp_1901-2010.nc', 'tmp': 'cera-20c/monthly/v1.0/cera-20c_t2m_1901-2010.nc' }, 'ERA5dr': { 'inv': 'era5/monthly/vdr/ERA5_geopotential_monthly.nc', 'lapserates': 'era5/monthly/vdr/ERA5_lapserates_monthly.nc', 'tmp': 'era5/monthly/vdr/ERA5_temp_monthly.nc', 'tempstd': 'era5/monthly/vdr/ERA5_tempstd_monthly.nc', 'pre': 'era5/monthly/vdr/ERA5_totalprecip_monthly.nc', } } monkeypatch.setattr(ecmwf, 'BASENAMES', basenames) def secure_url_retrieve(url, *args, **kwargs): """A simple patch to OGGM's download function to make sure we don't download elsewhere than expected.""" assert ('github' in url or 'cluster.klima.uni-bremen.de/~oggm/ref_mb_params' in url or 'cluster.klima.uni-bremen.de/~oggm/test_gdirs/' in url or 'cluster.klima.uni-bremen.de/~oggm/demo_gdirs/' in url or 'cluster.klima.uni-bremen.de/~oggm/test_climate/' in url or 'klima.uni-bremen.de/~oggm/climate/cru/cru_cl2.nc.zip' in url ) return oggm_urlretrieve(url, *args, **kwargs) @pytest.fixture(autouse=True) def patch_url_retrieve(monkeypatch): monkeypatch.setattr(_downloads, 'oggm_urlretrieve', secure_url_retrieve) @pytest.fixture() def dummy_constant_bed(): dx = 1. hmax = 3000. hmin = 1000. nx = 200 map_dx = 100. widths = 3. surface_h = np.linspace(hmax, hmin, nx) bed_h = surface_h widths = surface_h * 0. + widths coords = np.arange(0, nx - 0.5, 1) line = shpg.LineString(np.vstack([coords, coords * 0.]).T) return [flowline.RectangularBedFlowline(line, dx, map_dx, surface_h, bed_h, widths)] @pytest.fixture(scope='session') def test_dir(): """ Provides a reference to the test directory for the entire test session. Named after the current git revision. As a session-scoped fixture, this will only be created once and then injected to each test that depends on it. """ return get_test_dir() def _setup_case_dir(call, test_dir): casedir = os.path.join(test_dir, call.__name__) mkdir(casedir, reset=True) return casedir def _teardown_case_dir(casedir): if os.path.exists(casedir): shutil.rmtree(casedir) @pytest.fixture(scope='function') def case_dir(request, test_dir): """ Provides a unique directory for the current test function, a child of the session test directory (test_dir > case_dir). Named after the current test function. As a function-scoped fixture, a new directory is created for each function that uses this and then cleaned up when the case completes. """ cd = _setup_case_dir(request.function, test_dir) yield cd _teardown_case_dir(cd) @pytest.fixture(scope='class') def class_case_dir(request, test_dir): """ Provides a unique directory for the current test class, a child of the session test directory (test_dir > class_case_dir). Named after the current test class. As a class-scoped fixture, a class directory is created once for the current class and used by each test inside it. It is cleaned up when the all the cases in the class complete. """ cd = _setup_case_dir(request.cls, test_dir) yield cd _teardown_case_dir(cd) @pytest.fixture(scope='module') def hef_gdir_base(request, test_dir): """ Provides an initialized Hintereisferner glacier directory. As a module fixture, the initialization is run only once per test module that uses it. IMPORTANT: To preserve a constant starting condition, hef_gdir_base should almost never be directly injected into a test case. Test cases should use the below hef_gdir fixture to provide a directory that has been copied into an ephemeral case directory. """ try: module = request.module border = module.DOM_BORDER if module.DOM_BORDER is not None else 40 return init_hef(border=border) except AttributeError: return init_hef() @pytest.fixture(scope='class') def hef_gdir(hef_gdir_base, class_case_dir): """ Provides a copy of the base Hintereisenferner glacier directory in a case directory specific to the current test class. All cases in the test class will use the same copy of this glacier directory. """ return tasks.copy_to_basedir(hef_gdir_base, base_dir=class_case_dir, setup='all')
bsd-3-clause
mrquim/mrquimrepo
plugin.video.poseidon/resources/lib/modules/log_utils.py
30
3921
""" tknorris shared module Copyright (C) 2016 tknorris This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import time import cProfile import StringIO import pstats import json import xbmc from resources.lib.modules import control from xbmc import LOGDEBUG, LOGERROR, LOGFATAL, LOGINFO, LOGNONE, LOGNOTICE, LOGSEVERE, LOGWARNING # @UnusedImport name = control.addonInfo('name') def log(msg, level=LOGDEBUG): req_level = level # override message level to force logging when addon logging turned on if control.setting('addon_debug') == 'true' and level == LOGDEBUG: level = LOGNOTICE try: if isinstance(msg, unicode): msg = '%s (ENCODED)' % (msg.encode('utf-8')) xbmc.log('[%s] %s' % (name, msg), level) except Exception as e: try: xbmc.log('Logging Failure: %s' % (e), level) except: pass # just give up class Profiler(object): def __init__(self, file_path, sort_by='time', builtins=False): self._profiler = cProfile.Profile(builtins=builtins) self.file_path = file_path self.sort_by = sort_by def profile(self, f): def method_profile_on(*args, **kwargs): try: self._profiler.enable() result = self._profiler.runcall(f, *args, **kwargs) self._profiler.disable() return result except Exception as e: log('Profiler Error: %s' % (e), LOGWARNING) return f(*args, **kwargs) def method_profile_off(*args, **kwargs): return f(*args, **kwargs) if _is_debugging(): return method_profile_on else: return method_profile_off def __del__(self): self.dump_stats() def dump_stats(self): if self._profiler is not None: s = StringIO.StringIO() params = (self.sort_by,) if isinstance(self.sort_by, basestring) else self.sort_by ps = pstats.Stats(self._profiler, stream=s).sort_stats(*params) ps.print_stats() if self.file_path is not None: with open(self.file_path, 'w') as f: f.write(s.getvalue()) def trace(method): def method_trace_on(*args, **kwargs): start = time.time() result = method(*args, **kwargs) end = time.time() log('{name!r} time: {time:2.4f}s args: |{args!r}| kwargs: |{kwargs!r}|'.format(name=method.__name__, time=end - start, args=args, kwargs=kwargs), LOGDEBUG) return result def method_trace_off(*args, **kwargs): return method(*args, **kwargs) if _is_debugging(): return method_trace_on else: return method_trace_off def _is_debugging(): command = {'jsonrpc': '2.0', 'id': 1, 'method': 'Settings.getSettings', 'params': {'filter': {'section': 'system', 'category': 'logging'}}} js_data = execute_jsonrpc(command) for item in js_data.get('result', {}).get('settings', {}): if item['id'] == 'debug.showloginfo': return item['value'] return False def execute_jsonrpc(command): if not isinstance(command, basestring): command = json.dumps(command) response = control.jsonrpc(command) return json.loads(response)
gpl-2.0
jakar/odoo-bank-statement-reconcile
__unported__/account_statement_ofx_import/parser/ofx_parser.py
15
3538
# -*- coding: utf-8 -*- ############################################################################## # # Author: Pedro Manuel Baeza Romero # Copyright 2013 Servicios Tecnológicos Avanzados # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import tempfile import datetime from openerp.tools.translate import _ from openerp.addons.account_statement_base_import.parser import \ BankStatementImportParser try: import ofxparse except: raise Exception(_('Please install python lib ofxparse')) class OfxParser(BankStatementImportParser): """Class for defining parser for OFX file format.""" @classmethod def parser_for(cls, parser_name): """Used by the new_bank_statement_parser class factory. Return true if the providen name is 'ofx_so'. """ return parser_name == 'ofx_so' def _custom_format(self, *args, **kwargs): """No other work on data are needed in this parser.""" return True def _pre(self, *args, **kwargs): """No pre-treatment needed for this parser.""" return True def _parse(self, *args, **kwargs): """Launch the parsing itself.""" ofx_file = tempfile.NamedTemporaryFile() ofx_file.seek(0) ofx_file.write(self.filebuffer) ofx_file.flush() ofx = ofxparse.OfxParser.parse(file(ofx_file.name)) ofx_file.close() res = [] for transaction in ofx.account.statement.transactions: res.append({ 'date': transaction.date, 'amount': transaction.amount, 'ref': transaction.type, 'label': transaction.payee, }) self.result_row_list = res return True def _validate(self, *args, **kwargs): """Nothing to do here. ofxparse trigger possible format errors.""" return True def _post(self, *args, **kwargs): """Nothing is needed to do after parsing.""" return True def get_st_line_vals(self, line, *args, **kwargs): """This method must return a dict of vals that can be passed to create method of statement line in order to record it. It is the responsibility of every parser to give this dict of vals, so each one can implement his own way of recording the lines. :param: line: a dict of vals that represent a line of result_row_list :return: dict of values to give to the create method of statement line """ return { 'name': line.get('label', line.get('ref', '/')), 'date': line.get('date', datetime.datetime.now().date()), 'amount': line.get('amount', 0.0), 'ref': line.get('ref', '/'), 'label': line.get('label', ''), }
agpl-3.0
yared-bezum/tweepy
tweepy/parsers.py
51
2754
# Tweepy # Copyright 2009-2010 Joshua Roesslein # See LICENSE for details. from __future__ import print_function from tweepy.models import ModelFactory from tweepy.utils import import_simplejson from tweepy.error import TweepError class Parser(object): def parse(self, method, payload): """ Parse the response payload and return the result. Returns a tuple that contains the result data and the cursors (or None if not present). """ raise NotImplementedError def parse_error(self, payload): """ Parse the error message from payload. If unable to parse the message, throw an exception and default error message will be used. """ raise NotImplementedError class RawParser(Parser): def __init__(self): pass def parse(self, method, payload): return payload def parse_error(self, payload): return payload class JSONParser(Parser): payload_format = 'json' def __init__(self): self.json_lib = import_simplejson() def parse(self, method, payload): try: json = self.json_lib.loads(payload) except Exception as e: raise TweepError('Failed to parse JSON payload: %s' % e) needs_cursors = 'cursor' in method.session.params if needs_cursors and isinstance(json, dict): if 'previous_cursor' in json: if 'next_cursor' in json: cursors = json['previous_cursor'], json['next_cursor'] return json, cursors else: return json def parse_error(self, payload): error = self.json_lib.loads(payload) if error.has_key('error'): return error['error'] else: return error['errors'] class ModelParser(JSONParser): def __init__(self, model_factory=None): JSONParser.__init__(self) self.model_factory = model_factory or ModelFactory def parse(self, method, payload): try: if method.payload_type is None: return model = getattr(self.model_factory, method.payload_type) except AttributeError: raise TweepError('No model for this payload type: ' '%s' % method.payload_type) json = JSONParser.parse(self, method, payload) if isinstance(json, tuple): json, cursors = json else: cursors = None if method.payload_list: result = model.parse_list(method.api, json) else: result = model.parse(method.api, json) if cursors: return result, cursors else: return result
mit
darkleons/lama
addons/marketing_campaign/report/__init__.py
441
1071
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import campaign_analysis # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
food52/thumbor
vows/healthcheck_vows.py
14
1668
#!/usr/bin/python # -*- coding: utf-8 -*- # thumbor imaging service # https://github.com/globocom/thumbor/wiki # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com timehome@corp.globo.com from pyvows import Vows, expect from tornado_pyvows.context import TornadoHTTPContext from thumbor.app import ThumborServiceApp from thumbor.config import Config from thumbor.context import Context @Vows.batch class HealthCheck(TornadoHTTPContext): def get_app(self): cfg = Config() ctx = Context(None, cfg, None) application = ThumborServiceApp(ctx) return application class WhenRunning(TornadoHTTPContext): def topic(self): response = self.get('/healthcheck') return (response.code, response.body) class StatusCode(TornadoHTTPContext): def topic(self, response): return response[0] def should_not_be_an_error(self, topic): expect(topic).to_equal(200) class Body(TornadoHTTPContext): def topic(self, response): return response[1] def should_equal_working(self, topic): expect(topic.lower().strip()).to_equal('working') class HeadHandler(TornadoHTTPContext): def topic(self): response = self.head('/healthcheck') return (response.code, response.body) class StatusCode(TornadoHTTPContext): def topic(self, response): return response[0] def should_not_be_an_error(self, topic): expect(topic).to_equal(200)
mit
toontownfunserver/Panda3D-1.9.0
direct/distributed/AstronClientRepository.py
4
10641
"""AstronClientRepository module: contains the AstronClientRepository class""" from direct.directnotify import DirectNotifyGlobal from ClientRepositoryBase import ClientRepositoryBase from MsgTypes import * from direct.distributed.PyDatagram import PyDatagram from pandac.PandaModules import STUint16, STUint32 class AstronClientRepository(ClientRepositoryBase): """ The Astron implementation of a clients repository for communication with an Astron ClientAgent. This repo will emit events for: * CLIENT_HELLO_RESP * CLIENT_EJECT ( error_code, reason ) * CLIENT_OBJECT_LEAVING ( do_id ) * CLIENT_ADD_INTEREST ( context, interest_id, parent_id, zone_id ) * CLIENT_ADD_INTEREST_MULTIPLE ( icontext, interest_id, parent_id, [zone_ids] ) * CLIENT_REMOVE_INTEREST ( context, interest_id ) * CLIENT_DONE_INTEREST_RESP ( context, interest_id ) * LOST_CONNECTION () """ notify = DirectNotifyGlobal.directNotify.newCategory("ClientRepository") # This is required by DoCollectionManager, even though it's not # used by this implementation. GameGlobalsId = 0 def __init__(self, *args, **kwargs): ClientRepositoryBase.__init__(self, *args, **kwargs) base.finalExitCallbacks.append(self.shutdown) self.message_handlers = {CLIENT_HELLO_RESP: self.handleHelloResp, CLIENT_EJECT: self.handleEject, CLIENT_ENTER_OBJECT_REQUIRED: self.handleEnterObjectRequired, CLIENT_ENTER_OBJECT_REQUIRED_OWNER: self.handleEnterObjectRequiredOwner, CLIENT_OBJECT_SET_FIELD: self.handleUpdateField, CLIENT_OBJECT_SET_FIELDS: self.handleUpdateFields, CLIENT_OBJECT_LEAVING: self.handleObjectLeaving, CLIENT_OBJECT_LOCATION: self.handleObjectLocation, CLIENT_ADD_INTEREST: self.handleAddInterest, CLIENT_ADD_INTEREST_MULTIPLE: self.handleAddInterestMultiple, CLIENT_REMOVE_INTEREST: self.handleRemoveInterest, CLIENT_DONE_INTEREST_RESP: self.handleInterestDoneMessage, } # # Message Handling # def handleDatagram(self, di): msgType = self.getMsgType() # self.handleMessageType(msgType, di) # #def handleMessageType(self, msgType, di): if msgType in self.message_handlers: self.message_handlers[msgType](di) else: self.notify.error("Got unknown message type %d!" % (msgType,)) self.considerHeartbeat() def handleHelloResp(self, di): messenger.send("CLIENT_HELLO_RESP", []) def handleEject(self, di): error_code = di.get_uint16() reason = di.get_string() messenger.send("CLIENT_EJECT", [error_code, reason]) def handleEnterObjectRequired(self, di): do_id = di.getArg(STUint32) parent_id = di.getArg(STUint32) zone_id = di.getArg(STUint32) dclass_id = di.getArg(STUint16) dclass = self.dclassesByNumber[dclass_id] self.generateWithRequiredFields(dclass, do_id, di, parent_id, zone_id) def handleEnterObjectRequiredOwner(self, di): avatar_doId = di.getArg(STUint32) parentId = di.getArg(STUint32) zoneId = di.getArg(STUint32) dclass_id = di.getArg(STUint16) dclass = self.dclassesByNumber[dclass_id] self.generateWithRequiredFieldsOwner(dclass, avatar_doId, di) def generateWithRequiredFieldsOwner(self, dclass, doId, di): if doId in self.doId2ownerView: # ...it is in our dictionary. # Just update it. self.notify.error('duplicate owner generate for %s (%s)' % ( doId, dclass.getName())) distObj = self.doId2ownerView[doId] assert distObj.dclass == dclass distObj.generate() distObj.updateRequiredFields(dclass, di) # updateRequiredFields calls announceGenerate elif self.cacheOwner.contains(doId): # ...it is in the cache. # Pull it out of the cache: distObj = self.cacheOwner.retrieve(doId) assert distObj.dclass == dclass # put it in the dictionary: self.doId2ownerView[doId] = distObj # and update it. distObj.generate() distObj.updateRequiredFields(dclass, di) # updateRequiredFields calls announceGenerate else: # ...it is not in the dictionary or the cache. # Construct a new one classDef = dclass.getOwnerClassDef() if classDef == None: self.notify.error("Could not create an undefined %s object. Have you created an owner view?" % (dclass.getName())) distObj = classDef(self) distObj.dclass = dclass # Assign it an Id distObj.doId = doId # Put the new do in the dictionary self.doId2ownerView[doId] = distObj # Update the required fields distObj.generateInit() # Only called when constructed distObj.generate() distObj.updateRequiredFields(dclass, di) # updateRequiredFields calls announceGenerate return distObj def handleUpdateFields(self, di): # Can't test this without the server actually sending it. self.notify.error("CLIENT_OBJECT_SET_FIELDS not implemented!") # # Here's some tentative code and notes: # do_id = di.getUint32() # field_count = di.getUint16() # for i in range(0, field_count): # field_id = di.getUint16() # field = self.get_dc_file().get_field_by_index(field_id) # # print(type(field)) # # print(field) # # FIXME: Get field type, unpack value, create and send message. # # value = di.get?() # # Assemble new message def handleObjectLeaving(self, di): do_id = di.get_uint32() dist_obj = self.doId2do.get(do_id) dist_obj.delete() self.deleteObject(do_id) messenger.send("CLIENT_OBJECT_LEAVING", [do_id]) def handleAddInterest(self, di): context = di.get_uint32() interest_id = di.get_uint16() parent_id = di.get_uint32() zone_id = di.get_uint32() messenger.send("CLIENT_ADD_INTEREST", [context, interest_id, parent_id, zone_id]) def handleAddInterestMultiple(self, di): context = di.get_uint32() interest_id = di.get_uint16() parent_id = di.get_uint32() zone_ids = [di.get_uint32() for i in range(0, di.get_uint16())] messenger.send("CLIENT_ADD_INTEREST_MULTIPLE", [context, interest_id, parent_id, zone_ids]) def handleRemoveInterest(self, di): context = di.get_uint32() interest_id = di.get_uint16() messenger.send("CLIENT_REMOVE_INTEREST", [context, interest_id]) def deleteObject(self, doId): """ implementation copied from ClientRepository.py Removes the object from the client's view of the world. This should normally not be called directly except in the case of error recovery, since the server will normally be responsible for deleting and disabling objects as they go out of scope. After this is called, future updates by server on this object will be ignored (with a warning message). The object will become valid again the next time the server sends a generate message for this doId. This is not a distributed message and does not delete the object on the server or on any other client. """ if doId in self.doId2do: # If it is in the dictionary, remove it. obj = self.doId2do[doId] # Remove it from the dictionary del self.doId2do[doId] # Disable, announce, and delete the object itself... # unless delayDelete is on... obj.deleteOrDelay() if self.isLocalId(doId): self.freeDoId(doId) elif self.cache.contains(doId): # If it is in the cache, remove it. self.cache.delete(doId) if self.isLocalId(doId): self.freeDoId(doId) else: # Otherwise, ignore it self.notify.warning( "Asked to delete non-existent DistObj " + str(doId)) # # Sending messages # def sendUpdate(self, distObj, fieldName, args): """ Sends a normal update for a single field. """ dg = distObj.dclass.clientFormatUpdate( fieldName, distObj.doId, args) self.send(dg) # FIXME: The version string should default to a .prc variable. def sendHello(self, version_string): dg = PyDatagram() dg.add_uint16(CLIENT_HELLO) dg.add_uint32(self.get_dc_file().get_hash()) dg.add_string(version_string) self.send(dg) def sendHeartbeat(self): datagram = PyDatagram() datagram.addUint16(CLIENT_HEARTBEAT) self.send(datagram) def sendAddInterest(self, context, interest_id, parent_id, zone_id): dg = PyDatagram() dg.add_uint16(CLIENT_ADD_INTEREST) dg.add_uint32(context) dg.add_uint16(interest_id) dg.add_uint32(parent_id) dg.add_uint32(zone_id) self.send(dg) def sendAddInterestMultiple(self, context, interest_id, parent_id, zone_ids): dg = PyDatagram() dg.add_uint16(CLIENT_ADD_INTEREST_MULTIPLE) dg.add_uint32(context) dg.add_uint16(interest_id) dg.add_uint32(parent_id) dg.add_uint16(len(zone_ids)) for zone_id in zone_ids: dg.add_uint32(zone_id) self.send(dg) def sendRemoveInterest(self, context, interest_id): dg = PyDatagram() dg.add_uint16(CLIENT_REMOVE_INTEREST) dg.add_uint32(context) dg.add_uint16(interest_id) self.send(dg) # # Other stuff # def lostConnection(self): messenger.send("LOST_CONNECTION") def disconnect(self): """ This implicitly deletes all objects from the repository. """ for do_id in self.doId2do.keys(): self.deleteObject(do_id) ClientRepositoryBase.disconnect(self)
bsd-3-clause
vrenaville/OCB
addons/website_mail/controllers/email_designer.py
20
3078
# -*- coding: utf-8 -*- from urllib import urlencode from openerp.addons.web import http from openerp.addons.web.http import request from openerp.tools.mail import html_sanitize class WebsiteEmailDesigner(http.Controller): @http.route('/website_mail/email_designer', type='http', auth="user", website=True) def index(self, model, res_id, template_model=None, **kw): if not model or not model in request.registry or not res_id: return request.redirect('/') model_cols = request.registry[model]._all_columns if 'body' not in model_cols and 'body_html' not in model_cols or \ 'email' not in model_cols and 'email_from' not in model_cols or \ 'name' not in model_cols and 'subject' not in model_cols: return request.redirect('/') res_id = int(res_id) obj_ids = request.registry[model].exists(request.cr, request.uid, [res_id], context=request.context) if not obj_ids: return request.redirect('/') # try to find fields to display / edit -> as t-field is static, we have to limit # the available fields to a given subset email_from_field = 'email' if 'email_from' in model_cols: email_from_field = 'email_from' subject_field = 'name' if 'subject' in model_cols: subject_field = 'subject' body_field = 'body' if 'body_html' in model_cols: body_field = 'body_html' cr, uid, context = request.cr, request.uid, request.context record = request.registry[model].browse(cr, uid, res_id, context=context) values = { 'record': record, 'templates': None, 'model': model, 'res_id': res_id, 'email_from_field': email_from_field, 'subject_field': subject_field, 'body_field': body_field, } if getattr(record, body_field): values['mode'] = 'email_designer' else: if kw.get('enable_editor'): kw.pop('enable_editor') fragments = dict(model=model, res_id=res_id, **kw) if template_model: fragments['template_model'] = template_model return request.redirect('/website_mail/email_designer?%s' % urlencode(fragments)) values['mode'] = 'email_template' tmpl_obj = request.registry['email.template'] if template_model: tids = tmpl_obj.search(cr, uid, [('model', '=', template_model)], context=context) else: tids = tmpl_obj.search(cr, uid, [], context=context) templates = tmpl_obj.browse(cr, uid, tids, context=context) values['templates'] = templates values['html_sanitize'] = html_sanitize return request.website.render("website_mail.email_designer", values) @http.route(['/website_mail/snippets'], type='json', auth="user", website=True) def snippets(self): return request.website._render('website_mail.email_designer_snippets')
agpl-3.0
google-research/episodic-curiosity
episodic_curiosity/r_network.py
1
3675
# coding=utf-8 # Copyright 2019 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """R-network and some related functions to train R-networks.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tempfile from absl import logging from third_party.keras_resnet import models import numpy as np import tensorflow as tf from tensorflow import keras class RNetwork(object): """Encapsulates a trained R network, with lazy loading of weights.""" def __init__(self, input_shape, weight_path): """Inits the RNetwork. Args: input_shape: (height, width, channel) weight_path: Path to the weights of the r_network. """ self._weight_path = weight_path (self._r_network, self._embedding_network, self._similarity_network) = models.ResnetBuilder.build_siamese_resnet_18( input_shape) self._r_network.compile( loss='categorical_crossentropy', optimizer=keras.optimizers.Adam()) self._weights_loaded = False def _maybe_load_weights(self): """Loads R-network weights if needed. The RNetwork is used together with an environment used by ppo2.learn. Unfortunately, ppo2.learn initializes all global TF variables at the beginning of the training, which in particular, random-initializes the weights of the R Network. We therefore load the weights lazily, to make sure they are loaded after the global initialization happens in ppo2.learn. """ if self._weights_loaded: return if self._weight_path is None: # Typically the case when doing online training of the R-network. return # Keras does not support reading weights from CNS, so we have to copy the # weights to a temporary local file. with tempfile.NamedTemporaryFile(prefix='r_net', suffix='.h5', delete=False) as tmp_file: tmp_path = tmp_file.name tf.gfile.Copy(self._weight_path, tmp_path, overwrite=True) logging.info('Loading weights from %s...', tmp_path) print('Loading into R network:') self._r_network.summary() self._r_network.load_weights(tmp_path) tf.gfile.Remove(tmp_path) self._weights_loaded = True def embed_observation(self, x): """Embeds an observation. Args: x: batched input observations. Expected to have the shape specified when the RNetwork was contructed (plus the batch dimension as first dim). Returns: embedding, shape [batch, models.EMBEDDING_DIM] """ self._maybe_load_weights() return self._embedding_network.predict(x) def embedding_similarity(self, x, y): """Computes the similarity between two embeddings. Args: x: batch of the first embedding. Shape [batch, models.EMBEDDING_DIM]. y: batch of the first embedding. Shape [batch, models.EMBEDDING_DIM]. Returns: Similarity probabilities. 1 means very similar according to the net. 0 means very dissimilar. Shape [batch]. """ self._maybe_load_weights() return self._similarity_network.predict([x, y], batch_size=1024)[:, 1]
apache-2.0
Zaltok/jugendjazzt
node_modules/node-forge/tests/policyserver.py
171
3551
#!/usr/bin/env python """ Flash Socket Policy Server. - Starts Flash socket policy file server. - Defaults to port 843. - NOTE: Most operating systems require administrative privileges to use ports under 1024. $ ./policyserver.py [options] """ """ Also consider Adobe's solutions: http://www.adobe.com/devnet/flashplayer/articles/socket_policy_files.html """ from multiprocessing import Process from optparse import OptionParser import SocketServer import logging # Set address reuse for all TCPServers SocketServer.TCPServer.allow_reuse_address = True # Static socket policy file string. # NOTE: This format is very strict. Edit with care. socket_policy_file = """\ <?xml version="1.0"?>\ <!DOCTYPE cross-domain-policy\ SYSTEM "http://www.adobe.com/xml/dtds/cross-domain-policy.dtd">\ <cross-domain-policy>\ <allow-access-from domain="*" to-ports="*"/>\ </cross-domain-policy>\0""" class PolicyHandler(SocketServer.BaseRequestHandler): """ The RequestHandler class for our server. Returns a policy file when requested. """ def handle(self): """Send policy string if proper request string is received.""" # get some data # TODO: make this more robust (while loop, etc) self.data = self.request.recv(1024).rstrip('\0') logging.debug("%s wrote:%s" % (self.client_address[0], repr(self.data))) # if policy file request, send the file. if self.data == "<policy-file-request/>": logging.info("Policy server request from %s." % (self.client_address[0])) self.request.send(socket_policy_file) else: logging.info("Policy server received junk from %s: \"%s\"" % \ (self.client_address[0], repr(self.data))) class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): def serve_forever(self): """Handle one request at a time until shutdown or keyboard interrupt.""" try: SocketServer.BaseServer.serve_forever(self) except KeyboardInterrupt: return def main(): """Run socket policy file servers.""" usage = "Usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option("", "--host", dest="host", metavar="HOST", default="localhost", help="bind to HOST") parser.add_option("-p", "--port", dest="port", metavar="PORT", default=843, type="int", help="serve on PORT") parser.add_option("-d", "--debug", dest="debug", action="store_true", default=False, help="debugging output") parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose output") (options, args) = parser.parse_args() # setup logging if options.debug: lvl = logging.DEBUG elif options.verbose: lvl = logging.INFO else: lvl = logging.WARNING logging.basicConfig(level=lvl, format="%(levelname)-8s %(message)s") # log basic info logging.info("Flash Socket Policy Server. Use ctrl-c to exit.") # create policy server logging.info("Socket policy serving on %s:%d." % (options.host, options.port)) policyd = ThreadedTCPServer((options.host, options.port), PolicyHandler) # start server policy_p = Process(target=policyd.serve_forever) policy_p.start() while policy_p.is_alive(): try: policy_p.join(1) except KeyboardInterrupt: logging.info("Stopping test server...") if __name__ == "__main__": main()
mit
castroflavio/ryu
ryu/services/protocols/bgp/utils/validation.py
9
6388
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module provides utilities for validation. """ import socket def is_valid_ipv4(ipv4): """Returns True if given is a valid ipv4 address. Given value should be a dot-decimal notation string. """ valid = True if not isinstance(ipv4, str): valid = False else: try: a, b, c, d = map(lambda x: int(x), ipv4.split('.')) if (a < 0 or a > 255 or b < 0 or b > 255 or c < 0 or c > 255 or d < 0 or d > 255): valid = False except ValueError: valid = False return valid def is_valid_ipv4_prefix(ipv4_prefix): """Returns True if *ipv4_prefix* is a valid prefix with mask. Samples: - valid prefix: 1.1.1.0/32, 244.244.244.1/10 - invalid prefix: 255.2.2.2/2, 2.2.2/22, etc. """ if not isinstance(ipv4_prefix, str): return False valid = True tokens = ipv4_prefix.split('/') if len(tokens) != 2: valid = False else: if not is_valid_ipv4(tokens[0]): valid = False else: # Validate mask try: # Mask is a number mask = int(tokens[1]) # Mask is number between 0 to 32 if mask < 0 or mask > 32: valid = False except ValueError: valid = False return valid def is_valid_ipv6(ipv6): """Returns True if given `ipv6` is a valid IPv6 address Uses `socket.inet_pton` to determine validity. """ valid = True try: socket.inet_pton(socket.AF_INET6, ipv6) except socket.error: valid = False return valid def is_valid_ipv6_prefix(ipv6_prefix): """Returns True if given `ipv6_prefix` is a valid IPv6 prefix.""" # Validate input type if not isinstance(ipv6_prefix, str): return False valid = True tokens = ipv6_prefix.split('/') if len(tokens) != 2: valid = False else: if not is_valid_ipv6(tokens[0]): valid = False else: # Validate mask try: # Mask is a number mask = int(tokens[1]) # Mask is number between 0 to 128 if mask < 0 or mask > 128: valid = False except ValueError: valid = False return valid def is_valid_old_asn(asn): """Returns true if given asn is a 16 bit number. Old AS numbers are 16 but unsigned number. """ valid = True # AS number should be a 16 bit number if (not isinstance(asn, (int, long)) or (asn < 0) or (asn > ((2 ** 16) - 1))): valid = False return valid def is_valid_vpnv4_prefix(prefix): """Returns True if given prefix is a string represent vpnv4 prefix. Vpnv4 prefix is made up of RD:Ipv4, where RD is represents route distinguisher and Ipv4 represents valid dot-decimal ipv4 notation string. """ valid = True if not isinstance(prefix, str): valid = False else: # Split the prefix into route distinguisher and IP tokens = prefix.split(':') if len(tokens) != 3: valid = False else: # Check if first two tokens can form a valid RD try: # admin_subfield int(tokens[0]) # assigned_subfield int(tokens[1]) except ValueError: valid = False # Check if ip part is valid valid = is_valid_ipv4_prefix(tokens[2]) return valid def is_valid_med(med): """Returns True if value of *med* is valid as per RFC. According to RFC MED is a four octet non-negative integer. """ valid = True if not isinstance(med, (int, long)): valid = False else: if med < 0 or med > (2 ** 32) - 1: valid = False return valid def is_valid_mpls_label(label): """Validates `label` according to MPLS label rules RFC says: This 20-bit field. A value of 0 represents the "IPv4 Explicit NULL Label". A value of 1 represents the "Router Alert Label". A value of 2 represents the "IPv6 Explicit NULL Label". A value of 3 represents the "Implicit NULL Label". Values 4-15 are reserved. """ valid = True if (not isinstance(label, (int, long)) or (label >= 4 and label <= 15) or (label < 0 or label > 2 ** 20)): valid = False return valid def is_valid_route_dist(route_dist): """Validates *route_dist* as string representation of route distinguisher. Returns True if *route_dist* is as per our convention of RD, else False. Our convention is to represent RD as a string in format: *admin_sub_field:assigned_num_field* and *admin_sub_field* can be valid IPv4 string representation. Valid examples: '65000:222', '1.2.3.4:4432'. Invalid examples: '1.11.1: 333' """ # TODO(PH): Provide complete implementation. return is_valid_ext_comm_attr(route_dist) def is_valid_ext_comm_attr(attr): """Validates *attr* as string representation of RT or SOO. Returns True if *attr* is as per our convention of RT or SOO, else False. Our convention is to represent RT/SOO is a string with format: *global_admin_part:local_admin_path* """ is_valid = True if not isinstance(attr, str): is_valid = False else: first, second = attr.split(':') try: if '.' in first: socket.inet_aton(first) else: int(first) int(second) except (ValueError, socket.error): is_valid = False return is_valid
apache-2.0
waytai/django
tests/middleware/tests.py
132
34932
# -*- coding: utf-8 -*- from __future__ import unicode_literals import gzip import random import re from io import BytesIO from unittest import skipIf from django.conf import settings from django.core import mail from django.core.exceptions import PermissionDenied from django.http import ( FileResponse, HttpRequest, HttpResponse, HttpResponseNotFound, HttpResponsePermanentRedirect, HttpResponseRedirect, StreamingHttpResponse, ) from django.middleware.clickjacking import XFrameOptionsMiddleware from django.middleware.common import ( BrokenLinkEmailsMiddleware, CommonMiddleware, ) from django.middleware.gzip import GZipMiddleware from django.middleware.http import ConditionalGetMiddleware from django.test import RequestFactory, SimpleTestCase, override_settings from django.utils import six from django.utils.encoding import force_str from django.utils.six.moves import range from django.utils.six.moves.urllib.parse import quote @override_settings(ROOT_URLCONF='middleware.urls') class CommonMiddlewareTest(SimpleTestCase): rf = RequestFactory() @override_settings(APPEND_SLASH=True) def test_append_slash_have_slash(self): """ URLs with slashes should go unmolested. """ request = self.rf.get('/slash/') self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_resource(self): """ Matches to explicit slashless URLs should go unmolested. """ request = self.rf.get('/noslash') self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponse("Here's the text of the Web page.") self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_unknown(self): """ APPEND_SLASH should not redirect to unknown resources. """ request = self.rf.get('/unknown') response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_redirect(self): """ APPEND_SLASH should redirect slashless URLs to a valid pattern. """ request = self.rf.get('/slash') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/slash/') @override_settings(APPEND_SLASH=True) def test_append_slash_redirect_querystring(self): """ APPEND_SLASH should preserve querystrings when redirecting. """ request = self.rf.get('/slash?test=1') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.url, '/slash/?test=1') @override_settings(APPEND_SLASH=True, DEBUG=True) def test_append_slash_no_redirect_on_POST_in_DEBUG(self): """ Tests that while in debug mode, an exception is raised with a warning when a failed attempt is made to POST, PUT, or PATCH to an URL which would normally be redirected to a slashed version. """ msg = "maintaining %s data. Change your form to point to testserver/slash/" request = self.rf.get('/slash') request.method = 'POST' response = HttpResponseNotFound() with six.assertRaisesRegex(self, RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) request = self.rf.get('/slash') request.method = 'PUT' with six.assertRaisesRegex(self, RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) request = self.rf.get('/slash') request.method = 'PATCH' with six.assertRaisesRegex(self, RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) @override_settings(APPEND_SLASH=False) def test_append_slash_disabled(self): """ Disabling append slash functionality should leave slashless URLs alone. """ request = self.rf.get('/slash') response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_quoted(self): """ URLs which require quoting should be redirected to their slash version ok. """ request = self.rf.get(quote('/needsquoting#')) response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual( r.url, '/needsquoting%23/') @override_settings(APPEND_SLASH=False, PREPEND_WWW=True) def test_prepend_www(self): request = self.rf.get('/path/') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual( r.url, 'http://www.testserver/path/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_have_slash(self): request = self.rf.get('/slash/') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/slash/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_slashless(self): request = self.rf.get('/slash') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/slash/') # The following tests examine expected behavior given a custom urlconf that # overrides the default one through the request object. @override_settings(APPEND_SLASH=True) def test_append_slash_have_slash_custom_urlconf(self): """ URLs with slashes should go unmolested. """ request = self.rf.get('/customurlconf/slash/') request.urlconf = 'middleware.extra_urls' self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_resource_custom_urlconf(self): """ Matches to explicit slashless URLs should go unmolested. """ request = self.rf.get('/customurlconf/noslash') request.urlconf = 'middleware.extra_urls' self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponse("Here's the text of the Web page.") self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_unknown_custom_urlconf(self): """ APPEND_SLASH should not redirect to unknown resources. """ request = self.rf.get('/customurlconf/unknown') request.urlconf = 'middleware.extra_urls' self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_redirect_custom_urlconf(self): """ APPEND_SLASH should redirect slashless URLs to a valid pattern. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertIsNotNone(r, "CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf") self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/customurlconf/slash/') @override_settings(APPEND_SLASH=True, DEBUG=True) def test_append_slash_no_redirect_on_POST_in_DEBUG_custom_urlconf(self): """ Tests that while in debug mode, an exception is raised with a warning when a failed attempt is made to POST to an URL which would normally be redirected to a slashed version. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' request.method = 'POST' response = HttpResponseNotFound() with six.assertRaisesRegex(self, RuntimeError, 'end in a slash'): CommonMiddleware().process_response(request, response) @override_settings(APPEND_SLASH=False) def test_append_slash_disabled_custom_urlconf(self): """ Disabling append slash functionality should leave slashless URLs alone. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' self.assertEqual(CommonMiddleware().process_request(request), None) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_quoted_custom_urlconf(self): """ URLs which require quoting should be redirected to their slash version ok. """ request = self.rf.get(quote('/customurlconf/needsquoting#')) request.urlconf = 'middleware.extra_urls' response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertIsNotNone(r, "CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf") self.assertEqual(r.status_code, 301) self.assertEqual( r.url, '/customurlconf/needsquoting%23/') @override_settings(APPEND_SLASH=False, PREPEND_WWW=True) def test_prepend_www_custom_urlconf(self): request = self.rf.get('/customurlconf/path/') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual( r.url, 'http://www.testserver/customurlconf/path/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_have_slash_custom_urlconf(self): request = self.rf.get('/customurlconf/slash/') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/customurlconf/slash/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_slashless_custom_urlconf(self): request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/customurlconf/slash/') # Other tests @override_settings(DISALLOWED_USER_AGENTS=[re.compile(r'foo')]) def test_disallowed_user_agents(self): request = self.rf.get('/slash') request.META['HTTP_USER_AGENT'] = 'foo' with self.assertRaisesMessage(PermissionDenied, 'Forbidden user agent'): CommonMiddleware().process_request(request) def test_non_ascii_query_string_does_not_crash(self): """Regression test for #15152""" request = self.rf.get('/slash') request.META['QUERY_STRING'] = force_str('drink=café') r = CommonMiddleware().process_request(request) self.assertIsNone(r) response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) def test_response_redirect_class(self): request = self.rf.get('/slash') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/slash/') self.assertIsInstance(r, HttpResponsePermanentRedirect) def test_response_redirect_class_subclass(self): class MyCommonMiddleware(CommonMiddleware): response_redirect_class = HttpResponseRedirect request = self.rf.get('/slash') response = HttpResponseNotFound() r = MyCommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 302) self.assertEqual(r.url, '/slash/') self.assertIsInstance(r, HttpResponseRedirect) @override_settings( IGNORABLE_404_URLS=[re.compile(r'foo')], MANAGERS=['PHB@dilbert.com'], ) class BrokenLinkEmailsMiddlewareTest(SimpleTestCase): rf = RequestFactory() def setUp(self): self.req = self.rf.get('/regular_url/that/does/not/exist') self.resp = self.client.get(self.req.path) def test_404_error_reporting(self): self.req.META['HTTP_REFERER'] = '/another/url/' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) self.assertIn('Broken', mail.outbox[0].subject) def test_404_error_reporting_no_referer(self): BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) def test_404_error_reporting_ignored_url(self): self.req.path = self.req.path_info = 'foo_url/that/does/not/exist' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) @skipIf(six.PY3, "HTTP_REFERER is str type on Python 3") def test_404_error_nonascii_referrer(self): # Such referer strings should not happen, but anyway, if it happens, # let's not crash self.req.META['HTTP_REFERER'] = b'http://testserver/c/\xd0\xbb\xd0\xb8/' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) @skipIf(six.PY3, "HTTP_USER_AGENT is str type on Python 3") def test_404_error_nonascii_user_agent(self): # Such user agent strings should not happen, but anyway, if it happens, # let's not crash self.req.META['HTTP_REFERER'] = '/another/url/' self.req.META['HTTP_USER_AGENT'] = b'\xd0\xbb\xd0\xb8\xff\xff' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) self.assertIn('User agent: \u043b\u0438\ufffd\ufffd\n', mail.outbox[0].body) def test_custom_request_checker(self): class SubclassedMiddleware(BrokenLinkEmailsMiddleware): ignored_user_agent_patterns = (re.compile(r'Spider.*'), re.compile(r'Robot.*')) def is_ignorable_request(self, request, uri, domain, referer): '''Check user-agent in addition to normal checks.''' if super(SubclassedMiddleware, self).is_ignorable_request(request, uri, domain, referer): return True user_agent = request.META['HTTP_USER_AGENT'] return any(pattern.search(user_agent) for pattern in self.ignored_user_agent_patterns) self.req.META['HTTP_REFERER'] = '/another/url/' self.req.META['HTTP_USER_AGENT'] = 'Spider machine 3.4' SubclassedMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) self.req.META['HTTP_USER_AGENT'] = 'My user agent' SubclassedMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) def test_referer_equal_to_requested_url(self): """ Some bots set the referer to the current URL to avoid being blocked by an referer check (#25302). """ self.req.META['HTTP_REFERER'] = self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) # URL with scheme and domain should also be ignored self.req.META['HTTP_REFERER'] = 'http://testserver%s' % self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) def test_referer_equal_to_requested_url_on_another_domain(self): self.req.META['HTTP_REFERER'] = 'http://anotherserver%s' % self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) @override_settings(ROOT_URLCONF='middleware.cond_get_urls') class ConditionalGetMiddlewareTest(SimpleTestCase): def setUp(self): self.req = RequestFactory().get('/') self.resp = self.client.get(self.req.path_info) # Tests for the Date header def test_date_header_added(self): self.assertNotIn('Date', self.resp) self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertIn('Date', self.resp) # Tests for the Content-Length header def test_content_length_header_added(self): content_length = len(self.resp.content) self.assertNotIn('Content-Length', self.resp) self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertIn('Content-Length', self.resp) self.assertEqual(int(self.resp['Content-Length']), content_length) def test_content_length_header_not_added(self): resp = StreamingHttpResponse('content') self.assertNotIn('Content-Length', resp) resp = ConditionalGetMiddleware().process_response(self.req, resp) self.assertNotIn('Content-Length', resp) def test_content_length_header_not_changed(self): bad_content_length = len(self.resp.content) + 10 self.resp['Content-Length'] = bad_content_length self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(int(self.resp['Content-Length']), bad_content_length) # Tests for the ETag header def test_if_none_match_and_no_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = 'spam' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_no_if_none_match_and_etag(self): self.resp['ETag'] = 'eggs' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_none_match_and_same_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_none_match_and_different_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = 'spam' self.resp['ETag'] = 'eggs' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_none_match_and_redirect(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam' self.resp['Location'] = '/' self.resp.status_code = 301 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 301) def test_if_none_match_and_client_error(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam' self.resp.status_code = 400 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 400) @override_settings(USE_ETAGS=True) def test_etag(self): req = HttpRequest() res = HttpResponse('content') self.assertTrue( CommonMiddleware().process_response(req, res).has_header('ETag')) @override_settings(USE_ETAGS=True) def test_etag_streaming_response(self): req = HttpRequest() res = StreamingHttpResponse(['content']) res['ETag'] = 'tomatoes' self.assertEqual( CommonMiddleware().process_response(req, res).get('ETag'), 'tomatoes') @override_settings(USE_ETAGS=True) def test_no_etag_streaming_response(self): req = HttpRequest() res = StreamingHttpResponse(['content']) self.assertFalse( CommonMiddleware().process_response(req, res).has_header('ETag')) # Tests for the Last-Modified header def test_if_modified_since_and_no_last_modified(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_no_if_modified_since_and_last_modified(self): self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_modified_since_and_same_last_modified(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_modified_since_and_last_modified_in_the_past(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_modified_since_and_last_modified_in_the_future(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:41:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_modified_since_and_redirect(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp['Location'] = '/' self.resp.status_code = 301 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 301) def test_if_modified_since_and_client_error(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp.status_code = 400 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 400) class XFrameOptionsMiddlewareTest(SimpleTestCase): """ Tests for the X-Frame-Options clickjacking prevention middleware. """ def test_same_origin(self): """ Tests that the X_FRAME_OPTIONS setting can be set to SAMEORIGIN to have the middleware use that value for the HTTP header. """ with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='sameorigin'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') def test_deny(self): """ Tests that the X_FRAME_OPTIONS setting can be set to DENY to have the middleware use that value for the HTTP header. """ with override_settings(X_FRAME_OPTIONS='DENY'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') with override_settings(X_FRAME_OPTIONS='deny'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') def test_defaults_sameorigin(self): """ Tests that if the X_FRAME_OPTIONS setting is not set then it defaults to SAMEORIGIN. """ with override_settings(X_FRAME_OPTIONS=None): del settings.X_FRAME_OPTIONS # restored by override_settings r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') def test_dont_set_if_set(self): """ Tests that if the X-Frame-Options header is already set then the middleware does not attempt to override it. """ with override_settings(X_FRAME_OPTIONS='DENY'): response = HttpResponse() response['X-Frame-Options'] = 'SAMEORIGIN' r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): response = HttpResponse() response['X-Frame-Options'] = 'DENY' r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'DENY') def test_response_exempt(self): """ Tests that if the response has a xframe_options_exempt attribute set to False then it still sets the header, but if it's set to True then it does not. """ with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): response = HttpResponse() response.xframe_options_exempt = False r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') response = HttpResponse() response.xframe_options_exempt = True r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r.get('X-Frame-Options', None), None) def test_is_extendable(self): """ Tests that the XFrameOptionsMiddleware method that determines the X-Frame-Options header value can be overridden based on something in the request or response. """ class OtherXFrameOptionsMiddleware(XFrameOptionsMiddleware): # This is just an example for testing purposes... def get_xframe_options_value(self, request, response): if getattr(request, 'sameorigin', False): return 'SAMEORIGIN' if getattr(response, 'sameorigin', False): return 'SAMEORIGIN' return 'DENY' with override_settings(X_FRAME_OPTIONS='DENY'): response = HttpResponse() response.sameorigin = True r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') request = HttpRequest() request.sameorigin = True r = OtherXFrameOptionsMiddleware().process_response(request, HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') class GZipMiddlewareTest(SimpleTestCase): """ Tests the GZip middleware. """ short_string = b"This string is too short to be worth compressing." compressible_string = b'a' * 500 uncompressible_string = b''.join(six.int2byte(random.randint(0, 255)) for _ in range(500)) sequence = [b'a' * 500, b'b' * 200, b'a' * 300] sequence_unicode = ['a' * 500, 'é' * 200, 'a' * 300] def setUp(self): self.req = RequestFactory().get('/') self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate' self.req.META['HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1' self.resp = HttpResponse() self.resp.status_code = 200 self.resp.content = self.compressible_string self.resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp = StreamingHttpResponse(self.sequence) self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp_unicode = StreamingHttpResponse(self.sequence_unicode) self.stream_resp_unicode['Content-Type'] = 'text/html; charset=UTF-8' @staticmethod def decompress(gzipped_string): with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f: return f.read() def test_compress_response(self): """ Tests that compression is performed on responses with compressible content. """ r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertEqual(r.get('Content-Length'), str(len(r.content))) def test_compress_streaming_response(self): """ Tests that compression is performed on responses with streaming content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp) self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_streaming_response_unicode(self): """ Tests that compression is performed on responses with streaming Unicode content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp_unicode) self.assertEqual(self.decompress(b''.join(r)), b''.join(x.encode('utf-8') for x in self.sequence_unicode)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_file_response(self): """ Tests that compression is performed on FileResponse. """ open_file = lambda: open(__file__, 'rb') with open_file() as file1: file_resp = FileResponse(file1) file_resp['Content-Type'] = 'text/html; charset=UTF-8' r = GZipMiddleware().process_response(self.req, file_resp) with open_file() as file2: self.assertEqual(self.decompress(b''.join(r)), file2.read()) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertIsNot(r.file_to_stream, file1) def test_compress_non_200_response(self): """ Tests that compression is performed on responses with a status other than 200. See #10762. """ self.resp.status_code = 404 r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') def test_no_compress_short_response(self): """ Tests that compression isn't performed on responses with short content. """ self.resp.content = self.short_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.short_string) self.assertEqual(r.get('Content-Encoding'), None) def test_no_compress_compressed_response(self): """ Tests that compression isn't performed on responses that are already compressed. """ self.resp['Content-Encoding'] = 'deflate' r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'deflate') def test_no_compress_uncompressible_response(self): """ Tests that compression isn't performed on responses with uncompressible content. """ self.resp.content = self.uncompressible_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.uncompressible_string) self.assertEqual(r.get('Content-Encoding'), None) @override_settings(USE_ETAGS=True) class ETagGZipMiddlewareTest(SimpleTestCase): """ Tests if the ETag middleware behaves correctly with GZip middleware. """ rf = RequestFactory() compressible_string = b'a' * 500 def test_compress_response(self): """ Tests that ETag is changed after gzip compression is performed. """ request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate') response = GZipMiddleware().process_response(request, CommonMiddleware().process_response(request, HttpResponse(self.compressible_string))) gzip_etag = response.get('ETag') request = self.rf.get('/', HTTP_ACCEPT_ENCODING='') response = GZipMiddleware().process_response(request, CommonMiddleware().process_response(request, HttpResponse(self.compressible_string))) nogzip_etag = response.get('ETag') self.assertNotEqual(gzip_etag, nogzip_etag)
bsd-3-clause
dancingdan/tensorflow
tensorflow/python/profiler/option_builder.py
29
16611
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utilities for building profiler options.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy from tensorflow.python.profiler import tfprof_logger from tensorflow.python.util.tf_export import tf_export @tf_export('profiler.ProfileOptionBuilder') class ProfileOptionBuilder(object): # pylint: disable=line-too-long """Option Builder for Profiling API. For tutorial on the options, see https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/options.md ```python # Users can use pre-built options: opts = ( tf.profiler.ProfileOptionBuilder.trainable_variables_parameter()) # Or, build your own options: opts = (tf.profiler.ProfileOptionBuilder() .with_max_depth(10) .with_min_micros(1000) .select(['accelerator_micros']) .with_stdout_output() .build() # Or customize the pre-built options: opts = (tf.profiler.ProfileOptionBuilder( tf.profiler.ProfileOptionBuilder.time_and_memory()) .with_displaying_options(show_name_regexes=['.*rnn.*']) .build()) # Finally, profiling with the options: _ = tf.profiler.profile(tf.get_default_graph(), run_meta=run_meta, cmd='scope', options=opts) ``` """ # pylint: enable=line-too-long def __init__(self, options=None): """Constructor. Args: options: Optional initial option dict to start with. """ if options is not None: self._options = copy.deepcopy(options) else: self._options = {'max_depth': 100, 'min_bytes': 0, 'min_micros': 0, 'min_params': 0, 'min_float_ops': 0, 'min_occurrence': 0, 'order_by': 'name', 'account_type_regexes': ['.*'], 'start_name_regexes': ['.*'], 'trim_name_regexes': [], 'show_name_regexes': ['.*'], 'hide_name_regexes': [], 'account_displayed_op_only': False, 'select': ['micros'], 'step': -1, 'output': 'stdout'} @staticmethod def trainable_variables_parameter(): """Options used to profile trainable variable parameters. Normally used together with 'scope' view. Returns: A dict of profiling options. """ return {'max_depth': 10000, 'min_bytes': 0, 'min_micros': 0, 'min_params': 0, 'min_float_ops': 0, 'min_occurrence': 0, 'order_by': 'name', 'account_type_regexes': [tfprof_logger.TRAINABLE_VARIABLES], 'start_name_regexes': ['.*'], 'trim_name_regexes': [], 'show_name_regexes': ['.*'], 'hide_name_regexes': [], 'account_displayed_op_only': True, 'select': ['params'], 'step': -1, 'output': 'stdout'} @staticmethod def float_operation(): # pylint: disable=line-too-long """Options used to profile float operations. Please see https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/profile_model_architecture.md on the caveats of calculating float operations. Returns: A dict of profiling options. """ # pylint: enable=line-too-long return {'max_depth': 10000, 'min_bytes': 0, 'min_micros': 0, 'min_params': 0, 'min_float_ops': 1, 'min_occurrence': 0, 'order_by': 'float_ops', 'account_type_regexes': ['.*'], 'start_name_regexes': ['.*'], 'trim_name_regexes': [], 'show_name_regexes': ['.*'], 'hide_name_regexes': [], 'account_displayed_op_only': True, 'select': ['float_ops'], 'step': -1, 'output': 'stdout'} @staticmethod def time_and_memory(min_micros=1, min_bytes=1, min_accelerator_micros=0, min_cpu_micros=0, min_peak_bytes=0, min_residual_bytes=0, min_output_bytes=0): """Show operation time and memory consumptions. Args: min_micros: Only show profiler nodes with execution time no less than this. It sums accelerator and cpu times. min_bytes: Only show profiler nodes requested to allocate no less bytes than this. min_accelerator_micros: Only show profiler nodes spend no less than this time on accelerator (e.g. GPU). min_cpu_micros: Only show profiler nodes spend no less than this time on cpu. min_peak_bytes: Only show profiler nodes using no less than this bytes at peak (high watermark). For profiler nodes consist of multiple graph nodes, it sums the graph nodes' peak_bytes. min_residual_bytes: Only show profiler nodes have no less than this bytes not being de-allocated after Compute() ends. For profiler nodes consist of multiple graph nodes, it sums the graph nodes' residual_bytes. min_output_bytes: Only show profiler nodes have no less than this bytes output. The output are not necessarily allocated by this profiler nodes. Returns: A dict of profiling options. """ return {'max_depth': 10000, 'min_bytes': min_bytes, 'min_peak_bytes': min_peak_bytes, 'min_residual_bytes': min_residual_bytes, 'min_output_bytes': min_output_bytes, 'min_micros': min_micros, 'min_accelerator_micros': min_accelerator_micros, 'min_cpu_micros': min_cpu_micros, 'min_params': 0, 'min_float_ops': 0, 'min_occurrence': 0, 'order_by': 'micros', 'account_type_regexes': ['.*'], 'start_name_regexes': ['.*'], 'trim_name_regexes': [], 'show_name_regexes': ['.*'], 'hide_name_regexes': [], 'account_displayed_op_only': True, 'select': ['micros', 'bytes'], 'step': -1, 'output': 'stdout'} def build(self): """Build a profiling option. Returns: A dict of profiling options. """ return copy.deepcopy(self._options) def with_max_depth(self, max_depth): """Set the maximum depth of display. The depth depends on profiling view. For 'scope' view, it's the depth of name scope hierarchy (tree), for 'op' view, it's the number of operation types (list), etc. Args: max_depth: Maximum depth of the data structure to display. Returns: self """ self._options['max_depth'] = max_depth return self def with_min_memory(self, min_bytes=0, min_peak_bytes=0, min_residual_bytes=0, min_output_bytes=0): """Only show profiler nodes consuming no less than 'min_bytes'. Args: min_bytes: Only show profiler nodes requested to allocate no less bytes than this. min_peak_bytes: Only show profiler nodes using no less than this bytes at peak (high watermark). For profiler nodes consist of multiple graph nodes, it sums the graph nodes' peak_bytes. min_residual_bytes: Only show profiler nodes have no less than this bytes not being de-allocated after Compute() ends. For profiler nodes consist of multiple graph nodes, it sums the graph nodes' residual_bytes. min_output_bytes: Only show profiler nodes have no less than this bytes output. The output are not necessarily allocated by this profiler nodes. Returns: self """ self._options['min_bytes'] = min_bytes self._options['min_peak_bytes'] = min_peak_bytes self._options['min_residual_bytes'] = min_residual_bytes self._options['min_output_bytes'] = min_output_bytes return self def with_min_execution_time(self, min_micros=0, min_accelerator_micros=0, min_cpu_micros=0): """Only show profiler nodes consuming no less than 'min_micros'. Args: min_micros: Only show profiler nodes with execution time no less than this. It sums accelerator and cpu times. min_accelerator_micros: Only show profiler nodes spend no less than this time on accelerator (e.g. GPU). min_cpu_micros: Only show profiler nodes spend no less than this time on cpu. Returns: self """ self._options['min_micros'] = min_micros self._options['min_accelerator_micros'] = min_accelerator_micros self._options['min_cpu_micros'] = min_cpu_micros return self def with_min_parameters(self, min_params): """Only show profiler nodes holding no less than 'min_params' parameters. 'Parameters' normally refers the weights of in TensorFlow variables. It reflects the 'capacity' of models. Args: min_params: Only show profiler nodes holding number parameters no less than this. Returns: self """ self._options['min_params'] = min_params return self def with_min_occurrence(self, min_occurrence): # pylint: disable=line-too-long """Only show profiler nodes including no less than 'min_occurrence' graph nodes. A "node" means a profiler output node, which can be a python line (code view), an operation type (op view), or a graph node (graph/scope view). A python line includes all graph nodes created by that line, while an operation type includes all graph nodes of that type. Args: min_occurrence: Only show nodes including no less than this. Returns: self """ # pylint: enable=line-too-long self._options['min_occurrence'] = min_occurrence return self def with_min_float_operations(self, min_float_ops): # pylint: disable=line-too-long """Only show profiler nodes consuming no less than 'min_float_ops'. Please see https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/profile_model_architecture.md on the caveats of calculating float operations. Args: min_float_ops: Only show profiler nodes with float operations no less than this. Returns: self """ # pylint: enable=line-too-long self._options['min_float_ops'] = min_float_ops return self def with_accounted_types(self, account_type_regexes): """Selectively counting statistics based on node types. Here, 'types' means the profiler nodes' properties. Profiler by default consider device name (e.g. /job:xx/.../device:GPU:0) and operation type (e.g. MatMul) as profiler nodes' properties. User can also associate customized 'types' to profiler nodes through OpLogProto proto. For example, user can select profiler nodes placed on gpu:0 with: `account_type_regexes=['.*gpu:0.*']` If none of a node's properties match the specified regexes, the node is not displayed nor accounted. Args: account_type_regexes: A list of regexes specifying the types. Returns: self. """ self._options['account_type_regexes'] = copy.copy(account_type_regexes) return self def with_node_names(self, start_name_regexes=None, show_name_regexes=None, hide_name_regexes=None, trim_name_regexes=None): """Regular expressions used to select profiler nodes to display. After 'with_accounted_types' is evaluated, 'with_node_names' are evaluated as follows: For a profile data structure, profiler first finds the profiler nodes matching 'start_name_regexes', and starts displaying profiler nodes from there. Then, if a node matches 'show_name_regexes' and doesn't match 'hide_name_regexes', it's displayed. If a node matches 'trim_name_regexes', profiler stops further searching that branch. Args: start_name_regexes: list of node name regexes to start displaying. show_name_regexes: list of node names regexes to display. hide_name_regexes: list of node_names regexes that should be hidden. trim_name_regexes: list of node name regexes from where to stop. Returns: self """ if start_name_regexes is not None: self._options['start_name_regexes'] = copy.copy(start_name_regexes) if show_name_regexes is not None: self._options['show_name_regexes'] = copy.copy(show_name_regexes) if hide_name_regexes is not None: self._options['hide_name_regexes'] = copy.copy(hide_name_regexes) if trim_name_regexes is not None: self._options['trim_name_regexes'] = copy.copy(trim_name_regexes) return self def account_displayed_op_only(self, is_true): """Whether only account the statistics of displayed profiler nodes. Args: is_true: If true, only account statistics of nodes eventually displayed by the outputs. Otherwise, a node's statistics are accounted by its parents as long as it's types match 'account_type_regexes', even if it is hidden from the output, say, by hide_name_regexes. Returns: self """ self._options['account_displayed_op_only'] = is_true return self def with_empty_output(self): """Do not generate side-effect outputs.""" self._options['output'] = 'none' return self def with_stdout_output(self): """Print the result to stdout.""" self._options['output'] = 'stdout' return self def with_file_output(self, outfile): """Print the result to a file.""" self._options['output'] = 'file:outfile=%s' % outfile return self def with_timeline_output(self, timeline_file): """Generate a timeline json file.""" self._options['output'] = 'timeline:outfile=%s' % timeline_file return self def with_pprof_output(self, pprof_file): """Generate a pprof profile gzip file. To use the pprof file: pprof -png --nodecount=100 --sample_index=1 <pprof_file> Args: pprof_file: filename for output, usually suffixed with .pb.gz. Returns: self. """ self._options['output'] = 'pprof:outfile=%s' % pprof_file return self def order_by(self, attribute): # pylint: disable=line-too-long """Order the displayed profiler nodes based on a attribute. Supported attribute includes micros, bytes, occurrence, params, etc. https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/options.md Args: attribute: An attribute the profiler node has. Returns: self """ # pylint: enable=line-too-long self._options['order_by'] = attribute return self def select(self, attributes): # pylint: disable=line-too-long """Select the attributes to display. See https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/options.md for supported attributes. Args: attributes: A list of attribute the profiler node has. Returns: self """ # pylint: enable=line-too-long self._options['select'] = copy.copy(attributes) return self def with_step(self, step): """Which profile step to use for profiling. The 'step' here refers to the step defined by `Profiler.add_step()` API. Args: step: When multiple steps of profiles are available, select which step's profile to use. If -1, use average of all available steps. Returns: self """ self._options['step'] = step return self
apache-2.0
mmbtba/odoo
addons/l10n_uy/__openerp__.py
260
1807
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (c) 2011 Openerp.uy <openerp.uy@lists.launchpad.net> # Proyecto de Localización de OperERP para Uruguay # $Id$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Uruguay - Chart of Accounts', 'version': '0.1', 'author': 'Uruguay l10n Team & Guillem Barba', 'category': 'Localization/Account Charts', 'website': 'https://launchpad.net/openerp-uruguay', 'description': """ General Chart of Accounts. ========================== Provide Templates for Chart of Accounts, Taxes for Uruguay. """, 'license': 'AGPL-3', 'depends': ['account'], 'data': [ 'account_types.xml', 'taxes_code_template.xml', 'account_chart_template.xml', 'taxes_template.xml', 'l10n_uy_wizard.xml', ], 'demo': [], 'auto_install': False, 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
BT-astauder/odoo
addons/l10n_ec/__openerp__.py
260
1695
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2010-2012 Cristian Salamea Gnuthink Software Labs Cia. Ltda # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Ecuador - Accounting', 'version': '1.1', 'category': 'Localization/Account Charts', 'description': """ This is the base module to manage the accounting chart for Ecuador in OpenERP. ============================================================================== Accounting chart and localization for Ecuador. """, 'author': 'Gnuthink Co.Ltd.', 'depends': [ 'account', 'base_vat', 'base_iban', 'account_chart', ], 'data': [ 'account_tax_code.xml', 'account_chart.xml', 'account_tax.xml', 'l10n_chart_ec_wizard.xml', ], 'demo': [], 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
NaokiXie/android_kernel_samsung_wilcox
scripts/tracing/draw_functrace.py
14676
3560
#!/usr/bin/python """ Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com> Licensed under the terms of the GNU GPL License version 2 This script parses a trace provided by the function tracer in kernel/trace/trace_functions.c The resulted trace is processed into a tree to produce a more human view of the call stack by drawing textual but hierarchical tree of calls. Only the functions's names and the the call time are provided. Usage: Be sure that you have CONFIG_FUNCTION_TRACER # mount -t debugfs nodev /sys/kernel/debug # echo function > /sys/kernel/debug/tracing/current_tracer $ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func Wait some times but not too much, the script is a bit slow. Break the pipe (Ctrl + Z) $ scripts/draw_functrace.py < raw_trace_func > draw_functrace Then you have your drawn trace in draw_functrace """ import sys, re class CallTree: """ This class provides a tree representation of the functions call stack. If a function has no parent in the kernel (interrupt, syscall, kernel thread...) then it is attached to a virtual parent called ROOT. """ ROOT = None def __init__(self, func, time = None, parent = None): self._func = func self._time = time if parent is None: self._parent = CallTree.ROOT else: self._parent = parent self._children = [] def calls(self, func, calltime): """ If a function calls another one, call this method to insert it into the tree at the appropriate place. @return: A reference to the newly created child node. """ child = CallTree(func, calltime, self) self._children.append(child) return child def getParent(self, func): """ Retrieve the last parent of the current node that has the name given by func. If this function is not on a parent, then create it as new child of root @return: A reference to the parent. """ tree = self while tree != CallTree.ROOT and tree._func != func: tree = tree._parent if tree == CallTree.ROOT: child = CallTree.ROOT.calls(func, None) return child return tree def __repr__(self): return self.__toString("", True) def __toString(self, branch, lastChild): if self._time is not None: s = "%s----%s (%s)\n" % (branch, self._func, self._time) else: s = "%s----%s\n" % (branch, self._func) i = 0 if lastChild: branch = branch[:-1] + " " while i < len(self._children): if i != len(self._children) - 1: s += "%s" % self._children[i].__toString(branch +\ " |", False) else: s += "%s" % self._children[i].__toString(branch +\ " |", True) i += 1 return s class BrokenLineException(Exception): """If the last line is not complete because of the pipe breakage, we want to stop the processing and ignore this line. """ pass class CommentLineException(Exception): """ If the line is a comment (as in the beginning of the trace file), just ignore it. """ pass def parseLine(line): line = line.strip() if line.startswith("#"): raise CommentLineException m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line) if m is None: raise BrokenLineException return (m.group(1), m.group(2), m.group(3)) def main(): CallTree.ROOT = CallTree("Root (Nowhere)", None, None) tree = CallTree.ROOT for line in sys.stdin: try: calltime, callee, caller = parseLine(line) except BrokenLineException: break except CommentLineException: continue tree = tree.getParent(caller) tree = tree.calls(callee, calltime) print CallTree.ROOT if __name__ == "__main__": main()
gpl-2.0
mezz64/home-assistant
homeassistant/components/homeassistant/triggers/homeassistant.py
9
1725
"""Offer Home Assistant core automation rules.""" import voluptuous as vol from homeassistant.const import CONF_EVENT, CONF_PLATFORM, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HassJob, callback # mypy: allow-untyped-defs EVENT_START = "start" EVENT_SHUTDOWN = "shutdown" TRIGGER_SCHEMA = vol.Schema( { vol.Required(CONF_PLATFORM): "homeassistant", vol.Required(CONF_EVENT): vol.Any(EVENT_START, EVENT_SHUTDOWN), } ) async def async_attach_trigger(hass, config, action, automation_info): """Listen for events based on configuration.""" event = config.get(CONF_EVENT) job = HassJob(action) if event == EVENT_SHUTDOWN: @callback def hass_shutdown(event): """Execute when Home Assistant is shutting down.""" hass.async_run_hass_job( job, { "trigger": { "platform": "homeassistant", "event": event, "description": "Home Assistant stopping", } }, event.context, ) return hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, hass_shutdown) # Automation are enabled while hass is starting up, fire right away # Check state because a config reload shouldn't trigger it. if automation_info["home_assistant_start"]: hass.async_run_hass_job( job, { "trigger": { "platform": "homeassistant", "event": event, "description": "Home Assistant starting", } }, ) return lambda: None
apache-2.0
wavefrontHQ/python-client
test/test_policy_rule_write_model.py
1
1334
# coding: utf-8 """ Wavefront REST API <p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer &lt;&lt;API-TOKEN&gt;&gt;\" to your HTTP requests.</p> # noqa: E501 OpenAPI spec version: v2 Contact: chitimba@wavefront.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import wavefront_api_client from wavefront_api_client.models.policy_rule_write_model import PolicyRuleWriteModel # noqa: E501 from wavefront_api_client.rest import ApiException class TestPolicyRuleWriteModel(unittest.TestCase): """PolicyRuleWriteModel unit test stubs""" def setUp(self): pass def tearDown(self): pass def testPolicyRuleWriteModel(self): """Test PolicyRuleWriteModel""" # FIXME: construct object with mandatory attributes with example values # model = wavefront_api_client.models.policy_rule_write_model.PolicyRuleWriteModel() # noqa: E501 pass if __name__ == '__main__': unittest.main()
apache-2.0
mikewiebe-ansible/ansible
lib/ansible/modules/storage/hpe3par/ss_3par_cpg.py
35
9416
#!/usr/bin/python # Copyright: (c) 2018, Hewlett Packard Enterprise Development LP # GNU General Public License v3.0+ # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- short_description: Manage HPE StoreServ 3PAR CPG author: - Farhan Nomani (@farhan7500) - Gautham P Hegde (@gautamphegde) description: - Create and delete CPG on HPE 3PAR. module: ss_3par_cpg options: cpg_name: description: - Name of the CPG. type: str required: true disk_type: choices: - FC - NL - SSD description: - Specifies that physical disks must have the specified device type. type: str domain: description: - Specifies the name of the domain in which the object will reside. type: str growth_increment: description: - Specifies the growth increment(in MiB, GiB or TiB) the amount of logical disk storage created on each auto-grow operation. type: str growth_limit: description: - Specifies that the autogrow operation is limited to the specified storage amount that sets the growth limit(in MiB, GiB or TiB). type: str growth_warning: description: - Specifies that the threshold(in MiB, GiB or TiB) of used logical disk space when exceeded results in a warning alert. type: str high_availability: choices: - PORT - CAGE - MAG description: - Specifies that the layout must support the failure of one port pair, one cage, or one magazine. type: str raid_type: choices: - R0 - R1 - R5 - R6 description: - Specifies the RAID type for the logical disk. type: str set_size: description: - Specifies the set size in the number of chunklets. type: int state: choices: - present - absent description: - Whether the specified CPG should exist or not. required: true type: str secure: description: - Specifies whether the certificate needs to be validated while communicating. type: bool default: no extends_documentation_fragment: hpe3par version_added: '2.8' ''' EXAMPLES = r''' - name: Create CPG sample_cpg ss_3par_cpg: storage_system_ip: 10.10.10.1 storage_system_username: username storage_system_password: password state: present cpg_name: sample_cpg domain: sample_domain growth_increment: 32000 MiB growth_limit: 64000 MiB growth_warning: 48000 MiB raid_type: R6 set_size: 8 high_availability: MAG disk_type: FC secure: no - name: Delete CPG sample_cpg ss_3par_cpg: storage_system_ip: 10.10.10.1 storage_system_username: username storage_system_password: password state: absent cpg_name: sample_cpg secure: no ''' RETURN = r''' ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.storage.hpe3par import hpe3par try: from hpe3par_sdk import client from hpe3parclient import exceptions HAS_3PARCLIENT = True except ImportError: HAS_3PARCLIENT = False def validate_set_size(raid_type, set_size): if raid_type: set_size_array = client.HPE3ParClient.RAID_MAP[raid_type]['set_sizes'] if set_size in set_size_array: return True return False def cpg_ldlayout_map(ldlayout_dict): if ldlayout_dict['RAIDType'] is not None and ldlayout_dict['RAIDType']: ldlayout_dict['RAIDType'] = client.HPE3ParClient.RAID_MAP[ ldlayout_dict['RAIDType']]['raid_value'] if ldlayout_dict['HA'] is not None and ldlayout_dict['HA']: ldlayout_dict['HA'] = getattr( client.HPE3ParClient, ldlayout_dict['HA']) return ldlayout_dict def create_cpg( client_obj, cpg_name, domain, growth_increment, growth_limit, growth_warning, raid_type, set_size, high_availability, disk_type): try: if not validate_set_size(raid_type, set_size): return (False, False, "Set size %s not part of RAID set %s" % (set_size, raid_type)) if not client_obj.cpgExists(cpg_name): disk_patterns = [] if disk_type: disk_type = getattr(client.HPE3ParClient, disk_type) disk_patterns = [{'diskType': disk_type}] ld_layout = { 'RAIDType': raid_type, 'setSize': set_size, 'HA': high_availability, 'diskPatterns': disk_patterns} ld_layout = cpg_ldlayout_map(ld_layout) if growth_increment is not None: growth_increment = hpe3par.convert_to_binary_multiple( growth_increment) if growth_limit is not None: growth_limit = hpe3par.convert_to_binary_multiple( growth_limit) if growth_warning is not None: growth_warning = hpe3par.convert_to_binary_multiple( growth_warning) optional = { 'domain': domain, 'growthIncrementMiB': growth_increment, 'growthLimitMiB': growth_limit, 'usedLDWarningAlertMiB': growth_warning, 'LDLayout': ld_layout} client_obj.createCPG(cpg_name, optional) else: return (True, False, "CPG already present") except exceptions.ClientException as e: return (False, False, "CPG creation failed | %s" % (e)) return (True, True, "Created CPG %s successfully." % cpg_name) def delete_cpg( client_obj, cpg_name): try: if client_obj.cpgExists(cpg_name): client_obj.deleteCPG(cpg_name) else: return (True, False, "CPG does not exist") except exceptions.ClientException as e: return (False, False, "CPG delete failed | %s" % e) return (True, True, "Deleted CPG %s successfully." % cpg_name) def main(): module = AnsibleModule(argument_spec=hpe3par.cpg_argument_spec(), required_together=[['raid_type', 'set_size']]) if not HAS_3PARCLIENT: module.fail_json(msg='the python hpe3par_sdk library is required (https://pypi.org/project/hpe3par_sdk)') if len(module.params["cpg_name"]) < 1 or len(module.params["cpg_name"]) > 31: module.fail_json(msg="CPG name must be at least 1 character and not more than 31 characters") storage_system_ip = module.params["storage_system_ip"] storage_system_username = module.params["storage_system_username"] storage_system_password = module.params["storage_system_password"] cpg_name = module.params["cpg_name"] domain = module.params["domain"] growth_increment = module.params["growth_increment"] growth_limit = module.params["growth_limit"] growth_warning = module.params["growth_warning"] raid_type = module.params["raid_type"] set_size = module.params["set_size"] high_availability = module.params["high_availability"] disk_type = module.params["disk_type"] secure = module.params["secure"] wsapi_url = 'https://%s:8080/api/v1' % storage_system_ip try: client_obj = client.HPE3ParClient(wsapi_url, secure) except exceptions.SSLCertFailed: module.fail_json(msg="SSL Certificate Failed") except exceptions.ConnectionError: module.fail_json(msg="Connection Error") except exceptions.UnsupportedVersion: module.fail_json(msg="Unsupported WSAPI version") except Exception as e: module.fail_json(msg="Initializing client failed. %s" % e) if storage_system_username is None or storage_system_password is None: module.fail_json(msg="Storage system username or password is None") if cpg_name is None: module.fail_json(msg="CPG Name is None") # States if module.params["state"] == "present": try: client_obj.login(storage_system_username, storage_system_password) return_status, changed, msg = create_cpg( client_obj, cpg_name, domain, growth_increment, growth_limit, growth_warning, raid_type, set_size, high_availability, disk_type ) except Exception as e: module.fail_json(msg="CPG create failed | %s" % e) finally: client_obj.logout() elif module.params["state"] == "absent": try: client_obj.login(storage_system_username, storage_system_password) return_status, changed, msg = delete_cpg( client_obj, cpg_name ) except Exception as e: module.fail_json(msg="CPG create failed | %s" % e) finally: client_obj.logout() if return_status: module.exit_json(changed=changed, msg=msg) else: module.fail_json(msg=msg) if __name__ == '__main__': main()
gpl-3.0
wujuguang/scrapy
scrapy/middleware.py
3
2729
from collections import defaultdict, deque import logging import pprint from scrapy.exceptions import NotConfigured from scrapy.utils.misc import create_instance, load_object from scrapy.utils.defer import process_parallel, process_chain, process_chain_both logger = logging.getLogger(__name__) class MiddlewareManager(object): """Base class for implementing middleware managers""" component_name = 'foo middleware' def __init__(self, *middlewares): self.middlewares = middlewares self.methods = defaultdict(deque) for mw in middlewares: self._add_middleware(mw) @classmethod def _get_mwlist_from_settings(cls, settings): raise NotImplementedError @classmethod def from_settings(cls, settings, crawler=None): mwlist = cls._get_mwlist_from_settings(settings) middlewares = [] enabled = [] for clspath in mwlist: try: mwcls = load_object(clspath) mw = create_instance(mwcls, settings, crawler) middlewares.append(mw) enabled.append(clspath) except NotConfigured as e: if e.args: clsname = clspath.split('.')[-1] logger.warning("Disabled %(clsname)s: %(eargs)s", {'clsname': clsname, 'eargs': e.args[0]}, extra={'crawler': crawler}) logger.info("Enabled %(componentname)ss:\n%(enabledlist)s", {'componentname': cls.component_name, 'enabledlist': pprint.pformat(enabled)}, extra={'crawler': crawler}) return cls(*middlewares) @classmethod def from_crawler(cls, crawler): return cls.from_settings(crawler.settings, crawler) def _add_middleware(self, mw): if hasattr(mw, 'open_spider'): self.methods['open_spider'].append(mw.open_spider) if hasattr(mw, 'close_spider'): self.methods['close_spider'].appendleft(mw.close_spider) def _process_parallel(self, methodname, obj, *args): return process_parallel(self.methods[methodname], obj, *args) def _process_chain(self, methodname, obj, *args): return process_chain(self.methods[methodname], obj, *args) def _process_chain_both(self, cb_methodname, eb_methodname, obj, *args): return process_chain_both(self.methods[cb_methodname], \ self.methods[eb_methodname], obj, *args) def open_spider(self, spider): return self._process_parallel('open_spider', spider) def close_spider(self, spider): return self._process_parallel('close_spider', spider)
bsd-3-clause
ZhangXinNan/tensorflow
tensorflow/contrib/autograph/converters/break_statements.py
3
4731
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Lowers break statements to conditionals.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.autograph.core import converter from tensorflow.contrib.autograph.pyct import anno from tensorflow.contrib.autograph.pyct import templates from tensorflow.contrib.autograph.pyct.static_analysis.annos import NodeAnno class _Break(object): def __init__(self): self.used = False self.control_var_name = None def __repr__(self): return 'used: %s, var: %s' % (self.used, self.control_var_name) class BreakTransformer(converter.Base): """Canonicalizes break statements into additional conditionals.""" def visit_Break(self, node): self.state[_Break].used = True var_name = self.state[_Break].control_var_name # TODO(mdan): This will fail when expanded inside a top-level else block. template = """ var_name = tf.constant(True) continue """ return templates.replace(template, var_name=var_name) def _guard_if_present(self, block, var_name): """Prevents the block from executing if var_name is set.""" if not block: return block template = """ if not var_name: block """ node = templates.replace( template, var_name=var_name, block=block) return node def _process_body(self, nodes, break_var): self.state[_Break].enter() self.state[_Break].control_var_name = break_var nodes = self.visit_block(nodes) break_used = self.state[_Break].used self.state[_Break].exit() return nodes, break_used def visit_While(self, node): scope = anno.getanno(node, NodeAnno.BODY_SCOPE) break_var = self.ctx.namer.new_symbol('break_', scope.referenced) node.test = self.visit(node.test) node.body, break_used = self._process_body(node.body, break_var) # A break in the else clause applies to the containing scope. node.orelse = self.visit_block(node.orelse) if break_used: # Python's else clause only triggers if the loop exited cleanly (e.g. # break did not trigger). guarded_orelse = self._guard_if_present(node.orelse, break_var) template = """ var_name = tf.constant(False) while test and not var_name: body else: orelse """ node = templates.replace( template, var_name=break_var, test=node.test, body=node.body, orelse=guarded_orelse) return node def visit_For(self, node): scope = anno.getanno(node, NodeAnno.BODY_SCOPE) break_var = self.ctx.namer.new_symbol('break_', scope.referenced) node.target = self.visit(node.target) node.iter = self.visit(node.iter) node.body, break_used = self._process_body(node.body, break_var) # A break in the else clause applies to the containing scope. node.orelse = self.visit_block(node.orelse) if break_used: # Python's else clause only triggers if the loop exited cleanly (e.g. # break did not trigger). guarded_orelse = self._guard_if_present(node.orelse, break_var) extra_test = templates.replace_as_expression( 'not var_name', var_name=break_var) # The extra test is hidden in the AST, which will confuse the static # analysis. To mitigate that, we insert a no-op statement that ensures # the control variable is marked as used. # TODO(mdan): Use a marker instead, e.g. ag__.condition_loop_on(var_name) template = """ var_name = tf.constant(False) for target in iter_: (var_name,) body else: orelse """ node = templates.replace( template, var_name=break_var, iter_=node.iter, target=node.target, body=node.body, orelse=guarded_orelse) anno.setanno(node[1], 'extra_test', extra_test) return node def transform(node, ctx): return BreakTransformer(ctx).visit(node)
apache-2.0
mitya57/django
tests/test_client/views.py
26
10894
from urllib.parse import urlencode from xml.dom.minidom import parseString from django.contrib.auth.decorators import login_required, permission_required from django.core import mail from django.forms import fields from django.forms.forms import Form, ValidationError from django.forms.formsets import BaseFormSet, formset_factory from django.http import ( HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound, HttpResponseRedirect, ) from django.shortcuts import render from django.template import Context, Template from django.test import Client from django.utils.decorators import method_decorator def get_view(request): "A simple view that expects a GET request, and returns a rendered template" t = Template('This is a test. {{ var }} is the value.', name='GET Template') c = Context({'var': request.GET.get('var', 42)}) return HttpResponse(t.render(c)) def trace_view(request): """ A simple view that expects a TRACE request and echoes its status line. TRACE requests should not have an entity; the view will return a 400 status response if it is present. """ if request.method.upper() != "TRACE": return HttpResponseNotAllowed("TRACE") elif request.body: return HttpResponseBadRequest("TRACE requests MUST NOT include an entity") else: protocol = request.META["SERVER_PROTOCOL"] t = Template( '{{ method }} {{ uri }} {{ version }}', name="TRACE Template", ) c = Context({ 'method': request.method, 'uri': request.path, 'version': protocol, }) return HttpResponse(t.render(c)) def post_view(request): """A view that expects a POST, and returns a different template depending on whether any POST data is available """ if request.method == 'POST': if request.POST: t = Template('Data received: {{ data }} is the value.', name='POST Template') c = Context({'data': request.POST['value']}) else: t = Template('Viewing POST page.', name='Empty POST Template') c = Context() else: t = Template('Viewing GET page.', name='Empty GET Template') c = Context() return HttpResponse(t.render(c)) def view_with_header(request): "A view that has a custom header" response = HttpResponse() response['X-DJANGO-TEST'] = 'Slartibartfast' return response def raw_post_view(request): """A view which expects raw XML to be posted and returns content extracted from the XML""" if request.method == 'POST': root = parseString(request.body) first_book = root.firstChild.firstChild title, author = [n.firstChild.nodeValue for n in first_book.childNodes] t = Template("{{ title }} - {{ author }}", name="Book template") c = Context({"title": title, "author": author}) else: t = Template("GET request.", name="Book GET template") c = Context() return HttpResponse(t.render(c)) def redirect_view(request): "A view that redirects all requests to the GET view" if request.GET: query = '?' + urlencode(request.GET, True) else: query = '' return HttpResponseRedirect('/get_view/' + query) def view_with_secure(request): "A view that indicates if the request was secure" response = HttpResponse() response.test_was_secure_request = request.is_secure() response.test_server_port = request.META.get('SERVER_PORT', 80) return response def double_redirect_view(request): "A view that redirects all requests to a redirection view" return HttpResponseRedirect('/permanent_redirect_view/') def bad_view(request): "A view that returns a 404 with some error content" return HttpResponseNotFound('Not found!. This page contains some MAGIC content') TestChoices = ( ('a', 'First Choice'), ('b', 'Second Choice'), ('c', 'Third Choice'), ('d', 'Fourth Choice'), ('e', 'Fifth Choice') ) class TestForm(Form): text = fields.CharField() email = fields.EmailField() value = fields.IntegerField() single = fields.ChoiceField(choices=TestChoices) multi = fields.MultipleChoiceField(choices=TestChoices) def clean(self): cleaned_data = self.cleaned_data if cleaned_data.get("text") == "Raise non-field error": raise ValidationError("Non-field error.") return cleaned_data def form_view(request): "A view that tests a simple form" if request.method == 'POST': form = TestForm(request.POST) if form.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ form.errors }}', name='Invalid POST Template') c = Context({'form': form}) else: form = TestForm(request.GET) t = Template('Viewing base form. {{ form }}.', name='Form GET Template') c = Context({'form': form}) return HttpResponse(t.render(c)) def form_view_with_template(request): "A view that tests a simple form" if request.method == 'POST': form = TestForm(request.POST) if form.is_valid(): message = 'POST data OK' else: message = 'POST data has errors' else: form = TestForm() message = 'GET form page' return render(request, 'form_view.html', { 'form': form, 'message': message, }) class BaseTestFormSet(BaseFormSet): def clean(self): """No two email addresses are the same.""" if any(self.errors): # Don't bother validating the formset unless each form is valid return emails = [] for i in range(0, self.total_form_count()): form = self.forms[i] email = form.cleaned_data['email'] if email in emails: raise ValidationError( "Forms in a set must have distinct email addresses." ) emails.append(email) TestFormSet = formset_factory(TestForm, BaseTestFormSet) def formset_view(request): "A view that tests a simple formset" if request.method == 'POST': formset = TestFormSet(request.POST) if formset.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ my_formset.errors }}', name='Invalid POST Template') c = Context({'my_formset': formset}) else: formset = TestForm(request.GET) t = Template('Viewing base formset. {{ my_formset }}.', name='Formset GET Template') c = Context({'my_formset': formset}) return HttpResponse(t.render(c)) @login_required def login_protected_view(request): "A simple view that is login protected." t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) @login_required(redirect_field_name='redirect_to') def login_protected_view_changed_redirect(request): "A simple view that is login protected with a custom redirect field set" t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) def _permission_protected_view(request): "A simple view that is permission protected." t = Template('This is a permission protected test. ' 'Username is {{ user.username }}. ' 'Permissions are {{ user.get_all_permissions }}.', name='Permissions Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) permission_protected_view = permission_required('permission_not_granted')(_permission_protected_view) permission_protected_view_exception = ( permission_required('permission_not_granted', raise_exception=True)(_permission_protected_view) ) class _ViewManager: @method_decorator(login_required) def login_protected_view(self, request): t = Template('This is a login protected test using a method. ' 'Username is {{ user.username }}.', name='Login Method Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) @method_decorator(permission_required('permission_not_granted')) def permission_protected_view(self, request): t = Template('This is a permission protected test using a method. ' 'Username is {{ user.username }}. ' 'Permissions are {{ user.get_all_permissions }}.', name='Permissions Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) _view_manager = _ViewManager() login_protected_method_view = _view_manager.login_protected_view permission_protected_method_view = _view_manager.permission_protected_view def session_view(request): "A view that modifies the session" request.session['tobacconist'] = 'hovercraft' t = Template('This is a view that modifies the session.', name='Session Modifying View Template') c = Context() return HttpResponse(t.render(c)) def broken_view(request): """A view which just raises an exception, simulating a broken view.""" raise KeyError("Oops! Looks like you wrote some bad code.") def mail_sending_view(request): mail.EmailMessage( "Test message", "This is a test email", "from@example.com", ['first@example.com', 'second@example.com']).send() return HttpResponse("Mail sent") def mass_mail_sending_view(request): m1 = mail.EmailMessage( 'First Test message', 'This is the first test email', 'from@example.com', ['first@example.com', 'second@example.com']) m2 = mail.EmailMessage( 'Second Test message', 'This is the second test email', 'from@example.com', ['second@example.com', 'third@example.com']) c = mail.get_connection() c.send_messages([m1, m2]) return HttpResponse("Mail sent") def nesting_exception_view(request): """ A view that uses a nested client to call another view and then raises an exception. """ client = Client() client.get('/get_view/') raise Exception('exception message') def django_project_redirect(request): return HttpResponseRedirect('https://www.djangoproject.com/') def upload_view(request): """Prints keys of request.FILES to the response.""" return HttpResponse(', '.join(request.FILES.keys()))
bsd-3-clause